summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.classpath3
-rw-r--r--.project6
-rw-r--r--META-INF/MANIFEST.MF13
-rw-r--r--OMakefile187
-rw-r--r--OMakeroot3
-rw-r--r--README83
-rw-r--r--build.number2
-rw-r--r--build.xml442
-rw-r--r--docs/LICENSE2
-rw-r--r--docs/examples/actors/producers.scala16
-rw-r--r--docs/examples/jolib/Ref.scala4
-rw-r--r--docs/examples/jolib/parallelOr.scala14
-rw-r--r--docs/examples/monads/callccInterpreter.scala6
-rw-r--r--docs/examples/monads/directInterpreter.scala4
-rw-r--r--docs/examples/monads/simpleInterpreter.scala4
-rw-r--r--docs/examples/monads/stateInterpreter.scala4
-rw-r--r--docs/examples/parsing/ArithmeticParser.scala16
-rw-r--r--docs/examples/parsing/ArithmeticParsers.scala10
-rw-r--r--docs/examples/parsing/JSON.scala14
-rw-r--r--docs/examples/parsing/ListParser.scala2
-rw-r--r--docs/examples/parsing/ListParsers.scala4
-rw-r--r--docs/examples/parsing/MiniML.scala6
-rw-r--r--docs/examples/parsing/lambda/Main.scala4
-rw-r--r--docs/examples/parsing/lambda/TestParser.scala20
-rw-r--r--docs/examples/parsing/lambda/TestSyntax.scala28
-rw-r--r--docs/examples/pilib/elasticBuffer.scala4
-rw-r--r--docs/examples/pilib/handover.scala6
-rw-r--r--docs/examples/pilib/mobilePhoneProtocol.scala12
-rw-r--r--docs/examples/pilib/piNat.scala2
-rw-r--r--docs/examples/pilib/rwlock.scala4
-rw-r--r--docs/examples/pilib/scheduler.scala2
-rw-r--r--docs/examples/pilib/semaphore.scala2
-rw-r--r--docs/examples/pilib/twoPlaceBuffer.scala2
-rw-r--r--docs/examples/plugintemplate/lib/scalatest.jar.desired.sha12
-rw-r--r--docs/examples/plugintemplate/plugin.properties2
-rw-r--r--docs/examples/plugintemplate/src/plugintemplate/TemplatePlugin.scala2
-rw-r--r--docs/examples/plugintemplate/src/plugintemplate/standalone/Main.scala9
-rw-r--r--docs/examples/plugintemplate/src/plugintemplate/standalone/PluginRunner.scala5
-rw-r--r--docs/examples/tcpoly/collection/HOSeq.scala46
-rw-r--r--docs/examples/tcpoly/monads/Monads.scala14
-rw-r--r--docs/examples/typeinf.scala6
-rw-r--r--docs/examples/xml/phonebook/embeddedBook.scala8
-rw-r--r--docs/examples/xml/phonebook/phonebook.scala14
-rw-r--r--docs/examples/xml/phonebook/phonebook1.scala8
-rw-r--r--docs/examples/xml/phonebook/phonebook2.scala10
-rw-r--r--docs/examples/xml/phonebook/phonebook3.scala32
-rw-r--r--docs/examples/xml/phonebook/verboseBook.scala12
-rw-r--r--docs/licenses/apache_android.txt16
-rw-r--r--docs/licenses/apache_ant.txt16
-rw-r--r--docs/licenses/bsd_jline.txt34
-rw-r--r--docs/licenses/mit_jquery.txt13
-rw-r--r--docs/licenses/mit_sizzle.txt13
-rw-r--r--docs/licenses/mit_tools.tooltip.txt13
l---------lib/ScalaCheck.jar1
-rw-r--r--lib/ant/maven-ant-tasks-2.0.9.jar.desired.sha11
-rw-r--r--lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha11
-rw-r--r--lib/fjbg.jar.desired.sha12
-rw-r--r--lib/forkjoin.jar.desired.sha11
-rw-r--r--lib/msil.jar.desired.sha12
-rw-r--r--lib/scala-compiler.jar.desired.sha12
-rw-r--r--lib/scala-library-src.jar.desired.sha12
-rw-r--r--lib/scala-library.jar.desired.sha12
-rw-r--r--lib/scalacheck-1.6dev.jar.desired.sha11
-rw-r--r--scala-lang.ipr1521
-rw-r--r--src/actors/scala/actors/AbstractActor.scala12
-rw-r--r--src/actors/scala/actors/Actor.scala445
-rw-r--r--src/actors/scala/actors/ActorCanReply.scala66
-rw-r--r--src/actors/scala/actors/ActorProxy.scala4
-rw-r--r--src/actors/scala/actors/ActorTask.scala38
-rw-r--r--src/actors/scala/actors/CanReply.scala (renamed from src/actors/scala/actors/Replyable.scala)43
-rw-r--r--src/actors/scala/actors/Channel.scala134
-rw-r--r--src/actors/scala/actors/Combinators.scala46
-rw-r--r--src/actors/scala/actors/DaemonActor.scala2
-rw-r--r--src/actors/scala/actors/Debug.scala39
-rw-r--r--src/actors/scala/actors/Future.scala226
-rw-r--r--src/actors/scala/actors/IScheduler.scala9
-rw-r--r--src/actors/scala/actors/InputChannel.scala20
-rw-r--r--src/actors/scala/actors/MessageQueue.scala111
-rw-r--r--src/actors/scala/actors/OutputChannel.scala24
-rw-r--r--src/actors/scala/actors/ReactChannel.scala5
-rw-r--r--src/actors/scala/actors/Reaction.scala17
-rw-r--r--src/actors/scala/actors/Reactor.scala257
-rw-r--r--src/actors/scala/actors/ReactorCanReply.scala (renamed from src/actors/scala/actors/ReplyableReactor.scala)71
-rw-r--r--src/actors/scala/actors/ReactorTask.scala55
-rw-r--r--src/actors/scala/actors/ReplyReactor.scala128
-rw-r--r--src/actors/scala/actors/ReplyReactorTask.scala36
-rw-r--r--src/actors/scala/actors/ReplyableActor.scala160
-rw-r--r--src/actors/scala/actors/Scheduler.scala15
-rw-r--r--src/actors/scala/actors/SchedulerAdapter.scala9
-rw-r--r--src/actors/scala/actors/UncaughtException.scala33
-rw-r--r--src/actors/scala/actors/package.scala22
-rw-r--r--src/actors/scala/actors/remote/FreshNameCreator.scala3
-rw-r--r--src/actors/scala/actors/remote/JavaSerializer.scala3
-rw-r--r--src/actors/scala/actors/remote/NetKernel.scala5
-rw-r--r--src/actors/scala/actors/remote/Proxy.scala11
-rw-r--r--src/actors/scala/actors/remote/RemoteActor.scala46
-rw-r--r--src/actors/scala/actors/remote/Serializer.scala3
-rw-r--r--src/actors/scala/actors/remote/Service.scala3
-rw-r--r--src/actors/scala/actors/remote/TcpService.scala3
-rw-r--r--src/actors/scala/actors/scheduler/ActorGC.scala (renamed from src/actors/scala/actors/ActorGC.scala)23
-rw-r--r--src/actors/scala/actors/scheduler/DaemonScheduler.scala10
-rw-r--r--src/actors/scala/actors/scheduler/DefaultThreadPoolScheduler.scala49
-rw-r--r--src/actors/scala/actors/scheduler/DelegatingScheduler.scala8
-rw-r--r--src/actors/scala/actors/scheduler/ExecutorScheduler.scala48
-rw-r--r--src/actors/scala/actors/scheduler/ForkJoinScheduler.scala34
-rw-r--r--src/actors/scala/actors/scheduler/QuitControl.scala (renamed from src/actors/scala/actors/AbstractReactor.scala)22
-rw-r--r--src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala13
-rw-r--r--src/actors/scala/actors/scheduler/SchedulerService.scala80
-rw-r--r--src/actors/scala/actors/scheduler/SimpleExecutorScheduler.scala43
-rw-r--r--src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala9
-rw-r--r--src/actors/scala/actors/scheduler/TerminationMonitor.scala21
-rw-r--r--src/actors/scala/actors/scheduler/TerminationService.scala25
-rw-r--r--src/actors/scala/actors/scheduler/ThreadPoolConfig.scala41
-rw-r--r--src/actors/scala/actors/scheduler/ThreadPoolScheduler.scala81
-rw-r--r--src/actors/scala/actors/threadpool/AbstractExecutorService.java10
-rw-r--r--src/actors/scala/actors/threadpool/BlockingQueue.java24
-rw-r--r--src/actors/scala/actors/threadpool/Executors.java6
-rw-r--r--src/actors/scala/actors/threadpool/LinkedBlockingQueue.java716
-rw-r--r--src/actors/scala/actors/threadpool/ThreadPoolExecutor.java2
-rw-r--r--src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java2
-rw-r--r--src/actors/scala/actors/threadpool/helpers/Utils.java12
-rw-r--r--src/actors/scala/actors/threadpool/locks/CondVar.java1
-rw-r--r--src/actors/scala/actors/threadpool/locks/FIFOCondVar.java1
-rw-r--r--src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java8
-rw-r--r--src/android-library/scala/ScalaObject.scala3
-rw-r--r--src/attic/README2
-rw-r--r--src/attic/scala/tools/nsc/models/Models.scala (renamed from src/compiler/scala/tools/nsc/models/Models.scala)7
-rw-r--r--src/attic/scala/tools/nsc/models/SemanticTokens.scala (renamed from src/compiler/scala/tools/nsc/models/SemanticTokens.scala)17
-rw-r--r--src/attic/scala/tools/nsc/models/Signatures.scala (renamed from src/compiler/scala/tools/nsc/models/Signatures.scala)15
-rw-r--r--src/attic/scala/tools/nsc/symtab/SymbolWalker.scala (renamed from src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala)44
-rw-r--r--src/build/genprod.scala352
-rw-r--r--src/build/maven/continuations-plugin-pom.xml51
-rw-r--r--src/build/maven/maven-deploy.xml53
-rw-r--r--src/build/pack.xml29
-rw-r--r--src/compiler/scala/tools/ant/FastScalac.scala3
-rw-r--r--src/compiler/scala/tools/ant/Pack200Task.scala6
-rw-r--r--src/compiler/scala/tools/ant/Same.scala5
-rw-r--r--src/compiler/scala/tools/ant/ScalaBazaar.scala11
-rw-r--r--src/compiler/scala/tools/ant/ScalaTool.scala9
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala51
-rw-r--r--src/compiler/scala/tools/ant/ScalacShared.scala25
-rw-r--r--src/compiler/scala/tools/ant/Scaladoc.scala130
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Break.scala3
-rw-r--r--src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala3
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Compiler.scala5
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Compilers.scala3
-rw-r--r--src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala7
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Make.scala5
-rw-r--r--src/compiler/scala/tools/ant/sabbus/ScalacFork.scala131
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Settings.scala4
-rw-r--r--src/compiler/scala/tools/ant/sabbus/TaskArgs.scala43
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Use.scala5
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-unix.tmpl17
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl2
-rw-r--r--src/compiler/scala/tools/cmd/CommandLine.scala91
-rw-r--r--src/compiler/scala/tools/cmd/Demo.scala84
-rw-r--r--src/compiler/scala/tools/cmd/FromString.scala72
-rw-r--r--src/compiler/scala/tools/cmd/Instance.scala24
-rw-r--r--src/compiler/scala/tools/cmd/Interpolation.scala57
-rw-r--r--src/compiler/scala/tools/cmd/Meta.scala67
-rw-r--r--src/compiler/scala/tools/cmd/Opt.scala91
-rw-r--r--src/compiler/scala/tools/cmd/Parser.scala52
-rw-r--r--src/compiler/scala/tools/cmd/Property.scala71
-rw-r--r--src/compiler/scala/tools/cmd/Reference.scala99
-rw-r--r--src/compiler/scala/tools/cmd/Spec.scala52
-rw-r--r--src/compiler/scala/tools/cmd/package.scala28
-rw-r--r--src/compiler/scala/tools/cmd/program/Scmp.scala59
-rw-r--r--src/compiler/scala/tools/cmd/program/Simple.scala81
-rw-r--r--src/compiler/scala/tools/cmd/program/Tokens.scala100
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala18
-rw-r--r--src/compiler/scala/tools/nsc/CompileClient.scala75
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala124
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala119
-rw-r--r--src/compiler/scala/tools/nsc/CompilerCommand.scala111
-rw-r--r--src/compiler/scala/tools/nsc/CompilerRun.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ConsoleWriter.scala3
-rw-r--r--src/compiler/scala/tools/nsc/EvalLoop.scala22
-rw-r--r--src/compiler/scala/tools/nsc/FatalError.scala7
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerCommand.scala96
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerSettings.scala5
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala442
-rw-r--r--src/compiler/scala/tools/nsc/Interpreter.scala1041
-rw-r--r--src/compiler/scala/tools/nsc/InterpreterCommand.scala6
-rw-r--r--src/compiler/scala/tools/nsc/InterpreterLoop.scala413
-rw-r--r--src/compiler/scala/tools/nsc/InterpreterResults.scala3
-rw-r--r--src/compiler/scala/tools/nsc/InterpreterSettings.scala38
-rw-r--r--src/compiler/scala/tools/nsc/Main.scala31
-rw-r--r--src/compiler/scala/tools/nsc/MainGenericRunner.scala135
-rw-r--r--src/compiler/scala/tools/nsc/MainInterpreter.scala3
-rw-r--r--src/compiler/scala/tools/nsc/MainTokenMetric.scala5
-rw-r--r--src/compiler/scala/tools/nsc/NewLinePrintWriter.scala3
-rw-r--r--src/compiler/scala/tools/nsc/NoPhase.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ObjectRunner.scala8
-rw-r--r--src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala10
-rw-r--r--src/compiler/scala/tools/nsc/Phase.scala9
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala197
-rw-r--r--src/compiler/scala/tools/nsc/Properties.scala17
-rw-r--r--src/compiler/scala/tools/nsc/ScalaDoc.scala113
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala140
-rw-r--r--src/compiler/scala/tools/nsc/Settings.scala854
-rw-r--r--src/compiler/scala/tools/nsc/SubComponent.scala3
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala431
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala74
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala16
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala60
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala134
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala88
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreePrinters.scala343
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala1142
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/BracePair.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Change.scala10
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala270
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala382
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Patch.scala8
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala103
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala19
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Tokens.scala72
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala101
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala41
-rw-r--r--src/compiler/scala/tools/nsc/backend/MSILPlatform.scala36
-rw-r--r--src/compiler/scala/tools/nsc/backend/Platform.scala31
-rw-r--r--src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala50
-rw-r--r--src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala130
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala (renamed from src/compiler/scala/tools/nsc/backend/icode/CheckerError.scala)5
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Checkers.scala25
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala1
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala1097
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodes.scala24
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala143
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala75
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala26
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Primitives.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Printers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Repository.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala65
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala18
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CompleteLattice.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala65
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala (renamed from src/compiler/scala/tools/nsc/backend/icode/analysis/LubError.scala)5
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ProgramPoint.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala29
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala20
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala683
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala148
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala1182
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala22
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala26
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala104
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala143
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala140
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Files.scala167
-rw-r--r--src/compiler/scala/tools/nsc/doc/DefaultDocDriver.scala307
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocDriver.scala21
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocFactory.scala66
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocUtil.scala104
-rw-r--r--src/compiler/scala/tools/nsc/doc/ModelAdditions.scala412
-rw-r--r--src/compiler/scala/tools/nsc/doc/ModelExtractor.scala453
-rw-r--r--src/compiler/scala/tools/nsc/doc/ModelFrames.scala396
-rw-r--r--src/compiler/scala/tools/nsc/doc/ModelToXML.scala368
-rw-r--r--src/compiler/scala/tools/nsc/doc/Settings.scala38
-rw-r--r--src/compiler/scala/tools/nsc/doc/SourcelessComments.scala249
-rw-r--r--src/compiler/scala/tools/nsc/doc/Universe.scala11
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala79
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala239
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala127
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Source.scala129
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala540
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/class.pngbin0 -> 516 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.pngbin0 -> 3183 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.pngbin0 -> 481 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psdbin0 -> 30823 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.pngbin0 -> 533 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psdbin0 -> 31295 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css204
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js290
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js401
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js154
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js18
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/object.pngbin0 -> 518 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.pngbin0 -> 3318 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/package.pngbin0 -> 488 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.pngbin0 -> 3183 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.pngbin0 -> 3186 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psdbin0 -> 28904 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js71
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css496
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js270
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js14
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.pngbin0 -> 494 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.pngbin0 -> 3088 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai6020
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt1
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/Entity.scala190
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala573
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala16
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/TreeFactory.scala89
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala25
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/Body.scala73
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala77
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala865
-rw-r--r--src/compiler/scala/tools/nsc/doc/script.js112
-rw-r--r--src/compiler/scala/tools/nsc/doc/style.css148
-rw-r--r--src/compiler/scala/tools/nsc/interactive/BuildManager.scala16
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala390
-rw-r--r--src/compiler/scala/tools/nsc/interactive/ContextTrees.scala14
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala1154
-rw-r--r--src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala47
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Picklers.scala184
-rw-r--r--src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala51
-rw-r--r--src/compiler/scala/tools/nsc/interactive/REPL.scala48
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RangePositions.scala50
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala201
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Response.scala105
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala17
-rw-r--r--src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala13
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ByteCode.scala64
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Completion.scala511
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala130
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala88
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Delimited.scala36
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala54
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/History.scala36
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala19
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineReader.scala30
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Parsed.scala68
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala44
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReflectionCompletion.scala112
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala8
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala44
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/package.scala30
-rw-r--r--src/compiler/scala/tools/nsc/io/AbstractFile.scala40
-rw-r--r--src/compiler/scala/tools/nsc/io/Directory.scala34
-rw-r--r--src/compiler/scala/tools/nsc/io/File.scala93
-rw-r--r--src/compiler/scala/tools/nsc/io/FileOperationException.scala3
-rw-r--r--src/compiler/scala/tools/nsc/io/Lexer.scala301
-rw-r--r--src/compiler/scala/tools/nsc/io/NullPrintStream.scala19
-rw-r--r--src/compiler/scala/tools/nsc/io/Path.scala134
-rw-r--r--src/compiler/scala/tools/nsc/io/Pickler.scala455
-rw-r--r--src/compiler/scala/tools/nsc/io/PlainFile.scala14
-rw-r--r--src/compiler/scala/tools/nsc/io/PrettyWriter.scala41
-rw-r--r--src/compiler/scala/tools/nsc/io/Process.scala90
-rw-r--r--src/compiler/scala/tools/nsc/io/Replayer.scala74
-rw-r--r--src/compiler/scala/tools/nsc/io/Socket.scala46
-rw-r--r--src/compiler/scala/tools/nsc/io/SourceReader.scala5
-rw-r--r--src/compiler/scala/tools/nsc/io/Streamable.scala55
-rw-r--r--src/compiler/scala/tools/nsc/io/VirtualDirectory.scala15
-rw-r--r--src/compiler/scala/tools/nsc/io/VirtualFile.scala20
-rw-r--r--src/compiler/scala/tools/nsc/io/ZipArchive.scala30
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala9
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala44
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaTokens.scala34
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala159
-rw-r--r--src/compiler/scala/tools/nsc/matching/Matrix.scala11
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala57
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala180
-rw-r--r--src/compiler/scala/tools/nsc/matching/PatternBindings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/matching/Patterns.scala107
-rw-r--r--src/compiler/scala/tools/nsc/matching/TransMatcher.scala9
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala11
-rw-r--r--src/compiler/scala/tools/nsc/plugins/PluginComponent.scala3
-rw-r--r--src/compiler/scala/tools/nsc/plugins/PluginDescription.scala3
-rw-r--r--src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala3
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugins.scala11
-rw-r--r--src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala13
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala18
-rw-r--r--src/compiler/scala/tools/nsc/reporters/Reporter.scala33
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala3
-rw-r--r--src/compiler/scala/tools/nsc/reporters/StoreReporter.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala40
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsSettings.scala137
-rw-r--r--src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala76
-rw-r--r--src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala11
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala604
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala169
-rw-r--r--src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala54
-rw-r--r--src/compiler/scala/tools/nsc/symtab/AnnotationCheckers.scala39
-rw-r--r--src/compiler/scala/tools/nsc/symtab/AnnotationInfos.scala46
-rw-r--r--src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala84
-rw-r--r--src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala118
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Definitions.scala268
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Flags.scala207
-rw-r--r--src/compiler/scala/tools/nsc/symtab/InfoTransformers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Names.scala49
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Positions.scala5
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Scopes.scala49
-rw-r--r--src/compiler/scala/tools/nsc/symtab/StdNames.scala79
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala187
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTable.scala15
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Symbols.scala669
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Types.scala2396
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileConstants.scala5
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala362
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala135
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala6
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala101
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala835
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala14
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala387
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala147
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala637
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala44
-rw-r--r--src/compiler/scala/tools/nsc/transform/Flatten.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/InfoTransform.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala68
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala39
-rw-r--r--src/compiler/scala/tools/nsc/transform/LiftCode.scala7
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala146
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala56
-rw-r--r--src/compiler/scala/tools/nsc/transform/Reifiers.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/SampleTransform.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala929
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala158
-rw-r--r--src/compiler/scala/tools/nsc/transform/Transform.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypingTransformers.scala15
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala252
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala50
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala286
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala69
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala20
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala626
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala700
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala603
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala180
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala1008
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala328
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala66
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala145
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala268
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala1654
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala42
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Variances.scala3
-rw-r--r--src/compiler/scala/tools/nsc/util/ArgumentsExpander.scala43
-rw-r--r--src/compiler/scala/tools/nsc/util/CharArrayReader.scala42
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/Chars.scala85
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala604
-rw-r--r--src/compiler/scala/tools/nsc/util/CommandLineParser.scala145
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala138
-rw-r--r--src/compiler/scala/tools/nsc/util/FreshNameCreator.scala3
-rw-r--r--src/compiler/scala/tools/nsc/util/HashSet.scala62
-rw-r--r--src/compiler/scala/tools/nsc/util/InterruptReq.scala37
-rw-r--r--src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala16
-rw-r--r--src/compiler/scala/tools/nsc/util/MsilClassPath.scala169
-rw-r--r--src/compiler/scala/tools/nsc/util/MultiHashMap.scala10
-rw-r--r--src/compiler/scala/tools/nsc/util/Position.scala24
-rw-r--r--src/compiler/scala/tools/nsc/util/RegexCache.scala3
-rw-r--r--src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala (renamed from src/library/scala/util/ScalaClassLoader.scala)69
-rw-r--r--src/compiler/scala/tools/nsc/util/Set.scala3
-rw-r--r--src/compiler/scala/tools/nsc/util/ShowPickled.scala207
-rw-r--r--src/compiler/scala/tools/nsc/util/SourceFile.scala222
-rw-r--r--src/compiler/scala/tools/nsc/util/Statistics.scala299
-rw-r--r--src/compiler/scala/tools/nsc/util/Tracer.scala37
-rw-r--r--src/compiler/scala/tools/nsc/util/TreeSet.scala13
-rw-r--r--src/compiler/scala/tools/nsc/util/WorkScheduler.scala57
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala29
-rw-r--r--src/compiler/scala/tools/util/AbstractTimer.scala3
-rw-r--r--src/compiler/scala/tools/util/ClassPathSettings.scala32
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala253
-rw-r--r--src/compiler/scala/tools/util/SocketConnection.scala3
-rw-r--r--src/compiler/scala/tools/util/SocketServer.scala3
-rw-r--r--src/compiler/scala/tools/util/StringOps.scala26
-rw-r--r--src/compiler/scala/tools/util/Which.scala39
-rw-r--r--src/continuations/library/scala/util/continuations/ControlContext.scala161
-rw-r--r--src/continuations/library/scala/util/continuations/package.scala65
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala462
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala131
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala414
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala60
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala384
-rw-r--r--src/continuations/plugin/scalac-plugin.xml5
-rw-r--r--src/dbc/scala/dbc/DataType.scala3
-rw-r--r--src/dbc/scala/dbc/Database.scala3
-rw-r--r--src/dbc/scala/dbc/Syntax.scala3
-rw-r--r--src/dbc/scala/dbc/Utilities.scala3
-rw-r--r--src/dbc/scala/dbc/Value.scala3
-rw-r--r--src/dbc/scala/dbc/Vendor.scala3
-rw-r--r--src/dbc/scala/dbc/datatype/ApproximateNumeric.scala3
-rw-r--r--src/dbc/scala/dbc/datatype/Boolean.scala3
-rw-r--r--src/dbc/scala/dbc/datatype/Character.scala3
-rw-r--r--src/dbc/scala/dbc/datatype/CharacterLargeObject.scala3
-rw-r--r--src/dbc/scala/dbc/datatype/CharacterString.scala3
-rw-r--r--src/dbc/scala/dbc/datatype/CharacterVarying.scala3
-rw-r--r--src/dbc/scala/dbc/datatype/ExactNumeric.scala3
-rw-r--r--src/dbc/scala/dbc/datatype/Factory.scala5
-rw-r--r--src/dbc/scala/dbc/datatype/Numeric.scala3
-rw-r--r--src/dbc/scala/dbc/datatype/String.scala3
-rw-r--r--src/dbc/scala/dbc/datatype/Unknown.scala3
-rw-r--r--src/dbc/scala/dbc/exception/IncompatibleSchema.scala3
-rw-r--r--src/dbc/scala/dbc/exception/UnsupportedFeature.scala3
-rw-r--r--src/dbc/scala/dbc/result/Field.scala3
-rw-r--r--src/dbc/scala/dbc/result/FieldMetadata.scala3
-rw-r--r--src/dbc/scala/dbc/result/Relation.scala3
-rw-r--r--src/dbc/scala/dbc/result/Status.scala3
-rw-r--r--src/dbc/scala/dbc/result/Tuple.scala3
-rw-r--r--src/dbc/scala/dbc/statement/AccessMode.scala3
-rw-r--r--src/dbc/scala/dbc/statement/DerivedColumn.scala3
-rw-r--r--src/dbc/scala/dbc/statement/Expression.scala3
-rw-r--r--src/dbc/scala/dbc/statement/Insert.scala3
-rw-r--r--src/dbc/scala/dbc/statement/InsertionData.scala3
-rw-r--r--src/dbc/scala/dbc/statement/IsolationLevel.scala3
-rw-r--r--src/dbc/scala/dbc/statement/JoinType.scala3
-rw-r--r--src/dbc/scala/dbc/statement/Jointure.scala3
-rw-r--r--src/dbc/scala/dbc/statement/Relation.scala3
-rw-r--r--src/dbc/scala/dbc/statement/Select.scala3
-rw-r--r--src/dbc/scala/dbc/statement/SetClause.scala3
-rw-r--r--src/dbc/scala/dbc/statement/SetQuantifier.scala3
-rw-r--r--src/dbc/scala/dbc/statement/Statement.scala3
-rw-r--r--src/dbc/scala/dbc/statement/Status.scala3
-rw-r--r--src/dbc/scala/dbc/statement/Table.scala3
-rw-r--r--src/dbc/scala/dbc/statement/Transaction.scala3
-rw-r--r--src/dbc/scala/dbc/statement/Update.scala3
-rw-r--r--src/dbc/scala/dbc/statement/expression/Aggregate.scala3
-rw-r--r--src/dbc/scala/dbc/statement/expression/BinaryOperator.scala3
-rw-r--r--src/dbc/scala/dbc/statement/expression/Constant.scala3
-rw-r--r--src/dbc/scala/dbc/statement/expression/Default.scala3
-rw-r--r--src/dbc/scala/dbc/statement/expression/Field.scala3
-rw-r--r--src/dbc/scala/dbc/statement/expression/FunctionCall.scala3
-rw-r--r--src/dbc/scala/dbc/statement/expression/Select.scala3
-rw-r--r--src/dbc/scala/dbc/statement/expression/SetFunction.scala3
-rw-r--r--src/dbc/scala/dbc/statement/expression/TypeCast.scala3
-rw-r--r--src/dbc/scala/dbc/statement/expression/UnaryOperator.scala3
-rw-r--r--src/dbc/scala/dbc/syntax/DataTypeUtil.scala3
-rw-r--r--src/dbc/scala/dbc/syntax/Database.scala3
-rw-r--r--src/dbc/scala/dbc/syntax/Statement.scala3
-rw-r--r--src/dbc/scala/dbc/syntax/StatementExpression.scala3
-rw-r--r--src/dbc/scala/dbc/value/ApproximateNumeric.scala3
-rw-r--r--src/dbc/scala/dbc/value/Boolean.scala3
-rw-r--r--src/dbc/scala/dbc/value/Character.scala3
-rw-r--r--src/dbc/scala/dbc/value/CharacterLargeObject.scala3
-rw-r--r--src/dbc/scala/dbc/value/CharacterVarying.scala3
-rw-r--r--src/dbc/scala/dbc/value/Conversion.scala3
-rw-r--r--src/dbc/scala/dbc/value/ExactNumeric.scala3
-rw-r--r--src/dbc/scala/dbc/value/Factory.scala3
-rw-r--r--src/dbc/scala/dbc/value/Unknown.scala3
-rw-r--r--src/dbc/scala/dbc/vendor/PostgreSQL.scala3
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JClass.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JCode.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java40
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java3
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JField.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JLabel.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JMember.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JMethod.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JType.java1
-rw-r--r--src/fjbg/ch/epfl/lamp/util/ByteArray.java1
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java (renamed from src/library/scala/concurrent/forkjoin/ForkJoinPool.java)6
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java (renamed from src/library/scala/concurrent/forkjoin/ForkJoinTask.java)21
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java (renamed from src/library/scala/concurrent/forkjoin/ForkJoinWorkerThread.java)0
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java (renamed from src/library/scala/concurrent/forkjoin/LinkedTransferQueue.java)0
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java (renamed from src/library/scala/concurrent/forkjoin/RecursiveAction.java)0
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java (renamed from src/library/scala/concurrent/forkjoin/RecursiveTask.java)2
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java (renamed from src/library/scala/concurrent/forkjoin/ThreadLocalRandom.java)0
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java (renamed from src/library/scala/concurrent/forkjoin/TransferQueue.java)0
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/package-info.java (renamed from src/library/scala/concurrent/forkjoin/package-info.java)0
-rw-r--r--src/intellij/actors.iml.SAMPLE (renamed from actors.iml)4
-rw-r--r--src/intellij/compiler.iml.SAMPLE (renamed from compiler.iml)16
-rw-r--r--src/intellij/dbc.iml.SAMPLE (renamed from dbc.iml)4
-rw-r--r--src/intellij/library.iml.SAMPLE (renamed from library.iml)10
-rw-r--r--src/intellij/manual.iml.SAMPLE (renamed from manual.iml)4
-rw-r--r--src/intellij/partest.iml.SAMPLE (renamed from partest.iml)6
-rw-r--r--src/intellij/scala-lang.ipr.SAMPLE1446
-rw-r--r--src/intellij/scalap.iml.SAMPLE (renamed from scalap.iml)4
-rw-r--r--src/intellij/swing.iml.SAMPLE (renamed from swing.iml)4
-rw-r--r--src/library/scala/Annotation.scala3
-rw-r--r--src/library/scala/Application.scala10
-rw-r--r--src/library/scala/Array.scala218
-rw-r--r--src/library/scala/Cell.scala3
-rw-r--r--src/library/scala/ClassfileAnnotation.scala3
-rw-r--r--src/library/scala/Console.scala38
-rw-r--r--src/library/scala/CountedIterator.scala3
-rw-r--r--src/library/scala/Either.scala35
-rw-r--r--src/library/scala/Enumeration.scala144
-rw-r--r--src/library/scala/Equals.scala3
-rw-r--r--src/library/scala/Function.scala14
-rw-r--r--src/library/scala/Function0.scala8
-rw-r--r--src/library/scala/Function1.scala8
-rw-r--r--src/library/scala/Function10.scala20
-rw-r--r--src/library/scala/Function11.scala20
-rw-r--r--src/library/scala/Function12.scala20
-rw-r--r--src/library/scala/Function13.scala20
-rw-r--r--src/library/scala/Function14.scala20
-rw-r--r--src/library/scala/Function15.scala20
-rw-r--r--src/library/scala/Function16.scala20
-rw-r--r--src/library/scala/Function17.scala20
-rw-r--r--src/library/scala/Function18.scala20
-rw-r--r--src/library/scala/Function19.scala20
-rw-r--r--src/library/scala/Function2.scala22
-rw-r--r--src/library/scala/Function20.scala20
-rw-r--r--src/library/scala/Function21.scala20
-rw-r--r--src/library/scala/Function22.scala20
-rw-r--r--src/library/scala/Function3.scala20
-rw-r--r--src/library/scala/Function4.scala20
-rw-r--r--src/library/scala/Function5.scala20
-rw-r--r--src/library/scala/Function6.scala20
-rw-r--r--src/library/scala/Function7.scala20
-rw-r--r--src/library/scala/Function8.scala20
-rw-r--r--src/library/scala/Function9.scala20
-rw-r--r--src/library/scala/Immutable.scala5
-rw-r--r--src/library/scala/LowPriorityImplicits.scala38
-rw-r--r--src/library/scala/MatchError.scala3
-rw-r--r--src/library/scala/Math.scala207
-rw-r--r--src/library/scala/MathCommon.scala143
-rw-r--r--src/library/scala/Mutable.scala3
-rw-r--r--src/library/scala/NotDefinedError.scala4
-rw-r--r--src/library/scala/NotNull.scala3
-rw-r--r--src/library/scala/Option.scala195
-rw-r--r--src/library/scala/PartialFunction.scala48
-rw-r--r--src/library/scala/Predef.scala200
-rw-r--r--src/library/scala/Product.scala5
-rw-r--r--src/library/scala/Product1.scala8
-rw-r--r--src/library/scala/Product10.scala6
-rw-r--r--src/library/scala/Product11.scala6
-rw-r--r--src/library/scala/Product12.scala6
-rw-r--r--src/library/scala/Product13.scala6
-rw-r--r--src/library/scala/Product14.scala6
-rw-r--r--src/library/scala/Product15.scala6
-rw-r--r--src/library/scala/Product16.scala6
-rw-r--r--src/library/scala/Product17.scala6
-rw-r--r--src/library/scala/Product18.scala6
-rw-r--r--src/library/scala/Product19.scala6
-rw-r--r--src/library/scala/Product2.scala8
-rw-r--r--src/library/scala/Product20.scala6
-rw-r--r--src/library/scala/Product21.scala6
-rw-r--r--src/library/scala/Product22.scala6
-rw-r--r--src/library/scala/Product3.scala6
-rw-r--r--src/library/scala/Product4.scala6
-rw-r--r--src/library/scala/Product5.scala6
-rw-r--r--src/library/scala/Product6.scala6
-rw-r--r--src/library/scala/Product7.scala6
-rw-r--r--src/library/scala/Product8.scala6
-rw-r--r--src/library/scala/Product9.scala6
-rw-r--r--src/library/scala/Proxy.scala5
-rw-r--r--src/library/scala/Responder.scala6
-rw-r--r--src/library/scala/ScalaObject.scala3
-rw-r--r--src/library/scala/SerialVersionUID.scala3
-rw-r--r--src/library/scala/StaticAnnotation.scala3
-rw-r--r--src/library/scala/Symbol.scala8
-rw-r--r--src/library/scala/Tuple1.scala8
-rw-r--r--src/library/scala/Tuple10.scala6
-rw-r--r--src/library/scala/Tuple11.scala6
-rw-r--r--src/library/scala/Tuple12.scala6
-rw-r--r--src/library/scala/Tuple13.scala6
-rw-r--r--src/library/scala/Tuple14.scala6
-rw-r--r--src/library/scala/Tuple15.scala6
-rw-r--r--src/library/scala/Tuple16.scala6
-rw-r--r--src/library/scala/Tuple17.scala6
-rw-r--r--src/library/scala/Tuple18.scala6
-rw-r--r--src/library/scala/Tuple19.scala6
-rw-r--r--src/library/scala/Tuple2.scala45
-rw-r--r--src/library/scala/Tuple20.scala6
-rw-r--r--src/library/scala/Tuple21.scala6
-rw-r--r--src/library/scala/Tuple22.scala6
-rw-r--r--src/library/scala/Tuple3.scala31
-rw-r--r--src/library/scala/Tuple4.scala6
-rw-r--r--src/library/scala/Tuple5.scala6
-rw-r--r--src/library/scala/Tuple6.scala6
-rw-r--r--src/library/scala/Tuple7.scala6
-rw-r--r--src/library/scala/Tuple8.scala6
-rw-r--r--src/library/scala/Tuple9.scala6
-rw-r--r--src/library/scala/TypeConstraint.scala3
-rw-r--r--src/library/scala/UninitializedError.scala3
-rw-r--r--src/library/scala/UninitializedFieldError.scala3
-rw-r--r--src/library/scala/annotation/elidable.scala10
-rw-r--r--src/library/scala/annotation/implicitNotFound.scala (renamed from src/library/scala/annotation/experimental.scala)17
-rw-r--r--src/library/scala/annotation/migration.scala28
-rw-r--r--src/library/scala/annotation/switch.scala2
-rw-r--r--src/library/scala/annotation/tailrec.scala2
-rw-r--r--src/library/scala/annotation/target/beanGetter.scala14
-rw-r--r--src/library/scala/annotation/target/beanSetter.scala14
-rw-r--r--src/library/scala/annotation/target/field.scala14
-rw-r--r--src/library/scala/annotation/target/getter.scala14
-rw-r--r--src/library/scala/annotation/target/param.scala51
-rw-r--r--src/library/scala/annotation/target/setter.scala14
-rw-r--r--src/library/scala/annotation/unchecked/uncheckedStable.scala2
-rw-r--r--src/library/scala/annotation/unchecked/uncheckedVariance.scala2
-rw-r--r--src/library/scala/cloneable.scala3
-rw-r--r--src/library/scala/collection/BitSet.scala18
-rw-r--r--src/library/scala/collection/BitSetLike.scala65
-rw-r--r--src/library/scala/collection/BufferedIterator.scala3
-rw-r--r--src/library/scala/collection/DefaultMap.scala5
-rw-r--r--src/library/scala/collection/IndexedSeq.scala22
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala256
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala287
-rw-r--r--src/library/scala/collection/IndexedSeqView.scala38
-rw-r--r--src/library/scala/collection/IndexedSeqViewLike.scala109
-rw-r--r--src/library/scala/collection/Iterable.scala45
-rw-r--r--src/library/scala/collection/IterableLike.scala421
-rw-r--r--src/library/scala/collection/IterableProxy.scala3
-rw-r--r--src/library/scala/collection/IterableProxyLike.scala35
-rw-r--r--src/library/scala/collection/IterableView.scala13
-rw-r--r--src/library/scala/collection/IterableViewLike.scala24
-rw-r--r--src/library/scala/collection/Iterator.scala790
-rw-r--r--src/library/scala/collection/JavaConversions.scala556
-rwxr-xr-xsrc/library/scala/collection/JavaConverters.scala456
-rw-r--r--src/library/scala/collection/LinearSeq.scala24
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala399
-rwxr-xr-xsrc/library/scala/collection/LinearSeqOptimized.scala294
-rw-r--r--src/library/scala/collection/Map.scala45
-rw-r--r--src/library/scala/collection/MapLike.scala269
-rw-r--r--src/library/scala/collection/MapProxy.scala3
-rw-r--r--src/library/scala/collection/MapProxyLike.scala26
-rw-r--r--src/library/scala/collection/RollbackIterator.scala.disabled88
-rw-r--r--src/library/scala/collection/Seq.scala34
-rw-r--r--src/library/scala/collection/SeqLike.scala759
-rw-r--r--src/library/scala/collection/SeqProxy.scala3
-rw-r--r--src/library/scala/collection/SeqProxyLike.scala42
-rw-r--r--src/library/scala/collection/SeqView.scala12
-rw-r--r--src/library/scala/collection/SeqViewLike.scala80
-rw-r--r--src/library/scala/collection/Set.scala30
-rw-r--r--src/library/scala/collection/SetLike.scala188
-rw-r--r--src/library/scala/collection/SetProxy.scala3
-rw-r--r--src/library/scala/collection/SetProxyLike.scala7
-rw-r--r--src/library/scala/collection/SortedMap.scala7
-rw-r--r--src/library/scala/collection/SortedMapLike.scala3
-rw-r--r--src/library/scala/collection/SortedSet.scala3
-rw-r--r--src/library/scala/collection/SortedSetLike.scala3
-rw-r--r--src/library/scala/collection/Traversable.scala34
-rw-r--r--src/library/scala/collection/TraversableLike.scala982
-rw-r--r--src/library/scala/collection/TraversableOnce.scala538
-rw-r--r--src/library/scala/collection/TraversableProxy.scala5
-rw-r--r--src/library/scala/collection/TraversableProxyLike.scala72
-rw-r--r--src/library/scala/collection/TraversableView.scala17
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala101
-rw-r--r--src/library/scala/collection/generic/Addable.scala56
-rw-r--r--src/library/scala/collection/generic/BitSetFactory.scala20
-rw-r--r--src/library/scala/collection/generic/CanBuildFrom.scala29
-rwxr-xr-xsrc/library/scala/collection/generic/FilterMonadic.scala11
-rw-r--r--src/library/scala/collection/generic/GenericCompanion.scala32
-rw-r--r--src/library/scala/collection/generic/GenericSequenceFactory.scala.disabled17
-rw-r--r--src/library/scala/collection/generic/GenericSetTemplate.scala3
-rw-r--r--src/library/scala/collection/generic/GenericTraversableFactory.scala.disabled188
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala72
-rw-r--r--src/library/scala/collection/generic/Growable.scala47
-rwxr-xr-xsrc/library/scala/collection/generic/HasNewBuilder.scala2
-rw-r--r--src/library/scala/collection/generic/ImmutableMapFactory.scala11
-rw-r--r--src/library/scala/collection/generic/ImmutableSetFactory.scala18
-rw-r--r--src/library/scala/collection/generic/ImmutableSortedMapFactory.scala13
-rw-r--r--src/library/scala/collection/generic/ImmutableSortedSetFactory.scala13
-rw-r--r--src/library/scala/collection/generic/IterableForwarder.scala7
-rw-r--r--src/library/scala/collection/generic/MapFactory.scala36
-rw-r--r--src/library/scala/collection/generic/MutableMapFactory.scala21
-rw-r--r--src/library/scala/collection/generic/MutableSetFactory.scala18
-rw-r--r--src/library/scala/collection/generic/SeqFactory.scala3
-rw-r--r--src/library/scala/collection/generic/SeqForwarder.scala30
-rw-r--r--src/library/scala/collection/generic/SetFactory.scala27
-rw-r--r--src/library/scala/collection/generic/Shrinkable.scala28
-rw-r--r--src/library/scala/collection/generic/Sorted.scala30
-rw-r--r--src/library/scala/collection/generic/SortedMapFactory.scala3
-rw-r--r--src/library/scala/collection/generic/SortedSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/Subtractable.scala57
-rw-r--r--src/library/scala/collection/generic/TraversableFactory.scala131
-rw-r--r--src/library/scala/collection/generic/TraversableForwarder.scala42
-rw-r--r--src/library/scala/collection/generic/TraversableView.scala.1152
-rw-r--r--src/library/scala/collection/immutable/BitSet.scala25
-rwxr-xr-xsrc/library/scala/collection/immutable/DefaultMap.scala65
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala466
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala407
-rw-r--r--src/library/scala/collection/immutable/ImmutableIterator.scala.disabled117
-rw-r--r--src/library/scala/collection/immutable/IndexedSeq.scala20
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala88
-rw-r--r--src/library/scala/collection/immutable/Iterable.scala22
-rw-r--r--src/library/scala/collection/immutable/LinearSeq.scala16
-rw-r--r--src/library/scala/collection/immutable/List.scala395
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala26
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala23
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala91
-rw-r--r--src/library/scala/collection/immutable/Map.scala40
-rw-r--r--src/library/scala/collection/immutable/MapLike.scala118
-rw-r--r--src/library/scala/collection/immutable/MapProxy.scala29
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala83
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala25
-rw-r--r--src/library/scala/collection/immutable/Queue.scala69
-rw-r--r--src/library/scala/collection/immutable/Range.scala213
-rw-r--r--src/library/scala/collection/immutable/RedBlack.scala85
-rw-r--r--src/library/scala/collection/immutable/Seq.scala17
-rw-r--r--src/library/scala/collection/immutable/Set.scala49
-rw-r--r--src/library/scala/collection/immutable/SetProxy.scala21
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala28
-rw-r--r--src/library/scala/collection/immutable/SortedSet.scala14
-rw-r--r--src/library/scala/collection/immutable/Stack.scala106
-rw-r--r--src/library/scala/collection/immutable/Stream.scala224
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala137
-rw-r--r--src/library/scala/collection/immutable/StringOps.scala30
-rw-r--r--src/library/scala/collection/immutable/Traversable.scala22
-rw-r--r--src/library/scala/collection/immutable/Tree.scala.disabled440
-rw-r--r--src/library/scala/collection/immutable/TreeHashMap.scala3
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala45
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala43
-rw-r--r--src/library/scala/collection/immutable/Vector.scala186
-rw-r--r--src/library/scala/collection/immutable/WrappedString.scala24
-rw-r--r--src/library/scala/collection/interfaces/IterableMethods.scala2
-rw-r--r--src/library/scala/collection/interfaces/MapMethods.scala10
-rw-r--r--src/library/scala/collection/interfaces/SeqMethods.scala13
-rw-r--r--src/library/scala/collection/interfaces/SetMethods.scala8
-rw-r--r--src/library/scala/collection/interfaces/TraversableMethods.scala12
-rw-r--r--src/library/scala/collection/interfaces/TraversableOnceMethods.scala69
-rw-r--r--src/library/scala/collection/mutable/AddingBuilder.scala27
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala92
-rw-r--r--src/library/scala/collection/mutable/ArrayBuilder.scala173
-rw-r--r--src/library/scala/collection/mutable/ArrayLike.scala33
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala60
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala (renamed from src/library/scala/collection/mutable/GenericArray.scala)49
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala114
-rw-r--r--src/library/scala/collection/mutable/BitSet.scala47
-rw-r--r--src/library/scala/collection/mutable/Buffer.scala16
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala348
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala75
-rw-r--r--src/library/scala/collection/mutable/Builder.scala76
-rw-r--r--src/library/scala/collection/mutable/Cloneable.scala7
-rw-r--r--src/library/scala/collection/mutable/CloneableCollection.scala7
-rw-r--r--src/library/scala/collection/mutable/ConcurrentMap.scala68
-rw-r--r--src/library/scala/collection/mutable/DefaultEntry.scala9
-rw-r--r--src/library/scala/collection/mutable/DefaultMapModel.scala8
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedList.scala73
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedListLike.scala14
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala80
-rw-r--r--src/library/scala/collection/mutable/GrowingBuilder.scala38
-rw-r--r--src/library/scala/collection/mutable/HashEntry.scala6
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala70
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala36
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala124
-rw-r--r--src/library/scala/collection/mutable/History.scala29
-rw-r--r--src/library/scala/collection/mutable/ImmutableMapAdaptor.scala20
-rw-r--r--src/library/scala/collection/mutable/ImmutableSetAdaptor.scala7
-rw-r--r--src/library/scala/collection/mutable/IndexedSeq.scala12
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqLike.scala30
-rwxr-xr-xsrc/library/scala/collection/mutable/IndexedSeqOptimized.scala20
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala54
-rw-r--r--src/library/scala/collection/mutable/Iterable.scala24
-rw-r--r--src/library/scala/collection/mutable/LazyBuilder.scala11
-rw-r--r--src/library/scala/collection/mutable/LinearSeq.scala16
-rw-r--r--src/library/scala/collection/mutable/LinkedEntry.scala7
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala63
-rw-r--r--src/library/scala/collection/mutable/LinkedHashSet.scala51
-rw-r--r--src/library/scala/collection/mutable/LinkedList.scala29
-rw-r--r--src/library/scala/collection/mutable/LinkedListLike.scala23
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala60
-rw-r--r--src/library/scala/collection/mutable/ListMap.scala32
-rw-r--r--src/library/scala/collection/mutable/Map.scala19
-rw-r--r--src/library/scala/collection/mutable/MapBuilder.scala7
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala293
-rw-r--r--src/library/scala/collection/mutable/MapLikeBase.scala37
-rw-r--r--src/library/scala/collection/mutable/MapProxy.scala32
-rw-r--r--src/library/scala/collection/mutable/MultiMap.scala52
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala23
-rw-r--r--src/library/scala/collection/mutable/ObservableBuffer.scala16
-rw-r--r--src/library/scala/collection/mutable/ObservableMap.scala17
-rw-r--r--src/library/scala/collection/mutable/ObservableSet.scala17
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala100
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala196
-rw-r--r--src/library/scala/collection/mutable/PriorityQueueProxy.scala20
-rw-r--r--src/library/scala/collection/mutable/Publisher.scala58
-rw-r--r--src/library/scala/collection/mutable/Queue.scala18
-rw-r--r--src/library/scala/collection/mutable/QueueProxy.scala20
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala12
-rw-r--r--src/library/scala/collection/mutable/RevertibleHistory.scala16
-rw-r--r--src/library/scala/collection/mutable/Seq.scala29
-rw-r--r--src/library/scala/collection/mutable/Set.scala26
-rw-r--r--src/library/scala/collection/mutable/SetBuilder.scala20
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala244
-rw-r--r--src/library/scala/collection/mutable/SetProxy.scala6
-rw-r--r--src/library/scala/collection/mutable/Stack.scala48
-rw-r--r--src/library/scala/collection/mutable/StackProxy.scala31
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala969
-rw-r--r--src/library/scala/collection/mutable/Subscriber.scala10
-rw-r--r--src/library/scala/collection/mutable/SynchronizedBuffer.scala75
-rw-r--r--src/library/scala/collection/mutable/SynchronizedMap.scala26
-rw-r--r--src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala35
-rw-r--r--src/library/scala/collection/mutable/SynchronizedQueue.scala23
-rw-r--r--src/library/scala/collection/mutable/SynchronizedSet.scala27
-rw-r--r--src/library/scala/collection/mutable/SynchronizedStack.scala33
-rw-r--r--src/library/scala/collection/mutable/Traversable.scala20
-rw-r--r--src/library/scala/collection/mutable/Undoable.scala7
-rw-r--r--src/library/scala/collection/mutable/WeakHashMap.scala39
-rw-r--r--src/library/scala/collection/mutable/WrappedArray.scala26
-rw-r--r--src/library/scala/collection/mutable/WrappedArrayBuilder.scala8
-rw-r--r--src/library/scala/collection/package.scala73
-rwxr-xr-xsrc/library/scala/collection/readme-if-you-want-to-add-something.txt50
-rw-r--r--src/library/scala/collection/script/Location.scala3
-rw-r--r--src/library/scala/collection/script/Message.scala3
-rw-r--r--src/library/scala/collection/script/Scriptable.scala3
-rw-r--r--src/library/scala/compat/Platform.scala34
-rw-r--r--src/library/scala/concurrent/Channel.scala3
-rw-r--r--src/library/scala/concurrent/DelayedLazyVal.scala17
-rw-r--r--src/library/scala/concurrent/JavaConversions.scala3
-rw-r--r--src/library/scala/concurrent/Lock.scala3
-rw-r--r--src/library/scala/concurrent/MailBox.scala3
-rw-r--r--src/library/scala/concurrent/ManagedBlocker.scala3
-rw-r--r--src/library/scala/concurrent/SyncChannel.scala3
-rw-r--r--src/library/scala/concurrent/SyncVar.scala16
-rw-r--r--src/library/scala/concurrent/TIMEOUT.scala3
-rw-r--r--src/library/scala/concurrent/TaskRunner.scala8
-rw-r--r--src/library/scala/concurrent/TaskRunners.scala3
-rw-r--r--src/library/scala/concurrent/ThreadPoolRunner.scala3
-rw-r--r--src/library/scala/concurrent/ThreadRunner.scala13
-rw-r--r--src/library/scala/concurrent/jolib.scala82
-rw-r--r--src/library/scala/concurrent/ops.scala19
-rw-r--r--src/library/scala/concurrent/pilib.scala3
-rw-r--r--src/library/scala/deprecated.scala6
-rw-r--r--src/library/scala/inline.scala3
-rw-r--r--src/library/scala/io/BufferedSource.scala9
-rw-r--r--src/library/scala/io/BytePickle.scala3
-rw-r--r--src/library/scala/io/Codec.scala25
-rw-r--r--src/library/scala/io/Position.scala3
-rw-r--r--src/library/scala/io/Source.scala213
-rw-r--r--src/library/scala/io/UTF8Codec.scala5
-rw-r--r--src/library/scala/math/BigDecimal.scala64
-rw-r--r--src/library/scala/math/BigInt.scala30
-rw-r--r--src/library/scala/math/Equiv.scala3
-rw-r--r--src/library/scala/math/Fractional.scala3
-rw-r--r--src/library/scala/math/Integral.scala3
-rw-r--r--src/library/scala/math/Numeric.scala18
-rw-r--r--src/library/scala/math/Ordered.scala3
-rw-r--r--src/library/scala/math/Ordering.scala16
-rw-r--r--src/library/scala/math/PartialOrdering.scala3
-rw-r--r--src/library/scala/math/PartiallyOrdered.scala3
-rw-r--r--src/library/scala/math/ScalaNumber.java21
-rw-r--r--src/library/scala/math/ScalaNumericConversions.scala60
-rw-r--r--src/library/scala/math/package.scala30
-rw-r--r--src/library/scala/mobile/Code.scala3
-rw-r--r--src/library/scala/mobile/Location.scala3
-rw-r--r--src/library/scala/native.scala25
-rw-r--r--src/library/scala/noinline.scala3
-rw-r--r--src/library/scala/package.scala79
-rw-r--r--src/library/scala/ref/PhantomReference.scala11
-rw-r--r--src/library/scala/ref/Reference.scala7
-rw-r--r--src/library/scala/ref/ReferenceQueue.scala26
-rw-r--r--src/library/scala/ref/ReferenceWrapper.scala11
-rw-r--r--src/library/scala/ref/SoftReference.scala14
-rw-r--r--src/library/scala/ref/WeakReference.scala14
-rw-r--r--src/library/scala/reflect/BeanDescription.scala3
-rw-r--r--src/library/scala/reflect/BeanDisplayName.scala3
-rw-r--r--src/library/scala/reflect/BeanInfo.scala3
-rw-r--r--src/library/scala/reflect/BeanInfoSkip.scala3
-rw-r--r--src/library/scala/reflect/BeanProperty.scala6
-rw-r--r--src/library/scala/reflect/BooleanBeanProperty.scala6
-rw-r--r--src/library/scala/reflect/ClassManifest.scala46
-rw-r--r--src/library/scala/reflect/Code.scala5
-rw-r--r--src/library/scala/reflect/Invocation.scala134
-rw-r--r--src/library/scala/reflect/Manifest.scala107
-rwxr-xr-x[-rw-r--r--]src/library/scala/reflect/NameTransformer.scala (renamed from src/library/scala/util/NameTransformer.scala)5
-rw-r--r--src/library/scala/reflect/NoManifest.scala3
-rw-r--r--src/library/scala/reflect/OptManifest.scala3
-rw-r--r--src/library/scala/reflect/Print.scala8
-rw-r--r--src/library/scala/reflect/RichClass.scala93
-rw-r--r--src/library/scala/reflect/ScalaBeanInfo.scala3
-rw-r--r--src/library/scala/reflect/ScalaLongSignature.java13
-rw-r--r--src/library/scala/reflect/ScalaSignature.java13
-rw-r--r--src/library/scala/reflect/Symbol.scala3
-rw-r--r--src/library/scala/reflect/Tree.scala3
-rw-r--r--src/library/scala/reflect/Type.scala3
-rwxr-xr-xsrc/library/scala/reflect/generic/AnnotationInfos.scala42
-rw-r--r--src/library/scala/reflect/generic/ByteCodecs.scala224
-rwxr-xr-x[-rw-r--r--]src/library/scala/reflect/generic/Constants.scala (renamed from src/compiler/scala/tools/nsc/symtab/Constants.scala)45
-rwxr-xr-xsrc/library/scala/reflect/generic/Flags.scala199
-rwxr-xr-xsrc/library/scala/reflect/generic/Names.scala21
-rwxr-xr-x[-rw-r--r--]src/library/scala/reflect/generic/PickleBuffer.scala (renamed from src/compiler/scala/tools/nsc/symtab/classfile/PickleBuffer.scala)31
-rwxr-xr-x[-rw-r--r--]src/library/scala/reflect/generic/PickleFormat.scala (renamed from src/compiler/scala/tools/nsc/symtab/classfile/PickleFormat.scala)19
-rwxr-xr-xsrc/library/scala/reflect/generic/Scopes.scala15
-rwxr-xr-xsrc/library/scala/reflect/generic/StandardDefinitions.scala66
-rwxr-xr-xsrc/library/scala/reflect/generic/StdNames.scala30
-rwxr-xr-xsrc/library/scala/reflect/generic/Symbols.scala195
-rwxr-xr-xsrc/library/scala/reflect/generic/Trees.scala739
-rwxr-xr-xsrc/library/scala/reflect/generic/Types.scala156
-rwxr-xr-xsrc/library/scala/reflect/generic/UnPickler.scala796
-rwxr-xr-xsrc/library/scala/reflect/generic/Universe.scala16
-rw-r--r--src/library/scala/remote.scala3
-rw-r--r--src/library/scala/runtime/AbstractFunction0.scala (renamed from src/library/scala/runtime/NonLocalReturnException.scala)9
-rw-r--r--src/library/scala/runtime/AbstractFunction1.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction10.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction11.scala (renamed from src/library/scala/runtime/ExceptionHandling.java)16
-rw-r--r--src/library/scala/runtime/AbstractFunction12.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction13.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction14.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction15.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction16.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction17.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction18.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction19.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction2.scala (renamed from src/library/scala/runtime/BoxedIntArray.scala)16
-rw-r--r--src/library/scala/runtime/AbstractFunction20.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction21.scala (renamed from src/library/scala/runtime/BoxedLongArray.scala)16
-rw-r--r--src/library/scala/runtime/AbstractFunction22.scala (renamed from src/library/scala/runtime/BoxedCharArray.scala)16
-rw-r--r--src/library/scala/runtime/AbstractFunction3.scala (renamed from src/library/scala/runtime/StreamCons.scala)11
-rw-r--r--src/library/scala/runtime/AbstractFunction4.scala (renamed from src/library/scala/concurrent/AsyncInvokable.scala)14
-rw-r--r--src/library/scala/runtime/AbstractFunction5.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction6.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction7.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction8.scala18
-rw-r--r--src/library/scala/runtime/AbstractFunction9.scala18
-rw-r--r--src/library/scala/runtime/AnyValCompanion.scala86
-rw-r--r--src/library/scala/runtime/ArrayRuntime.java16
-rw-r--r--src/library/scala/runtime/BooleanRef.java3
-rw-r--r--src/library/scala/runtime/Boxed.scala3
-rw-r--r--src/library/scala/runtime/BoxedAnyArray.scala224
-rw-r--r--src/library/scala/runtime/BoxedArray.scala165
-rw-r--r--src/library/scala/runtime/BoxedBooleanArray.scala28
-rw-r--r--src/library/scala/runtime/BoxedByteArray.scala28
-rw-r--r--src/library/scala/runtime/BoxedDoubleArray.scala28
-rw-r--r--src/library/scala/runtime/BoxedFloatArray.scala28
-rw-r--r--src/library/scala/runtime/BoxedObjectArray.scala40
-rw-r--r--src/library/scala/runtime/BoxedShortArray.scala28
-rw-r--r--src/library/scala/runtime/BoxedUnit.java3
-rw-r--r--src/library/scala/runtime/BoxedUnitArray.scala28
-rw-r--r--src/library/scala/runtime/BoxesRunTime.java609
-rw-r--r--src/library/scala/runtime/ByteRef.java5
-rw-r--r--src/library/scala/runtime/CharRef.java5
-rw-r--r--src/library/scala/runtime/DoubleRef.java5
-rw-r--r--src/library/scala/runtime/FloatRef.java5
-rw-r--r--src/library/scala/runtime/IntRef.java5
-rw-r--r--src/library/scala/runtime/LongRef.java5
-rw-r--r--src/library/scala/runtime/MethodCache.scala31
-rw-r--r--src/library/scala/runtime/NonLocalReturnControl.scala (renamed from src/library/scala/unsealed.scala)13
-rw-r--r--src/library/scala/runtime/Nothing$.scala3
-rw-r--r--src/library/scala/runtime/Null$.scala3
-rw-r--r--src/library/scala/runtime/ObjectRef.java3
-rw-r--r--src/library/scala/runtime/RichBoolean.scala3
-rw-r--r--src/library/scala/runtime/RichByte.scala3
-rw-r--r--src/library/scala/runtime/RichChar.scala28
-rw-r--r--src/library/scala/runtime/RichDouble.scala19
-rw-r--r--src/library/scala/runtime/RichException.scala14
-rw-r--r--src/library/scala/runtime/RichFloat.scala19
-rw-r--r--src/library/scala/runtime/RichInt.scala3
-rw-r--r--src/library/scala/runtime/RichLong.scala3
-rw-r--r--src/library/scala/runtime/RichShort.scala3
-rw-r--r--src/library/scala/runtime/RichString.scala261
-rw-r--r--src/library/scala/runtime/RichUnit.scala3
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala282
-rw-r--r--src/library/scala/runtime/ShortRef.java4
-rw-r--r--src/library/scala/runtime/StringAdd.scala3
-rwxr-xr-xsrc/library/scala/runtime/TraitSetter.java6
-rwxr-xr-x[-rw-r--r--]src/library/scala/runtime/VolatileBooleanRef.java (renamed from src/library/scala/net/Utility.scala)23
-rwxr-xr-xsrc/library/scala/runtime/VolatileByteRef.java20
-rwxr-xr-xsrc/library/scala/runtime/VolatileCharRef.java20
-rwxr-xr-xsrc/library/scala/runtime/VolatileDoubleRef.java19
-rwxr-xr-xsrc/library/scala/runtime/VolatileFloatRef.java20
-rwxr-xr-xsrc/library/scala/runtime/VolatileIntRef.java19
-rwxr-xr-xsrc/library/scala/runtime/VolatileLongRef.java20
-rwxr-xr-xsrc/library/scala/runtime/VolatileObjectRef.java20
-rwxr-xr-xsrc/library/scala/runtime/VolatileShortRef.java20
-rw-r--r--src/library/scala/serializable.scala3
-rw-r--r--src/library/scala/specialized.scala14
-rw-r--r--src/library/scala/testing/Benchmark.scala3
-rw-r--r--src/library/scala/testing/SUnit.scala23
-rw-r--r--src/library/scala/testing/Show.scala3
-rw-r--r--src/library/scala/text/Document.scala3
-rw-r--r--src/library/scala/throws.scala17
-rw-r--r--src/library/scala/transient.scala6
-rw-r--r--src/library/scala/unchecked.scala3
-rw-r--r--src/library/scala/util/DynamicVariable.scala3
-rw-r--r--src/library/scala/util/Hashable.scala62
-rw-r--r--src/library/scala/util/JenkinsHash.scala191
-rw-r--r--src/library/scala/util/Marshal.scala3
-rw-r--r--src/library/scala/util/MurmurHash.scala196
-rw-r--r--src/library/scala/util/Properties.scala111
-rw-r--r--src/library/scala/util/Random.scala60
-rw-r--r--src/library/scala/util/Sorting.scala123
-rw-r--r--src/library/scala/util/automata/BaseBerrySethi.scala189
-rw-r--r--src/library/scala/util/automata/DetWordAutom.scala39
-rw-r--r--src/library/scala/util/automata/Inclusion.scala3
-rw-r--r--src/library/scala/util/automata/NondetWordAutom.scala28
-rw-r--r--src/library/scala/util/automata/SubsetConstruction.scala5
-rw-r--r--src/library/scala/util/automata/WordBerrySethi.scala244
-rw-r--r--src/library/scala/util/control/Breaks.scala9
-rw-r--r--src/library/scala/util/control/ControlThrowable.scala (renamed from src/library/scala/util/control/ControlException.scala)11
-rw-r--r--src/library/scala/util/control/NoStackTrace.scala2
-rw-r--r--src/library/scala/util/control/TailCalls.scala56
-rw-r--r--src/library/scala/util/control/TailRec.scala24
-rw-r--r--src/library/scala/util/grammar/HedgeRHS.scala3
-rw-r--r--src/library/scala/util/grammar/TreeRHS.scala3
-rw-r--r--src/library/scala/util/logging/ConsoleLogger.scala5
-rw-r--r--src/library/scala/util/logging/Logged.scala3
-rw-r--r--src/library/scala/util/matching/Regex.scala63
-rw-r--r--src/library/scala/util/parsing/ast/AbstractSyntax.scala2
-rw-r--r--src/library/scala/util/parsing/ast/Binders.scala24
-rw-r--r--src/library/scala/util/parsing/combinator/ImplicitConversions.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/JavaTokenParsers.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/PackratParsers.scala7
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala101
-rw-r--r--src/library/scala/util/parsing/combinator/RegexParsers.scala22
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/Lexical.scala12
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/Scanners.scala19
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/StdLexical.scala12
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala13
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala12
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala19
-rw-r--r--src/library/scala/util/parsing/combinator/testing/RegexTest.scala1
-rw-r--r--src/library/scala/util/parsing/combinator/testing/Tester.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/token/StdTokens.scala (renamed from src/library/scala/util/parsing/syntax/StdTokens.scala)6
-rw-r--r--src/library/scala/util/parsing/combinator/token/Tokens.scala (renamed from src/library/scala/util/parsing/syntax/Tokens.scala)6
-rw-r--r--src/library/scala/util/parsing/input/CharArrayPosition.scala44
-rw-r--r--src/library/scala/util/parsing/input/CharArrayReader.scala6
-rw-r--r--src/library/scala/util/parsing/input/CharSequenceReader.scala3
-rw-r--r--src/library/scala/util/parsing/input/NoPosition.scala3
-rw-r--r--src/library/scala/util/parsing/input/OffsetPosition.scala2
-rw-r--r--src/library/scala/util/parsing/input/PagedSeqReader.scala3
-rw-r--r--src/library/scala/util/parsing/input/Position.scala4
-rw-r--r--src/library/scala/util/parsing/input/Positional.scala2
-rw-r--r--src/library/scala/util/parsing/input/Reader.scala3
-rw-r--r--src/library/scala/util/parsing/input/StreamReader.scala3
-rw-r--r--src/library/scala/util/parsing/json/JSON.scala55
-rw-r--r--src/library/scala/util/parsing/json/Lexer.scala5
-rw-r--r--src/library/scala/util/parsing/json/Parser.scala32
-rw-r--r--src/library/scala/util/parsing/syntax/package.scala19
-rw-r--r--src/library/scala/util/regexp/Base.scala36
-rw-r--r--src/library/scala/util/regexp/PointedHedgeExp.scala3
-rw-r--r--src/library/scala/util/regexp/SyntaxError.scala3
-rw-r--r--src/library/scala/util/regexp/WordExp.scala3
-rw-r--r--src/library/scala/volatile.scala6
-rw-r--r--src/library/scala/xml/Atom.scala21
-rw-r--r--src/library/scala/xml/Attribute.scala50
-rw-r--r--src/library/scala/xml/Comment.scala5
-rw-r--r--src/library/scala/xml/Document.scala8
-rw-r--r--src/library/scala/xml/Elem.scala21
-rw-r--r--src/library/scala/xml/EntityRef.scala5
-rw-r--r--src/library/scala/xml/Equality.scala115
-rw-r--r--src/library/scala/xml/Group.scala61
-rw-r--r--src/library/scala/xml/HasKeyValue.scala4
-rw-r--r--src/library/scala/xml/MalformedAttributeException.scala3
-rw-r--r--src/library/scala/xml/MetaData.scala50
-rw-r--r--src/library/scala/xml/NamespaceBinding.scala18
-rw-r--r--src/library/scala/xml/Node.scala69
-rw-r--r--src/library/scala/xml/NodeBuffer.scala9
-rw-r--r--src/library/scala/xml/NodeSeq.scala86
-rw-r--r--src/library/scala/xml/Null.scala60
-rw-r--r--src/library/scala/xml/PCData.scala8
-rw-r--r--src/library/scala/xml/PrefixedAttribute.scala55
-rw-r--r--src/library/scala/xml/PrettyPrinter.scala23
-rw-r--r--src/library/scala/xml/ProcInstr.scala6
-rw-r--r--src/library/scala/xml/QNode.scala3
-rw-r--r--src/library/scala/xml/SpecialNode.scala7
-rw-r--r--src/library/scala/xml/Text.scala43
-rw-r--r--src/library/scala/xml/TextBuffer.scala7
-rw-r--r--src/library/scala/xml/TopScope.scala8
-rw-r--r--src/library/scala/xml/TypeSymbol.scala3
-rw-r--r--src/library/scala/xml/Unparsed.scala10
-rw-r--r--src/library/scala/xml/UnprefixedAttribute.scala39
-rw-r--r--src/library/scala/xml/Utility.scala84
-rw-r--r--src/library/scala/xml/XML.scala19
-rw-r--r--src/library/scala/xml/Xhtml.scala15
-rw-r--r--src/library/scala/xml/dtd/ContentModel.scala18
-rw-r--r--src/library/scala/xml/dtd/ContentModelParser.scala6
-rw-r--r--src/library/scala/xml/dtd/DTD.scala25
-rw-r--r--src/library/scala/xml/dtd/Decl.scala9
-rw-r--r--src/library/scala/xml/dtd/DocType.scala6
-rw-r--r--src/library/scala/xml/dtd/ElementValidator.scala95
-rw-r--r--src/library/scala/xml/dtd/ExternalID.scala8
-rw-r--r--src/library/scala/xml/dtd/Scanner.scala7
-rw-r--r--src/library/scala/xml/dtd/Tokens.scala3
-rw-r--r--src/library/scala/xml/dtd/ValidationException.scala3
-rw-r--r--src/library/scala/xml/factory/Binder.scala5
-rw-r--r--src/library/scala/xml/factory/LoggedNodeFactory.scala5
-rw-r--r--src/library/scala/xml/factory/NodeFactory.scala9
-rw-r--r--src/library/scala/xml/factory/XMLLoader.scala10
-rw-r--r--src/library/scala/xml/include/CircularIncludeException.scala3
-rw-r--r--src/library/scala/xml/include/UnavailableResourceException.scala3
-rw-r--r--src/library/scala/xml/include/XIncludeException.scala5
-rw-r--r--src/library/scala/xml/include/sax/EncodingHeuristics.scala3
-rw-r--r--src/library/scala/xml/include/sax/Main.scala24
-rw-r--r--src/library/scala/xml/include/sax/XIncludeFilter.scala98
-rw-r--r--src/library/scala/xml/include/sax/XIncluder.scala27
-rw-r--r--src/library/scala/xml/package.scala11
-rw-r--r--src/library/scala/xml/parsing/ConstructingHandler.scala3
-rw-r--r--src/library/scala/xml/parsing/ConstructingParser.scala53
-rw-r--r--src/library/scala/xml/parsing/DefaultMarkupHandler.scala5
-rw-r--r--src/library/scala/xml/parsing/ExternalSources.scala8
-rw-r--r--src/library/scala/xml/parsing/FactoryAdapter.scala18
-rw-r--r--src/library/scala/xml/parsing/FatalError.scala10
-rw-r--r--src/library/scala/xml/parsing/MarkupHandler.scala10
-rw-r--r--src/library/scala/xml/parsing/MarkupParser.scala518
-rw-r--r--src/library/scala/xml/parsing/MarkupParserCommon.scala263
-rw-r--r--src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala7
-rw-r--r--src/library/scala/xml/parsing/TokenTests.scala5
-rw-r--r--src/library/scala/xml/parsing/ValidatingMarkupHandler.scala5
-rw-r--r--src/library/scala/xml/parsing/XhtmlEntities.scala6
-rw-r--r--src/library/scala/xml/parsing/XhtmlParser.scala3
-rw-r--r--src/library/scala/xml/persistent/CachedFileStorage.scala5
-rw-r--r--src/library/scala/xml/persistent/Index.scala3
-rw-r--r--src/library/scala/xml/persistent/SetStorage.scala3
-rw-r--r--src/library/scala/xml/pull/XMLEvent.scala45
-rw-r--r--src/library/scala/xml/pull/XMLEventReader.scala25
-rw-r--r--src/library/scala/xml/pull/package.scala41
-rw-r--r--src/library/scala/xml/transform/BasicTransformer.scala3
-rw-r--r--src/library/scala/xml/transform/RewriteRule.scala3
-rw-r--r--src/library/scala/xml/transform/RuleTransformer.scala3
-rw-r--r--src/manual/scala/man1/Command.scala3
-rw-r--r--src/manual/scala/man1/fsc.scala3
-rw-r--r--src/manual/scala/man1/sbaz.scala3
-rw-r--r--src/manual/scala/man1/scala.scala3
-rw-r--r--src/manual/scala/man1/scalac.scala3
-rw-r--r--src/manual/scala/man1/scaladoc.scala129
-rw-r--r--src/manual/scala/man1/scalap.scala3
-rw-r--r--src/manual/scala/tools/docutil/EmitHtml.scala3
-rw-r--r--src/manual/scala/tools/docutil/EmitManPage.scala5
-rw-r--r--src/manual/scala/tools/docutil/ManPage.scala3
-rw-r--r--src/manual/scala/tools/docutil/resources/index.html4
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Assembly.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Attribute.java3
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Module.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEFile.java17
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEModule.java14
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEType.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Type.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Version.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala3
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala5
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala3
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala3
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala9
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala19
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala3
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala5
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala3
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala5
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala145
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala7
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala3
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala3
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala33
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java1
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/Table.java1
-rw-r--r--src/partest-alternative/README50
-rw-r--r--src/partest-alternative/scala/tools/partest/Actions.scala231
-rw-r--r--src/partest-alternative/scala/tools/partest/Alarms.scala86
-rw-r--r--src/partest-alternative/scala/tools/partest/BuildContributors.scala102
-rw-r--r--src/partest-alternative/scala/tools/partest/Categories.scala70
-rw-r--r--src/partest-alternative/scala/tools/partest/Compilable.scala106
-rw-r--r--src/partest-alternative/scala/tools/partest/Config.scala129
-rw-r--r--src/partest-alternative/scala/tools/partest/Dispatcher.scala162
-rw-r--r--src/partest-alternative/scala/tools/partest/Entities.scala74
-rw-r--r--src/partest-alternative/scala/tools/partest/Housekeeping.scala187
-rw-r--r--src/partest-alternative/scala/tools/partest/Partest.scala81
-rw-r--r--src/partest-alternative/scala/tools/partest/PartestSpec.scala104
-rw-r--r--src/partest-alternative/scala/tools/partest/Properties.scala17
-rw-r--r--src/partest-alternative/scala/tools/partest/Results.scala121
-rw-r--r--src/partest-alternative/scala/tools/partest/Runner.scala36
-rw-r--r--src/partest-alternative/scala/tools/partest/Statistics.scala46
-rw-r--r--src/partest-alternative/scala/tools/partest/Universe.scala96
-rw-r--r--src/partest-alternative/scala/tools/partest/ant/JavaTask.scala57
-rw-r--r--src/partest-alternative/scala/tools/partest/antlib.xml3
-rw-r--r--src/partest-alternative/scala/tools/partest/category/AllCategories.scala20
-rw-r--r--src/partest-alternative/scala/tools/partest/category/Analysis.scala64
-rw-r--r--src/partest-alternative/scala/tools/partest/category/Compiler.scala140
-rw-r--r--src/partest-alternative/scala/tools/partest/category/Runner.scala108
-rw-r--r--src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala58
-rw-r--r--src/partest-alternative/scala/tools/partest/io/Diff.java873
-rw-r--r--src/partest-alternative/scala/tools/partest/io/DiffPrint.java606
-rw-r--r--src/partest-alternative/scala/tools/partest/io/JUnitReport.scala38
-rw-r--r--src/partest-alternative/scala/tools/partest/io/Logging.scala137
-rw-r--r--src/partest-alternative/scala/tools/partest/nest/StreamAppender.scala94
-rw-r--r--src/partest-alternative/scala/tools/partest/package.scala45
-rw-r--r--src/partest-alternative/scala/tools/partest/util/package.scala61
-rw-r--r--src/partest/README9
-rw-r--r--src/partest/scala/tools/partest/PartestDefaults.scala30
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala212
-rw-r--r--src/partest/scala/tools/partest/nest/AntRunner.scala15
-rw-r--r--src/partest/scala/tools/partest/nest/CompileManager.scala199
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleFileManager.scala296
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunner.scala344
-rw-r--r--src/partest/scala/tools/partest/nest/Diff.java9
-rw-r--r--src/partest/scala/tools/partest/nest/DiffPrint.java3
-rw-r--r--src/partest/scala/tools/partest/nest/DirectRunner.scala48
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala97
-rw-r--r--src/partest/scala/tools/partest/nest/NestRunner.scala2
-rw-r--r--src/partest/scala/tools/partest/nest/NestUI.scala30
-rw-r--r--src/partest/scala/tools/partest/nest/PathSettings.scala41
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala42
-rw-r--r--src/partest/scala/tools/partest/nest/RunnerUtils.scala39
-rw-r--r--src/partest/scala/tools/partest/nest/StreamAppender.scala70
-rw-r--r--src/partest/scala/tools/partest/nest/TestFile.scala116
-rw-r--r--src/partest/scala/tools/partest/nest/Worker.scala880
-rw-r--r--src/partest/scala/tools/partest/package.scala40
-rw-r--r--src/partest/scala/tools/partest/utils/PrintMgr.scala2
-rw-r--r--src/partest/scala/tools/partest/utils/Properties.scala3
-rw-r--r--src/scalap/decoder.properties2
-rw-r--r--src/scalap/scala/tools/scalap/Arguments.scala3
-rw-r--r--src/scalap/scala/tools/scalap/ByteArrayReader.scala63
-rw-r--r--src/scalap/scala/tools/scalap/Classfile.scala119
-rw-r--r--src/scalap/scala/tools/scalap/Classfiles.scala18
-rw-r--r--src/scalap/scala/tools/scalap/CodeWriter.scala3
-rw-r--r--src/scalap/scala/tools/scalap/Decode.scala101
-rw-r--r--src/scalap/scala/tools/scalap/JavaWriter.scala17
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala64
-rw-r--r--src/scalap/scala/tools/scalap/MetaParser.scala3
-rw-r--r--src/scalap/scala/tools/scalap/Names.scala3
-rw-r--r--src/scalap/scala/tools/scalap/Properties.scala4
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Functors.scala4
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Result.scala10
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Rules.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala4
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala80
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala64
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala264
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala1
-rw-r--r--src/swing/scala/swing/AbstractButton.scala13
-rw-r--r--src/swing/scala/swing/Action.scala19
-rw-r--r--src/swing/scala/swing/Alignment.scala3
-rw-r--r--src/swing/scala/swing/Applet.scala3
-rw-r--r--src/swing/scala/swing/BorderPanel.scala11
-rw-r--r--src/swing/scala/swing/BoxPanel.scala3
-rw-r--r--src/swing/scala/swing/BufferWrapper.scala4
-rw-r--r--src/swing/scala/swing/Button.scala3
-rw-r--r--src/swing/scala/swing/ButtonGroup.scala3
-rw-r--r--src/swing/scala/swing/CheckBox.scala3
-rw-r--r--src/swing/scala/swing/ComboBox.scala8
-rw-r--r--src/swing/scala/swing/Component.scala24
-rw-r--r--src/swing/scala/swing/Container.scala9
-rw-r--r--src/swing/scala/swing/EditorPane.scala5
-rw-r--r--src/swing/scala/swing/FileChooser.scala5
-rw-r--r--src/swing/scala/swing/FlowPanel.scala7
-rw-r--r--src/swing/scala/swing/Font.scala70
-rw-r--r--src/swing/scala/swing/FormattedTextField.scala5
-rw-r--r--src/swing/scala/swing/GUIApplication.scala3
-rw-r--r--src/swing/scala/swing/GridBagPanel.scala9
-rw-r--r--src/swing/scala/swing/GridPanel.scala6
-rw-r--r--src/swing/scala/swing/Label.scala6
-rw-r--r--src/swing/scala/swing/LayoutContainer.scala12
-rw-r--r--src/swing/scala/swing/ListView.scala9
-rw-r--r--src/swing/scala/swing/MainFrame.scala5
-rw-r--r--src/swing/scala/swing/Menu.scala5
-rw-r--r--src/swing/scala/swing/Orientable.scala3
-rw-r--r--src/swing/scala/swing/Orientation.scala3
-rw-r--r--src/swing/scala/swing/Oriented.scala3
-rw-r--r--src/swing/scala/swing/Panel.scala3
-rw-r--r--src/swing/scala/swing/PasswordField.scala5
-rw-r--r--src/swing/scala/swing/ProgressBar.scala5
-rw-r--r--src/swing/scala/swing/Publisher.scala13
-rw-r--r--src/swing/scala/swing/RadioButton.scala5
-rw-r--r--src/swing/scala/swing/Reactions.scala3
-rw-r--r--src/swing/scala/swing/Reactor.scala3
-rw-r--r--src/swing/scala/swing/RichWindow.scala12
-rw-r--r--src/swing/scala/swing/RootPanel.scala15
-rw-r--r--src/swing/scala/swing/ScrollBar.scala5
-rw-r--r--src/swing/scala/swing/ScrollPane.scala22
-rw-r--r--src/swing/scala/swing/Scrollable.scala3
-rw-r--r--src/swing/scala/swing/Separator.scala5
-rw-r--r--src/swing/scala/swing/SequentialContainer.scala3
-rw-r--r--src/swing/scala/swing/SimpleGUIApplication.scala9
-rw-r--r--src/swing/scala/swing/SimpleSwingApplication.scala4
-rw-r--r--src/swing/scala/swing/Slider.scala14
-rw-r--r--src/swing/scala/swing/SplitPane.scala11
-rw-r--r--src/swing/scala/swing/Swing.scala6
-rw-r--r--src/swing/scala/swing/SwingActor.scala3
-rw-r--r--src/swing/scala/swing/TabbedPane.scala9
-rw-r--r--src/swing/scala/swing/Table.scala19
-rw-r--r--src/swing/scala/swing/TextArea.scala8
-rw-r--r--src/swing/scala/swing/TextComponent.scala4
-rw-r--r--src/swing/scala/swing/TextField.scala16
-rw-r--r--src/swing/scala/swing/ToggleButton.scala5
-rw-r--r--src/swing/scala/swing/UIElement.scala20
-rw-r--r--src/swing/scala/swing/Window.scala15
-rw-r--r--src/swing/scala/swing/event/ActionEvent.scala3
-rw-r--r--src/swing/scala/swing/event/AdjustingEvent.scala3
-rw-r--r--src/swing/scala/swing/event/BackgroundChanged.scala3
-rw-r--r--src/swing/scala/swing/event/ButtonClicked.scala3
-rw-r--r--src/swing/scala/swing/event/CaretUpdate.scala3
-rw-r--r--src/swing/scala/swing/event/ComponentEvent.scala13
-rw-r--r--src/swing/scala/swing/event/ContainerEvent.scala3
-rw-r--r--src/swing/scala/swing/event/EditDone.scala3
-rw-r--r--src/swing/scala/swing/event/Event.scala3
-rw-r--r--src/swing/scala/swing/event/FocusEvent.scala3
-rw-r--r--src/swing/scala/swing/event/FontChanged.scala3
-rw-r--r--src/swing/scala/swing/event/ForegroundChanged.scala3
-rw-r--r--src/swing/scala/swing/event/InputEvent.scala3
-rw-r--r--src/swing/scala/swing/event/Key.scala3
-rw-r--r--src/swing/scala/swing/event/KeyEvent.scala20
-rw-r--r--src/swing/scala/swing/event/ListEvent.scala3
-rw-r--r--src/swing/scala/swing/event/MouseEvent.scala43
-rw-r--r--src/swing/scala/swing/event/SelectionEvent.scala3
-rw-r--r--src/swing/scala/swing/event/TableEvent.scala5
-rw-r--r--src/swing/scala/swing/event/UIEvent.scala3
-rw-r--r--src/swing/scala/swing/event/ValueChanged.scala3
-rw-r--r--src/swing/scala/swing/event/WindowActivated.scala3
-rw-r--r--src/swing/scala/swing/event/WindowClosing.scala3
-rw-r--r--src/swing/scala/swing/event/WindowDeactivated.scala3
-rw-r--r--src/swing/scala/swing/event/WindowDeiconified.scala3
-rw-r--r--src/swing/scala/swing/event/WindowEvent.scala3
-rw-r--r--src/swing/scala/swing/event/WindowIconified.scala3
-rw-r--r--src/swing/scala/swing/event/WindowOpened.scala3
-rw-r--r--src/swing/scala/swing/model/Matrix.scala3
-rw-r--r--src/swing/scala/swing/package.scala78
-rw-r--r--src/swing/scala/swing/test/SimpleApplet.scala3
-rw-r--r--src/swing/scala/swing/test/UIDemo.scala6
-rw-r--r--test/attic/files/cli/test1/Main.check.j9vm5 (renamed from test/files/cli/test1/Main.check.j9vm5)0
-rw-r--r--test/attic/files/cli/test1/Main.check.java (renamed from test/files/cli/test1/Main.check.java)0
-rw-r--r--test/attic/files/cli/test1/Main.check.java5 (renamed from test/files/cli/test1/Main.check.java5)0
-rw-r--r--test/attic/files/cli/test1/Main.check.java5_api (renamed from test/files/cli/test1/Main.check.java5_api)0
-rw-r--r--test/attic/files/cli/test1/Main.check.java5_j9 (renamed from test/files/cli/test1/Main.check.java5_j9)0
-rw-r--r--test/attic/files/cli/test1/Main.check.javac (renamed from test/files/cli/test1/Main.check.javac)0
-rw-r--r--test/attic/files/cli/test1/Main.check.javac5 (renamed from test/files/cli/test1/Main.check.javac5)0
-rw-r--r--test/attic/files/cli/test1/Main.check.javac6 (renamed from test/files/cli/test1/Main.check.javac6)0
-rw-r--r--test/attic/files/cli/test1/Main.check.jikes (renamed from test/files/cli/test1/Main.check.jikes)0
-rw-r--r--test/attic/files/cli/test1/Main.check.jikes5 (renamed from test/files/cli/test1/Main.check.jikes5)0
-rw-r--r--test/attic/files/cli/test1/Main.check.scala (renamed from test/files/cli/test1/Main.check.scala)0
-rw-r--r--test/attic/files/cli/test1/Main.check.scala_api (renamed from test/files/cli/test1/Main.check.scala_api)0
-rw-r--r--test/attic/files/cli/test1/Main.check.scala_j9 (renamed from test/files/cli/test1/Main.check.scala_j9)0
-rw-r--r--test/attic/files/cli/test1/Main.check.scalac (renamed from test/files/cli/test1/Main.check.scalac)0
-rw-r--r--test/attic/files/cli/test1/Main.check.scalaint (renamed from test/files/cli/test1/Main.check.scalaint)0
-rw-r--r--test/attic/files/cli/test1/Main.java (renamed from test/files/cli/test1/Main.java)0
-rw-r--r--test/attic/files/cli/test1/Main.scala (renamed from test/files/cli/test1/Main.scala)0
-rw-r--r--test/attic/files/cli/test2/Main.check.j9vm5 (renamed from test/files/cli/test2/Main.check.j9vm5)0
-rw-r--r--test/attic/files/cli/test2/Main.check.java (renamed from test/files/cli/test2/Main.check.java)0
-rw-r--r--test/attic/files/cli/test2/Main.check.java5 (renamed from test/files/cli/test2/Main.check.java5)0
-rw-r--r--test/attic/files/cli/test2/Main.check.java5_api (renamed from test/files/cli/test2/Main.check.java5_api)0
-rw-r--r--test/attic/files/cli/test2/Main.check.java5_j9 (renamed from test/files/cli/test2/Main.check.java5_j9)0
-rw-r--r--test/attic/files/cli/test2/Main.check.javac (renamed from test/files/cli/test2/Main.check.javac)0
-rw-r--r--test/attic/files/cli/test2/Main.check.javac5 (renamed from test/files/cli/test2/Main.check.javac5)0
-rw-r--r--test/attic/files/cli/test2/Main.check.javac6 (renamed from test/files/cli/test2/Main.check.javac6)0
-rw-r--r--test/attic/files/cli/test2/Main.check.jikes (renamed from test/files/cli/test2/Main.check.jikes)0
-rw-r--r--test/attic/files/cli/test2/Main.check.jikes5 (renamed from test/files/cli/test2/Main.check.jikes5)0
-rw-r--r--test/attic/files/cli/test2/Main.check.scala (renamed from test/files/cli/test2/Main.check.scala)0
-rw-r--r--test/attic/files/cli/test2/Main.check.scala_api (renamed from test/files/cli/test2/Main.check.scala_api)0
-rw-r--r--test/attic/files/cli/test2/Main.check.scala_j9 (renamed from test/files/cli/test2/Main.check.scala_j9)0
-rw-r--r--test/attic/files/cli/test2/Main.check.scalac (renamed from test/files/cli/test2/Main.check.scalac)0
-rw-r--r--test/attic/files/cli/test2/Main.check.scalaint (renamed from test/files/cli/test2/Main.check.scalaint)0
-rw-r--r--test/attic/files/cli/test2/Main.java (renamed from test/files/cli/test2/Main.java)0
-rw-r--r--test/attic/files/cli/test2/Main.scala (renamed from test/files/cli/test2/Main.scala)2
-rw-r--r--test/attic/files/cli/test3/Main.check.j9vm5 (renamed from test/files/cli/test3/Main.check.j9vm5)0
-rw-r--r--test/attic/files/cli/test3/Main.check.java (renamed from test/files/cli/test3/Main.check.java)0
-rw-r--r--test/attic/files/cli/test3/Main.check.java5 (renamed from test/files/cli/test3/Main.check.java5)0
-rw-r--r--test/attic/files/cli/test3/Main.check.java5_api (renamed from test/files/cli/test3/Main.check.java5_api)0
-rw-r--r--test/attic/files/cli/test3/Main.check.java5_j9 (renamed from test/files/cli/test3/Main.check.java5_j9)0
-rw-r--r--test/attic/files/cli/test3/Main.check.javac (renamed from test/files/cli/test3/Main.check.javac)0
-rw-r--r--test/attic/files/cli/test3/Main.check.javac5 (renamed from test/files/cli/test3/Main.check.javac5)0
-rw-r--r--test/attic/files/cli/test3/Main.check.javac6 (renamed from test/files/cli/test3/Main.check.javac6)0
-rw-r--r--test/attic/files/cli/test3/Main.check.jikes (renamed from test/files/cli/test3/Main.check.jikes)0
-rw-r--r--test/attic/files/cli/test3/Main.check.jikes5 (renamed from test/files/cli/test3/Main.check.jikes5)0
-rw-r--r--test/attic/files/cli/test3/Main.check.scala (renamed from test/files/cli/test3/Main.check.scala)0
-rw-r--r--test/attic/files/cli/test3/Main.check.scala_api (renamed from test/files/cli/test3/Main.check.scala_api)0
-rw-r--r--test/attic/files/cli/test3/Main.check.scala_j9 (renamed from test/files/cli/test3/Main.check.scala_j9)0
-rw-r--r--test/attic/files/cli/test3/Main.check.scalac (renamed from test/files/cli/test3/Main.check.scalac)0
-rw-r--r--test/attic/files/cli/test3/Main.check.scalaint (renamed from test/files/cli/test3/Main.check.scalaint)0
-rw-r--r--test/attic/files/cli/test3/Main.java (renamed from test/files/cli/test3/Main.java)0
-rw-r--r--test/attic/files/cli/test3/Main.scala (renamed from test/files/cli/test3/Main.scala)0
-rw-r--r--test/debug/buildmanager/.gitignore0
-rw-r--r--test/debug/jvm/.gitignore0
-rw-r--r--test/debug/neg/.gitignore0
-rw-r--r--test/debug/pos/.gitignore0
-rw-r--r--test/debug/res/.gitignore0
-rw-r--r--test/debug/run/.gitignore0
-rw-r--r--test/debug/scalacheck/.gitignore0
-rw-r--r--test/debug/scalap/.gitignore0
-rw-r--r--test/debug/shootout/.gitignore0
-rw-r--r--test/disabled-windows/script/loadAndExecute.check (renamed from test/files/script/loadAndExecute/loadAndExecute.check)0
-rwxr-xr-xtest/disabled-windows/script/loadAndExecute/lAndE1.scala (renamed from test/files/script/loadAndExecute/lAndE1.scala)0
-rwxr-xr-xtest/disabled-windows/script/loadAndExecute/lAndE2.scala (renamed from test/files/script/loadAndExecute/lAndE2.scala)0
-rwxr-xr-xtest/disabled-windows/script/loadAndExecute/loadAndExecute.scala (renamed from test/files/script/loadAndExecute/loadAndExecute.scala)0
-rwxr-xr-xtest/disabled-windows/script/utf8.bat (renamed from test/files/script/utf8.bat)0
-rw-r--r--test/disabled-windows/script/utf8.check (renamed from test/files/script/utf8.check)0
-rwxr-xr-xtest/disabled-windows/script/utf8.scala (renamed from test/files/script/utf8.scala)9
-rw-r--r--test/disabled/buildmanager/t2651_1/A.scala1
-rw-r--r--test/disabled/buildmanager/t2651_1/B.scala2
-rw-r--r--test/disabled/buildmanager/t2651_1/C.scala3
-rw-r--r--test/disabled/buildmanager/t2651_1/D.scala3
-rw-r--r--test/disabled/buildmanager/t2651_1/t2651_1.changes/A2.scala2
-rw-r--r--test/disabled/buildmanager/t2651_1/t2651_1.check19
-rw-r--r--test/disabled/buildmanager/t2651_1/t2651_1.test3
-rw-r--r--test/disabled/pos/bug2919.scala12
-rw-r--r--test/disabled/pos/spec-traits.scala83
-rw-r--r--test/disabled/run/docgenerator.check (renamed from test/files/run/docgenerator.check)0
-rw-r--r--test/disabled/run/docgenerator.scala (renamed from test/files/run/docgenerator.scala)4
-rw-r--r--test/disabled/run/script-positions.scala86
-rw-r--r--test/disabled/run/sigtp.check7
-rw-r--r--test/disabled/run/sigtp.scala18
-rw-r--r--test/disabled/run/t2946/Parsers.scala4
-rw-r--r--test/disabled/run/t2946/ResponseCommon.scala14
-rw-r--r--test/disabled/run/t2946/Test.scala7
-rw-r--r--test/disabled/scalacheck/redblack.scala157
-rwxr-xr-xtest/files/bench/equality/eq.scala34
-rw-r--r--test/files/bench/equality/eqeq.eqlog42
-rwxr-xr-xtest/files/bench/equality/eqeq.scala46
-rw-r--r--test/files/buildmanager/annotated/A.scala1
-rw-r--r--test/files/buildmanager/annotated/annotated.check6
-rw-r--r--test/files/buildmanager/annotated/annotated.test2
-rw-r--r--test/files/buildmanager/freshnames/A.scala16
-rw-r--r--test/files/buildmanager/freshnames/B.scala4
-rw-r--r--test/files/buildmanager/freshnames/freshnames.check6
-rw-r--r--test/files/buildmanager/freshnames/freshnames.test2
-rw-r--r--test/files/buildmanager/infer/A.scala16
-rw-r--r--test/files/buildmanager/infer/infer.check6
-rw-r--r--test/files/buildmanager/infer/infer.test2
-rw-r--r--test/files/buildmanager/overloaded_1/A.scala11
-rw-r--r--test/files/buildmanager/overloaded_1/overloaded_1.check6
-rw-r--r--test/files/buildmanager/overloaded_1/overloaded_1.test2
-rw-r--r--test/files/buildmanager/simpletest/A.scala3
-rw-r--r--test/files/buildmanager/simpletest/B.scala3
-rw-r--r--test/files/buildmanager/simpletest/simpletest.changes/A1.scala1
-rw-r--r--test/files/buildmanager/simpletest/simpletest.check11
-rw-r--r--test/files/buildmanager/simpletest/simpletest.test3
-rw-r--r--test/files/buildmanager/t2280/A.scala1
-rw-r--r--test/files/buildmanager/t2280/B.java2
-rw-r--r--test/files/buildmanager/t2280/t2280.check6
-rw-r--r--test/files/buildmanager/t2280/t2280.test2
-rw-r--r--test/files/buildmanager/t2556_1/A.scala3
-rw-r--r--test/files/buildmanager/t2556_1/B.scala3
-rw-r--r--test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2556_1/t2556_1.check12
-rw-r--r--test/files/buildmanager/t2556_1/t2556_1.test3
-rw-r--r--test/files/buildmanager/t2556_2/A.scala4
-rw-r--r--test/files/buildmanager/t2556_2/B.scala2
-rw-r--r--test/files/buildmanager/t2556_2/C.scala4
-rw-r--r--test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2556_2/t2556_2.check13
-rw-r--r--test/files/buildmanager/t2556_2/t2556_2.test3
-rw-r--r--test/files/buildmanager/t2556_3/A.scala5
-rw-r--r--test/files/buildmanager/t2556_3/B.scala5
-rw-r--r--test/files/buildmanager/t2556_3/C.scala2
-rw-r--r--test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2556_3/t2556_3.check18
-rw-r--r--test/files/buildmanager/t2556_3/t2556_3.test3
-rw-r--r--test/files/buildmanager/t2557/A.scala4
-rw-r--r--test/files/buildmanager/t2557/B.scala4
-rw-r--r--test/files/buildmanager/t2557/C.scala3
-rw-r--r--test/files/buildmanager/t2557/D.scala1
-rw-r--r--test/files/buildmanager/t2557/E.scala1
-rw-r--r--test/files/buildmanager/t2557/F.scala4
-rw-r--r--test/files/buildmanager/t2557/t2557.changes/D2.scala2
-rw-r--r--test/files/buildmanager/t2557/t2557.check10
-rw-r--r--test/files/buildmanager/t2557/t2557.test3
-rw-r--r--test/files/buildmanager/t2559/A.scala5
-rw-r--r--test/files/buildmanager/t2559/D.scala8
-rw-r--r--test/files/buildmanager/t2559/t2559.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2559/t2559.check14
-rw-r--r--test/files/buildmanager/t2559/t2559.test3
-rw-r--r--test/files/buildmanager/t2562/A.scala7
-rw-r--r--test/files/buildmanager/t2562/B.scala8
-rw-r--r--test/files/buildmanager/t2562/t2562.changes/A2.scala8
-rw-r--r--test/files/buildmanager/t2562/t2562.check12
-rw-r--r--test/files/buildmanager/t2562/t2562.test3
-rw-r--r--test/files/buildmanager/t2649/A.scala3
-rw-r--r--test/files/buildmanager/t2649/B.scala4
-rw-r--r--test/files/buildmanager/t2649/t2649.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2649/t2649.check9
-rw-r--r--test/files/buildmanager/t2649/t2649.test3
-rw-r--r--test/files/buildmanager/t2650_1/A.scala4
-rw-r--r--test/files/buildmanager/t2650_1/B.scala3
-rw-r--r--test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala3
-rw-r--r--test/files/buildmanager/t2650_1/t2650_1.check11
-rw-r--r--test/files/buildmanager/t2650_1/t2650_1.test3
-rw-r--r--test/files/buildmanager/t2650_2/A.scala3
-rw-r--r--test/files/buildmanager/t2650_2/B.scala4
-rw-r--r--test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2650_2/t2650_2.check13
-rw-r--r--test/files/buildmanager/t2650_2/t2650_2.test3
-rw-r--r--test/files/buildmanager/t2650_3/A.scala4
-rw-r--r--test/files/buildmanager/t2650_3/B.scala3
-rw-r--r--test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2650_3/t2650_3.check13
-rw-r--r--test/files/buildmanager/t2650_3/t2650_3.test3
-rw-r--r--test/files/buildmanager/t2650_4/A.scala5
-rw-r--r--test/files/buildmanager/t2650_4/B.scala3
-rw-r--r--test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2650_4/t2650_4.check13
-rw-r--r--test/files/buildmanager/t2650_4/t2650_4.test3
-rw-r--r--test/files/buildmanager/t2651_2/A.scala1
-rw-r--r--test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala1
-rw-r--r--test/files/buildmanager/t2651_2/t2651_2.check6
-rw-r--r--test/files/buildmanager/t2651_2/t2651_2.test3
-rw-r--r--test/files/buildmanager/t2651_3/A.scala3
-rw-r--r--test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala3
-rw-r--r--test/files/buildmanager/t2651_3/t2651_3.check6
-rw-r--r--test/files/buildmanager/t2651_3/t2651_3.test3
-rw-r--r--test/files/buildmanager/t2651_4/A.scala5
-rw-r--r--test/files/buildmanager/t2651_4/B.scala3
-rw-r--r--test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2651_4/t2651_4.check13
-rw-r--r--test/files/buildmanager/t2651_4/t2651_4.test3
-rw-r--r--test/files/buildmanager/t2652/A.scala3
-rw-r--r--test/files/buildmanager/t2652/B.scala4
-rw-r--r--test/files/buildmanager/t2652/t2652.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2652/t2652.check9
-rw-r--r--test/files/buildmanager/t2652/t2652.test3
-rw-r--r--test/files/buildmanager/t2653/A.scala2
-rw-r--r--test/files/buildmanager/t2653/B.scala3
-rw-r--r--test/files/buildmanager/t2653/t2653.changes/A2.scala2
-rw-r--r--test/files/buildmanager/t2653/t2653.check13
-rw-r--r--test/files/buildmanager/t2653/t2653.test3
-rw-r--r--test/files/buildmanager/t2654/A.scala2
-rw-r--r--test/files/buildmanager/t2654/B.scala1
-rw-r--r--test/files/buildmanager/t2654/t2654.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2654/t2654.check6
-rw-r--r--test/files/buildmanager/t2654/t2654.test3
-rw-r--r--test/files/buildmanager/t2655/A.scala4
-rw-r--r--test/files/buildmanager/t2655/B.scala3
-rw-r--r--test/files/buildmanager/t2655/t2655.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2655/t2655.check13
-rw-r--r--test/files/buildmanager/t2655/t2655.test3
-rw-r--r--test/files/buildmanager/t2657/A.scala3
-rw-r--r--test/files/buildmanager/t2657/B.scala4
-rw-r--r--test/files/buildmanager/t2657/t2657.changes/A2.scala3
-rw-r--r--test/files/buildmanager/t2657/t2657.check13
-rw-r--r--test/files/buildmanager/t2657/t2657.test3
-rw-r--r--test/files/buildmanager/t2789/A.scala5
-rw-r--r--test/files/buildmanager/t2789/B.scala3
-rw-r--r--test/files/buildmanager/t2789/t2789.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2789/t2789.check11
-rw-r--r--test/files/buildmanager/t2789/t2789.test3
-rw-r--r--test/files/buildmanager/t2790/A.scala5
-rw-r--r--test/files/buildmanager/t2790/B.scala4
-rw-r--r--test/files/buildmanager/t2790/t2790.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2790/t2790.check14
-rw-r--r--test/files/buildmanager/t2790/t2790.test3
-rw-r--r--test/files/buildmanager/t3045/A.java7
-rw-r--r--test/files/buildmanager/t3045/t3045.check3
-rw-r--r--test/files/buildmanager/t3045/t3045.test1
-rw-r--r--test/files/buildmanager/t3054/bar/Bar.java7
-rw-r--r--test/files/buildmanager/t3054/foo/Foo.scala5
-rw-r--r--test/files/buildmanager/t3054/t3054.check3
-rw-r--r--test/files/buildmanager/t3054/t3054.test1
-rw-r--r--test/files/buildmanager/t3133/A.java7
-rw-r--r--test/files/buildmanager/t3133/t3133.check3
-rw-r--r--test/files/buildmanager/t3133/t3133.test1
-rw-r--r--test/files/continuations-neg/function0.check6
-rw-r--r--test/files/continuations-neg/function0.scala16
-rw-r--r--test/files/continuations-neg/function2.check6
-rw-r--r--test/files/continuations-neg/function2.scala16
-rw-r--r--test/files/continuations-neg/function3.check6
-rw-r--r--test/files/continuations-neg/function3.scala15
-rw-r--r--test/files/continuations-neg/infer0.check4
-rw-r--r--test/files/continuations-neg/infer0.scala14
-rw-r--r--test/files/continuations-neg/infer2.check4
-rw-r--r--test/files/continuations-neg/infer2.scala19
-rw-r--r--test/files/continuations-neg/lazy.check6
-rw-r--r--test/files/continuations-neg/lazy.scala16
-rw-r--r--test/files/continuations-neg/t1929.check6
-rw-r--r--test/files/continuations-neg/t1929.scala17
-rw-r--r--test/files/continuations-neg/t2285.check6
-rw-r--r--test/files/continuations-neg/t2285.scala11
-rw-r--r--test/files/continuations-neg/t2949.check6
-rw-r--r--test/files/continuations-neg/t2949.scala15
-rw-r--r--test/files/continuations-neg/trycatch2.check7
-rw-r--r--test/files/continuations-neg/trycatch2.scala33
-rwxr-xr-xtest/files/continuations-run/basics.check2
-rwxr-xr-xtest/files/continuations-run/basics.scala23
-rw-r--r--test/files/continuations-run/function1.check1
-rw-r--r--test/files/continuations-run/function1.scala16
-rw-r--r--test/files/continuations-run/function4.check1
-rw-r--r--test/files/continuations-run/function4.scala15
-rw-r--r--test/files/continuations-run/function5.check1
-rw-r--r--test/files/continuations-run/function5.scala15
-rw-r--r--test/files/continuations-run/function6.check1
-rw-r--r--test/files/continuations-run/function6.scala16
-rw-r--r--test/files/continuations-run/ifelse0.check2
-rw-r--r--test/files/continuations-run/ifelse0.scala18
-rw-r--r--test/files/continuations-run/ifelse1.check4
-rw-r--r--test/files/continuations-run/ifelse1.scala25
-rw-r--r--test/files/continuations-run/ifelse2.check4
-rw-r--r--test/files/continuations-run/ifelse2.scala16
-rw-r--r--test/files/continuations-run/ifelse3.check2
-rw-r--r--test/files/continuations-run/ifelse3.scala21
-rw-r--r--test/files/continuations-run/infer1.scala33
-rw-r--r--test/files/continuations-run/match0.check2
-rw-r--r--test/files/continuations-run/match0.scala18
-rw-r--r--test/files/continuations-run/match1.check2
-rw-r--r--test/files/continuations-run/match1.scala18
-rw-r--r--test/files/continuations-run/match2.check2
-rw-r--r--test/files/continuations-run/match2.scala26
-rw-r--r--test/files/continuations-run/t1807.check1
-rw-r--r--test/files/continuations-run/t1807.scala14
-rw-r--r--test/files/continuations-run/t1808.scala10
-rw-r--r--test/files/continuations-run/t1820.scala14
-rw-r--r--test/files/continuations-run/t1821.check4
-rw-r--r--test/files/continuations-run/t1821.scala20
-rw-r--r--test/files/continuations-run/t2864.check1
-rw-r--r--test/files/continuations-run/t2864.scala30
-rw-r--r--test/files/continuations-run/t2934.check1
-rw-r--r--test/files/continuations-run/t2934.scala10
-rw-r--r--test/files/continuations-run/t3199.check1
-rw-r--r--test/files/continuations-run/t3199.scala20
-rw-r--r--test/files/continuations-run/t3199b.check1
-rw-r--r--test/files/continuations-run/t3199b.scala11
-rw-r--r--test/files/continuations-run/t3223.check1
-rw-r--r--test/files/continuations-run/t3223.scala19
-rw-r--r--test/files/continuations-run/t3225.check12
-rw-r--r--test/files/continuations-run/t3225.scala56
-rw-r--r--test/files/continuations-run/trycatch0.check2
-rw-r--r--test/files/continuations-run/trycatch0.scala25
-rw-r--r--test/files/continuations-run/trycatch1.check4
-rw-r--r--test/files/continuations-run/trycatch1.scala48
-rw-r--r--test/files/continuations-run/while0.check1
-rw-r--r--test/files/continuations-run/while0.scala22
-rw-r--r--test/files/continuations-run/while1.check11
-rw-r--r--test/files/continuations-run/while1.scala22
-rw-r--r--test/files/continuations-run/while2.check19
-rw-r--r--test/files/continuations-run/while2.scala23
-rw-r--r--test/files/files.iml17
-rw-r--r--test/files/jvm/JavaInteraction.scala23
-rw-r--r--test/files/jvm/actor-exceptions.check12
-rw-r--r--test/files/jvm/actor-exceptions.scala34
-rw-r--r--test/files/jvm/actor-executor.check20
-rw-r--r--test/files/jvm/actor-executor.scala75
-rw-r--r--test/files/jvm/actor-executor2.check21
-rw-r--r--test/files/jvm/actor-executor2.scala88
-rw-r--r--test/files/jvm/actor-executor3.check20
-rw-r--r--test/files/jvm/actor-executor3.scala62
-rw-r--r--test/files/jvm/actor-getstate.check2
-rw-r--r--test/files/jvm/actor-getstate.scala85
-rw-r--r--test/files/jvm/actor-link-getstate.check2
-rw-r--r--test/files/jvm/actor-link-getstate.scala62
-rw-r--r--test/files/jvm/actor-looping.scala5
-rw-r--r--test/files/jvm/actor-normal-exit.scala10
-rw-r--r--test/files/jvm/actor-receivewithin.scala2
-rw-r--r--test/files/jvm/actor-sync-send-timeout.scala47
-rw-r--r--test/files/jvm/actor-termination.scala5
-rw-r--r--test/files/jvm/actor-uncaught-exception.check2
-rw-r--r--test/files/jvm/actor-uncaught-exception.scala63
-rw-r--r--test/files/jvm/actor-uncaught-exception2.check2
-rw-r--r--test/files/jvm/actor-uncaught-exception2.scala63
-rw-r--r--test/files/jvm/annotations.check13
-rw-r--r--test/files/jvm/annotations.scala53
-rw-r--r--test/files/jvm/bigints.scala2
-rw-r--r--test/files/jvm/bug560bis.scala4
-rw-r--r--test/files/jvm/console.scala2
-rw-r--r--test/files/jvm/daemon-actor-termination.scala10
-rw-r--r--test/files/jvm/deprecation.cmds3
-rw-r--r--test/files/jvm/deprecation/Defs.java12
-rw-r--r--test/files/jvm/deprecation/Test_1.scala17
-rw-r--r--test/files/jvm/deprecation/Use_2.java10
-rw-r--r--test/files/jvm/future-alarm.check20
-rw-r--r--test/files/jvm/future-alarm.scala21
-rw-r--r--test/files/jvm/future-awaitall-zero.check1
-rw-r--r--test/files/jvm/future-awaitall-zero.scala22
-rw-r--r--test/files/jvm/future-termination.scala29
-rw-r--r--test/files/jvm/inner.scala6
-rw-r--r--test/files/jvm/interpreter.check20
-rw-r--r--test/files/jvm/interpreter.scala10
-rw-r--r--test/files/jvm/libnatives-32.so (renamed from test/disabled/jvm/libnatives-32.so)bin5359 -> 5359 bytes
-rw-r--r--test/files/jvm/libnatives-64.so (renamed from test/disabled/jvm/libnatives-64.so)bin7466 -> 7466 bytes
-rw-r--r--test/files/jvm/libnatives.jnilib (renamed from test/disabled/jvm/libnatives.jnilib)bin8456 -> 8456 bytes
-rw-r--r--test/files/jvm/methvsfield.java6
-rwxr-xr-xtest/files/jvm/mkLibNatives.bat (renamed from test/disabled/jvm/mkLibNatives.bat)0
-rwxr-xr-xtest/files/jvm/mkLibNatives.sh (renamed from test/disabled/jvm/mkLibNatives.sh)0
-rw-r--r--test/files/jvm/natives-32.dll (renamed from test/disabled/jvm/natives-32.dll)bin40960 -> 40960 bytes
-rw-r--r--test/files/jvm/natives.c (renamed from test/disabled/jvm/natives.c)0
-rw-r--r--test/files/jvm/natives.check (renamed from test/disabled/jvm/natives.check)0
-rw-r--r--test/files/jvm/natives.h (renamed from test/disabled/jvm/natives.h)0
-rw-r--r--test/files/jvm/nest/nest.java38
-rw-r--r--test/files/jvm/nest/nest.scala21
-rw-r--r--test/files/jvm/protectedacc.scala16
-rw-r--r--test/files/jvm/reactor-exceptionOnSend.scala15
-rw-r--r--test/files/jvm/reactor-producer-consumer.check10
-rw-r--r--test/files/jvm/reactor-producer-consumer.scala95
-rw-r--r--test/files/jvm/reactor.scala19
-rw-r--r--test/files/jvm/replyablereactor.scala15
-rw-r--r--test/files/jvm/replyablereactor2.scala12
-rw-r--r--test/files/jvm/replyablereactor3.scala12
-rw-r--r--test/files/jvm/replyablereactor4.scala12
-rw-r--r--test/files/jvm/replyreactor-react-sender.scala10
-rw-r--r--test/files/jvm/replyreactor.scala16
-rw-r--r--test/files/jvm/scheduler-adapter.scala10
-rw-r--r--test/files/jvm/serialization.check229
-rw-r--r--test/files/jvm/serialization.scala578
-rw-r--r--test/files/jvm/stringbuilder.scala6
-rw-r--r--test/files/jvm/t1449.check1
-rw-r--r--test/files/jvm/t1449.scala25
-rw-r--r--test/files/jvm/t1461.scala2
-rw-r--r--test/files/jvm/t1464/MyTrait.scala2
-rw-r--r--test/files/jvm/t1600.scala76
-rw-r--r--test/files/jvm/t1948.scala16
-rw-r--r--test/files/jvm/t2359.scala25
-rw-r--r--test/files/jvm/t2470.check1
-rw-r--r--test/files/jvm/t2470.cmds3
-rw-r--r--test/files/jvm/t2470/Action.java6
-rw-r--r--test/files/jvm/t2470/Read_Classfile_2.scala3
-rw-r--r--test/files/jvm/t2470/Task.java3
-rw-r--r--test/files/jvm/t2470/Test_1.scala11
-rw-r--r--test/files/jvm/t2515.scala40
-rw-r--r--test/files/jvm/t2530.scala14
-rw-r--r--test/files/jvm/t2570.check0
-rw-r--r--test/files/jvm/t2570/Test.scala3
-rw-r--r--test/files/jvm/t2570/Test1.java2
-rw-r--r--test/files/jvm/t2570/Test3.java2
-rw-r--r--test/files/jvm/t2585.check0
-rw-r--r--test/files/jvm/t2585/Test.java16
-rw-r--r--test/files/jvm/t2585/genericouter.scala25
-rw-r--r--test/files/jvm/t2827.check3
-rw-r--r--test/files/jvm/t2827.scala14
-rw-r--r--test/files/jvm/t3003.check1
-rw-r--r--test/files/jvm/t3003.cmds2
-rw-r--r--test/files/jvm/t3003/Annot.java4
-rw-r--r--test/files/jvm/t3003/Test_1.scala8
-rw-r--r--test/files/jvm/t3102.check2
-rw-r--r--test/files/jvm/t3102.scala36
-rw-r--r--test/files/jvm/t3356.check1
-rw-r--r--test/files/jvm/t3356.scala54
-rw-r--r--test/files/jvm/t3365.check5
-rw-r--r--test/files/jvm/t3365.scala65
-rw-r--r--test/files/jvm/t3407.check10
-rw-r--r--test/files/jvm/t3407.scala19
-rw-r--r--test/files/jvm/t3412-channel.check10
-rw-r--r--test/files/jvm/t3412-channel.scala38
-rw-r--r--test/files/jvm/t3412.check10
-rw-r--r--test/files/jvm/t3412.scala32
-rw-r--r--test/files/jvm/t3415/Hello.java3
-rw-r--r--test/files/jvm/t3415/HelloWorld.scala4
-rw-r--r--test/files/jvm/t3470.check3
-rw-r--r--test/files/jvm/t3470.scala30
-rw-r--r--test/files/jvm/t3838.check1
-rw-r--r--test/files/jvm/t3838.scala15
-rw-r--r--test/files/jvm/throws-annot.scala10
-rw-r--r--test/files/jvm/typerep.scala2
-rw-r--r--test/files/jvm/unittest_io.scala8
-rw-r--r--test/files/jvm/unittest_xml.scala18
-rw-r--r--test/files/jvm/xml01.scala88
-rw-r--r--test/files/jvm/xml02.scala26
-rw-r--r--test/files/jvm/xmlattr.scala6
-rw-r--r--test/files/jvm/xmlmore.scala2
-rw-r--r--test/files/jvm/xmlpull.scala4
-rw-r--r--test/files/jvm/xmlstuff.scala50
-rw-r--r--test/files/lib/scalacheck.jar.desired.sha11
-rw-r--r--test/files/neg/abstract-vars.check21
-rw-r--r--test/files/neg/abstract-vars.scala29
-rw-r--r--test/files/neg/accesses.scala2
-rw-r--r--test/files/neg/array-not-seq.check7
-rw-r--r--test/files/neg/array-not-seq.flags1
-rw-r--r--test/files/neg/array-not-seq.scala26
-rw-r--r--test/files/neg/bug1011.scala2
-rw-r--r--test/files/neg/bug1112.scala6
-rw-r--r--test/files/neg/bug112706A.scala2
-rw-r--r--test/files/neg/bug1183.scala4
-rw-r--r--test/files/neg/bug1275.check8
-rw-r--r--test/files/neg/bug1275.scala29
-rw-r--r--test/files/neg/bug1279a.check6
-rw-r--r--test/files/neg/bug1286.check9
-rw-r--r--test/files/neg/bug1392.check4
-rw-r--r--test/files/neg/bug1392.scala1
-rw-r--r--test/files/neg/bug1523.scala4
-rw-r--r--test/files/neg/bug1623.scala4
-rw-r--r--test/files/neg/bug1878.check7
-rw-r--r--test/files/neg/bug1878.scala8
-rw-r--r--test/files/neg/bug2148.check4
-rw-r--r--test/files/neg/bug2148.scala10
-rw-r--r--test/files/neg/bug2206.check5
-rw-r--r--test/files/neg/bug2206.scala15
-rw-r--r--test/files/neg/bug2213.check15
-rw-r--r--test/files/neg/bug2213.scala11
-rw-r--r--test/files/neg/bug278.check5
-rw-r--r--test/files/neg/bug3209.check4
-rw-r--r--test/files/neg/bug3209.scala2
-rw-r--r--test/files/neg/bug3631.check4
-rw-r--r--test/files/neg/bug3631.scala3
-rw-r--r--test/files/neg/bug414.check5
-rw-r--r--test/files/neg/bug414.scala2
-rw-r--r--test/files/neg/bug520.scala2
-rw-r--r--test/files/neg/bug558.scala2
-rw-r--r--test/files/neg/bug563.scala4
-rw-r--r--test/files/neg/bug576.scala8
-rw-r--r--test/files/neg/bug588.scala8
-rw-r--r--test/files/neg/bug591.scala22
-rw-r--r--test/files/neg/bug608.check8
-rw-r--r--test/files/neg/bug608.scala24
-rw-r--r--test/files/neg/bug692.scala6
-rw-r--r--test/files/neg/bug693.scala2
-rw-r--r--test/files/neg/bug700.check2
-rw-r--r--test/files/neg/bug700.scala2
-rw-r--r--test/files/neg/bug715.scala2
-rw-r--r--test/files/neg/bug783.scala2
-rw-r--r--test/files/neg/bug798.scala2
-rw-r--r--test/files/neg/bug836.scala2
-rw-r--r--test/files/neg/bug856.check4
-rw-r--r--test/files/neg/bug856.scala2
-rw-r--r--test/files/neg/bug875.check4
-rw-r--r--test/files/neg/bug875.scala6
-rw-r--r--test/files/neg/bug876.scala6
-rw-r--r--test/files/neg/bug877.scala2
-rw-r--r--test/files/neg/bug882.check2
-rw-r--r--test/files/neg/bug910.check4
-rw-r--r--test/files/neg/bug910.scala2
-rw-r--r--test/files/neg/bug944.scala6
-rw-r--r--test/files/neg/bug961.check5
-rw-r--r--test/files/neg/bug961.scala6
-rw-r--r--test/files/neg/bug987.scala2
-rw-r--r--test/files/neg/checksensible.scala2
-rw-r--r--test/files/neg/constrs.check2
-rw-r--r--test/files/neg/constrs.scala2
-rw-r--r--test/files/neg/depmet_1.check10
-rw-r--r--test/files/neg/depmet_1.flags1
-rw-r--r--test/files/neg/depmet_1.scala5
-rw-r--r--test/files/neg/forward.scala8
-rw-r--r--test/files/neg/gadts1.scala12
-rw-r--r--test/files/neg/illegal-stmt-start.check4
-rw-r--r--test/files/neg/illegal-stmt-start.scala5
-rw-r--r--test/files/neg/implicits.check11
-rw-r--r--test/files/neg/implicits.scala12
-rw-r--r--test/files/neg/java-access-neg.check16
-rw-r--r--test/files/neg/java-access-neg/J.java15
-rw-r--r--test/files/neg/java-access-neg/S2.scala61
-rw-r--r--test/files/neg/lazy-override.scala2
-rw-r--r--test/files/neg/lazyvals.scala4
-rw-r--r--test/files/neg/lubs.scala2
-rw-r--r--test/files/neg/migration28.check8
-rw-r--r--test/files/neg/migration28.flags1
-rw-r--r--test/files/neg/migration28.scala12
-rw-r--r--test/files/neg/multi-array.flags1
-rw-r--r--test/files/neg/names-defaults-neg-ref.check2
-rw-r--r--test/files/neg/names-defaults-neg.check95
-rw-r--r--test/files/neg/names-defaults-neg.scala66
-rw-r--r--test/files/neg/null-unsoundness.scala2
-rw-r--r--test/files/neg/overload-msg.check13
-rw-r--r--test/files/neg/overload-msg.scala4
-rw-r--r--test/files/neg/overload.check2
-rw-r--r--test/files/neg/overload.scala2
-rw-r--r--test/files/neg/override.check5
-rwxr-xr-xtest/files/neg/override.scala15
-rw-r--r--test/files/neg/pat_unreachable.scala2
-rw-r--r--test/files/neg/patmat-type-check.check21
-rw-r--r--test/files/neg/patmat-type-check.scala28
-rw-r--r--test/files/neg/patmatexhaust.check2
-rw-r--r--test/files/neg/patmatexhaust.scala24
-rw-r--r--test/files/neg/patternalts.scala2
-rw-r--r--test/files/neg/plugin-after-terminal/lib/plugins.jar.desired.sha11
-rw-r--r--test/files/neg/plugin-before-parser/lib/plugins.jar.desired.sha11
-rw-r--r--test/files/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha11
-rw-r--r--test/files/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha11
-rw-r--r--test/files/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha11
-rw-r--r--test/files/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha11
-rw-r--r--test/files/neg/saito.scala4
-rw-r--r--test/files/neg/sensitive.scala6
-rw-r--r--test/files/neg/spec-overrides.check7
-rw-r--r--test/files/neg/spec-overrides.scala26
-rw-r--r--test/files/neg/structural.scala28
-rw-r--r--test/files/neg/switch.scala16
-rw-r--r--test/files/neg/t0117.scala2
-rw-r--r--test/files/neg/t0152.scala2
-rw-r--r--test/files/neg/t0218.scala6
-rw-r--r--test/files/neg/t0226.check3
-rw-r--r--test/files/neg/t0503.scala2
-rw-r--r--test/files/neg/t0528neg.scala2
-rw-r--r--test/files/neg/t0764.scala2
-rw-r--r--test/files/neg/t0851.check9
-rw-r--r--test/files/neg/t0851.scala25
-rw-r--r--test/files/neg/t1422.check4
-rw-r--r--test/files/neg/t1422.scala1
-rw-r--r--test/files/neg/t1477.check5
-rw-r--r--test/files/neg/t1477.scala25
-rw-r--r--test/files/neg/t1705.scala2
-rw-r--r--test/files/neg/t2139.check6
-rw-r--r--test/files/neg/t2139.scala15
-rw-r--r--test/files/neg/t2179.check9
-rwxr-xr-xtest/files/neg/t2179.scala3
-rw-r--r--test/files/neg/t2386.check4
-rw-r--r--test/files/neg/t2386.scala3
-rw-r--r--test/files/neg/t2416.check10
-rw-r--r--test/files/neg/t2416.scala14
-rw-r--r--test/files/neg/t2421b.check4
-rw-r--r--test/files/neg/t2421b.scala17
-rw-r--r--test/files/neg/t2462a.check4
-rw-r--r--test/files/neg/t2462a.scala3
-rw-r--r--test/files/neg/t2462b.check14
-rw-r--r--test/files/neg/t2462b.scala12
-rw-r--r--test/files/neg/t2641.check35
-rw-r--r--test/files/neg/t2641.scala31
-rw-r--r--test/files/neg/t2773.check7
-rwxr-xr-xtest/files/neg/t2773.scala8
-rw-r--r--test/files/neg/t2775.check4
-rw-r--r--test/files/neg/t2775.scala1
-rw-r--r--test/files/neg/t2779.check4
-rwxr-xr-xtest/files/neg/t2779.scala25
-rw-r--r--test/files/neg/t2801.check6
-rw-r--r--test/files/neg/t2801.scala3
-rw-r--r--test/files/neg/t2870.check7
-rwxr-xr-xtest/files/neg/t2870.scala9
-rw-r--r--test/files/neg/t2918.check7
-rwxr-xr-xtest/files/neg/t2918.scala3
-rw-r--r--test/files/neg/t3006.check6
-rwxr-xr-xtest/files/neg/t3006.scala10
-rw-r--r--test/files/neg/t3015.check11
-rw-r--r--test/files/neg/t3015.scala8
-rw-r--r--test/files/neg/t3115.check10
-rw-r--r--test/files/neg/t3115.flags1
-rwxr-xr-xtest/files/neg/t3115.scala9
-rw-r--r--test/files/neg/t3118.check7
-rw-r--r--test/files/neg/t3118.scala8
-rw-r--r--test/files/neg/t3222.check13
-rw-r--r--test/files/neg/t3222.scala9
-rw-r--r--test/files/neg/t3224.check6
-rwxr-xr-xtest/files/neg/t3224.scala30
-rw-r--r--test/files/neg/t3399.check4
-rw-r--r--test/files/neg/t3399.scala24
-rw-r--r--test/files/neg/t3403.check4
-rw-r--r--test/files/neg/t3403.scala2
-rw-r--r--test/files/neg/t3453.check21
-rw-r--r--test/files/neg/t3453.scala66
-rw-r--r--test/files/neg/t3507.check4
-rw-r--r--test/files/neg/t3507.scala15
-rw-r--r--test/files/neg/t3604.check7
-rw-r--r--test/files/neg/t3604.scala6
-rw-r--r--test/files/neg/t3653.check7
-rw-r--r--test/files/neg/t3653.scala4
-rw-r--r--test/files/neg/t3663.check4
-rw-r--r--test/files/neg/t3663/PackageProtected.java5
-rw-r--r--test/files/neg/t3663/main.scala14
-rw-r--r--test/files/neg/t3691.check16
-rw-r--r--test/files/neg/t3691.scala11
-rw-r--r--test/files/neg/t3692.check14
-rw-r--r--test/files/neg/t3692.scala17
-rw-r--r--test/files/neg/t3757.check4
-rw-r--r--test/files/neg/t3757/A.java5
-rw-r--r--test/files/neg/t3757/B.scala5
-rw-r--r--test/files/neg/t3769.check10
-rw-r--r--test/files/neg/t3769.scala3
-rw-r--r--test/files/neg/t3773.check4
-rw-r--r--test/files/neg/t3773.flags1
-rw-r--r--test/files/neg/t3773.scala5
-rw-r--r--test/files/neg/t3774.check7
-rw-r--r--test/files/neg/t3774.scala5
-rw-r--r--test/files/neg/t3776.check4
-rw-r--r--test/files/neg/t3776.scala10
-rw-r--r--test/files/neg/t3873.check6
-rw-r--r--test/files/neg/t3873.flags1
-rw-r--r--test/files/neg/t3873.scala12
-rw-r--r--test/files/neg/t742.check5
-rw-r--r--test/files/neg/t742.scala8
-rw-r--r--test/files/neg/t771.check4
-rwxr-xr-xtest/files/neg/t771.scala5
-rw-r--r--test/files/neg/tailrec.check18
-rw-r--r--test/files/neg/tailrec.scala54
-rw-r--r--test/files/neg/tcpoly_infer_ticket1162.scala4
-rw-r--r--test/files/neg/tcpoly_ticket2101.scala8
-rw-r--r--test/files/neg/tcpoly_typealias.scala6
-rw-r--r--test/files/neg/tcpoly_variance_enforce.scala16
-rw-r--r--test/files/neg/typeerror.check2
-rw-r--r--test/files/neg/unit2anyref.check6
-rw-r--r--test/files/neg/variances.check7
-rw-r--r--test/files/neg/variances.scala22
-rw-r--r--test/files/neg/viewtest.scala22
-rw-r--r--test/files/pos/List1.scala2
-rw-r--r--test/files/pos/MailBox.scala4
-rw-r--r--test/files/pos/Transactions.scala18
-rw-r--r--test/files/pos/annotations.scala24
-rw-r--r--test/files/pos/arrays2.scala4
-rw-r--r--test/files/pos/bug0031.scala2
-rw-r--r--test/files/pos/bug0066.scala2
-rw-r--r--test/files/pos/bug0069.scala2
-rw-r--r--test/files/pos/bug0091.scala2
-rw-r--r--test/files/pos/bug0599.scala2
-rw-r--r--test/files/pos/bug0646.scala6
-rw-r--r--test/files/pos/bug1075.scala2
-rw-r--r--test/files/pos/bug1090.scala2
-rw-r--r--test/files/pos/bug1123.scala2
-rw-r--r--test/files/pos/bug1168.scala2
-rw-r--r--test/files/pos/bug1210a.scala4
-rw-r--r--test/files/pos/bug122.scala2
-rw-r--r--test/files/pos/bug1237.scala6
-rw-r--r--test/files/pos/bug1272.scala4
-rw-r--r--test/files/pos/bug1279a.scala6
-rw-r--r--test/files/pos/bug1292.scala2
-rw-r--r--test/files/pos/bug1385.scala2
-rw-r--r--test/files/pos/bug1560.scala6
-rw-r--r--test/files/pos/bug1565.scala2
-rw-r--r--test/files/pos/bug1737/A.java3
-rw-r--r--test/files/pos/bug1737/B.java1
-rw-r--r--test/files/pos/bug1737/c.scala4
-rw-r--r--test/files/pos/bug2018.scala15
-rw-r--r--test/files/pos/bug2023.scala4
-rw-r--r--test/files/pos/bug2081.scala2
-rw-r--r--test/files/pos/bug2168.scala4
-rw-r--r--test/files/pos/bug2187-2.scala7
-rw-r--r--test/files/pos/bug2310.scala38
-rw-r--r--test/files/pos/bug2409/J.java4
-rw-r--r--test/files/pos/bug2409/bug2409.scala1
-rw-r--r--test/files/pos/bug247.scala2
-rw-r--r--test/files/pos/bug262.scala4
-rw-r--r--test/files/pos/bug2691.scala10
-rw-r--r--test/files/pos/bug287.scala2
-rw-r--r--test/files/pos/bug2939.scala13
-rw-r--r--test/files/pos/bug2945.scala12
-rw-r--r--test/files/pos/bug3020.scala9
-rw-r--r--test/files/pos/bug3097.flags1
-rw-r--r--test/files/pos/bug3097.scala31
-rw-r--r--test/files/pos/bug3136.scala19
-rw-r--r--test/files/pos/bug3175.scala7
-rw-r--r--test/files/pos/bug3252.flags1
-rw-r--r--test/files/pos/bug3252.scala15
-rw-r--r--test/files/pos/bug3278.scala15
-rw-r--r--test/files/pos/bug3411.scala8
-rw-r--r--test/files/pos/bug3420.flags1
-rw-r--r--test/files/pos/bug3420.scala5
-rw-r--r--test/files/pos/bug3430.flags1
-rw-r--r--test/files/pos/bug3430.scala13
-rw-r--r--test/files/pos/bug3480.scala4
-rw-r--r--test/files/pos/bug3495.flags1
-rw-r--r--test/files/pos/bug3495.scala2
-rw-r--r--test/files/pos/bug3521/DoubleValue.java7
-rw-r--r--test/files/pos/bug3521/a.scala4
-rw-r--r--test/files/pos/bug3568.scala46
-rw-r--r--test/files/pos/bug3570.scala7
-rw-r--r--test/files/pos/bug3578.scala30
-rw-r--r--test/files/pos/bug430-feb09.scala4
-rw-r--r--test/files/pos/bug432.scala2
-rw-r--r--test/files/pos/bug460.scala6
-rw-r--r--test/files/pos/bug516.scala4
-rw-r--r--test/files/pos/bug577.scala10
-rw-r--r--test/files/pos/bug599.scala2
-rw-r--r--test/files/pos/bug602.scala2
-rw-r--r--test/files/pos/bug613.scala4
-rw-r--r--test/files/pos/bug616.scala2
-rw-r--r--test/files/pos/bug651.scala4
-rw-r--r--test/files/pos/bug675.scala6
-rw-r--r--test/files/pos/bug690.scala2
-rw-r--r--test/files/pos/bug711.scala2
-rw-r--r--test/files/pos/bug715.cmds2
-rw-r--r--test/files/pos/bug715/meredith_1.scala16
-rw-r--r--test/files/pos/bug757.scala4
-rw-r--r--test/files/pos/bug758.scala10
-rw-r--r--test/files/pos/bug767.scala2
-rw-r--r--test/files/pos/bug788.scala2
-rw-r--r--test/files/pos/bug802.scala8
-rw-r--r--test/files/pos/bug807.scala2
-rw-r--r--test/files/pos/bug927.scala2
-rw-r--r--test/files/pos/bug946.scala2
-rw-r--r--test/files/pos/builders.scala10
-rw-r--r--test/files/pos/caseClassInMethod.scala5
-rw-r--r--test/files/pos/channels.scala6
-rw-r--r--test/files/pos/clsrefine.scala4
-rw-r--r--test/files/pos/collectGenericCC.scala6
-rw-r--r--test/files/pos/collections.scala2
-rw-r--r--test/files/pos/context.scala6
-rw-r--r--test/files/pos/cyclics.scala14
-rw-r--r--test/files/pos/depexists.scala5
-rw-r--r--test/files/pos/depmet_1.flags1
-rw-r--r--test/files/pos/depmet_1.scala6
-rw-r--r--test/files/pos/depmet_implicit_chaining_zw.flags1
-rw-r--r--test/files/pos/depmet_implicit_chaining_zw.scala28
-rw-r--r--test/files/pos/depmet_implicit_norm_ret.flags1
-rw-r--r--test/files/pos/depmet_implicit_norm_ret.scala29
-rw-r--r--test/files/pos/depmet_implicit_oopsla_session.flags1
-rw-r--r--test/files/pos/depmet_implicit_oopsla_session.scala63
-rw-r--r--test/files/pos/depmet_implicit_oopsla_session_2.flags1
-rw-r--r--test/files/pos/depmet_implicit_oopsla_session_2.scala87
-rw-r--r--test/files/pos/depmet_implicit_oopsla_session_simpler.flags1
-rw-r--r--test/files/pos/depmet_implicit_oopsla_session_simpler.scala44
-rw-r--r--test/files/pos/depmet_implicit_oopsla_zipwith.flags1
-rw-r--r--test/files/pos/depmet_implicit_oopsla_zipwith.scala44
-rw-r--r--test/files/pos/depmet_implicit_tpbetareduce.flags1
-rw-r--r--test/files/pos/depmet_implicit_tpbetareduce.scala12
-rw-r--r--test/files/pos/gadt-gilles.scala2
-rw-r--r--test/files/pos/gadts2.scala4
-rw-r--r--test/files/pos/gosh.scala16
-rw-r--r--test/files/pos/gui.scala8
-rw-r--r--test/files/pos/imp2.scala2
-rw-r--r--test/files/pos/implicits.scala55
-rw-r--r--test/files/pos/imports.scala4
-rw-r--r--test/files/pos/infer2.scala2
-rw-r--r--test/files/pos/inferbroadtype.scala2
-rw-r--r--test/files/pos/java-access-pos/J.java15
-rw-r--r--test/files/pos/java-access-pos/S1.scala67
-rw-r--r--test/files/pos/lambdalift.scala2
-rw-r--r--test/files/pos/listpattern.scala2
-rw-r--r--test/files/pos/looping-jsig.scala8
-rw-r--r--test/files/pos/manifest1.scala7
-rw-r--r--test/files/pos/michel6.scala2
-rw-r--r--test/files/pos/needstypeearly.scala4
-rw-r--r--test/files/pos/nested2.scala2
-rw-r--r--test/files/pos/nothing_manifest_disambig.scala10
-rw-r--r--test/files/pos/nullary.scala4
-rw-r--r--test/files/pos/nullary_poly.scala4
-rw-r--r--test/files/pos/partialfun.scala2
-rw-r--r--test/files/pos/pat_gilles.scala2
-rw-r--r--test/files/pos/propagate.scala2
-rw-r--r--test/files/pos/relax_implicit_divergence.scala7
-rw-r--r--test/files/pos/return_thistype.scala6
-rw-r--r--test/files/pos/scala-singleton.scala55
-rw-r--r--test/files/pos/scan.scala23
-rw-r--r--test/files/pos/scoping1.scala2
-rw-r--r--test/files/pos/selftails.scala23
-rw-r--r--test/files/pos/signatures/Test.java2
-rw-r--r--test/files/pos/signatures/sig.scala2
-rw-r--r--test/files/pos/spec-Function1.flags1
-rw-r--r--test/files/pos/spec-Function1.scala6
-rw-r--r--test/files/pos/spec-List.flags1
-rw-r--r--test/files/pos/spec-List.scala54
-rw-r--r--test/files/pos/spec-annotations.flags1
-rw-r--r--test/files/pos/spec-arrays.flags1
-rw-r--r--test/files/pos/spec-arrays.scala36
-rw-r--r--test/files/pos/spec-asseenfrom.scala29
-rw-r--r--test/files/pos/spec-constr.scala7
-rw-r--r--test/files/pos/spec-cyclic.flags1
-rw-r--r--test/files/pos/spec-cyclic.scala8
-rw-r--r--test/files/pos/spec-doubledef.scala28
-rw-r--r--test/files/pos/spec-example1.flags1
-rw-r--r--test/files/pos/spec-fields.flags1
-rw-r--r--test/files/pos/spec-foo.flags1
-rw-r--r--test/files/pos/spec-funs.flags1
-rw-r--r--test/files/pos/spec-funs.scala2
-rw-r--r--test/files/pos/spec-lists.flags1
-rw-r--r--test/files/pos/spec-localdefs.flags1
-rw-r--r--test/files/pos/spec-maps.flags1
-rw-r--r--test/files/pos/spec-multiplectors.scala3
-rw-r--r--test/files/pos/spec-params.flags1
-rw-r--r--test/files/pos/spec-params.scala6
-rw-r--r--test/files/pos/spec-partially.scala5
-rw-r--r--test/files/pos/spec-partialmap.scala17
-rw-r--r--test/files/pos/spec-polymeth.flags1
-rw-r--r--test/files/pos/spec-private.scala10
-rw-r--r--test/files/pos/spec-sealed.flags1
-rw-r--r--test/files/pos/spec-sealed.scala4
-rw-r--r--test/files/pos/spec-short.flags1
-rw-r--r--test/files/pos/spec-short.scala6
-rw-r--r--test/files/pos/spec-simple.flags1
-rw-r--r--test/files/pos/spec-sparsearray.scala24
-rw-r--r--test/files/pos/spec-super.flags1
-rw-r--r--test/files/pos/spec-t3497.scala16
-rw-r--r--test/files/pos/spec-tailcall.flags1
-rw-r--r--test/files/pos/spec-tailcall.scala4
-rw-r--r--test/files/pos/spec-thistype.flags1
-rw-r--r--test/files/pos/spec-vector.scala4
-rw-r--r--test/files/pos/strings.scala2
-rw-r--r--test/files/pos/sudoku.scala10
-rw-r--r--test/files/pos/super.cmds2
-rw-r--r--test/files/pos/super/Super_1.java2
-rw-r--r--test/files/pos/switchUnbox.flags2
-rw-r--r--test/files/pos/switchUnbox.scala6
-rw-r--r--test/files/pos/t0227.scala2
-rw-r--r--test/files/pos/t0288/Foo.scala2
-rw-r--r--test/files/pos/t0288/Outer.java2
-rw-r--r--test/files/pos/t0438.scala4
-rw-r--r--test/files/pos/t0453.scala2
-rw-r--r--test/files/pos/t0770.scala2
-rw-r--r--test/files/pos/t0774/unrelated.scala4
-rw-r--r--test/files/pos/t0786.scala12
-rw-r--r--test/files/pos/t0816.scala (renamed from test/pending/pos/t0816.scala)4
-rw-r--r--test/files/pos/t0971.java4
-rw-r--r--test/files/pos/t0999.scala5
-rw-r--r--test/files/pos/t1000.scala2
-rw-r--r--test/files/pos/t1029.cmds2
-rw-r--r--test/files/pos/t1035.scala (renamed from test/pending/pos/t1035.scala)14
-rw-r--r--test/files/pos/t1053.scala (renamed from test/pending/pos/t1053.scala)0
-rw-r--r--test/files/pos/t1059.scala2
-rw-r--r--test/files/pos/t1107/O.scala4
-rw-r--r--test/files/pos/t1107/T.scala2
-rw-r--r--test/files/pos/t1164.scala32
-rw-r--r--test/files/pos/t1226.scala8
-rw-r--r--test/files/pos/t1236.scala14
-rw-r--r--test/files/pos/t1254/t1254.java2
-rw-r--r--test/files/pos/t1263/test.scala2
-rw-r--r--test/files/pos/t1380.flags1
-rw-r--r--test/files/pos/t1380/hallo.scala3
-rw-r--r--test/files/pos/t1422.scala2
-rwxr-xr-xtest/files/pos/t1459/AbstractBase.java5
-rwxr-xr-xtest/files/pos/t1459/App.scala18
-rwxr-xr-xtest/files/pos/t1459/Caller.java7
-rw-r--r--test/files/pos/t1480.scala4
-rwxr-xr-xtest/files/pos/t1545.scala (renamed from test/pending/neg/t1545.scala)4
-rw-r--r--test/files/pos/t1560.scala8
-rw-r--r--test/files/pos/t1569.flags1
-rw-r--r--test/files/pos/t1569.scala5
-rw-r--r--test/files/pos/t1591.scala7
-rw-r--r--test/files/pos/t1591_pos.scala7
-rw-r--r--test/files/pos/t1591b.scala13
-rw-r--r--test/files/pos/t1693.scala9
-rw-r--r--test/files/pos/t1711/Seq.scala2
-rw-r--r--test/files/pos/t1722-A.scala4
-rwxr-xr-xtest/files/pos/t1722/Test.scala2
-rwxr-xr-xtest/files/pos/t1722/Top.scala4
-rw-r--r--test/files/pos/t1745/J.java6
-rw-r--r--test/files/pos/t1751.cmds3
-rwxr-xr-xtest/files/pos/t1756.scala14
-rw-r--r--test/files/pos/t1761.scala2
-rw-r--r--test/files/pos/t1782.cmds2
-rw-r--r--test/files/pos/t1798.scala2
-rw-r--r--test/files/pos/t1836/J.java (renamed from test/pending/pos/t1836/J.java)0
-rw-r--r--test/files/pos/t1836/S.scala (renamed from test/pending/pos/t1836/S.scala)0
-rw-r--r--test/files/pos/t1840/J.java4
-rw-r--r--test/files/pos/t1942.cmds2
-rw-r--r--test/files/pos/t1996.scala (renamed from test/pending/pos/t1996.scala)0
-rw-r--r--test/files/pos/t2023.scala4
-rwxr-xr-xtest/files/pos/t2060.scala2
-rwxr-xr-xtest/files/pos/t2082.scala16
-rw-r--r--test/files/pos/t2133.scala18
-rw-r--r--test/files/pos/t2261.scala2
-rw-r--r--test/files/pos/t2305.scala26
-rw-r--r--test/files/pos/t2331.scala11
-rw-r--r--test/files/pos/t2413/TestJava.java7
-rw-r--r--test/files/pos/t2413/TestScalac.scala23
-rw-r--r--test/files/pos/t2421.scala6
-rw-r--r--test/files/pos/t2421_delitedsl.scala10
-rw-r--r--test/files/pos/t2421b.scala19
-rw-r--r--test/files/pos/t2421c.scala17
-rwxr-xr-xtest/files/pos/t2429.scala4
-rwxr-xr-xtest/files/pos/t2433/A.java4
-rwxr-xr-xtest/files/pos/t2433/B.java4
-rwxr-xr-xtest/files/pos/t2433/Test.scala3
-rw-r--r--test/files/pos/t2444.scala6
-rw-r--r--test/files/pos/t2454.scala25
-rw-r--r--test/files/pos/t2464.cmds3
-rw-r--r--test/files/pos/t2464/JavaOne.java5
-rw-r--r--test/files/pos/t2464/ScalaOne_1.scala6
-rw-r--r--test/files/pos/t2464/t2464_2.scala3
-rwxr-xr-xtest/files/pos/t2484.scala17
-rwxr-xr-xtest/files/pos/t2504.scala2
-rwxr-xr-xtest/files/pos/t2545.scala4
-rw-r--r--test/files/pos/t2569/Child.scala6
-rw-r--r--test/files/pos/t2569/Parent.java6
-rw-r--r--test/files/pos/t2610.scala17
-rw-r--r--test/files/pos/t2619.scala80
-rw-r--r--test/files/pos/t2624.scala4
-rwxr-xr-xtest/files/pos/t2635.scala16
-rw-r--r--test/files/pos/t2660.scala25
-rw-r--r--test/files/pos/t2664.scala9
-rw-r--r--test/files/pos/t2665.scala3
-rw-r--r--test/files/pos/t2667.scala6
-rw-r--r--test/files/pos/t2669.scala28
-rw-r--r--test/files/pos/t2673.scala4
-rwxr-xr-xtest/files/pos/t2683.scala7
-rw-r--r--test/files/pos/t2691.scala9
-rw-r--r--test/files/pos/t2698.scala10
-rw-r--r--test/files/pos/t2708.scala1
-rw-r--r--test/files/pos/t2726.cmds2
-rw-r--r--test/files/pos/t2726/SQLBuilder_1.scala7
-rw-r--r--test/files/pos/t2726/test_2.scala3
-rw-r--r--test/files/pos/t2741/2741-1.scala13
-rw-r--r--test/files/pos/t2741/2741-2.scala5
-rw-r--r--test/files/pos/t2794.scala9
-rw-r--r--test/files/pos/t2795.scala17
-rw-r--r--test/files/pos/t2797.scala9
-rw-r--r--test/files/pos/t2799.flags1
-rw-r--r--test/files/pos/t2799.scala1
-rw-r--r--test/files/pos/t2809.scala20
-rw-r--r--test/files/pos/t2810.scala8
-rw-r--r--test/files/pos/t2868.cmds3
-rw-r--r--test/files/pos/t2868/Jann.java5
-rw-r--r--test/files/pos/t2868/Nest.java3
-rw-r--r--test/files/pos/t2868/pick_1.scala7
-rw-r--r--test/files/pos/t2868/test_2.scala6
-rwxr-xr-xtest/files/pos/t2913.scala53
-rw-r--r--test/files/pos/t294.cmds3
-rw-r--r--test/files/pos/t2940/Cycle.java3
-rw-r--r--test/files/pos/t2940/Error.scala12
-rw-r--r--test/files/pos/t2956/BeanDefinitionVisitor.java6
-rwxr-xr-xtest/files/pos/t2956/t2956.scala7
-rw-r--r--test/files/pos/t2994a.scala27
-rw-r--r--test/files/pos/t2994b.scala7
-rw-r--r--test/files/pos/t3037.scala13
-rw-r--r--test/files/pos/t3071.scala7
-rw-r--r--test/files/pos/t3076/C2.scala4
-rw-r--r--test/files/pos/t3076/T.scala2
-rw-r--r--test/files/pos/t3079.scala17
-rw-r--r--test/files/pos/t3108.scala5
-rw-r--r--test/files/pos/t3152.scala20
-rwxr-xr-xtest/files/pos/t3174.scala14
-rwxr-xr-xtest/files/pos/t3174b.scala12
-rw-r--r--test/files/pos/t3177.scala39
-rw-r--r--test/files/pos/t3249/Test.java5
-rw-r--r--test/files/pos/t3249/a.scala11
-rw-r--r--test/files/pos/t3274.scala9
-rw-r--r--test/files/pos/t3349/AbstractTupleSet.java9
-rw-r--r--test/files/pos/t3349/Table.java9
-rw-r--r--test/files/pos/t3349/Test.scala5
-rw-r--r--test/files/pos/t3349/TupleSet.java4
-rwxr-xr-xtest/files/pos/t3363.scala18
-rw-r--r--test/files/pos/t3373.scala11
-rw-r--r--test/files/pos/t3374.scala6
-rw-r--r--test/files/pos/t3384.scala14
-rw-r--r--test/files/pos/t3404/Base.java3
-rw-r--r--test/files/pos/t3404/Derived.scala3
-rw-r--r--test/files/pos/t3417.scala11
-rw-r--r--test/files/pos/t3419/B_1.scala3
-rw-r--r--test/files/pos/t3419/C_2.scala3
-rw-r--r--test/files/pos/t3429/A.scala12
-rw-r--r--test/files/pos/t3429/Test.java3
-rw-r--r--test/files/pos/t3477.scala7
-rw-r--r--test/files/pos/t3486/JTest.java3
-rw-r--r--test/files/pos/t3486/test.scala6
-rw-r--r--test/files/pos/t3494.scala7
-rw-r--r--test/files/pos/t3560.scala2
-rwxr-xr-xtest/files/pos/t3568.scala46
-rw-r--r--test/files/pos/t3582.scala12
-rw-r--r--test/files/pos/t3582b.scala5
-rw-r--r--test/files/pos/t3612.scala6
-rw-r--r--test/files/pos/t3622/test/AsyncTask.java5
-rw-r--r--test/files/pos/t3622/test/MyAsyncTask.java9
-rw-r--r--test/files/pos/t3622/test/Test.scala5
-rw-r--r--test/files/pos/t3676.scala5
-rw-r--r--test/files/pos/t3688.scala9
-rw-r--r--test/files/pos/t3731.scala13
-rw-r--r--test/files/pos/t3777.scala7
-rw-r--r--test/files/pos/t3859.scala4
-rw-r--r--test/files/pos/t3864/scalaz_2.scala1
-rw-r--r--test/files/pos/t3864/tuples_1.scala78
-rw-r--r--test/files/pos/t3946/A.java5
-rw-r--r--test/files/pos/t3946/Test_1.scala12
-rw-r--r--test/files/pos/t425.scala (renamed from test/pending/pos/t425.scala)0
-rw-r--r--test/files/pos/t5013/Bar_2.scala5
-rw-r--r--test/files/pos/t5013/Foo_1.scala5
-rw-r--r--test/files/pos/tcpoly_boundedmonad.scala18
-rw-r--r--test/files/pos/tcpoly_bounds1.scala4
-rw-r--r--test/files/pos/tcpoly_checkkinds_mix.scala6
-rw-r--r--test/files/pos/tcpoly_gm.scala6
-rw-r--r--test/files/pos/tcpoly_higherorder_bound_method.scala2
-rw-r--r--test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala4
-rw-r--r--test/files/pos/tcpoly_late_method_params.scala2
-rw-r--r--test/files/pos/tcpoly_method.scala2
-rw-r--r--test/files/pos/tcpoly_overloaded.scala14
-rw-r--r--test/files/pos/tcpoly_poly.scala2
-rw-r--r--test/files/pos/tcpoly_return_overriding.scala2
-rw-r--r--test/files/pos/tcpoly_seq.scala44
-rw-r--r--test/files/pos/tcpoly_seq_typealias.scala40
-rw-r--r--test/files/pos/ted.scala2
-rw-r--r--test/files/pos/test5.scala4
-rw-r--r--test/files/pos/test5refine.scala4
-rw-r--r--test/files/pos/thistypes.scala2
-rw-r--r--test/files/pos/ticket0137.scala6
-rw-r--r--test/files/pos/ticket2251.scala (renamed from test/pending/pos/ticket2251.scala)0
-rw-r--r--test/files/pos/traits.scala4
-rw-r--r--test/files/pos/typealias_dubious.scala10
-rw-r--r--test/files/pos/typealiases.scala14
-rw-r--r--test/files/pos/unapplyNeedsMemberType.scala2
-rw-r--r--test/files/pos/unapplySeq.scala2
-rw-r--r--test/files/pos/unapplyVal.scala4
-rw-r--r--test/files/positions/Anon.scala2
-rw-r--r--test/files/positions/Enclosing1.scala2
-rw-r--r--test/files/positions/ExcludedPrefix1.scala16
-rw-r--r--test/files/positions/New1.scala3
-rw-r--r--test/files/positions/Overlap3.scala2
-rw-r--r--test/files/positions/Overlap7.scala3
-rw-r--r--test/files/positions/Scaladoc2.scala4
-rw-r--r--test/files/positions/Scaladoc3.scala2
-rw-r--r--test/files/positions/Scaladoc4.scala2
-rw-r--r--test/files/positions/Scaladoc6.scala10
-rw-r--r--test/files/positions/Scaladoc7.scala6
-rw-r--r--test/files/positions/Scaladoc8.scala6
-rw-r--r--test/files/positions/Unsupported2.scala5
-rw-r--r--test/files/res/bug597/Test.scala2
-rw-r--r--test/files/res/bug722/Parser.scala2
-rw-r--r--test/files/res/bug735/ScalaExpressions.scala2
-rw-r--r--test/files/res/bug743/BracesXXX.scala2
-rw-r--r--test/files/res/bug743/ParserXXX.scala6
-rw-r--r--test/files/res/bug785/ScalaNewTyper.scala2
-rw-r--r--test/files/res/bug831/NewScalaParserXXX.scala18
-rw-r--r--test/files/run/Course-2002-02.scala8
-rw-r--r--test/files/run/Course-2002-05.scala2
-rw-r--r--test/files/run/Course-2002-08.scala2
-rw-r--r--test/files/run/Course-2002-09.scala8
-rw-r--r--test/files/run/Course-2002-13.scala4
-rw-r--r--test/files/run/MutableListTest.scala252
-rw-r--r--test/files/run/OrderingTest.scala8
-rw-r--r--test/files/run/QueueTest.scala594
-rw-r--r--test/files/run/ReplacementMatching.scala47
-rw-r--r--test/files/run/ReverseSeqView.scala25
-rw-r--r--test/files/run/SymbolsTest.scala283
-rw-r--r--test/files/run/absoverride.scala10
-rw-r--r--test/files/run/adding-growing-set.scala11
-rw-r--r--test/files/run/arrayclone.scala106
-rw-r--r--test/files/run/arraycopy.scala31
-rw-r--r--test/files/run/arybufgrow.scala4
-rw-r--r--test/files/run/bigDecimalCache.scala9
-rw-r--r--test/files/run/bitsets-msil.check32
-rw-r--r--test/files/run/boolexprs.scala2
-rw-r--r--test/files/run/bug0325.scala8
-rw-r--r--test/files/run/bug1005.scala2
-rw-r--r--test/files/run/bug1074.check2
-rw-r--r--test/files/run/bug1141.scala2
-rw-r--r--test/files/run/bug1220.scala2
-rw-r--r--test/files/run/bug1300.scala4
-rw-r--r--test/files/run/bug1309.scala2
-rw-r--r--test/files/run/bug1766.scala16
-rw-r--r--test/files/run/bug2029.scala4
-rw-r--r--test/files/run/bug2124.scala2
-rw-r--r--test/files/run/bug2125.scala4
-rw-r--r--test/files/run/bug2276.scala2
-rw-r--r--test/files/run/bug2354.scala17
-rw-r--r--test/files/run/bug2378.scala9
-rw-r--r--test/files/run/bug2512.scala4
-rw-r--r--test/files/run/bug2514.scala4
-rw-r--r--test/files/run/bug2552.check48
-rw-r--r--test/files/run/bug2552.scala34
-rw-r--r--test/files/run/bug2636.scala35
-rw-r--r--test/files/run/bug2721.check2
-rw-r--r--test/files/run/bug2721.scala12
-rw-r--r--test/files/run/bug2876.scala7
-rw-r--r--test/files/run/bug2958.scala16
-rw-r--r--test/files/run/bug3004.scala14
-rw-r--r--test/files/run/bug3126.scala9
-rw-r--r--test/files/run/bug3175.check11
-rw-r--r--test/files/run/bug3175.scala55
-rw-r--r--test/files/run/bug3269.check2
-rw-r--r--test/files/run/bug3269.scala9
-rw-r--r--test/files/run/bug3327.check1
-rw-r--r--test/files/run/bug3327.scala8
-rw-r--r--test/files/run/bug3395.check2
-rw-r--r--test/files/run/bug3395.scala13
-rw-r--r--test/files/run/bug3397.scala7
-rw-r--r--test/files/run/bug3516.check3
-rw-r--r--test/files/run/bug3516.scala13
-rw-r--r--test/files/run/bug3529.scala14
-rw-r--r--test/files/run/bug3540.scala7
-rw-r--r--test/files/run/bug3563.scala21
-rw-r--r--test/files/run/bug3616.check1
-rw-r--r--test/files/run/bug3616.scala12
-rw-r--r--test/files/run/bug363.scala2
-rw-r--r--test/files/run/bug408.scala12
-rw-r--r--test/files/run/bug4238/J.java4
-rw-r--r--test/files/run/bug594.scala2
-rw-r--r--test/files/run/bug603.scala2
-rw-r--r--test/files/run/bug627.scala2
-rw-r--r--test/files/run/bug744.scala2
-rw-r--r--test/files/run/bug751.scala6
-rw-r--r--test/files/run/bug920.scala2
-rw-r--r--test/files/run/bugs.scala2
-rw-r--r--test/files/run/bugs2087-and-2400.scala20
-rw-r--r--test/files/run/bytecodecs.scala39
-rw-r--r--test/files/run/caseClassEquality.scala36
-rw-r--r--test/files/run/castsingleton.scala2
-rw-r--r--test/files/run/checked.scala6
-rw-r--r--test/files/run/classof.scala4
-rw-r--r--test/files/run/collections.scala4
-rw-r--r--test/files/run/colltest1.check32
-rw-r--r--test/files/run/colltest1.scala34
-rw-r--r--test/files/run/concurrent-stream.check3
-rw-r--r--test/files/run/concurrent-stream.scala36
-rw-r--r--test/files/run/constrained-types.check6
-rw-r--r--test/files/run/constrained-types.scala6
-rw-r--r--test/files/run/ctor-order.scala2
-rw-r--r--test/files/run/distinct.check1
-rw-r--r--test/files/run/distinct.scala15
-rw-r--r--test/files/run/elidable.check1
-rw-r--r--test/files/run/elidable.flags1
-rw-r--r--test/files/run/elidable.scala16
-rw-r--r--test/files/run/equality.scala40
-rw-r--r--test/files/run/exceptions-2.scala61
-rw-r--r--test/files/run/exceptions-nest.check13
-rw-r--r--test/files/run/exceptions-nest.scala157
-rw-r--r--test/files/run/existentials.scala4
-rw-r--r--test/files/run/forvaleq.scala30
-rw-r--r--test/files/run/gadts.scala4
-rw-r--r--test/files/run/groupby.scala18
-rw-r--r--test/files/run/hashCodeBoxesRunTime.scala14
-rw-r--r--test/files/run/hashhash.scala15
-rw-r--r--test/files/run/infix.scala2
-rw-r--r--test/files/run/inliner-infer.scala6
-rw-r--r--test/files/run/iq.scala24
-rw-r--r--test/files/run/issue192.scala32
-rw-r--r--test/files/run/iterator-iterate-lazy.scala5
-rw-r--r--test/files/run/iterator3444.scala23
-rw-r--r--test/files/run/iterators.check2
-rw-r--r--test/files/run/iterators.scala16
-rw-r--r--test/files/run/json.check21
-rw-r--r--test/files/run/json.scala103
-rw-r--r--test/files/run/lazy-concurrent.check1
-rw-r--r--test/files/run/lazy-concurrent.scala17
-rw-r--r--test/files/run/lazy-exprs.scala16
-rw-r--r--test/files/run/lazy-locals.scala10
-rw-r--r--test/files/run/lazy-override.scala4
-rw-r--r--test/files/run/lazy-traits.scala74
-rw-r--r--test/files/run/lisp.scala4
-rw-r--r--test/files/run/lists.scala12
-rw-r--r--test/files/run/manifests.scala147
-rw-r--r--test/files/run/mapValues.scala8
-rw-r--r--test/files/run/map_java_conversions.scala60
-rw-r--r--test/files/run/matcharraytail.check2
-rw-r--r--test/files/run/matchbytes.scala2
-rw-r--r--test/files/run/matchintasany.scala2
-rw-r--r--test/files/run/matchnull.scala2
-rw-r--r--test/files/run/misc.scala2
-rw-r--r--test/files/run/missingparams.scala4
-rw-r--r--test/files/run/names-defaults.check37
-rw-r--r--test/files/run/names-defaults.scala129
-rw-r--r--test/files/run/nodebuffer-array.check3
-rw-r--r--test/files/run/nodebuffer-array.scala15
-rw-r--r--test/files/run/numbereq.scala41
-rw-r--r--test/files/run/packrat1.scala8
-rw-r--r--test/files/run/packrat2.scala14
-rw-r--r--test/files/run/packrat3.scala12
-rw-r--r--test/files/run/patmat-seqs.check13
-rw-r--r--test/files/run/patmat-seqs.scala42
-rw-r--r--test/files/run/patmatnew.scala132
-rw-r--r--test/files/run/priorityQueue.scala354
-rw-r--r--test/files/run/programmatic-main.check25
-rw-r--r--test/files/run/programmatic-main.scala12
-rw-r--r--test/files/run/proxy.scala2
-rw-r--r--test/files/run/randomAccessSeq-apply.scala8
-rw-r--r--test/files/run/range.scala27
-rw-r--r--test/files/run/regularpatmat.check126
-rw-r--r--test/files/run/runtime-richChar.scala10
-rw-r--r--test/files/run/sequenceComparisons.scala52
-rw-r--r--test/files/run/slice-strings.scala19
-rw-r--r--test/files/run/spec-absfun.scala43
-rw-r--r--test/files/run/spec-ame.check2
-rw-r--r--test/files/run/spec-ame.scala17
-rw-r--r--test/files/run/spec-constr.check2
-rw-r--r--test/files/run/spec-constr.scala14
-rw-r--r--test/files/run/spec-early.check4
-rw-r--r--test/files/run/spec-early.scala15
-rw-r--r--test/files/run/spec-init.check9
-rw-r--r--test/files/run/spec-init.scala41
-rw-r--r--test/files/run/spec-matrix.check1
-rw-r--r--test/files/run/spec-matrix.scala70
-rw-r--r--test/files/run/spec-overrides.check0
-rw-r--r--test/files/run/spec-overrides.scala20
-rw-r--r--test/files/run/spec-patmatch.check19
-rw-r--r--test/files/run/spec-patmatch.scala52
-rw-r--r--test/files/run/streamWithFilter.check5
-rw-r--r--test/files/run/streamWithFilter.scala11
-rw-r--r--test/files/run/stream_length.check1
-rw-r--r--test/files/run/stream_length.scala15
-rw-r--r--test/files/run/streams.scala2
-rw-r--r--test/files/run/stringbuilder.scala16
-rw-r--r--test/files/run/structural.scala52
-rw-r--r--test/files/run/t0017.check2
-rw-r--r--test/files/run/t0017.scala2
-rw-r--r--test/files/run/t0421.scala10
-rw-r--r--test/files/run/t0432.scala15
-rw-r--r--test/files/run/t0508.scala2
-rw-r--r--test/files/run/t0528.scala2
-rw-r--r--test/files/run/t0631.scala2
-rw-r--r--test/files/run/t0677.scala4
-rw-r--r--test/files/run/t0807.scala2
-rw-r--r--test/files/run/t0883.scala12
-rw-r--r--test/files/run/t1167.check3
-rw-r--r--test/files/run/t1167.scala25
-rw-r--r--test/files/run/t1323.scala36
-rw-r--r--test/files/run/t1423.scala2
-rw-r--r--test/files/run/t1500.scala30
-rw-r--r--test/files/run/t1501.scala28
-rw-r--r--test/files/run/t1524.scala2
-rw-r--r--test/files/run/t153.check2
-rw-r--r--test/files/run/t153.scala2
-rw-r--r--test/files/run/t1591.check1
-rw-r--r--test/files/run/t1591.scala14
-rw-r--r--test/files/run/t1718.scala4
-rw-r--r--test/files/run/t1773.scala4
-rw-r--r--test/files/run/t1829.scala2
-rw-r--r--test/files/run/t2074.scala3
-rw-r--r--test/files/run/t2074_2.check6
-rw-r--r--test/files/run/t2074_2.scala9
-rw-r--r--test/files/run/t2212.scala10
-rw-r--r--test/files/run/t2417.check12
-rw-r--r--test/files/run/t2417.scala77
-rw-r--r--test/files/run/t2526.scala53
-rw-r--r--test/files/run/t2594_tcpoly.check0
-rw-r--r--test/files/run/t2594_tcpoly.scala18
-rw-r--r--test/files/run/t2754.scala39
-rw-r--r--test/files/run/t2849.scala46
-rw-r--r--test/files/run/t2857.check1
-rw-r--r--test/files/run/t2857.scala9
-rw-r--r--test/files/run/t2867.scala15
-rw-r--r--test/files/run/t2886.check1
-rw-r--r--test/files/run/t2886.scala7
-rw-r--r--test/files/run/t3026.check2
-rwxr-xr-xtest/files/run/t3026.scala8
-rw-r--r--test/files/run/t3112.check4
-rw-r--r--test/files/run/t3112.scala11
-rw-r--r--test/files/run/t3158.check1
-rw-r--r--test/files/run/t3158.scala9
-rw-r--r--test/files/run/t3186.check1
-rw-r--r--test/files/run/t3186.scala7
-rw-r--r--test/files/run/t3241.check1
-rw-r--r--test/files/run/t3241.scala23
-rw-r--r--test/files/run/t3242.check18
-rw-r--r--test/files/run/t3242.scala49
-rw-r--r--test/files/run/t3242b.scala17
-rw-r--r--test/files/run/t3361.scala100
-rw-r--r--test/files/run/t3493.scala15
-rw-r--r--test/files/run/t3496.scala15
-rw-r--r--test/files/run/t3502.scala24
-rw-r--r--test/files/run/t3508.scala11
-rw-r--r--test/files/run/t3580.scala17
-rw-r--r--test/files/run/t3603.scala18
-rw-r--r--test/files/run/t3645.scala6
-rw-r--r--test/files/run/t3667.check3
-rw-r--r--test/files/run/t3667.scala53
-rw-r--r--test/files/run/t3687.check2
-rw-r--r--test/files/run/t3687.scala6
-rw-r--r--test/files/run/t3719.check4
-rw-r--r--test/files/run/t3719.scala35
-rw-r--r--test/files/run/t3726.check2
-rw-r--r--test/files/run/t3726.scala8
-rw-r--r--test/files/run/t3763.scala3
-rw-r--r--test/files/run/t3950.check3
-rw-r--r--test/files/run/t3950.scala17
-rw-r--r--test/files/run/tailcalls.scala22
-rw-r--r--test/files/run/takeAndDrop.scala4
-rw-r--r--test/files/run/tcpoly_monads.scala8
-rw-r--r--test/files/run/tcpoly_parseridioms.scala44
-rw-r--r--test/files/run/treePrint.check5
-rw-r--r--test/files/run/treePrint.scala40
-rw-r--r--test/files/run/try-2.scala12
-rw-r--r--test/files/run/try.scala6
-rw-r--r--test/files/run/typealias_overriding.scala8
-rw-r--r--test/files/run/unapply.scala12
-rw-r--r--test/files/run/unapplyArray.scala2
-rw-r--r--test/files/run/unittest_collection.scala18
-rw-r--r--test/files/run/unittest_io.scala4
-rw-r--r--test/files/run/unittest_iterator.scala26
-rw-r--r--test/files/run/vector1.scala34
-rw-r--r--test/files/run/viewtest.check12
-rwxr-xr-xtest/files/run/viewtest.scala6
-rwxr-xr-xtest/files/run/weakconform.scala4
-rw-r--r--test/files/run/withIndex.scala2
-rw-r--r--test/files/run/xml-loop-bug.scala6
-rw-r--r--test/files/scalacheck/.gitignore0
-rw-r--r--test/files/scalacheck/array.scala39
-rw-r--r--test/files/scalap/caseClass/A.scala2
-rw-r--r--test/files/scalap/caseClass/result.test9
-rw-r--r--test/files/scalap/caseObject/A.scala2
-rw-r--r--test/files/scalap/cbnParam/A.scala2
-rw-r--r--test/files/scalap/classPrivate/A.scala9
-rw-r--r--test/files/scalap/classPrivate/result.test10
-rw-r--r--test/files/scalap/classWithExistential/result.test4
-rw-r--r--test/files/scalap/covariantParam/result.test4
-rw-r--r--test/files/scalap/defaultParameter/A.scala3
-rw-r--r--test/files/scalap/defaultParameter/result.test3
-rw-r--r--test/files/scalap/typeAnnotations/A.scala9
-rw-r--r--test/files/scalap/typeAnnotations/result.test8
-rwxr-xr-x[-rw-r--r--]test/files/script/fact.scala0
-rwxr-xr-xtest/files/script/t1017.scala4
-rwxr-xr-xtest/partest9
-rw-r--r--test/partest-tests/jvm/actor-receivewithin.check16
-rw-r--r--test/partest-tests/jvm/actor-receivewithin.scala69
-rw-r--r--test/partest-tests/run/crash.scala6
-rw-r--r--test/partest-tests/run/streamWithFilter.check5
-rw-r--r--test/partest-tests/run/streamWithFilter.scala11
-rw-r--r--test/partest-tests/run/timeout.scala5
-rwxr-xr-xtest/partest.bat4
-rw-r--r--test/pending/buildmanager/t2443/BitSet.scala2
-rw-r--r--test/pending/buildmanager/t2443/t2443.changes/BitSet2.scala1
-rw-r--r--test/pending/buildmanager/t2443/t2443.check6
-rw-r--r--test/pending/buildmanager/t2443/t2443.test3
-rw-r--r--test/pending/continuations-run/example0.scala9
-rw-r--r--test/pending/continuations-run/example1.scala9
-rw-r--r--test/pending/continuations-run/example16.scala9
-rw-r--r--test/pending/continuations-run/example2.scala9
-rw-r--r--test/pending/continuations-run/example3.scala9
-rw-r--r--test/pending/continuations-run/example4.scala9
-rw-r--r--test/pending/continuations-run/example5.scala9
-rw-r--r--test/pending/continuations-run/example6.scala9
-rw-r--r--test/pending/continuations-run/example7.scala9
-rw-r--r--test/pending/continuations-run/example8.scala9
-rw-r--r--test/pending/continuations-run/example9.scala9
-rw-r--r--test/pending/continuations-run/foreach.check4
-rw-r--r--test/pending/continuations-run/foreach.scala33
-rw-r--r--test/pending/jvm/actor-executor4.check21
-rw-r--r--test/pending/jvm/actor-executor4.scala64
-rw-r--r--test/pending/jvm/actorgc_leak.scala2
-rw-r--r--test/pending/jvm/natives.scala (renamed from test/disabled/jvm/natives.scala)6
-rw-r--r--test/pending/jvm/t1801.check (renamed from test/files/jvm/t1801.check)0
-rw-r--r--test/pending/jvm/t1801.scala (renamed from test/files/jvm/t1801.scala)0
-rw-r--r--test/pending/jvm/t2515.check (renamed from test/files/jvm/t2515.check)0
-rw-r--r--test/pending/jvm/t2515.scala43
-rw-r--r--test/pending/jvm/t2705/GenericInterface.java1
-rw-r--r--test/pending/jvm/t2705/Methods.java4
-rw-r--r--test/pending/jvm/t2705/t2705.scala5
-rw-r--r--test/pending/jvm/timeout.scala2
-rw-r--r--test/pending/neg/bug112506A.scala2
-rw-r--r--test/pending/neg/bug1210.check (renamed from test/files/neg/bug1210.check)0
-rw-r--r--test/pending/neg/bug1210.scala2
-rw-r--r--test/pending/neg/bug3189.check7
-rw-r--r--test/pending/neg/bug3189.scala3
-rw-r--r--test/pending/neg/plugin-after-terminal.check (renamed from test/files/neg/plugin-after-terminal.check)0
-rw-r--r--test/pending/neg/plugin-after-terminal.flags (renamed from test/files/neg/plugin-after-terminal.flags)0
-rw-r--r--test/pending/neg/plugin-after-terminal/lib/plugins.jar.desired.sha11
-rwxr-xr-xtest/pending/neg/plugin-after-terminal/misc/build.sh (renamed from test/files/neg/plugin-after-terminal/misc/build.sh)0
-rw-r--r--test/pending/neg/plugin-after-terminal/misc/scalac-plugin.xml (renamed from test/files/neg/plugin-after-terminal/misc/scalac-plugin.xml)0
-rw-r--r--test/pending/neg/plugin-after-terminal/src/ThePlugin.scala (renamed from test/files/neg/plugin-after-terminal/src/ThePlugin.scala)6
-rw-r--r--test/pending/neg/plugin-after-terminal/testsource.scala (renamed from test/files/neg/plugin-after-terminal/testsource.scala)0
-rw-r--r--test/pending/neg/plugin-before-parser.check (renamed from test/files/neg/plugin-before-parser.check)0
-rw-r--r--test/pending/neg/plugin-before-parser.flags (renamed from test/files/neg/plugin-before-parser.flags)0
-rw-r--r--test/pending/neg/plugin-before-parser/lib/plugins.jar.desired.sha11
-rwxr-xr-xtest/pending/neg/plugin-before-parser/misc/build.sh (renamed from test/files/neg/plugin-before-parser/misc/build.sh)0
-rw-r--r--test/pending/neg/plugin-before-parser/misc/scalac-plugin.xml (renamed from test/files/neg/plugin-before-parser/misc/scalac-plugin.xml)0
-rw-r--r--test/pending/neg/plugin-before-parser/src/ThePlugin.scala (renamed from test/files/neg/plugin-before-parser/src/ThePlugin.scala)6
-rw-r--r--test/pending/neg/plugin-before-parser/testsource.scala (renamed from test/files/neg/plugin-before-parser/testsource.scala)0
-rw-r--r--test/pending/neg/plugin-cyclic-dependency.check (renamed from test/files/neg/plugin-cyclic-dependency.check)0
-rw-r--r--test/pending/neg/plugin-cyclic-dependency.flags (renamed from test/files/neg/plugin-cyclic-dependency.flags)0
-rw-r--r--test/pending/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha11
-rwxr-xr-xtest/pending/neg/plugin-cyclic-dependency/misc/build.sh (renamed from test/files/neg/plugin-cyclic-dependency/misc/build.sh)0
-rw-r--r--test/pending/neg/plugin-cyclic-dependency/misc/scalac-plugin.xml (renamed from test/files/neg/plugin-cyclic-dependency/misc/scalac-plugin.xml)0
-rw-r--r--test/pending/neg/plugin-cyclic-dependency/src/ThePlugin.scala (renamed from test/files/neg/plugin-cyclic-dependency/src/ThePlugin.scala)10
-rw-r--r--test/pending/neg/plugin-cyclic-dependency/testsource.scala (renamed from test/files/neg/plugin-cyclic-dependency/testsource.scala)0
-rw-r--r--test/pending/neg/plugin-multiple-rafter.check (renamed from test/files/neg/plugin-multiple-rafter.check)0
-rw-r--r--test/pending/neg/plugin-multiple-rafter.flags (renamed from test/files/neg/plugin-multiple-rafter.flags)0
-rw-r--r--test/pending/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha11
-rwxr-xr-xtest/pending/neg/plugin-multiple-rafter/misc/build.sh (renamed from test/files/neg/plugin-multiple-rafter/misc/build.sh)0
-rw-r--r--test/pending/neg/plugin-multiple-rafter/misc/scalac-plugin.xml (renamed from test/files/neg/plugin-multiple-rafter/misc/scalac-plugin.xml)0
-rw-r--r--test/pending/neg/plugin-multiple-rafter/src/ThePlugin.scala (renamed from test/files/neg/plugin-multiple-rafter/src/ThePlugin.scala)6
-rw-r--r--test/pending/neg/plugin-multiple-rafter/testsource.scala (renamed from test/files/neg/plugin-multiple-rafter/testsource.scala)0
-rw-r--r--test/pending/neg/plugin-rafter-before-1.check (renamed from test/files/neg/plugin-rafter-before-1.check)0
-rw-r--r--test/pending/neg/plugin-rafter-before-1.flags (renamed from test/files/neg/plugin-rafter-before-1.flags)0
-rw-r--r--test/pending/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha11
-rwxr-xr-xtest/pending/neg/plugin-rafter-before-1/misc/build.sh (renamed from test/files/neg/plugin-rafter-before-1/misc/build.sh)0
-rw-r--r--test/pending/neg/plugin-rafter-before-1/misc/scalac-plugin.xml (renamed from test/files/neg/plugin-rafter-before-1/misc/scalac-plugin.xml)0
-rw-r--r--test/pending/neg/plugin-rafter-before-1/src/ThePlugin.scala (renamed from test/files/neg/plugin-rafter-before-1/src/ThePlugin.scala)6
-rw-r--r--test/pending/neg/plugin-rafter-before-1/testsource.scala (renamed from test/files/neg/plugin-rafter-before-1/testsource.scala)0
-rw-r--r--test/pending/neg/plugin-rightafter-terminal.check (renamed from test/files/neg/plugin-rightafter-terminal.check)0
-rw-r--r--test/pending/neg/plugin-rightafter-terminal.flags (renamed from test/files/neg/plugin-rightafter-terminal.flags)0
-rw-r--r--test/pending/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha11
-rwxr-xr-xtest/pending/neg/plugin-rightafter-terminal/misc/build.sh (renamed from test/files/neg/plugin-rightafter-terminal/misc/build.sh)0
-rw-r--r--test/pending/neg/plugin-rightafter-terminal/misc/scalac-plugin.xml (renamed from test/files/neg/plugin-rightafter-terminal/misc/scalac-plugin.xml)0
-rw-r--r--test/pending/neg/plugin-rightafter-terminal/src/ThePlugin.scala (renamed from test/files/neg/plugin-rightafter-terminal/src/ThePlugin.scala)8
-rw-r--r--test/pending/neg/plugin-rightafter-terminal/testsource.scala (renamed from test/files/neg/plugin-rightafter-terminal/testsource.scala)0
-rw-r--r--test/pending/neg/t1477.scala2
-rw-r--r--test/pending/neg/t2079.scala4
-rw-r--r--test/pending/neg/t2080.scala2
-rw-r--r--test/pending/neg/tcpoly_typealias_eta.scala6
-rw-r--r--test/pending/neg/tcpoly_variance_enforce_getter_setter.scala4
-rw-r--r--test/pending/pos/bug0305.scala2
-rw-r--r--test/pending/pos/bug112606B.scala4
-rw-r--r--test/pending/pos/bug1357.scala (renamed from test/files/pos/bug1357.scala)2
-rw-r--r--test/pending/pos/bug3420.flags1
-rw-r--r--test/pending/pos/bug3420.scala5
-rw-r--r--test/pending/pos/bug563.scala2
-rw-r--r--test/pending/pos/bug572.scala8
-rw-r--r--test/pending/pos/bug573.scala10
-rw-r--r--test/pending/pos/bug579.scala2
-rw-r--r--test/pending/pos/bug586.scala10
-rw-r--r--test/pending/pos/misc/B.scala2
-rw-r--r--test/pending/pos/moors.scala4
-rw-r--r--test/pending/pos/sig/sigs.scala2
-rw-r--r--test/pending/pos/t0621.scala2
-rw-r--r--test/pending/pos/t0756.scala2
-rw-r--r--test/pending/pos/t0805.scala2
-rw-r--r--test/pending/pos/t1004.scala2
-rw-r--r--test/pending/pos/t1380/gnujaxp.jar.desired.sha1 (renamed from test/files/pos/t1380/gnujaxp.jar.desired.sha1)0
-rw-r--r--test/pending/pos/t1380/hallo.scala3
-rw-r--r--test/pending/pos/t1659.scala4
-rw-r--r--test/pending/pos/t1786.scala4
-rw-r--r--test/pending/pos/t2060.scala28
-rw-r--r--test/pending/pos/t2625.scala9
-rw-r--r--test/pending/pos/t2635.scala16
-rw-r--r--test/pending/pos/t2641.scala16
-rw-r--r--test/pending/pos/unappgadteval.scala12
-rw-r--r--test/pending/pos/virt.scala4
-rw-r--r--test/pending/res/bug837/DeadCode.scala2
-rw-r--r--test/pending/run/array_casts.scala4
-rw-r--r--test/pending/run/arrays-2.scala2
-rw-r--r--test/pending/run/bug1697.scala (renamed from test/files/run/bug1697.scala)0
-rw-r--r--test/pending/run/bug2087.scala2
-rw-r--r--test/pending/run/bug2364.check1
-rw-r--r--test/pending/run/bug2364.scala60
-rw-r--r--test/pending/run/bug2365/Test.scala35
-rw-r--r--test/pending/run/bug2365/bug2365.javaopts1
-rwxr-xr-xtest/pending/run/bug2365/run13
-rw-r--r--test/pending/run/bug3050.scala10
-rw-r--r--test/pending/run/bug3150.scala10
-rw-r--r--test/pending/run/bug874.scala2
-rw-r--r--test/pending/run/bugs425-and-816.scala27
-rw-r--r--test/pending/run/castsingleton.scala2
-rw-r--r--test/pending/run/collections.scala4
-rw-r--r--test/pending/run/deprecated.scala18
-rw-r--r--test/pending/run/hashCodeDistribution.flags (renamed from test/files/run/hashCodeDistribution.flags)0
-rw-r--r--test/pending/run/hashCodeDistribution.scala (renamed from test/files/run/hashCodeDistribution.scala)2
-rw-r--r--test/pending/run/instanceOfAndTypeMatching.scala193
-rw-r--r--test/pending/run/records.scala6
-rw-r--r--test/pending/run/string-reverse.scala22
-rw-r--r--test/pending/run/t0508x.scala6
-rw-r--r--test/pending/run/t0807.scala2
-rw-r--r--test/pending/run/t0947.scala2
-rw-r--r--test/pending/run/t1980.scala2
-rwxr-xr-xtest/pending/run/t3609.scala11
-rw-r--r--test/pending/scalacheck/CheckEither.scala52
-rw-r--r--test/pending/scalacheck/array.scala37
-rw-r--r--test/pending/scalacheck/eqeq.scala37
-rw-r--r--test/pending/scalacheck/list.scala (renamed from test/files/scalacheck/list.scala)6
-rw-r--r--test/pending/scalacheck/range.scala205
-rw-r--r--test/pending/scalacheck/scan.scala17
-rw-r--r--test/pending/script/bug2365.javaopts1
-rw-r--r--test/pending/script/bug2365/Test.scala35
-rwxr-xr-xtest/pending/script/bug2365/bug2365.scala9
-rw-r--r--test/pending/shootout/fasta.check171
-rw-r--r--test/pending/shootout/fasta.scala162
-rw-r--r--test/pending/shootout/fasta.scala.runner3
-rw-r--r--test/pending/shootout/harmonic.scala-2.scala14
-rw-r--r--test/pending/shootout/harmonic.scala-2.scala.runner16
-rw-r--r--test/pending/shootout/harmonic.scala-3.scala15
-rw-r--r--test/pending/shootout/harmonic.scala-3.scala.runner3
-rw-r--r--test/pending/shootout/heapsort.scala72
-rw-r--r--test/pending/shootout/heapsort.scala.runner3
-rw-r--r--test/pending/shootout/mandelbrot.scala-2.checkbin0 -> 5011 bytes
-rw-r--r--test/pending/shootout/mandelbrot.scala-2.scala79
-rw-r--r--test/pending/shootout/mandelbrot.scala-2.scala.runner3
-rw-r--r--test/pending/shootout/message.check1
-rw-r--r--test/pending/shootout/message.javaopts1
-rw-r--r--test/pending/shootout/message.scala47
-rw-r--r--test/pending/shootout/message.scala.runner3
-rw-r--r--test/pending/shootout/meteor.scala496
-rw-r--r--test/pending/shootout/meteor.scala-2.scala496
-rw-r--r--test/pending/shootout/meteor.scala-2.scala.runner3
-rw-r--r--test/pending/shootout/meteor.scala-3.scala557
-rw-r--r--test/pending/shootout/meteor.scala-3.scala.runner3
-rw-r--r--test/pending/shootout/meteor.scala-4.scala587
-rw-r--r--test/pending/shootout/meteor.scala-4.scala.runner3
-rw-r--r--test/pending/shootout/meteor.scala.runner3
-rw-r--r--test/pending/shootout/methcall.scala58
-rw-r--r--test/pending/shootout/methcall.scala.runner3
-rw-r--r--test/pending/shootout/nsieve.scala-4.check9
-rw-r--r--test/pending/shootout/nsieve.scala-4.scala45
-rw-r--r--test/pending/shootout/nsieve.scala-4.scala.runner3
-rw-r--r--test/pending/shootout/pidigits.check100
-rw-r--r--test/pending/shootout/pidigits.scala69
-rw-r--r--test/pending/shootout/pidigits.scala.runner3
-rw-r--r--test/pending/shootout/prodcons.scala64
-rw-r--r--test/pending/shootout/prodcons.scala.runner3
-rw-r--r--test/pending/shootout/random.scala32
-rw-r--r--test/pending/shootout/random.scala.runner3
-rw-r--r--test/pending/shootout/revcomp.scala-2.check171
-rw-r--r--test/pending/shootout/revcomp.scala-2.scala92
-rw-r--r--test/pending/shootout/revcomp.scala-2.scala.runner6
-rw-r--r--test/pending/shootout/revcomp.scala-3.check171
-rw-r--r--test/pending/shootout/revcomp.scala-3.scala147
-rw-r--r--test/pending/shootout/revcomp.scala-3.scala.runner6
-rw-r--r--test/pending/shootout/sieve.scala43
-rw-r--r--test/pending/shootout/sieve.scala.runner3
-rw-r--r--test/postreview.py2540
-rwxr-xr-xtest/review44
-rw-r--r--test/simplejson/__init__.py318
-rw-r--r--test/simplejson/decoder.py354
-rw-r--r--test/simplejson/encoder.py440
-rw-r--r--test/simplejson/scanner.py65
-rw-r--r--test/simplejson/tool.py37
-rw-r--r--test/support/annotations/NestedAnnotations.java (renamed from test/files/jvm/NestedAnnotations.java)8
-rw-r--r--test/support/annotations/OuterEnum.java (renamed from test/files/jvm/OuterEnum.java)0
-rw-r--r--test/support/annotations/OuterTParams.java (renamed from test/files/jvm/OuterTParams.java)0
-rw-r--r--test/support/annotations/SourceAnnotation.java (renamed from test/files/jvm/SourceAnnotation.java)0
-rwxr-xr-xtest/support/annotations/mkAnnotationsJar.sh (renamed from test/files/jvm/mkAnnotationsJar.sh)0
-rwxr-xr-xtools/abspath9
-rwxr-xr-xtools/cpof30
-rwxr-xr-xtools/diffPickled51
-rwxr-xr-xtools/epfl-build28
-rwxr-xr-xtools/epfl-build-2.x.x35
-rwxr-xr-xtools/epfl-publish50
-rwxr-xr-xtools/git-get-rev5
-rwxr-xr-xtools/packcp5
-rwxr-xr-xtools/pathResolver11
-rwxr-xr-xtools/quickcp8
-rwxr-xr-xtools/scalawhich4
-rwxr-xr-xtools/scmp4
-rwxr-xr-xtools/showPickled32
-rwxr-xr-xtools/starrcp5
-rwxr-xr-xtools/strapcp8
-rwxr-xr-xtools/tokens4
-rwxr-xr-xtools/truncate7
2991 files changed, 87595 insertions, 38777 deletions
diff --git a/.classpath b/.classpath
index 1e83ae27e7..6ccb74aae0 100644
--- a/.classpath
+++ b/.classpath
@@ -2,9 +2,10 @@
<classpath>
<classpathentry kind="src" path="src/compiler"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="lib" path="lib/msil.jar"/>
+ <classpathentry exported="true" kind="lib" path="lib/msil.jar"/>
<classpathentry kind="lib" path="lib/jline.jar"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry exported="true" kind="lib" path="lib/fjbg.jar"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
<classpathentry kind="output" path="bin"/>
</classpath>
diff --git a/.project b/.project
index 71b4782bbe..68938dd571 100644
--- a/.project
+++ b/.project
@@ -1,12 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
- <name>scala</name>
+ <name>scala-2.8.x</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
- <name>ch.epfl.lamp.sdt.core.scalabuilder</name>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
<arguments>
</arguments>
</buildCommand>
@@ -23,7 +23,7 @@
</buildSpec>
<natures>
<nature>org.eclipse.pde.PluginNature</nature>
- <nature>ch.epfl.lamp.sdt.core.scalanature</nature>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>
diff --git a/META-INF/MANIFEST.MF b/META-INF/MANIFEST.MF
index 71fdb152c1..0d9aa5edff 100644
--- a/META-INF/MANIFEST.MF
+++ b/META-INF/MANIFEST.MF
@@ -2,7 +2,7 @@ Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: Scala Distribution
Bundle-SymbolicName: scala.tools.nsc;singleton:=true
-Bundle-Version: 2.8.0
+Bundle-Version: 2.8.3.alpha
Eclipse-LazyStart: true
Bundle-ClassPath:
.,
@@ -28,12 +28,17 @@ Export-Package:
scala.tools.nsc.matching,
scala.tools.nsc.plugins,
scala.tools.nsc.reporters,
+ scala.tools.nsc.settings,
scala.tools.nsc.symtab,
scala.tools.nsc.symtab.classfile,
scala.tools.nsc.transform,
scala.tools.nsc.typechecker,
scala.tools.nsc.util,
- ch.epfl.lamp.fjbg
+ scala.tools.util,
+ ch.epfl.lamp.compiler.msil,
+ ch.epfl.lamp.compiler.msil.emit,
+ ch.epfl.lamp.compiler.msil.util,
+ ch.epfl.lamp.fjbg,
+ ch.epfl.lamp.util
Require-Bundle:
- org.apache.ant,
- scala.library
+ org.apache.ant
diff --git a/OMakefile b/OMakefile
deleted file mode 100644
index 29f0616480..0000000000
--- a/OMakefile
+++ /dev/null
@@ -1,187 +0,0 @@
-######################################################################
-
-# If you aren't running on a LAMP system, you need to make sure you
-# have JAVA_HOME and JAVACMD (and optionally JAVAC) set correctly.
-JAVA_HOME = $(getenv JAVA_HOME, /home/linuxsoft/apps/java-1.6/)
-JAVACMD = $(getenv JAVACMD, $(JAVA_HOME)$(DIRSEP)bin$(DIRSEP)java)
-JAVAC = $(getenv JAVAC, $(JAVA_HOME)$(DIRSEP)bin$(DIRSEP)javac)
-JAVAP = $(JAVA_HOME)$(DIRSEP)bin$(DIRSEP)javap
-
-# Default options for the JVM
-JAVA_OPTS = $(getenv JAVA_OPTS, -Xms1024M -Xmx1024M -XX:MaxPermSize=256M)
-
-DIFF=diff
-DIFF_OPTS=-r
-
-######################################################################
-
-# The current copyright string
-COPYRIGHT_STRING = Copyright 2002-2009, LAMP/EPFL
-# Need to generate this correctly
-VERSION_NUMBER = 2.7.2
-
-# The directory where the STARR is kept
-LIB_DIR = .$(DIRSEP)lib
-# The directory where we store the built files
-BUILD_DIR = .$(DIRSEP)build
-# The directory where the locker files are kept
-LOCKER_DIR = $(BUILD_DIR)$(DIRSEP)locker
-LOCKER_CLASS_DIR = $(LOCKER_DIR)$(DIRSEP)classes
-# The directory where the quick files are kept
-QUICK_DIR = $(BUILD_DIR)$(DIRSEP)quick
-QUICK_CLASS_DIR = $(QUICK_DIR)$(DIRSEP)classes
-# The directory where the strap files are kept
-STRAP_DIR = $(BUILD_DIR)$(DIRSEP)strap
-STRAP_CLASS_DIR = $(STRAP_DIR)$(DIRSEP)classes
-
-# Scala compiler class
-SCALAC_CLASS = scala.tools.nsc.Main
-
-# The Partest class
-PARTEST_CLASS = scala.tools.partest.nest.NestRunner
-
-# Arguments used to configure which tests are run
-PARTEST_ARGS = --pos --neg --run --jvm --jvm5 --res --shootout
-
-######################################################################
-
-# CLASSPATHs for the various build modes
-COMMON_CLASSPATH = $(addprefix $(LIB_DIR)$(DIRSEP), jline.jar msil.jar fjbg.jar)
-
-STARR_CLASSPATH = $(array $(addprefix $(LIB_DIR)$(DIRSEP), scala-library.jar scala-compiler.jar) $(COMMON_CLASSPATH))
-# Debugging remove
-#println($(string $(STARR_CLASSPATH)))
-LOCKER_CLASSPATH = $(array $(addprefix $(LOCKER_CLASS_DIR)$(DIRSEP), compiler library) $(COMMON_CLASSPATH))
-QUICK_CLASSPATH = $(array $(addprefix $(QUICK_CLASS_DIR)$(DIRSEP), compiler library partest) $(COMMON_CLASSPATH))
-STRAP_CLASSPATH = $(array $(addprefix $(STRAP_CLASS_DIR)$(DIRSEP), compiler library partest) $(COMMON_CLASSPATH))
-
-######################################################################
-
-# Helper functions
-
-# Convert a sequence to a path by concatenating it together with
-# the appropriate separator for the current platform
-topath(seq) =
- return $(concat $(PATHSEP), $(seq))
-
-# Create an empty file
-touch(file) =
- close($(fopen $(file), w))
-
-# "idempotent-mkdir", create a directory if it doesn't already exist
-idem-mkdir(dir) =
- if $(not $(file-exists $(dir)))
- # println(Creating directory $(dir))
- mkdir(-p $(dir))
-
-# compare two classes using javap
-javap-diff(cpath1, cpath2, cls) =
- tmp1 = $(tmpfile javap1, .txt)
- tmp2 = $(tmpfile javap2, .txt)
- $(JAVAP) -classpath $(cpath1) -private $(cls) > $(tmp1)
- $(JAVAP) -classpath $(cpath2) -private $(cls) > $(tmp2)
- ($(DIFF) $(tmp1) $(tmp2)) || echo $(cls) is different in $(cpath1) and $(cpath2)
-
-# Write a property file
-propfile(file) =
- handle = $(fopen $(file), w)
- # Need to correctly generated date/time
- fprint($(handle), \# Generated at some time\n)
- fprint($(handle), copyright.string=$(COPYRIGHT_STRING)\n)
- fprint($(handle), version.number=$(VERSION_NUMBER)\n)
- close($(handle))
-
-# Compile the Scala files in the sequence args with the specified
-# classpath, placing the generated class files in the directory outdir
-scalac(classpath, outdir, args) =
- # println(scalac compiling $(string $(args)))
- $(JAVACMD) -cp $(topath $(classpath)) $(JAVA_OPTS) $(SCALAC_CLASS) -d $(outdir) $(args)
-
-# Compile the Java files in the sequence args with the specified
-# classpath, placing the generated class files in the directory outdir
-javac(classpath, outdir, args) =
- # println(javac compiling $(string $(args)))
- $(JAVAC) -cp $(topath $(classpath)) -d $(outdir) $(args)
-
-# Build an instance of the Scala compiler and libraries using
-# the compiler in the specified classpath as the bootstrap
-# compiler, and placing the result in the directory outdir
-buildscala(classpath, outdir) =
- idem-mkdir($(outdir)$(DIRSEP)classes$(DIRSEP)compiler)
- scalac($(classpath), $(outdir)$(DIRSEP)classes$(DIRSEP)compiler, \
- $(find ./src/compiler/scala/tools/nsc -name *.scala) $(find ./src/compiler/scala/tools/util -name *.scala))
- propfile($(outdir)$(DIRSEP)classes$(DIRSEP)compiler$(DIRSEP)compiler.properties)
- # For compatibility with the old ant script we'll create a flag
- # touch($(outdir)$(DIRSEP)compiler.complete)
- idem-mkdir($(outdir)$(DIRSEP)classes$(DIRSEP)library)
- javac($(classpath), $(outdir)$(DIRSEP)classes$(DIRSEP)library, \
- $(find ./src/library -name *.java) $(find ./src/actors -name *.java))
- scalac($(classpath), $(outdir)$(DIRSEP)classes$(DIRSEP)library, \
- $(find ./src/library -name *.scala) \
- $(find ./src/dbc -name *.scala) \
- $(find ./src/swing -name *.scala) \
- $(find ./src/actors -name *.scala))
- # Need to copy over script.js and style.css for scaladoc to find them
- cp($(addprefix ./src/compiler/scala/tools/nsc/doc/,script.js style.css) \
- $(outdir)$(DIRSEP)classes$(DIRSEP)compiler$(DIRSEP)scala$(DIRSEP)tools$(DIRSEP)nsc$(DIRSEP)doc)
- propfile($(outdir)$(DIRSEP)classes$(DIRSEP)library$(DIRSEP)library.properties)
- # For compatibility with the old ant script we'll create some flags
- touch($(outdir)$(DIRSEP)library.complete)
- # touch($(outdir)$(DIRSEP)all.complete)
-
-# Run partest with respect to the specified classpath
-partest(classpath) =
- # println(testing)
- $(JAVACMD) -cp $(topath $(classpath)) $(JAVA_OPTS) \
- $(PARTEST_CLASS) --classpath $(QUICK_CLASS_DIR) --show-diff $(PARTEST_ARGS)
-
-######################################################################
-
-# Specify those targets that are "phony", as in, they do not
-# correspond to actual files that will be created.
-
-.PHONY : locker quick partest test clean all.clean locker.clean strap stability
-
-# Specify the default target
-.DEFAULT : test
-
-######################################################################
-
-# Just clean out the quick build
-clean :
- $(rm -rf $(QUICK_DIR))
-
-# Just clean out the locker
-locker.clean :
- $(rm -rf $(LOCKER_DIR))
-
-# Clean up everything
-all.clean :
- $(rm -rf $(BUILD_DIR))
-
-######################################################################
-
-locker $(LOCKER_DIR) :
- buildscala($(STARR_CLASSPATH), $(LOCKER_DIR))
-
-quick $(QUICK_DIR) : $(LOCKER_DIR)
- buildscala($(LOCKER_CLASSPATH), $(QUICK_DIR))
-
-strap $(STRAP_DIR) : $(QUICK_DIR)
- buildscala($(QUICK_CLASSPATH), $(STRAP_DIR))
-
-test.stability : $(STRAP_DIR)
-# javap-diff($(QUICK_CLASS_DIR)/library, $(STRAP_CLASS_DIR)/library, "scala.swing.Key")
- $(DIFF) $(DIFF_OPTS) $(QUICK_CLASS_DIR) $(STRAP_CLASS_DIR)
-
-partest : quick
- idem-mkdir($(QUICK_CLASS_DIR)$(DIRSEP)partest)
- javac($(LOCKER_CLASSPATH), $(QUICK_CLASS_DIR)$(DIRSEP)partest, \
- $(find ./src/partest/scala -name *.java))
- scalac($(LOCKER_CLASSPATH), $(QUICK_CLASS_DIR)$(DIRSEP)partest, \
- $(filter-out %PartestTask.scala %AntRunner.scala, $(find ./src/partest/scala -name *.scala)))
- # For compatibility with the old ant script we'll create a flag
- # touch($(QUICK_DIR)$(DIRSEP)partest.complete)
-
-test : partest
- partest($(QUICK_CLASSPATH))
diff --git a/OMakeroot b/OMakeroot
deleted file mode 100644
index cdb7699a6e..0000000000
--- a/OMakeroot
+++ /dev/null
@@ -1,3 +0,0 @@
-open build/Common
-
-.SUBDIRS: .
diff --git a/README b/README
index 61425f8002..106503399b 100644
--- a/README
+++ b/README
@@ -4,7 +4,7 @@
================================================================================
This document describes the Scala core (core library and compiler) repository
-and how to build it. For information about Scala as a languages, you can visit
+and how to build it. For information about Scala as a language, you can visit
the web site http://www.scala-lang.org/
Part I. The repository layout
@@ -12,9 +12,10 @@ Part I. The repository layout
Follows the file layout of the Scala repository. Files marked with a † are not
part of the Subversion repository but are either automatically generated by the
-build script or user-created if needed.
+build script or user-created if needed. This is not a complete listing.
scala/
+ bin/ Developer utilities.
build/ † Temporary staging area for build products.
build.excludes † An optional build configuration file.
build.number The version number of the current distribution.
@@ -23,80 +24,106 @@ scala/
dist/ † The destination folder of Scala distributions.
docs/ Documentation of Scala. More in its own module.
development/ Developer documentation.
- examples/ Scala example files.
- man/ UNIX manual files.
+ examples/ Scala source code examples.
lib/ Pre-compiled libraries for the build.
fjbg.jar The Java byte-code generation library.
- scala-compiler.jar The last stable version of the Scala compiler.
- scala-library.jar The last stable version of the Scala library.
+ scala-compiler.jar The stable reference version (aka 'starr')
+ of the Scala compiler
+ scala-library.jar The stable reference version (aka 'starr')
+ of the Scala library.
+ scala-library-src.jar A snapshot of the source code which was used
+ to build starr.
ant/ Support libraries for the build tool.
ant-contrib.jar Provides additional features for Ant
vizant.jar Provides DOT graph generation for Ant
README The file you are currently reading.
sandbox/ † A folder to test code etc.
src/ All the source files of Scala.
+ actors/ The sources of the Actor library.
compiler/ The sources of the Scala compiler.
- library/ The sources of the Scala library.
+ library/ The sources of the core Scala library.
+ swing/ The sources of the Swing library.
test/ The Scala test suite.
-Any change to this structure requires a modification of the 'build.xml' file.
-
-Part IV. Building Scala with SABBUS
+Part II. Building Scala with SABBUS
--------------------------------------------------------------------------------
-SABBUS is the name of the Ant build script used to compile Scala. It is mostly automated and takes care of managing the dependencies.
+SABBUS is the name of the Ant build script used to compile Scala. It is mostly
+automated and takes care of managing the dependencies.
LAYERS:
-In order to guarantee the bootstrapping of the Scala compiler, SABBUS builds Scala in layers. Each layer is a complete compiled Scala compiler and library. A superior layer is always compiled by the layer just below it. Here is a short description of the four layers that SABBUS uses, from bottom to top:
+In order to guarantee the bootstrapping of the Scala compiler, SABBUS builds
+Scala in layers. Each layer is a complete compiled Scala compiler and library.
+A superior layer is always compiled by the layer just below it. Here is a short
+description of the four layers that SABBUS uses, from bottom to top:
-'starr': the stable reference Scala release which is shared by all the developers. It is found in the repository as 'lib/scala.compiler.jar' and 'lib/scala-library.jar'. Any committable source code must be compiled directly by starr to guarantee the bootstrapping of the compiler.
+'starr': the stable reference Scala release which is shared by all the
+developers. It is found in the repository as 'lib/scala-compiler.jar' and
+'lib/scala-library.jar'. Any committable source code must be compiled directly
+by starr to guarantee the bootstrapping of the compiler.
-'locker': the local reference which is compiled by starr and is the work compiler in a typical development cycle. When it has been built once, it is “frozen†in this state. Updating it to fit the current source code must be explicitly required (see below).
+'locker': the local reference which is compiled by starr and is the work
+compiler in a typical development cycle. When it has been built once, it is
+“frozen†in this state. Updating it to fit the current source code must be
+explicitly required (see below).
-'quick': the layer which is incrementally built when testing changes in the compiler or library. This is considered a actual new version when locker is up-to-date in relation to the source code.
+'quick': the layer which is incrementally built when testing changes in the
+compiler or library. This is considered an actual new version when locker is
+up-to-date in relation to the source code.
'strap': a test layer used to check stability of the build.
DEPENDANT CHANGES:
-SABBUS compiles, for each layer, the Scala library first and the compiler next. That means that any changes in the library can immediately be used in the compiler without an intermediate build. On the other hand, if building the library requires changes in the compiler a new starr (or in some case only locker if bootstrapping is still possible) compiler must be built in-between.
+SABBUS compiles, for each layer, the Scala library first and the compiler next.
+That means that any changes in the library can immediately be used in the
+compiler without an intermediate build. On the other hand, if building the
+library requires changes in the compiler, a new locker must be built if
+bootstrapping is still possible, or a new starr if it is not.
Part III. Requirements for SABBUS
--------------------------------------------------------------------------------
-The Scala build system is based on Apache Ant. Most required pre-compiled libraries are part of the repository (in 'lib/'). The following however is assumed to be installed on the build machine:
- - A Java runtime environment (JRE) or SDK 1.5 or above.
+The Scala build system is based on Apache Ant. Most required pre-compiled
+libraries are part of the repository (in 'lib/'). The following however is
+assumed to be installed on the build machine:
+ - A Java runtime environment (JRE) or SDK 1.6 or above.
- Apache Ant version 1.7.0 or above.
-Part IV. Common use-cases
+Part IV. Common use cases
--------------------------------------------------------------------------------
'ant -p'
- Prints-out information about all available targets in the build script.
+ Prints out information about the commonly used ant targets. The interested
+ developer can find the rest in the XML files.
'ant' or 'ant build'
A quick compilation (to quick) of your changes using the locker compiler.
- This will rebuild all quick if locker changed.
- This will also rebuild locker if starr changed.
-'ln -s build/quick/bin bin' (once)
-'ant && bin/scalac -d sandbox sandbox/test.scala && bin/scala -cp sandbox Test'
+'ln -s build/quick/bin qbin' (once)
+'ant && qbin/scalac -d sandbox sandbox/test.scala && qbin/scala -cp sandbox Test'
Incrementally builds quick, and then uses it to compile and run the file
'sandbox/test.scala'. This is a typical debug cycle.
-'ant newlocker'
+'ant replacelocker'
"unfreezes" locker by updating it to match the current source code.
- This will delete quick so as not to mix classes compiled with different
versions of locker.
'ant test'
Tests that your code is working and fit to be committed.
- - Runs the test suite on quick.
+ - Runs the test suite and bootstrapping test on quick.
+ - You can run the suite only (skipping strap) with 'ant test.suite'.
'ant docs'
Generates the HTML documentation for the library from the sources using the
- scaladoc tool in quick.
+ scaladoc tool in quick. Note: on most machines this requires more heap than
+ is allocate by default. You can adjust the parameters with ANT_OPTS.
+ Example command line:
+ ANT_OPTS="-Xms512M -Xmx2048M -Xss1M -XX:MaxPermSize=128M" ant docs
'ant dist'
Builds a distribution.
@@ -115,7 +142,9 @@ Part IV. Common use-cases
'ant all.clean'
Removes all build files (including locker) and all distributions.
-Additional ant targets can be seen via 'ant -p'.
+Many of these targets offer a variant which runs with -optimise enabled.
+Optimized targets include build-opt, test-opt, dist-opt, fastdist-opt,
+replacestarr-opt, replacelocker-opt, and distpack-opt.
Part V. Contributing to Scala
--------------------------------------------------------------------------------
@@ -124,7 +153,7 @@ If you wish to contribute, you can find all of the necessary information on
the official Scala website: www.scala-lang.org.
Specifically, you can subscribe to the Scala mailing lists, read all of the
-available documentation, and browse the live SVN repository. You can contact
+available documentation, and browse the live SVN repository. You can contact
the Scala team by sending us a message on one of the mailing lists, or by using
the available contact form.
diff --git a/build.number b/build.number
index 88a6ad113e..81f218da8d 100644
--- a/build.number
+++ b/build.number
@@ -1,5 +1,5 @@
#Tue Sep 11 19:21:09 CEST 2007
version.minor=8
-version.patch=0
+version.patch=3
version.suffix=alpha
version.major=2
diff --git a/build.xml b/build.xml
index 47e5a679ca..8f1e48d026 100644
--- a/build.xml
+++ b/build.xml
@@ -125,10 +125,15 @@ END-USER TARGETS
</target>
<target name="newlibs"
- description="Requires compiler libraries (MSIL and FJBG) to be rebuilt. Add this target before any other if class file format is incompatible.">
+ description="Requires libraries (MSIL, FJBG) to be rebuilt. Add this target before any other if class file format is incompatible.">
<property name="libs.outdated" value="yes"/>
</target>
+ <target name="newforkjoin"
+ description="Requires forkjoin library to be rebuilt. Add this target before any other if class file format is incompatible.">
+ <property name="forkjoin.outdated" value="yes"/>
+ </target>
+
<!-- ===========================================================================
PROPERTIES
============================================================================ -->
@@ -142,7 +147,7 @@ PROPERTIES
<property name="lib.dir" value="${basedir}/lib"/>
<property name="lib-ant.dir" value="${lib.dir}/ant"/>
<property name="src.dir" value="${basedir}/src"/>
- <property name="test.dir" value="${basedir}/test"/>
+ <property name="partest.dir" value="${basedir}/test"/>
<!-- Loads custom properties definitions -->
<property file="${basedir}/build.properties"/>
@@ -156,6 +161,7 @@ PROPERTIES
<property name="comp.starr.jar" value="${lib.dir}/scala-compiler.jar"/>
<property name="jline.jar" value="${lib.dir}/jline.jar"/>
<property name="ant.jar" value="${ant.home}/lib/ant.jar"/>
+ <property name="scalacheck.jar" value="${lib.dir}/scalacheck.jar"/>
<!-- Sets location of build folders -->
<property name="build.dir" value="${basedir}/build"/>
@@ -169,7 +175,7 @@ PROPERTIES
<property name="dists.dir" value="${basedir}/dists"/>
- <property name="copyright.string" value="Copyright 2002-2009, LAMP/EPFL"/>
+ <property name="copyright.string" value="Copyright 2002-2010, LAMP/EPFL"/>
<property name="partest.version.number" value="0.9.2"/>
<!-- These are NOT the flags used to run SuperSabbus, but the ones written
@@ -178,16 +184,14 @@ PROPERTIES
<!-- if ANT_OPTS is already set by the environment, it will be unaltered,
but if it is unset it will take this default value. -->
- <property name="env.ANT_OPTS" value="-Xms512M -Xmx1024M -Xss1M -XX:MaxPermSize=128M" />
+ <property name="env.ANT_OPTS" value="-Xms512M -Xmx1536M -Xss1M -XX:MaxPermSize=128M" />
<!-- to find max heap usage: -Xaprof ; currently at 980M for locker.comp -->
+ <echo message="Using ANT_OPTS: ${env.ANT_OPTS}" />
<property
name="scalacfork.jvmargs"
value="${env.ANT_OPTS}"/>
- <property name="javac.cmd" value="${env.JAVA_HOME}/bin/javac"/>
- <property name="java.cmd" value="${env.JAVA_HOME}/bin/java"/>
-
<!-- ===========================================================================
INITIALISATION
============================================================================ -->
@@ -212,16 +216,34 @@ INITIALISATION
<condition property="os.win">
<os family="windows"/>
</condition>
- <!-- Finding out SVN revision -->
+ <!-- Finding out SVN revision, svn style -->
<exec executable="svn" outputproperty="svn.out"
failifexecutionfails="false">
<arg line=" info ${basedir}"/>
</exec>
<propertyregex
- property="svn.number" input="${svn.out}" select="\1"
+ property="svn.number.svn" input="${svn.out}" select="\1"
regexp="Revision: ([0-9]+)"
defaultValue="0"/>
+
+ <!-- Both clauses of the conditional set svn.number -->
+ <if>
+ <equals arg1="${svn.number.svn}" arg2="0" />
+ <then>
+ <!-- Finding SVN revision, git style -->
+ <exec osfamily="unix" executable="tools/git-get-rev" outputproperty="svn.number.git" failifexecutionfails="false" />
+ <propertyregex
+ property="svn.number" input="${svn.number.git}" select="\1"
+ regexp="\D*?(\d+)"
+ defaultValue="0"/>
+ </then>
+ <else>
+ <property name="svn.number" value="${svn.number.svn}" />
+ </else>
+ </if>
+
<property name="init.avail" value="yes"/>
+
<!-- Generating version number -->
<property file="${basedir}/build.number"/>
<property
@@ -236,8 +258,18 @@ INITIALISATION
<pathelement location="${comp.starr.jar}"/>
<pathelement location="${lib.dir}/fjbg.jar"/>
<pathelement location="${lib.dir}/msil.jar"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${ant.jar}"/>
</path>
+ <!-- What to have on the compilation path when compiling during certain phases -->
+ <path id="quick.compilation.path">
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
+ </path>
+ <path id="strap.compilation.path">
+ <pathelement location="${build-strap.dir}/classes/library"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
+ </path>
<taskdef resource="scala/tools/ant/sabbus/antlib.xml" classpathref="starr.classpath"/>
</target>
@@ -320,8 +352,9 @@ LOCAL REFERENCE BUILD (LOCKER)
<pathelement location="${build-locker.dir}/classes/compiler"/>
<pathelement location="${lib.dir}/fjbg.jar"/>
<pathelement location="${lib.dir}/msil.jar"/>
- <pathelement location="${jline.jar}"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${ant.jar}"/>
+ <pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
<propertyfile file="${build-locker.dir}/classes/compiler/compiler.properties">
@@ -333,13 +366,18 @@ LOCAL REFERENCE BUILD (LOCKER)
<include name="**/*.tmpl"/>
<include name="**/*.xml"/>
<include name="**/*.js"/>
+ <include name="**/*.html"/>
<include name="**/*.css"/>
+ <include name="**/*.properties"/>
+ <include name="**/*.swf"/>
+ <include name="**/*.png"/>
+
</fileset>
</copy>
<touch file="${build-locker.dir}/compiler.complete" verbose="no"/>
<stopwatch name="locker.comp.timer" action="total"/>
</target>
-
+
<target name="locker.done" depends="locker.comp">
<touch file="${build-locker.dir}/all.complete" verbose="no"/>
<path id="locker.classpath">
@@ -347,6 +385,7 @@ LOCAL REFERENCE BUILD (LOCKER)
<pathelement location="${build-locker.dir}/classes/compiler"/>
<pathelement location="${lib.dir}/fjbg.jar"/>
<pathelement location="${lib.dir}/msil.jar"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${ant.jar}"/>
</path>
</target>
@@ -408,13 +447,14 @@ QUICK BUILD (QUICK)
============================================================================ -->
<target name="quick.start" depends="locker.done"/>
-
+
<target name="quick.pre-lib" depends="quick.start">
<uptodate property="quick.lib.available" targetfile="${build-quick.dir}/library.complete">
<srcfiles dir="${src.dir}">
<include name="library/**"/>
<include name="dbc/**"/>
<include name="actors/**"/>
+ <include name="continuations/**"/>
<include name="swing/**"/>
</srcfiles>
</uptodate>
@@ -447,9 +487,7 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/library"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="quick.compilation.path"/>
</scalacfork>
<scalacfork
destdir="${build-quick.dir}/classes/library"
@@ -458,9 +496,7 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/actors"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="quick.compilation.path"/>
</scalacfork>
<scalacfork
destdir="${build-quick.dir}/classes/library"
@@ -469,9 +505,7 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/dbc"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="quick.compilation.path"/>
</scalacfork>
<scalacfork
destdir="${build-quick.dir}/classes/library"
@@ -480,9 +514,7 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/swing"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="quick.compilation.path"/>
</scalacfork>
<propertyfile file="${build-quick.dir}/classes/library/library.properties">
<entry key="version.number" value="${version.number}"/>
@@ -501,18 +533,26 @@ QUICK BUILD (QUICK)
</target>
<target name="quick.newlibs" depends="quick.lib" if="libs.outdated">
- <antcall target="libs.done">
- <param name="fjbg.jar" value="${build-libs.dir}/fjbg.jar"/>
- <param name="msil.jar" value="${build-libs.dir}/msil.jar"/>
- </antcall>
+ <antcall target="libs.done" inheritRefs="true"/>
+ <property name="fjbg.jar" value="${build-libs.dir}/fjbg.jar"/>
+ <property name="msil.jar" value="${build-libs.dir}/msil.jar"/>
</target>
<target name="quick.libs" depends="quick.newlibs" unless="libs.outdated">
<property name="fjbg.jar" value="${lib.dir}/fjbg.jar"/>
<property name="msil.jar" value="${lib.dir}/msil.jar"/>
</target>
-
- <target name="quick.pre-comp" depends="quick.libs">
+
+ <target name="quick.newforkjoin" depends="quick.libs" if="forkjoin.outdated">
+ <antcall target="forkjoin.done" inheritRefs="true"/>
+ <property name="forkjoin.jar" value="${build-libs.dir}/forkjoin.jar"/>
+ </target>
+
+ <target name="quick.forkjoin" depends="quick.newforkjoin" unless="forkjoin.outdated">
+ <property name="forkjoin.jar" value="${lib.dir}/forkjoin.jar"/>
+ </target>
+
+ <target name="quick.pre-comp" depends="quick.forkjoin">
<uptodate property="quick.comp.available" targetfile="${build-quick.dir}/compiler.complete">
<srcfiles dir="${src.dir}/compiler"/>
</uptodate>
@@ -533,8 +573,9 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/compiler"/>
<pathelement location="${fjbg.jar}"/>
<pathelement location="${msil.jar}"/>
- <pathelement location="${jline.jar}"/>
+ <pathelement location="${forkjoin.jar}"/>
<pathelement location="${ant.jar}"/>
+ <pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
<propertyfile file="${build-quick.dir}/classes/compiler/compiler.properties">
@@ -547,13 +588,61 @@ QUICK BUILD (QUICK)
<include name="**/*.xml"/>
<include name="**/*.js"/>
<include name="**/*.css"/>
+ <include name="**/*.html"/>
+ <include name="**/*.properties"/>
+ <include name="**/*.swf"/>
+ <include name="**/*.png"/>
</fileset>
</copy>
<touch file="${build-quick.dir}/compiler.complete" verbose="no"/>
<stopwatch name="quick.comp.timer" action="total"/>
</target>
- <target name="quick.pre-scalap" depends="quick.comp">
+ <target name="quick.pre-plugins" depends="quick.comp">
+ <uptodate property="quick.plugins.available" targetfile="${build-quick.dir}/plugins.complete">
+ <srcfiles dir="${src.dir}/continuations"/>
+ </uptodate>
+ </target>
+
+ <target name="quick.plugins" depends="quick.pre-plugins" unless="quick.plugins.available">
+ <stopwatch name="quick.plugins.timer"/>
+ <mkdir dir="${build-quick.dir}/classes/continuations-plugin"/>
+ <scalacfork
+ destdir="${build-quick.dir}/classes/continuations-plugin"
+ compilerpathref="locker.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/continuations/plugin"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/continuations-plugin"/>
+ </compilationpath>
+ </scalacfork>
+ <copy
+ file="${src.dir}/continuations/plugin/scalac-plugin.xml"
+ todir="${build-quick.dir}/classes/continuations-plugin"/>
+ <!-- not very nice to create jar here but needed to load plugin -->
+ <mkdir dir="${build-quick.dir}/misc/scala-devel/plugins"/>
+ <jar destfile="${build-quick.dir}/misc/scala-devel/plugins/continuations.jar">
+ <fileset dir="${build-quick.dir}/classes/continuations-plugin"/>
+ </jar>
+ <!-- might split off library part into its own ant target -->
+ <scalacfork
+ destdir="${build-quick.dir}/classes/library"
+ compilerpathref="locker.classpath"
+ params="${scalac.args.quick} -Xpluginsdir ${build-quick.dir}/misc/scala-devel/plugins -Xplugin-require:continuations -P:continuations:enable"
+ srcdir="${src.dir}/continuations/library"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath refid="quick.compilation.path"/>
+ </scalacfork>
+ <touch file="${build-quick.dir}/plugins.complete" verbose="no"/>
+ <stopwatch name="quick.plugins.timer" action="total"/>
+ </target>
+
+ <target name="quick.pre-scalap" depends="quick.plugins">
<uptodate property="quick.scalap.available" targetfile="${build-quick.dir}/scalap.complete">
<srcfiles dir="${src.dir}/scalap"/>
</uptodate>
@@ -572,8 +661,8 @@ QUICK BUILD (QUICK)
<compilationpath>
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${build-quick.dir}/classes/partest"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
+ <pathelement location="${build-quick.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
</compilationpath>
</scalacfork>
@@ -616,6 +705,8 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${build-quick.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
+ <pathelement location="${scalacheck.jar}"/>
</compilationpath>
</scalacfork>
<propertyfile file="${build-quick.dir}/classes/partest/partest.properties">
@@ -644,6 +735,7 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${fjbg.jar}"/>
<pathelement location="${msil.jar}"/>
+ <pathelement location="${forkjoin.jar}"/>
<pathelement location="${jline.jar}"/>
</path>
<taskdef name="quick-bin" classname="scala.tools.ant.ScalaTool" classpathref="quick.bin.classpath"/>
@@ -687,6 +779,7 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/compiler"/>
<pathelement location="${fjbg.jar}"/>
<pathelement location="${msil.jar}"/>
+ <pathelement location="${forkjoin.jar}"/>
<pathelement location="${ant.jar}"/>
</path>
</target>
@@ -715,6 +808,7 @@ PACKED QUICK BUILD (PACK)
<exclude name="scala/dbc/**"/>
<exclude name="scala/swing/**"/>
</fileset>
+ <zipfileset dirmode="755" filemode="644" src="${forkjoin.jar}"/>
</jar>
<jar destfile="${build-pack.dir}/lib/scala-dbc.jar">
<fileset dir="${build-quick.dir}/classes/library">
@@ -754,7 +848,21 @@ PACKED QUICK BUILD (PACK)
<copy file="${jline.jar}" toDir="${build-pack.dir}/lib"/>
</target>
- <target name="pack.pre-partest" depends="pack.comp">
+ <target name="pack.pre-plugins" depends="pack.comp">
+ <uptodate
+ property="pack.plugins.available"
+ targetfile="${build-pack.dir}/misc/scala-devel/plugins/continuations.jar"
+ srcfile="${build-quick.dir}/plugins.complete"/>
+ </target>
+
+ <target name="pack.plugins" depends="pack.pre-plugins" unless="pack.plugins.available">
+ <mkdir dir="${build-pack.dir}/misc/scala-devel/plugins"/>
+ <jar destfile="${build-pack.dir}/misc/scala-devel/plugins/continuations.jar">
+ <fileset dir="${build-quick.dir}/classes/continuations-plugin"/>
+ </jar>
+ </target>
+
+ <target name="pack.pre-partest" depends="pack.plugins">
<uptodate
property="pack.partest.available"
targetfile="${build-pack.dir}/lib/scala-partest.jar"
@@ -888,9 +996,7 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/library"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="strap.compilation.path"/>
</scalacfork>
<scalacfork
destdir="${build-strap.dir}/classes/library"
@@ -899,9 +1005,7 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/actors"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="strap.compilation.path"/>
</scalacfork>
<scalacfork
destdir="${build-strap.dir}/classes/library"
@@ -910,9 +1014,7 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/dbc"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="strap.compilation.path"/>
</scalacfork>
<scalacfork
destdir="${build-strap.dir}/classes/library"
@@ -921,9 +1023,7 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/swing"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="strap.compilation.path"/>
</scalacfork>
<propertyfile file="${build-strap.dir}/classes/library/library.properties">
<entry key="version.number" value="${version.number}"/>
@@ -962,8 +1062,9 @@ BOOTSTRAPPING BUILD (STRAP)
<pathelement location="${build-strap.dir}/classes/compiler"/>
<pathelement location="${fjbg.jar}"/>
<pathelement location="${msil.jar}"/>
- <pathelement location="${jline.jar}"/>
+ <pathelement location="${forkjoin.jar}"/>
<pathelement location="${ant.jar}"/>
+ <pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
<propertyfile file="${build-strap.dir}/classes/compiler/compiler.properties">
@@ -976,13 +1077,64 @@ BOOTSTRAPPING BUILD (STRAP)
<include name="**/*.xml"/>
<include name="**/*.js"/>
<include name="**/*.css"/>
- </fileset>
+ <include name="**/*.html"/>
+ <include name="**/*.properties"/>
+ <include name="**/*.swf"/>
+ <include name="**/*.png"/>
+ </fileset>
</copy>
<touch file="${build-strap.dir}/compiler.complete" verbose="no"/>
<stopwatch name="strap.comp.timer" action="total"/>
</target>
-
- <target name="strap.pre-scalap" depends="strap.comp">
+
+ <target name="strap.pre-plugins" depends="strap.comp">
+ <uptodate property="strap.plugins.available" targetfile="${build-strap.dir}/plugins.complete">
+ <srcfiles dir="${src.dir}/continuations"/>
+ </uptodate>
+ </target>
+
+ <target name="strap.plugins" depends="strap.pre-plugins" unless="strap.plugins.available">
+ <stopwatch name="strap.plugins.timer"/>
+ <mkdir dir="${build-strap.dir}/classes/continuations-plugin"/>
+ <scalacfork
+ destdir="${build-strap.dir}/classes/continuations-plugin"
+ compilerpathref="pack.classpath"
+ params="${scalac.args.all}"
+ srcdir="${src.dir}/continuations/plugin"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-strap.dir}/classes/library"/>
+ <pathelement location="${build-strap.dir}/classes/compiler"/>
+ <pathelement location="${build-strap.dir}/classes/continuations-plugin"/>
+ </compilationpath>
+ </scalacfork>
+ <copy
+ file="${src.dir}/continuations/plugin/scalac-plugin.xml"
+ todir="${build-strap.dir}/classes/continuations-plugin"/>
+ <!-- not very nice to create jar here but needed to load plugin -->
+ <mkdir dir="${build-strap.dir}/misc/scala-devel/plugins"/>
+ <jar destfile="${build-strap.dir}/misc/scala-devel/plugins/continuations.jar">
+ <fileset dir="${build-strap.dir}/classes/continuations-plugin"/>
+ </jar>
+ <!-- might split off library part into its own ant target -->
+ <scalacfork
+ destdir="${build-strap.dir}/classes/library"
+ compilerpathref="pack.classpath"
+ params="${scalac.args.all} -Xpluginsdir ${build-strap.dir}/misc/scala-devel/plugins -Xplugin-require:continuations -P:continuations:enable"
+ srcdir="${src.dir}/continuations/library"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-strap.dir}/classes/library"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
+ </compilationpath>
+ </scalacfork>
+ <touch file="${build-strap.dir}/plugins.complete" verbose="no"/>
+ <stopwatch name="strap.plugins.timer" action="total"/>
+ </target>
+
+ <target name="strap.pre-scalap" depends="strap.plugins">
<uptodate property="strap.scalap.available" targetfile="${build-strap.dir}/scalap.complete">
<srcfiles dir="${src.dir}/scalap"/>
</uptodate>
@@ -1001,8 +1153,8 @@ BOOTSTRAPPING BUILD (STRAP)
<compilationpath>
<pathelement location="${build-strap.dir}/classes/library"/>
<pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
<pathelement location="${build-strap.dir}/classes/scalap"/>
+ <pathelement location="${build-strap.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
</compilationpath>
</scalacfork>
@@ -1045,6 +1197,8 @@ BOOTSTRAPPING BUILD (STRAP)
<pathelement location="${build-strap.dir}/classes/scalap"/>
<pathelement location="${build-strap.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
+ <pathelement location="${scalacheck.jar}"/>
</compilationpath>
</scalacfork>
<copy todir="${build-strap.dir}/classes/partest">
@@ -1068,6 +1222,48 @@ LIBRARIES (MSIL, FJBG maybe later)
<target name="libs.start"/>
+ <target name="libs.pre-forkjoin" depends="libs.start">
+ <property name="java6.home" value="/home/linuxsoft/apps/java-1.6"/>
+ <fail message="Compiling forkjoin.jar requires java 1.6. Please set the property `java6.home` in build.properties or using `-Djava6.home=/path/to/java6`">
+ <condition><not>
+ <available file="${java6.home}/bin/javac"/>
+ </not></condition>
+ </fail>
+
+ <uptodate property="libs.forkjoin.available" targetfile="${build-libs.dir}/forkjoin.complete">
+ <srcfiles dir="${src.dir}/forkjoin">
+ <include name="**/*.java"/>
+ <include name="**/*.scala"/>
+ </srcfiles>
+ </uptodate>
+ </target>
+
+ <target name="libs.forkjoin" depends="libs.pre-forkjoin" unless="libs.forkjoin.available">
+ <mkdir dir="${build-libs.dir}/classes/forkjoin"/>
+ <javac
+ executable="${java6.home}/bin/javac"
+ fork="yes"
+ compiler="javac1.6"
+ srcdir="${src.dir}/forkjoin"
+ destdir="${build-libs.dir}/classes/forkjoin"
+ classpath="${build-libs.dir}/classes/forkjoin"
+ includes="**/*.java"
+ debug="true"
+ target="1.5" source="1.5">
+ <compilerarg line="${javac.args}"/>
+ </javac>
+ <touch file="${build-libs.dir}/forkjoin.complete" verbose="no"/>
+ </target>
+
+ <target name="libs.pre-forkjoinpack" depends="libs.forkjoin">
+ </target>
+
+ <target name="libs.forkjoinpack" depends="libs.pre-forkjoinpack" unless="libs.forkjoinpack.available">
+ <jar destfile="${build-libs.dir}/forkjoin.jar">
+ <fileset dir="${build-libs.dir}/classes/forkjoin"/>
+ </jar>
+ </target>
+
<target name="libs.pre-msil" depends="libs.start">
<uptodate property="libs.msil.available" targetfile="${build-libs.dir}/msil.complete">
<srcfiles dir="${src.dir}/msil">
@@ -1085,6 +1281,7 @@ LIBRARIES (MSIL, FJBG maybe later)
classpath="${build-libs.dir}/classes/msil"
includes="**/*.java"
excludes="**/tests/**"
+ debug="true"
target="1.5" source="1.4">
<compilerarg line="${javac.args}"/>
</javac>
@@ -1128,23 +1325,10 @@ LIBRARIES (MSIL, FJBG maybe later)
destdir="${build-libs.dir}/classes/fjbg"
classpath="${build-libs.dir}/classes/fjbg"
includes="**/*.java"
+ debug="true"
target="1.5" source="1.4">
<compilerarg line="${javac.args}"/>
</javac>
- <!-- For now, JFBG is written in pure Java
- <scalacfork
- destdir="${build-libs.dir}/classes/fjbg"
- compilerpathref="locker.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/fjbg"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-libs.dir}/classes/fjbg"/>
- </compilationpath>
- </scalacfork>
- -->
<touch file="${build-libs.dir}/fjbg.complete" verbose="no"/>
</target>
@@ -1156,9 +1340,11 @@ LIBRARIES (MSIL, FJBG maybe later)
<fileset dir="${build-libs.dir}/classes/fjbg"/>
</jar>
</target>
-
+
<target name="libs.done" depends="libs.msilpack, libs.fjbgpack"/>
+ <target name="forkjoin.done" depends="libs.forkjoinpack"/>
+
<target name="libs.clean" depends="pack.clean">
<delete dir="${build-libs.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
</target>
@@ -1185,13 +1371,16 @@ DOCUMENTATION
<mkdir dir="${build-docs.dir}/library"/>
<scaladoc
destdir="${build-docs.dir}/library"
- windowtitle="Scala Library"
- doctitle="Scala ${version.number} API"
+ doctitle="Scala Standard Library"
+ docversion="${version.number}"
+ docsourceurl="https://lampsvn.epfl.ch/trac/scala/browser/scala/branches/2.8.x/src/€{FILE_PATH}.scala#L1"
+ sourcepath="${src.dir}"
classpathref="pack.classpath">
<src>
<files includes="${src.dir}/actors"/>
<files includes="${src.dir}/library/scala"/>
<files includes="${src.dir}/swing"/>
+ <files includes="${src.dir}/continuations/library"/>
</src>
<include name="**/*.scala"/>
<exclude name="reflect/Code.scala"/>
@@ -1206,6 +1395,7 @@ DOCUMENTATION
<exclude name="runtime/ScalaRunTime.scala"/>
<exclude name="runtime/StreamCons.scala"/>
<exclude name="runtime/StringAdd.scala"/>
+ <exclude name="scala/swing/test/**"/>
</scaladoc>
<touch file="${build-docs.dir}/library.complete" verbose="no"/>
<stopwatch name="docs.lib.timer" action="total"/>
@@ -1268,8 +1458,10 @@ DOCUMENTATION
<mkdir dir="${build-docs.dir}/compiler"/>
<scaladoc
destdir="${build-docs.dir}/compiler"
- windowtitle="Scala Compiler"
- doctitle="Scala Compiler ${version.number} API"
+ doctitle="Scala Compiler"
+ docversion="${version.number}"
+ docsourceurl="https://lampsvn.epfl.ch/trac/scala/browser/scala/branches/2.8.x/src/€{FILE_PATH}.scala#L1"
+ sourcepath="${src.dir}"
classpathref="pack.classpath"
srcdir="${src.dir}/compiler">
<include name="**/*.scala"/>
@@ -1294,18 +1486,26 @@ BOOTRAPING TEST AND TEST SUITE
<exclude name="**/*.properties"/>
<exclude name="bin/**"/>
<exclude name="*.complete"/>
+ <exclude name="misc/scala-devel/plugins/*.jar"/>
</same>
</target>
+
+ <!-- this target will run only those tests found in test/debug -->
+ <target name="test.debug">
+ <antcall target="test.suite">
+ <param name="partest.srcdir" value="debug" />
+ </antcall>
+ </target>
<target name="test.run" depends="pack.done">
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
- timeout="1200000" javaccmd="${javac.cmd}"
+ timeout="1200000"
scalacopts="${scalac.args.optimise}">
- <classpath>
+ <compilationpath>
<path refid="pack.classpath"/>
- <fileset dir="${test.dir}/files/lib" includes="*.jar"/>
- </classpath>
- <runtests dir="${test.dir}/files">
+ <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
+ </compilationpath>
+ <runtests dir="${partest.dir}/files">
<include name="run/**/*.scala"/>
<include name="jvm/**/*.scala"/>
</runtests>
@@ -1313,26 +1513,48 @@ BOOTRAPING TEST AND TEST SUITE
</target>
<target name="test.suite" depends="pack.done">
+ <property name="partest.srcdir" value="files" />
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
- timeout="2400000" javaccmd="${javac.cmd}"
+ timeout="2400000"
+ srcdir="${partest.srcdir}"
scalacopts="${scalac.args.optimise}">
- <classpath>
+ <compilationpath>
<path refid="pack.classpath"/>
- <fileset dir="${test.dir}/files/lib" includes="*.jar"/>
- </classpath>
- <postests dir="${test.dir}/files/pos" includes="*.scala"/>
- <negtests dir="${test.dir}/files/neg" includes="*.scala"/>
- <runtests dir="${test.dir}/files">
+ <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
+ <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
+ </compilationpath>
+ <postests dir="${partest.dir}/${partest.srcdir}/pos" includes="*.scala"/>
+ <negtests dir="${partest.dir}/${partest.srcdir}/neg" includes="*.scala"/>
+ <runtests dir="${partest.dir}/${partest.srcdir}">
<include name="run/**/*.scala"/>
</runtests>
- <jvmtests dir="${test.dir}/files/jvm" includes="*.scala"/>
- <residenttests dir="${test.dir}/files/res" includes="*.res"/>
- <!-- <scripttests dir="${test.dir}/files/script" includes="*.scala"/> -->
- <scalaptests dir="${test.dir}/files/scalap" includes="**/*.scala"/>
+ <jvmtests dir="${partest.dir}/${partest.srcdir}/jvm" includes="*.scala"/>
+ <scalachecktests dir="${partest.dir}/${partest.srcdir}/scalacheck" includes="**/*.scala"/>
+ <residenttests dir="${partest.dir}/${partest.srcdir}/res" includes="*.res"/>
+ <buildmanagertests dir="${partest.dir}/${partest.srcdir}/buildmanager" includes="*"/>
+ <scalaptests dir="${partest.dir}/${partest.srcdir}/scalap" includes="**/*.scala"/>
+ <!-- <scripttests dir="${partest.dir}/${partest.srcdir}/script" includes="*.scala"/> -->
</partest>
</target>
-
- <target name="test.done" depends="test.suite, test.stability"/>
+
+ <target name="test.continuations.suite" depends="pack.done">
+ <property name="partest.srcdir" value="files" />
+ <partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
+ timeout="2400000"
+ srcdir="${partest.srcdir}"
+ scalacopts="${scalac.args.optimise} -Xpluginsdir ${build-quick.dir}/misc/scala-devel/plugins -Xplugin-require:continuations -P:continuations:enable">
+ <compilationpath>
+ <path refid="pack.classpath"/>
+ <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
+ </compilationpath>
+ <negtests dir="${partest.dir}/${partest.srcdir}/continuations-neg" includes="*.scala"/>
+ <runtests dir="${partest.dir}/${partest.srcdir}">
+ <include name="continuations-run/**/*.scala"/>
+ </runtests>
+ </partest>
+ </target>
+
+ <target name="test.done" depends="test.suite, test.continuations.suite, test.stability"/>
<!-- ===========================================================================
DISTRIBUTION
@@ -1356,6 +1578,10 @@ DISTRIBUTION
<chmod perm="ugo+rx" file="${dist.dir}/bin/scaladoc"/>
<chmod perm="ugo+rx" file="${dist.dir}/bin/fsc"/>
<chmod perm="ugo+rx" file="${dist.dir}/bin/scalap"/>
+ <mkdir dir="${dist.dir}/misc/scala-devel/plugins"/>
+ <copy toDir="${dist.dir}/misc/scala-devel/plugins">
+ <fileset dir="${build-pack.dir}/misc/scala-devel/plugins"/>
+ </copy>
</target>
<target name="dist.doc" depends="dist.base">
@@ -1390,6 +1616,7 @@ DISTRIBUTION
<jar destfile="${dist.dir}/src/scala-library-src.jar">
<fileset dir="${src.dir}/library"/>
<fileset dir="${src.dir}/actors"/>
+ <fileset dir="${src.dir}/continuations/library"/>
</jar>
<jar destfile="${dist.dir}/src/scala-dbc-src.jar">
<fileset dir="${src.dir}/dbc"/>
@@ -1462,23 +1689,23 @@ STABLE REFERENCE (STARR)
</target>
<target name="starr.lib" depends="starr.start">
- <copy file="${basedir}/build/pack/lib/scala-library.jar"
- toFile="${basedir}/lib/scala-library.jar"
- overwrite="yes"/>
+ <jar destfile="${basedir}/lib/scala-library.jar">
+ <fileset dir="${basedir}/build/quick/classes/library"/>
+ </jar>
</target>
<target name="starr.comp" depends="starr.lib">
- <delete file="${basedir}/lib/scala-compiler.jar"/>
<jar destfile="${basedir}/lib/scala-compiler.jar">
<fileset dir="${basedir}/build/quick/classes/compiler"/>
</jar>
</target>
<target name="starr.src" depends="starr.comp">
- <delete file="${basedir}/lib/scala-library-src.jar"/>
<jar destfile="${basedir}/lib/scala-library-src.jar">
<fileset dir="${basedir}/src/library"/>
<fileset dir="${basedir}/src/actors"/>
+ <fileset dir="${basedir}/src/swing"/>
+ <fileset dir="${basedir}/src/dbc"/>
</jar>
</target>
@@ -1487,6 +1714,7 @@ STABLE REFERENCE (STARR)
<fileset dir="${build-libs.dir}">
<include name="fjbg.jar"/>
<include name="msil.jar"/>
+ <include name="forkjoin.jar"/>
</fileset>
</copy>
</target>
@@ -1500,9 +1728,15 @@ FORWARDED TARGETS FOR PACKAGING
<target name="distpack" depends="dist.done">
<ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/>
</target>
+
+ <target name="distpack-opt"
+ description="Builds an optimised distribution.">
+ <antcall target="distpack">
+ <param name="scalac.args.optimise" value="-optimise"/>
+ </antcall>
+ </target>
<!-- Used by the scala-installer script -->
- <target name="alldistpack" depends="distpack"/>
<target name="allallclean" depends="all.clean"/>
<!-- ===========================================================================
@@ -1520,13 +1754,25 @@ FORWARDED TARGETS FOR NIGHTLY BUILDS
<ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/>
</target>
+ <target name="nightly.checkinit">
+ <antcall target="nightly-nopt">
+ <param name="scalac.args.optimise" value="-Xcheckinit"/>
+ </antcall>
+ </target>
+
+ <target name="nightly.checkall">
+ <antcall target="nightly-nopt">
+ <param name="partest.scalacopts" value="-Ycheck:all"/>
+ </antcall>
+ </target>
+
<!-- ===========================================================================
POSITIONS
============================================================================ -->
<target name="test.positions" depends="quick.comp">
<antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${test.dir}/files/positions"/>
+ <param name="test.tests.srcs" value="${partest.dir}/files/positions"/>
</antcall>
<antcall target="test.positions.sub" inheritRefs="true">
<param name="test.srcs" value="${src.dir}/compiler"/>
@@ -1550,13 +1796,13 @@ POSITIONS
<param name="test.srcs" value="${src.dir}/scalap"/>
</antcall>
<antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${test.dir}/files/pos"/>
+ <param name="test.tests.srcs" value="${partest.dir}/files/pos"/>
</antcall>
<antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${test.dir}/files/run"/>
+ <param name="test.tests.srcs" value="${partest.dir}/files/run"/>
</antcall>
<antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${test.dir}/files/neg"/>
+ <param name="test.tests.srcs" value="${partest.dir}/files/neg"/>
</antcall>
</target>
diff --git a/docs/LICENSE b/docs/LICENSE
index fd4d83e7bd..b75bb425b4 100644
--- a/docs/LICENSE
+++ b/docs/LICENSE
@@ -1,6 +1,6 @@
SCALA LICENSE
-Copyright (c) 2002-2009 EPFL, Lausanne, unless otherwise specified.
+Copyright (c) 2002-2010 EPFL, Lausanne, unless otherwise specified.
All rights reserved.
This software was developed by the Programming Methods Laboratory of the
diff --git a/docs/examples/actors/producers.scala b/docs/examples/actors/producers.scala
index d3ff903f5b..80e5ae33d3 100644
--- a/docs/examples/actors/producers.scala
+++ b/docs/examples/actors/producers.scala
@@ -6,13 +6,13 @@ import scala.actors.Actor._
abstract class Producer[T] {
/** A signal that the next value should be produced. */
- private val Next = new Object
+ private val Next = new Object
/** A label for an undefined state of the iterators. */
private val Undefined = new Object
/** A signal to stop the coordinator. */
- private val Stop = new Object
+ private val Stop = new Object
protected def produce(x: T) {
coordinator ! Some(x)
@@ -53,10 +53,10 @@ abstract class Producer[T] {
}
private val producer: Actor = actor {
- receive {
- case Next =>
+ receive {
+ case Next =>
produceValues
- coordinator ! None
+ coordinator ! None
}
}
}
@@ -70,7 +70,7 @@ object producers extends Application {
def tree = node(node(node(3), 4, node(6)), 8, node(node(9), 10, node(11)))
class PreOrder(n: Tree) extends Producer[Int] {
- def produceValues = traverse(n)
+ def produceValues = traverse(n)
def traverse(n: Tree) {
if (n != null) {
produce(n.elem)
@@ -81,7 +81,7 @@ object producers extends Application {
}
class PostOrder(n: Tree) extends Producer[Int] {
- def produceValues = traverse(n)
+ def produceValues = traverse(n)
def traverse(n: Tree) {
if (n != null) {
traverse(n.left)
@@ -92,7 +92,7 @@ object producers extends Application {
}
class InOrder(n: Tree) extends Producer[Int] {
- def produceValues = traverse(n)
+ def produceValues = traverse(n)
def traverse(n: Tree) {
if (n != null) {
traverse(n.left)
diff --git a/docs/examples/jolib/Ref.scala b/docs/examples/jolib/Ref.scala
index 39c0123f09..5f655f16b1 100644
--- a/docs/examples/jolib/Ref.scala
+++ b/docs/examples/jolib/Ref.scala
@@ -12,7 +12,7 @@ import concurrent.SyncVar;
import concurrent.jolib._;
class Ref[a](init: a) extends Join {
-
+
object get extends Synchr[a](this) { case class C() extends SyncVar[a]; }
object set extends Synchr[unit](this) { case class C(x: a) extends SyncVar[unit]; }
object state extends Asynchr(this) { case class C(x: a); }
@@ -25,7 +25,7 @@ class Ref[a](init: a) extends Join {
);
state(state.C(init));
-
+
def Get: a = get(get.C());
def Set(x: a): unit = set(set.C(x));
}
diff --git a/docs/examples/jolib/parallelOr.scala b/docs/examples/jolib/parallelOr.scala
index 4c4a852c4a..72d282bee3 100644
--- a/docs/examples/jolib/parallelOr.scala
+++ b/docs/examples/jolib/parallelOr.scala
@@ -13,27 +13,27 @@ import concurrent.SyncVar;
/** Implementation in the join-calculus of a parallel OR. */
object or extends Join {
-
+
object res extends Synchr[boolean](this) { case class C() extends SyncVar[boolean] };
object res1 extends Asynchr(this) { case class C(b: boolean); }
object res2 extends Asynchr(this) { case class C(b: boolean); }
object res1False extends Synchr[boolean](this) { case class C() extends SyncVar[boolean] };
object res2False extends Synchr[boolean](this) { case class C() extends SyncVar[boolean] };
-
+
rules(
Pair(List(res, res1), { case List(r @ res.C(), res1.C(b)) =>
if (b) r.set(b) else r.set(res1False(res1False.C())) }),
-
+
Pair(List(res, res2), { case List(r @ res.C(), res2.C(b)) =>
if (b) r.set(b) else r.set(res2False(res2False.C())) }),
-
+
Pair(List(res1False, res2), { case List(r @ res1False.C(), res2.C(b)) =>
r.set(b) }),
-
+
Pair(List(res2False, res1), { case List(r @ res2False.C(), res1.C(b)) =>
r.set(b) })
);
-
+
def apply(b1: => boolean, b2: => boolean): boolean = {
concurrent.ops.spawn(res1(res1.C(b1)));
concurrent.ops.spawn(res2(res2.C(b2)));
@@ -42,7 +42,7 @@ object or extends Join {
}
*/
object parallelOr {
-
+
def main(args: Array[String]): unit = {
def loop: boolean = { while (true) {}; true };
/*
diff --git a/docs/examples/monads/callccInterpreter.scala b/docs/examples/monads/callccInterpreter.scala
index 5b09b4c285..5b556bd8fa 100644
--- a/docs/examples/monads/callccInterpreter.scala
+++ b/docs/examples/monads/callccInterpreter.scala
@@ -14,7 +14,7 @@ object callccInterpreter {
def showM(m: M[Value]): String = (m in id).toString();
- def callCC[A](h: (A => M[A]) => M[A]) =
+ def callCC[A](h: (A => M[A]) => M[A]) =
M[A](c => h(a => M[A](d => c(a))) in c);
type Name = String;
@@ -30,7 +30,7 @@ object callccInterpreter {
trait Value;
case object Wrong extends Value {
override def toString() = "wrong"
- }
+ }
case class Num(n: Int) extends Value {
override def toString() = n.toString();
}
@@ -70,7 +70,7 @@ object callccInterpreter {
case Ccc(x, t) => callCC(k => interp(t, Pair(x, Fun(k)) :: e))
}
- def test(t: Term): String =
+ def test(t: Term): String =
showM(interp(t, List()));
val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)));
diff --git a/docs/examples/monads/directInterpreter.scala b/docs/examples/monads/directInterpreter.scala
index a80c9e4ed0..06fffba8e2 100644
--- a/docs/examples/monads/directInterpreter.scala
+++ b/docs/examples/monads/directInterpreter.scala
@@ -45,11 +45,11 @@ object directInterpreter {
case App(f, t) => apply(interp(f, e), interp(t, e))
}
- def test(t: Term): String =
+ def test(t: Term): String =
showval(interp(t, List()));
val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)));
- def main(args: Array[String]) =
+ def main(args: Array[String]) =
System.out.println(test(term0));
}
diff --git a/docs/examples/monads/simpleInterpreter.scala b/docs/examples/monads/simpleInterpreter.scala
index 219b137c31..cde3a92dbb 100644
--- a/docs/examples/monads/simpleInterpreter.scala
+++ b/docs/examples/monads/simpleInterpreter.scala
@@ -22,7 +22,7 @@ object simpleInterpreter {
trait Value;
case object Wrong extends Value {
override def toString() = "wrong"
- }
+ }
case class Num(n: Int) extends Value {
override def toString() = n.toString();
}
@@ -61,7 +61,7 @@ object simpleInterpreter {
yield c
}
- def test(t: Term): String =
+ def test(t: Term): String =
showM(interp(t, List()));
val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)));
diff --git a/docs/examples/monads/stateInterpreter.scala b/docs/examples/monads/stateInterpreter.scala
index 35568fb314..97f3335dab 100644
--- a/docs/examples/monads/stateInterpreter.scala
+++ b/docs/examples/monads/stateInterpreter.scala
@@ -7,7 +7,7 @@ object stateInterpreter {
val tickS = new M(s => Pair((), s + 1));
case class M[A](in: State => Pair[A, State]) {
- def bind[B](k: A => M[B]) = M[B]{ s0 =>
+ def bind[B](k: A => M[B]) = M[B]{ s0 =>
val Pair(a, s1) = this in s0; k(a) in s1
}
def map[B](f: A => B): M[B] = bind(x => unitM(f(x)));
@@ -72,7 +72,7 @@ object stateInterpreter {
yield c
}
- def test(t: Term): String =
+ def test(t: Term): String =
showM(interp(t, List()));
val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)));
diff --git a/docs/examples/parsing/ArithmeticParser.scala b/docs/examples/parsing/ArithmeticParser.scala
index ebb6c38421..cbd8493453 100644
--- a/docs/examples/parsing/ArithmeticParser.scala
+++ b/docs/examples/parsing/ArithmeticParser.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -15,16 +15,16 @@ import scala.util.parsing.combinator.syntactical.StdTokenParsers
* a term is a sequence of factors, separated by * or /
* a factor is a parenthesized expression or a number
*
- * @author Adriaan Moors
- */
-object arithmeticParser extends StdTokenParsers {
+ * @author Adriaan Moors
+ */
+object arithmeticParser extends StdTokenParsers {
type Tokens = StdLexical ; val lexical = new StdLexical
lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
lazy val expr = term*("+" ^^^ {(x: int, y: int) => x + y} | "-" ^^^ {(x: int, y: int) => x - y})
lazy val term = factor*("*" ^^^ {(x: int, y: int) => x * y} | "/" ^^^ {(x: int, y: int) => x / y})
lazy val factor: Parser[int] = "(" ~> expr <~ ")" | numericLit ^^ (_.toInt)
-
+
def main(args: Array[String]) {
println(
if (args.length == 1) {
@@ -37,14 +37,14 @@ object arithmeticParser extends StdTokenParsers {
}
-object arithmeticParserDesugared extends StdTokenParsers {
+object arithmeticParserDesugared extends StdTokenParsers {
type Tokens = StdLexical ; val lexical = new StdLexical
lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
lazy val expr = chainl1(term, (keyword("+").^^^{(x: int, y: int) => x + y}).|(keyword("-").^^^{(x: int, y: int) => x - y}))
lazy val term = chainl1(factor, (keyword("*").^^^{(x: int, y: int) => x * y}).|(keyword("/").^^^{(x: int, y: int) => x / y}))
- lazy val factor: Parser[int] = keyword("(").~>(expr.<~(keyword(")"))).|(numericLit.^^(x => x.toInt))
-
+ lazy val factor: Parser[int] = keyword("(").~>(expr.<~(keyword(")"))).|(numericLit.^^(x => x.toInt))
+
def main(args: Array[String]) {
println(
if (args.length == 1) {
diff --git a/docs/examples/parsing/ArithmeticParsers.scala b/docs/examples/parsing/ArithmeticParsers.scala
index 8fb3af7acb..62d7a61862 100644
--- a/docs/examples/parsing/ArithmeticParsers.scala
+++ b/docs/examples/parsing/ArithmeticParsers.scala
@@ -2,7 +2,7 @@ package examples.parsing
import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-object ArithmeticParsers extends StandardTokenParsers {
+object ArithmeticParsers extends StandardTokenParsers {
lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
def expr: Parser[Any] = term ~ rep("+" ~ term | "-" ~ term)
@@ -16,11 +16,11 @@ object ArithmeticParsers extends StandardTokenParsers {
}
}
-object ArithmeticParsers1 extends StandardTokenParsers {
+object ArithmeticParsers1 extends StandardTokenParsers {
lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
val reduceList: Int ~ List[String ~ Int] => Int = {
- case i ~ ps => (i /: ps)(reduce)
+ case i ~ ps => (i /: ps)(reduce)
}
def reduce(x: Int, r: String ~ Int) = (r: @unchecked) match {
@@ -45,11 +45,11 @@ class Expr
case class BinOp(op: String, l: Expr, r: Expr) extends Expr
case class Num(n: Int) extends Expr
-object ArithmeticParsers2 extends StandardTokenParsers {
+object ArithmeticParsers2 extends StandardTokenParsers {
lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
val reduceList: Expr ~ List[String ~ Expr] => Expr = {
- case i ~ ps => (i /: ps)(reduce)
+ case i ~ ps => (i /: ps)(reduce)
}
def reduce(l: Expr, r: String ~ Expr) = BinOp(r._1, l, r._2)
diff --git a/docs/examples/parsing/JSON.scala b/docs/examples/parsing/JSON.scala
index bbba25f744..abfa242e9f 100644
--- a/docs/examples/parsing/JSON.scala
+++ b/docs/examples/parsing/JSON.scala
@@ -2,14 +2,14 @@ package examples.parsing
import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-object JSON extends StandardTokenParsers {
+object JSON extends StandardTokenParsers {
lexical.delimiters += ("{", "}", "[", "]", ":", ",")
lexical.reserved += ("null", "true", "false")
def obj : Parser[Any] = "{" ~ repsep(member, ",") ~ "}"
def arr : Parser[Any] = "[" ~ repsep(value, ",") ~ "]"
def member: Parser[Any] = ident ~ ":" ~ value
- def value : Parser[Any] = ident | numericLit | obj | arr |
+ def value : Parser[Any] = ident | numericLit | obj | arr |
"null" | "true" | "false"
def main(args: Array[String]) {
@@ -18,20 +18,20 @@ object JSON extends StandardTokenParsers {
println(phrase(value)(tokens))
}
}
-object JSON1 extends StandardTokenParsers {
+object JSON1 extends StandardTokenParsers {
lexical.delimiters += ("{", "}", "[", "]", ":", ",")
lexical.reserved += ("null", "true", "false")
- def obj: Parser[Map[String, Any]] =
+ def obj: Parser[Map[String, Any]] =
"{" ~> repsep(member, ",") <~ "}" ^^ (Map() ++ _)
def arr: Parser[List[Any]] =
- "[" ~> repsep(value, ",") <~ "]"
+ "[" ~> repsep(value, ",") <~ "]"
- def member: Parser[(String, Any)] =
+ def member: Parser[(String, Any)] =
ident ~ ":" ~ value ^^ { case name ~ ":" ~ value => (name -> value) }
- def value: Parser[Any] =
+ def value: Parser[Any] =
ident | numericLit ^^ (_.toInt) | obj | arr |
"null" ^^^ null | "true" ^^^ true | "false" ^^^ false
diff --git a/docs/examples/parsing/ListParser.scala b/docs/examples/parsing/ListParser.scala
index 12805e5e50..59fc292c1d 100644
--- a/docs/examples/parsing/ListParser.scala
+++ b/docs/examples/parsing/ListParser.scala
@@ -14,7 +14,7 @@ object listParser {
class ListParsers extends Parsers {
type Elem = Char
-
+
lazy val ident = rep1(elem("letter", isLetter), elem("letter or digit", isLetterOrDigit)) ^^ {cs => Id(mkString(cs))}
lazy val number = chainl1(elem("digit", isDigit) ^^ (_ - '0'), success{(accum: Int, d: Int) => accum * 10 + d}) ^^ Num
lazy val list = '(' ~> repsep(expr, ',') <~ ')' ^^ Lst
diff --git a/docs/examples/parsing/ListParsers.scala b/docs/examples/parsing/ListParsers.scala
index f503a0139f..b449c4a641 100644
--- a/docs/examples/parsing/ListParsers.scala
+++ b/docs/examples/parsing/ListParsers.scala
@@ -2,7 +2,7 @@ package examples.parsing
import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-object ListParsers extends StandardTokenParsers {
+object ListParsers extends StandardTokenParsers {
lexical.delimiters ++= List("(", ")", ",")
def expr: Parser[Any] = "(" ~ exprs ~ ")" | ident | numericLit
@@ -15,7 +15,7 @@ object ListParsers extends StandardTokenParsers {
}
}
-object ListParsers1 extends StandardTokenParsers {
+object ListParsers1 extends StandardTokenParsers {
lexical.delimiters ++= List("(", ")", ",")
def expr: Parser[Any] = "(" ~> exprs <~ ")" | ident | numericLit
diff --git a/docs/examples/parsing/MiniML.scala b/docs/examples/parsing/MiniML.scala
index ffc7c2ac92..f7f7172e8d 100644
--- a/docs/examples/parsing/MiniML.scala
+++ b/docs/examples/parsing/MiniML.scala
@@ -3,7 +3,7 @@ package examples.parsing
import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-object MiniML extends StandardTokenParsers {
+object MiniML extends StandardTokenParsers {
lexical.delimiters += ("(", ")", ".", "=")
lexical.reserved += ("lambda", "let", "in")
@@ -30,7 +30,7 @@ case class Lambda(x: String, expr: Expr) extends Expr
case class Apply(fun: Expr, arg: Expr) extends Expr
case class Var(x: String) extends Expr
-object MiniML1 extends StandardTokenParsers {
+object MiniML1 extends StandardTokenParsers {
lexical.delimiters += ("(", ")", ".", "=")
lexical.reserved += ("lambda", "let", "in")
@@ -41,7 +41,7 @@ object MiniML1 extends StandardTokenParsers {
)
def simpleExpr: Parser[Expr] = (
ident ^^ { Var }
- | "(" ~> expr <~ ")"
+ | "(" ~> expr <~ ")"
)
def main(args: Array[String]) {
diff --git a/docs/examples/parsing/lambda/Main.scala b/docs/examples/parsing/lambda/Main.scala
index 81a175de77..165e82b670 100644
--- a/docs/examples/parsing/lambda/Main.scala
+++ b/docs/examples/parsing/lambda/Main.scala
@@ -27,8 +27,8 @@ object Main extends Application with TestParser
{
Console.println("Term: \n"+term)
}
- case Failure(msg, remainder) => Console.println("Failure: "+msg+"\n"+"Remainder: \n"+remainder.pos.longString)
- case Error(msg, remainder) => Console.println("Error: "+msg+"\n"+"Remainder: \n"+remainder.pos.longString)
+ case Failure(msg, remainder) => Console.println("Failure: "+msg+"\n"+"Remainder: \n"+remainder.pos.longString)
+ case Error(msg, remainder) => Console.println("Error: "+msg+"\n"+"Remainder: \n"+remainder.pos.longString)
}
}
}
diff --git a/docs/examples/parsing/lambda/TestParser.scala b/docs/examples/parsing/lambda/TestParser.scala
index 22257c1731..d26589da1b 100644
--- a/docs/examples/parsing/lambda/TestParser.scala
+++ b/docs/examples/parsing/lambda/TestParser.scala
@@ -17,10 +17,10 @@ trait TestParser extends StdTokenParsers with ImplicitConversions with TestSynt
lexical.reserved ++= List("unit", "let", "in", "if", "then", "else")
lexical.delimiters ++= List("=>", "->", "==", "(", ")", "=", "\\", "+", "-", "*", "/")
-
+
def name : Parser[Name] = ident ^^ Name
-
- // meaning of the argumens to the closure during subsequent iterations
+
+ // meaning of the arguments to the closure during subsequent iterations
// (...(expr2 op1 expr1) ... op1 expr1)
// ^a^^^ ^o^ ^b^^^
// ^^^^^^^a^^^^^^^ ^o^ ^^b^^
@@ -29,10 +29,10 @@ trait TestParser extends StdTokenParsers with ImplicitConversions with TestSynt
def expr2 : Parser[Term] =
chainl1(expr3, expr2, op2 ^^ {o => (a: Term, b: Term) => App(App(o, a), b)})
-
+
def expr3 : Parser[Term] =
chainl1(expr4, expr3, op3 ^^ {o => (a: Term, b: Term) => App(App(o, a), b)})
-
+
def expr4 : Parser[Term] =
( "\\" ~> lambdas
| ("let" ~> name) ~ ("=" ~> expr1) ~ ("in" ~> expr1) ^^ flatten3(Let)
@@ -42,27 +42,27 @@ trait TestParser extends StdTokenParsers with ImplicitConversions with TestSynt
def lambdas : Parser[Term] =
name ~ ("->" ~> expr1 | lambdas) ^^ flatten2(Lam)
-
+
def aexpr : Parser[Term] =
( numericLit ^^ (_.toInt) ^^ Lit
| name ^^ Ref
| "unit" ^^^ Unit()
| "(" ~> expr1 <~ ")"
)
-
+
def op1 : Parser[Term] =
"==" ^^^ Ref(Name("=="))
-
+
def op2 : Parser[Term] =
( "+" ^^^ Ref(Name("+"))
| "-" ^^^ Ref(Name("-"))
)
-
+
def op3 : Parser[Term] =
( "*" ^^^ Ref(Name("*"))
| "/" ^^^ Ref(Name("/"))
)
-
+
def parse(r: Reader[char]) : ParseResult[Term] =
phrase(expr1)(new lexical.Scanner(r))
}
diff --git a/docs/examples/parsing/lambda/TestSyntax.scala b/docs/examples/parsing/lambda/TestSyntax.scala
index 531ae4bd54..7edca6ccdc 100644
--- a/docs/examples/parsing/lambda/TestSyntax.scala
+++ b/docs/examples/parsing/lambda/TestSyntax.scala
@@ -5,25 +5,25 @@ package examples.parsing.lambda
*
* @author Miles Sabin (adapted slightly by Adriaan Moors)
*/
-trait TestSyntax
+trait TestSyntax
{
- trait Term
-
+ trait Term
+
case class Unit extends Term
{
override def toString = "unit"
}
-
+
case class Lit(n: int) extends Term
{
override def toString = n.toString
}
-
+
case class Bool(b: boolean) extends Term
{
override def toString = b.toString
}
-
+
case class Name(name: String) extends Term
{
override def toString = name
@@ -33,27 +33,27 @@ trait TestSyntax
{
def value = n
}
-
+
case class Lam(n: Name, l: Term) extends Term
{
override def toString = "(\\ "+n+" -> "+l+")"
- }
-
+ }
+
case class App(t1: Term, t2: Term) extends Term
{
override def toString = "("+t1+" "+t2+")"
- }
-
+ }
+
case class Let(n: Name, t1: Term, t2: Term) extends Term
{
override def toString = "let "+n+" = "+t1+" in "+t2
}
-
+
case class If(c: Term, t1: Term, t2: Term) extends Term
{
override def toString = "if "+c+" then "+t1+" else "+t2
}
-
+
trait PrimTerm extends Term
{
def apply(n: Lit) : Term
@@ -68,7 +68,7 @@ trait TestSyntax
{
def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Lit(x.n-y.n) }
}
-
+
case class PrimMultiply extends PrimTerm
{
def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Lit(x.n*y.n) }
diff --git a/docs/examples/pilib/elasticBuffer.scala b/docs/examples/pilib/elasticBuffer.scala
index e156cafbc2..5fec96ab6c 100644
--- a/docs/examples/pilib/elasticBuffer.scala
+++ b/docs/examples/pilib/elasticBuffer.scala
@@ -25,7 +25,7 @@ object elasticBuffer {
/**
* A buffer cell containing a value, ready to receive (o,r) from the right.
*/
- def Cl(i: Chan[String], l: MetaChan,
+ def Cl(i: Chan[String], l: MetaChan,
o: Chan[String], r: MetaChan, content: String): Unit =
choice (
o(content) * (Bl(i,l,o,r)),
@@ -64,7 +64,7 @@ object elasticBuffer {
def Consumer(get: Chan[String]): Unit = {
Thread.sleep(1 + random.nextInt(1000))
val msg = get.read
- System.out.println("Consummer took " + msg)
+ System.out.println("Consumer took " + msg)
Consumer(get)
}
diff --git a/docs/examples/pilib/handover.scala b/docs/examples/pilib/handover.scala
index 9725382c96..c9b6156c2c 100644
--- a/docs/examples/pilib/handover.scala
+++ b/docs/examples/pilib/handover.scala
@@ -32,7 +32,7 @@ object handoverRecursive {
* Control center.
*/
def Control(talk1: Chan[unit], switch1: Switch,
- gain1: Switch, lose1: Switch,
+ gain1: Switch, lose1: Switch,
talk2: Chan[unit], switch2: Switch,
gain2: Switch, lose2: Switch): unit
= {
@@ -108,7 +108,7 @@ object handoverCast {
def Car(talk: Chan[Any], switch: Chan[Any]): unit =
choice (
switch * (o => {
- val Pair(t,s) = o.asInstanceOf[Pair[Chan[Any],Chan[Any]]];
+ val Pair(t,s) = o.asInstanceOf[Pair[Chan[Any],Chan[Any]]];
Car(t, s)
}),
talk(()) * ( {
@@ -122,7 +122,7 @@ object handoverCast {
* Control center.
*/
def Control(talk1: Chan[Any], switch1: Chan[Any],
- gain1: Chan[Any], lose1: Chan[Any],
+ gain1: Chan[Any], lose1: Chan[Any],
talk2: Chan[Any], switch2: Chan[Any],
gain2: Chan[Any], lose2: Chan[Any]): unit
= {
diff --git a/docs/examples/pilib/mobilePhoneProtocol.scala b/docs/examples/pilib/mobilePhoneProtocol.scala
index 385836318b..e8c0ac1dc4 100644
--- a/docs/examples/pilib/mobilePhoneProtocol.scala
+++ b/docs/examples/pilib/mobilePhoneProtocol.scala
@@ -21,7 +21,7 @@ object mobilePhoneProtocol {
case class HoCom() extends Message; // handover complete
case class HoFail() extends Message; // handover fail
case class ChRel() extends Message; // release
- case class Voice(s: String) extends Message; // voice
+ case class Voice(s: String) extends Message; // voice
case class Channel(n: Chan[Message]) extends Message; // channel
def MobileSystem(in: Chan[String], out: Chan[String]): unit = {
@@ -144,9 +144,9 @@ object mobilePhoneProtocol {
}
//***************** Entry function ******************//
-
+
def main(args: Array[String]): unit = {
-
+
def Producer(n: Int, put: Chan[String]): unit = {
Thread.sleep(1 + random.nextInt(1000));
val msg = "object " + n;
@@ -154,14 +154,14 @@ object mobilePhoneProtocol {
System.out.println("Producer gave " + msg);
Producer(n + 1, put)
}
-
+
def Consumer(get: Chan[String]): unit = {
Thread.sleep(1 + random.nextInt(1000));
val msg = get.read;
- System.out.println("Consummer took " + msg);
+ System.out.println("Consumer took " + msg);
Consumer(get)
}
-
+
val put = new Chan[String];
val get = new Chan[String];
spawn < Producer(0, put) | Consumer(get) | MobileSystem(put, get) >
diff --git a/docs/examples/pilib/piNat.scala b/docs/examples/pilib/piNat.scala
index ee9e5ba1af..a1a0e682e1 100644
--- a/docs/examples/pilib/piNat.scala
+++ b/docs/examples/pilib/piNat.scala
@@ -4,7 +4,7 @@ import scala.concurrent.pilib._
/** Church encoding of naturals in the Pi-calculus */
object piNat extends Application {
-
+
/** Locations of Pi-calculus natural */
class NatChan extends Chan[Triple[Chan[Unit], Chan[NatChan], Chan[NatChan]]]
diff --git a/docs/examples/pilib/rwlock.scala b/docs/examples/pilib/rwlock.scala
index 931f622f5a..bb1c26bdf2 100644
--- a/docs/examples/pilib/rwlock.scala
+++ b/docs/examples/pilib/rwlock.scala
@@ -250,7 +250,7 @@ object rwlock {
def endRead = er.send
def endWrite = ew.send
- private def Reading(nr: int, nw: int): unit =
+ private def Reading(nr: int, nw: int): unit =
if (nr == 0 && nw == 0)
choice (
sr * (x => Reading(1, 0)),
@@ -264,7 +264,7 @@ object rwlock {
choice (
sr * (x => Reading(nr + 1, 0)),
er * (x => Reading(nr - 1, 0)),
- ww * (x => Reading(nr, 1))
+ ww * (x => Reading(nr, 1))
)
else if (nr != 0 && nw != 0)
choice (
diff --git a/docs/examples/pilib/scheduler.scala b/docs/examples/pilib/scheduler.scala
index 9205ae3f0c..fd8fd52600 100644
--- a/docs/examples/pilib/scheduler.scala
+++ b/docs/examples/pilib/scheduler.scala
@@ -87,7 +87,7 @@ object scheduler {
/**
* A cell is modelled as a function that takes as parameters
- * input and output channels and which returns nothing.
+ * input and output channels and which returns nothing.
*/
type Cell = (Chan[Unit], Chan[Unit]) => Unit
diff --git a/docs/examples/pilib/semaphore.scala b/docs/examples/pilib/semaphore.scala
index ed224890e2..951c90e8d4 100644
--- a/docs/examples/pilib/semaphore.scala
+++ b/docs/examples/pilib/semaphore.scala
@@ -65,7 +65,7 @@ object semaphore {
println("b1");
Thread.sleep(1 + random.nextInt(100));
println("b2")
- } )
+ } )
} >;
}
}
diff --git a/docs/examples/pilib/twoPlaceBuffer.scala b/docs/examples/pilib/twoPlaceBuffer.scala
index f0f278317a..255f70ca06 100644
--- a/docs/examples/pilib/twoPlaceBuffer.scala
+++ b/docs/examples/pilib/twoPlaceBuffer.scala
@@ -16,7 +16,7 @@ object twoPlaceBuffer extends Application {
def B1(x: A): Unit = choice (
out(x) * (B0),
- in * (y => B2(x, y))
+ in * (y => B2(x, y))
)
def B2(x: A, y: A): Unit = choice (
diff --git a/docs/examples/plugintemplate/lib/scalatest.jar.desired.sha1 b/docs/examples/plugintemplate/lib/scalatest.jar.desired.sha1
index ab3c5ffd2c..14c20f874f 100644
--- a/docs/examples/plugintemplate/lib/scalatest.jar.desired.sha1
+++ b/docs/examples/plugintemplate/lib/scalatest.jar.desired.sha1
@@ -1 +1 @@
-462624d9413123e32d073863fa8759457cf8721e ?scalatest.jar
+8b6ba65c8146217333f0762087fe2340d572e832 ?scalatest.jar
diff --git a/docs/examples/plugintemplate/plugin.properties b/docs/examples/plugintemplate/plugin.properties
index 44e4900f8a..131f96110f 100644
--- a/docs/examples/plugintemplate/plugin.properties
+++ b/docs/examples/plugintemplate/plugin.properties
@@ -1,4 +1,4 @@
-scala.home=/Users/luc/scala/trunk/build/pack
+scala.home=../../../build/pack
plugin.name=plugintemplate
plugin.commandname=runplugintemplate
diff --git a/docs/examples/plugintemplate/src/plugintemplate/TemplatePlugin.scala b/docs/examples/plugintemplate/src/plugintemplate/TemplatePlugin.scala
index b241072301..6cda37d4e3 100644
--- a/docs/examples/plugintemplate/src/plugintemplate/TemplatePlugin.scala
+++ b/docs/examples/plugintemplate/src/plugintemplate/TemplatePlugin.scala
@@ -16,7 +16,7 @@ class TemplatePlugin(val global: Global) extends Plugin {
/** A short description of the plugin, read from the properties file */
val description = PluginProperties.pluginDescription
-
+
/** @todo A description of the plugin's options */
override val optionsHelp = Some(
" -P:"+ name +":option sets some option for this plugin")
diff --git a/docs/examples/plugintemplate/src/plugintemplate/standalone/Main.scala b/docs/examples/plugintemplate/src/plugintemplate/standalone/Main.scala
index 042409c000..19d2613649 100644
--- a/docs/examples/plugintemplate/src/plugintemplate/standalone/Main.scala
+++ b/docs/examples/plugintemplate/src/plugintemplate/standalone/Main.scala
@@ -1,10 +1,11 @@
package plugintemplate.standalone
+import plugintemplate.PluginProperties
import scala.tools.nsc.CompilerCommand
import scala.tools.nsc.Settings
/** An object for running the plugin as standalone application.
- *
+ *
* @todo: print, parse and apply plugin options !!!
* ideally re-use the TemplatePlugin (-> runsAfter, optionsHelp,
* processOptions, components, annotationChecker) instead of
@@ -14,9 +15,9 @@ object Main {
def main(args: Array[String]) {
val settings = new Settings
- val command = new CompilerCommand(args.toList, settings, println, false) {
+ val command = new CompilerCommand(args.toList, settings) {
/** The command name that will be printed in in the usage message.
- * This is autmatically set to the value of 'plugin.commandname' in the
+ * This is automatically set to the value of 'plugin.commandname' in the
* file build.properties.
*/
override val cmdName = PluginProperties.pluginCommand
@@ -25,7 +26,7 @@ object Main {
if (!command.ok)
return()
- /** The version number of this plugin is read from the porperties file
+ /** The version number of this plugin is read from the properties file
*/
if (settings.version.value) {
println(command.cmdName +" version "+ PluginProperties.versionString)
diff --git a/docs/examples/plugintemplate/src/plugintemplate/standalone/PluginRunner.scala b/docs/examples/plugintemplate/src/plugintemplate/standalone/PluginRunner.scala
index 4240c1278c..786d72d37c 100644
--- a/docs/examples/plugintemplate/src/plugintemplate/standalone/PluginRunner.scala
+++ b/docs/examples/plugintemplate/src/plugintemplate/standalone/PluginRunner.scala
@@ -1,5 +1,6 @@
package plugintemplate.standalone
+import plugintemplate.{TemplateAnnotationChecker, TemplatePlugin}
import scala.tools.nsc.{Global, Settings, SubComponent}
import scala.tools.nsc.reporters.{ConsoleReporter, Reporter}
@@ -11,7 +12,7 @@ extends Global(settings, reporter) {
def this(settings: Settings) = this(settings, new ConsoleReporter(settings))
val annotChecker = new TemplateAnnotationChecker {
- val global: PluginRunner.this.type = PluginRunner.this
+ val global: PluginRunner.this.type = PluginRunner.this
}
addAnnotationChecker(annotChecker.checker)
@@ -19,7 +20,7 @@ extends Global(settings, reporter) {
*
* @todo: Adapt to specific plugin implementation
*/
- override protected def computeInternalPhases() : Unit = {
+ override protected def computeInternalPhases() {
phasesSet += syntaxAnalyzer
phasesSet += analyzer.namerFactory
phasesSet += analyzer.typerFactory
diff --git a/docs/examples/tcpoly/collection/HOSeq.scala b/docs/examples/tcpoly/collection/HOSeq.scala
index a9414e3b3c..a6757b95ba 100644
--- a/docs/examples/tcpoly/collection/HOSeq.scala
+++ b/docs/examples/tcpoly/collection/HOSeq.scala
@@ -6,40 +6,40 @@ trait HOSeq {
// values implementing this interface, in order to provide more performant ways of building that structure
trait Accumulator[+coll[x], elT] {
def += (el: elT): Unit
- def result: coll[elT]
+ def result: coll[elT]
}
-
-
+
+
// Iterable abstracts over the type of its structure as well as its elements (see PolyP's Bifunctor)
- // m[x] is intentionally unbounded: fold can then be defined nicely
- // variance: if we write m[+x] instead of +m[+x], x is an invariant position because its enclosing type
+ // m[x] is intentionally unbounded: fold can then be defined nicely
+ // variance: if we write m[+x] instead of +m[+x], x is an invariant position because its enclosing type
// is an invariant position -- should probably rule that out?
trait Iterable[+m[+x], +t] {
//def unit[a](orig: a): m[a]
def iterator: Iterator[t]
-
+
// construct an empty accumulator that will produce the same structure as this iterable, with elements of type t
def accumulator[t]: Accumulator[m, t]
-
+
def filter(p: t => Boolean): m[t] = {
val buf = accumulator[t]
val elems = elements
while (elems.hasNext) { val x = elems.next; if (p(x)) buf += x }
buf.result
}
-
+
def map[s](f: t => s): m[s] = {
val buf = accumulator[s]
val elems = elements
while (elems.hasNext) buf += f(elems.next)
buf.result
}
-
+
// flatMap is a more specialized map, it only works if the mapped function produces Iterable values,
// which are then added to the result one by one
// the compiler should be able to find the right accumulator (implicit buf) to build the result
// to get concat, resColl = SingletonIterable, f = unit for SingletonIterable
- def flatMap[resColl[x] <: Iterable[resColl, x], s](f: t => resColl[s])(implicit buf: Accumulator[resColl, s]): resColl[s] = {
+ def flatMap[resColl[x] <: Iterable[resColl, x], s](f: t => resColl[s])(implicit buf: Accumulator[resColl, s]): resColl[s] = {
// TODO: would a viewbound for resColl[x] be better?
// -- 2nd-order type params are not yet in scope in view bound
val elems = elements
@@ -48,9 +48,9 @@ trait HOSeq {
while (elemss.hasNext) buf += elemss.next
}
buf.result
- }
+ }
}
-
+
final class ListBuffer[A] {
private var start: List[A] = Nil
private var last: ::[A] = _
@@ -78,7 +78,7 @@ trait HOSeq {
exported = !start.isEmpty
start
}
-
+
/** Clears the buffer contents.
*/
def clear: unit = {
@@ -97,13 +97,13 @@ trait HOSeq {
}
}
}
-
+
implicit def listAccumulator[elT]: Accumulator[List, elT] = new Accumulator[List, elT] {
private[this] val buff = new ListBuffer[elT]
def += (el: elT): Unit = buff += el
def result: List[elT] = buff.toList
}
-
+
trait List[+t] extends Iterable[List, t] {
def head: t
def tail: List[t]
@@ -111,16 +111,16 @@ trait HOSeq {
def iterator: Iterator[t] = error("TODO")
// construct an empty accumulator that will produce the same structure as this iterable, with elements of type t
- def accumulator[t]: Accumulator[List, t] = error("TODO")
+ def accumulator[t]: Accumulator[List, t] = error("TODO")
}
-
+
// TODO: the var tl approach does not seem to work because subtyping isn't fully working yet
final case class ::[+b](hd: b, private val tl: List[b]) extends List[b] {
def head = hd
def tail = if(tl==null) this else tl // hack
override def isEmpty: boolean = false
}
-
+
case object Nil extends List[Nothing] {
def isEmpty = true
def head: Nothing =
@@ -149,18 +149,18 @@ trait HOSeq {
def filter(f: T=>Boolean): FilterResult
def subseq(from: int, to: int): Subseq
def flatMap[S <: Seq[K], K](f: T => S): S#Concat // legal?
- def concat(others: Seq[T]): Concat
+ def concat(others: Seq[T]): Concat
*/
-
+
/*trait Iterator[t] {
// @post hasAdvanced implies hasNext
// model def hasAdvanced: Boolean
-
+
def hasNext: Boolean // pure
-
+
// @pre hasAdvanced
def current: t // pure
-
+
// @pre hasNext
// @post hasAdvanced
def advance: Unit
diff --git a/docs/examples/tcpoly/monads/Monads.scala b/docs/examples/tcpoly/monads/Monads.scala
index 5a966ce960..b6e3d5b9a8 100644
--- a/docs/examples/tcpoly/monads/Monads.scala
+++ b/docs/examples/tcpoly/monads/Monads.scala
@@ -6,18 +6,18 @@ trait Monads {
* (>>=) :: m a -> (a -> m b) -> m b
* return :: a -> m a
*
- * MonadTC encodes the above Haskell type class,
+ * MonadTC encodes the above Haskell type class,
* an instance of MonadTC corresponds to a method dictionary.
* (see http://lampwww.epfl.ch/~odersky/talks/wg2.8-boston06.pdf)
*
* Note that the identity (`this') of the method dictionary does not really correspond
- * to the instance of m[x] (`self') that is `wrapped': e.g., unit does not use `self' (which
+ * to the instance of m[x] (`self') that is `wrapped': e.g., unit does not use `self' (which
* corresponds to the argument of the implicit conversion that encodes an instance of this type class)
*/
// Option =:= [x] => Option[x] <: [x] => Any
-// trait MonadTC[m <: [x] => Any, a] {
+// trait MonadTC[m <: [x] => Any, a] {
// MonadTC[m[x], a] x is a type parameter too -- should not write e.g., m[Int] here
- trait MonadTC[m[x], a] {
+ trait MonadTC[m[x], a] {
def unit[a](orig: a): m[a]
// >>='s first argument comes from the implicit definition constructing this "method dictionary"
@@ -32,7 +32,7 @@ trait Monads {
*/
trait OptionMonad extends Monads {
// this implicit method encodes the Monad type class instance for Option
- implicit def OptionInstOfMonad[a](self: Option[a]): MonadTC[Option, a]
+ implicit def OptionInstOfMonad[a](self: Option[a]): MonadTC[Option, a]
= new MonadTC[Option, a] {
def unit[a](orig: a) = Some(orig)
def >>=[b](fun: a => Option[b]): Option[b] = self match {
@@ -47,8 +47,8 @@ object main extends OptionMonad with Application {
}
-/*
-trait MonadTC[m[x], a] requires m[x] {
+/*
+trait MonadTC[m[x], a] requires m[x] {
def unit[a](orig: a): m[a]
// >>='s first argument comes from the implicit definition constructing this "method dictionary"
diff --git a/docs/examples/typeinf.scala b/docs/examples/typeinf.scala
index 80980ebc9a..d4bc8bf3e1 100644
--- a/docs/examples/typeinf.scala
+++ b/docs/examples/typeinf.scala
@@ -69,14 +69,14 @@ object typeInfer {
case Tycon(k, ts) => (List[Tyvar]() /: ts) ((tvs, t) => tvs union tyvars(t))
}
- def tyvars(ts: TypeScheme): List[Tyvar] =
+ def tyvars(ts: TypeScheme): List[Tyvar] =
tyvars(ts.tpe) diff ts.tyvars;
def tyvars(env: Env): List[Tyvar] =
(List[Tyvar]() /: env) ((tvs, nt) => tvs union tyvars(nt._2))
def mgu(t: Type, u: Type, s: Subst): Subst = Pair(s(t), s(u)) match {
- case Pair(Tyvar(a), Tyvar(b)) if (a == b) =>
+ case Pair(Tyvar(a), Tyvar(b)) if (a == b) =>
s
case Pair(Tyvar(a), _) if !(tyvars(u) contains a) =>
s.extend(Tyvar(a), u)
@@ -181,7 +181,7 @@ object typeInfer {
yield Lam(x, t): Term )
|||
( for (
- letid <- id if letid == "let";
+ letid <- id if letid == "let";
x <- ident;
_ <- wschr('=');
t <- term;
diff --git a/docs/examples/xml/phonebook/embeddedBook.scala b/docs/examples/xml/phonebook/embeddedBook.scala
index 8ea9628212..3286485f0b 100644
--- a/docs/examples/xml/phonebook/embeddedBook.scala
+++ b/docs/examples/xml/phonebook/embeddedBook.scala
@@ -1,5 +1,5 @@
/* examples/phonebook/embeddedBook.scala */
-package phonebook
+package phonebook
object embeddedBook {
@@ -8,14 +8,14 @@ object embeddedBook {
val last = "Emir"
val location = "work"
- val embBook =
+ val embBook =
<phonebook>
<descr>
- This is the <b>phonebook</b> of the
+ This is the <b>phonebook</b> of the
{company} corporation.
</descr>
<entry>
- <name>{ first+" "+last }</name>
+ <name>{ first+" "+last }</name>
<phone where={ location }>+41 21 693 68 {val x = 60 + 7; x}</phone>
</entry>
</phonebook>;
diff --git a/docs/examples/xml/phonebook/phonebook.scala b/docs/examples/xml/phonebook/phonebook.scala
index 4813c2d20d..3c0dfbd837 100644
--- a/docs/examples/xml/phonebook/phonebook.scala
+++ b/docs/examples/xml/phonebook/phonebook.scala
@@ -2,14 +2,14 @@ package phonebook ;
object phonebook {
- val labPhoneBook =
+ val labPhoneBook =
<phonebook>
<descr>
- This is the <b>phonebook</b> of the
+ This is the <b>phonebook</b> of the
<a href="http://acme.org">ACME</a> corporation.
</descr>
<entry>
- <name>Burak</name>
+ <name>Burak</name>
<phone where="work"> +41 21 693 68 67</phone>
<phone where="mobile">+41 79 602 23 23</phone>
</entry>
@@ -22,14 +22,14 @@ object phonebook {
import scala.xml.{ Node, Text };
def add( phonebook:Node, newEntry:Node ):Node = phonebook match {
- case <phonebook>{ ch @ _* }</phonebook> =>
+ case <phonebook>{ ch @ _* }</phonebook> =>
<phonebook>{ ch }{ newEntry }</phonebook>
}
- val pb2 =
- add( labPhoneBook,
+ val pb2 =
+ add( labPhoneBook,
<entry>
- <name>Kim</name>
+ <name>Kim</name>
<phone where="work"> +41 21 111 11 11</phone>
</entry> );
diff --git a/docs/examples/xml/phonebook/phonebook1.scala b/docs/examples/xml/phonebook/phonebook1.scala
index 3a7a165202..316c6c1995 100644
--- a/docs/examples/xml/phonebook/phonebook1.scala
+++ b/docs/examples/xml/phonebook/phonebook1.scala
@@ -1,16 +1,16 @@
/* examples/phonebook/phonebook1.scala */
-package phonebook
+package phonebook
object phonebook1 {
- val labPhoneBook =
+ val labPhoneBook =
<phonebook>
<descr>
- This is the <b>phonebook</b> of the
+ This is the <b>phonebook</b> of the
<a href="http://acme.org">ACME</a> corporation.
</descr>
<entry>
- <name>Burak Emir</name>
+ <name>Burak Emir</name>
<phone where="work">+41 21 693 68 67</phone>
</entry>
</phonebook>;
diff --git a/docs/examples/xml/phonebook/phonebook2.scala b/docs/examples/xml/phonebook/phonebook2.scala
index ba50379369..2a708daf7c 100644
--- a/docs/examples/xml/phonebook/phonebook2.scala
+++ b/docs/examples/xml/phonebook/phonebook2.scala
@@ -8,18 +8,18 @@ object phonebook2 {
/** adds an entry to a phonebook */
def add( p: Node, newEntry: Node ): Node = p match {
- case <phonebook>{ ch @ _* }</phonebook> =>
+ case <phonebook>{ ch @ _* }</phonebook> =>
<phonebook>{ ch }{ newEntry }</phonebook>
}
- val pb2 =
- add( phonebook1.labPhoneBook,
+ val pb2 =
+ add( phonebook1.labPhoneBook,
<entry>
- <name>Kim</name>
+ <name>Kim</name>
<phone where="work">+41 21 111 11 11</phone>
</entry> );
- def main( args: Array[String] ) =
+ def main( args: Array[String] ) =
Console.println( pb2 )
}
diff --git a/docs/examples/xml/phonebook/phonebook3.scala b/docs/examples/xml/phonebook/phonebook3.scala
index 0dfae351b5..12f2deaa79 100644
--- a/docs/examples/xml/phonebook/phonebook3.scala
+++ b/docs/examples/xml/phonebook/phonebook3.scala
@@ -17,57 +17,57 @@ object phonebook3 {
import xml.Utility.{trim,trimProper} //removes whitespace nodes, which are annoying in matches
- for( val c <- ch ) yield
+ for( val c <- ch ) yield
trimProper(c) match {
// if the node is the particular entry we are looking for, return an updated copy
- case x @ <entry><name>{ Text(Name) }</name>{ ch1 @ _* }</entry> =>
+ case x @ <entry><name>{ Text(Name) }</name>{ ch1 @ _* }</entry> =>
var updated = false;
val ch2 = for(c <- ch1) yield c match { // does it have the phone number?
- case y @ <phone>{ _* }</phone> if y \ "@where" == Where =>
+ case y @ <phone>{ _* }</phone> if y \ "@where" == Where =>
updated = true
<phone where={ Where }>{ newPhone }</phone>
-
+
case y => y
-
+
}
if( !updated ) { // no, so we add as first entry
-
+
<entry>
<name>{ Name }</name>
<phone where={ Where }>{ newPhone }</phone>
{ ch1 }
</entry>
-
+
} else { // yes, and we changed it as we should
-
+
<entry>
{ ch2 }
</entry>
-
- }
+
+ }
// end case x @ <entry>...
-
+
// other entries are copied without changing them
- case x =>
+ case x =>
x
-
+
}
} ; // for ... yield ... returns an Iterator[Node]
-
+
// decompose phonebook, apply updates
phonebook match {
case <phonebook>{ ch @ _* }</phonebook> =>
<phonebook>{ copyOrChange( ch.iterator ) }</phonebook>
}
-
+
}
- val pb2 =
+ val pb2 =
change( phonebook1.labPhoneBook, "John", "work", "+41 55 555 55 55" );
val pp = new PrettyPrinter( 80, 5 );
diff --git a/docs/examples/xml/phonebook/verboseBook.scala b/docs/examples/xml/phonebook/verboseBook.scala
index 611cf5370e..2dcb155480 100644
--- a/docs/examples/xml/phonebook/verboseBook.scala
+++ b/docs/examples/xml/phonebook/verboseBook.scala
@@ -1,24 +1,24 @@
/* examples/xml/phonebook/verboseBook.scala */
-package phonebook
+package phonebook
object verboseBook {
- import scala.xml.{ UnprefixedAttribute, Elem, Node, Null, Text, TopScope }
+ import scala.xml.{ UnprefixedAttribute, Elem, Node, Null, Text, TopScope }
- val pbookVerbose =
+ val pbookVerbose =
Elem(null, "phonebook", Null, TopScope,
Elem(null, "descr", Null, TopScope,
- Text("This is a "),
+ Text("This is a "),
Elem(null, "b", Null, TopScope, Text("sample")),
Text("description")
),
Elem(null, "entry", Null, TopScope,
Elem(null, "name", Null, TopScope, Text("Burak Emir")),
- Elem(null, "phone", new UnprefixedAttribute("where","work", Null), TopScope,
+ Elem(null, "phone", new UnprefixedAttribute("where","work", Null), TopScope,
Text("+41 21 693 68 67"))
)
)
- def main(args: Array[String]) =
+ def main(args: Array[String]) =
Console.println( pbookVerbose )
}
diff --git a/docs/licenses/apache_android.txt b/docs/licenses/apache_android.txt
new file mode 100644
index 0000000000..00f339625f
--- /dev/null
+++ b/docs/licenses/apache_android.txt
@@ -0,0 +1,16 @@
+Scala includes various example files for Android:
+
+Copyright (c) 2005-2009, The Android Open Source Project
+Copyright (c) 2007, Steven Osborn
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License. \ No newline at end of file
diff --git a/docs/licenses/apache_ant.txt b/docs/licenses/apache_ant.txt
new file mode 100644
index 0000000000..ac637d760d
--- /dev/null
+++ b/docs/licenses/apache_ant.txt
@@ -0,0 +1,16 @@
+Scala includes Ant as a library needed for build with sbt
+
+Copyright © 1999-2010, The Apache Software Foundation.
+ http://ant.apache.org/
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/docs/licenses/bsd_jline.txt b/docs/licenses/bsd_jline.txt
new file mode 100644
index 0000000000..4ac4a378ed
--- /dev/null
+++ b/docs/licenses/bsd_jline.txt
@@ -0,0 +1,34 @@
+Scala includes the jLine library:
+
+Copyright (c) 2002-2006, Marc Prud'hommeaux <mwp1@cornell.edu>
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or
+without modification, are permitted provided that the following
+conditions are met:
+
+Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+
+Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with
+the distribution.
+
+Neither the name of JLine nor the names of its contributors
+may be used to endorse or promote products derived from this
+software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
+BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
+OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/docs/licenses/mit_jquery.txt b/docs/licenses/mit_jquery.txt
new file mode 100644
index 0000000000..ef2c570469
--- /dev/null
+++ b/docs/licenses/mit_jquery.txt
@@ -0,0 +1,13 @@
+Scala includes the jQuery library:
+
+Copyright (c) 2010 John Resig
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
diff --git a/docs/licenses/mit_sizzle.txt b/docs/licenses/mit_sizzle.txt
new file mode 100644
index 0000000000..d81d30aa0f
--- /dev/null
+++ b/docs/licenses/mit_sizzle.txt
@@ -0,0 +1,13 @@
+Scala includes the Sizzle library:
+
+Copyright (c) 2010 The Dojo Foundation
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
diff --git a/docs/licenses/mit_tools.tooltip.txt b/docs/licenses/mit_tools.tooltip.txt
new file mode 100644
index 0000000000..27a4dbc788
--- /dev/null
+++ b/docs/licenses/mit_tools.tooltip.txt
@@ -0,0 +1,13 @@
+Scala includes the Tools Tooltip library:
+
+Copyright (c) 2009 Tero Piirainen
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
diff --git a/lib/ScalaCheck.jar b/lib/ScalaCheck.jar
deleted file mode 120000
index 587b63a458..0000000000
--- a/lib/ScalaCheck.jar
+++ /dev/null
@@ -1 +0,0 @@
-scalacheck-1.6dev.jar \ No newline at end of file
diff --git a/lib/ant/maven-ant-tasks-2.0.9.jar.desired.sha1 b/lib/ant/maven-ant-tasks-2.0.9.jar.desired.sha1
deleted file mode 100644
index 9274ad0519..0000000000
--- a/lib/ant/maven-ant-tasks-2.0.9.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b539e357a1413e53ae429e4607310db7851257d3 ?maven-ant-tasks-2.0.9.jar
diff --git a/lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1 b/lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1
new file mode 100644
index 0000000000..06dcb1e312
--- /dev/null
+++ b/lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1
@@ -0,0 +1 @@
+7e50e3e227d834695f1e0bf018a7326e06ee4c86 ?maven-ant-tasks-2.1.1.jar
diff --git a/lib/fjbg.jar.desired.sha1 b/lib/fjbg.jar.desired.sha1
index f7bb459d8a..ff2228066d 100644
--- a/lib/fjbg.jar.desired.sha1
+++ b/lib/fjbg.jar.desired.sha1
@@ -1 +1 @@
-3ddd34e6fda096e9624306b9fba8a1ee8e2ba97d ?fjbg.jar
+06769295d0216cf44826d06eda7836c6358110a9 ?fjbg.jar
diff --git a/lib/forkjoin.jar.desired.sha1 b/lib/forkjoin.jar.desired.sha1
new file mode 100644
index 0000000000..758ecb4baf
--- /dev/null
+++ b/lib/forkjoin.jar.desired.sha1
@@ -0,0 +1 @@
+12c479a33ee283599fdb7aa91d6a1df0197a52cf ?forkjoin.jar
diff --git a/lib/msil.jar.desired.sha1 b/lib/msil.jar.desired.sha1
index 843cb568c5..a5722deadd 100644
--- a/lib/msil.jar.desired.sha1
+++ b/lib/msil.jar.desired.sha1
@@ -1 +1 @@
-e28574c5a6ca797c755284a530519254f0abb5e4 ?msil.jar
+682f60e7a3315c8dc3e7a39c10ba8069f0b0fca4 ?msil.jar
diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1
index c83384afed..f2a687e44e 100644
--- a/lib/scala-compiler.jar.desired.sha1
+++ b/lib/scala-compiler.jar.desired.sha1
@@ -1 +1 @@
-0b932d9548029d992264f0780fd5e24b2d0b7c61 ?scala-compiler.jar
+47cd1c12567af0aa7d93b4cf2877db26bd908fe7 ?scala-compiler.jar
diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1
index c0ba53e5f4..f25112f05b 100644
--- a/lib/scala-library-src.jar.desired.sha1
+++ b/lib/scala-library-src.jar.desired.sha1
@@ -1 +1 @@
-6761a76f6314540a9a69f4b45419a1d4d8b71773 ?scala-library-src.jar
+d42ea573aada13c9ea6b05da483c3e08522ea1fe ?scala-library-src.jar
diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1
index 0772bdf9d7..10dba65a67 100644
--- a/lib/scala-library.jar.desired.sha1
+++ b/lib/scala-library.jar.desired.sha1
@@ -1 +1 @@
-6ac2e1d59fdb05902788b1e55ca8dbf391504a32 ?scala-library.jar
+82a0de3721dc7299d57d385b1d19286d63a5e763 ?scala-library.jar
diff --git a/lib/scalacheck-1.6dev.jar.desired.sha1 b/lib/scalacheck-1.6dev.jar.desired.sha1
deleted file mode 100644
index 810b5d7f8e..0000000000
--- a/lib/scalacheck-1.6dev.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-41a805f4ccfab57be082e73f9de416fe6028d694 ?scalacheck-1.6dev.jar
diff --git a/scala-lang.ipr b/scala-lang.ipr
deleted file mode 100644
index 4cf15390b4..0000000000
--- a/scala-lang.ipr
+++ /dev/null
@@ -1,1521 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project relativePaths="false" version="4">
- <component name="AntConfiguration">
- <defaultAnt bundledAnt="true" />
- <buildFile url="file://$PROJECT_DIR$/build.xml">
- <additionalClassPath />
- <antReference projectDefault="true" />
- <customJdkName value="" />
- <maximumHeapSize value="128" />
- <maximumStackSize value="32" />
- <properties />
- </buildFile>
- </component>
- <component name="BuildJarProjectSettings">
- <option name="BUILD_JARS_ON_MAKE" value="false" />
- </component>
- <component name="CodeStyleSettingsManager">
- <option name="PER_PROJECT_SETTINGS">
- <value>
- <ADDITIONAL_INDENT_OPTIONS fileType="java">
- <option name="INDENT_SIZE" value="4" />
- <option name="CONTINUATION_INDENT_SIZE" value="8" />
- <option name="TAB_SIZE" value="4" />
- <option name="USE_TAB_CHARACTER" value="false" />
- <option name="SMART_TABS" value="false" />
- <option name="LABEL_INDENT_SIZE" value="0" />
- <option name="LABEL_INDENT_ABSOLUTE" value="false" />
- </ADDITIONAL_INDENT_OPTIONS>
- <ADDITIONAL_INDENT_OPTIONS fileType="jsp">
- <option name="INDENT_SIZE" value="4" />
- <option name="CONTINUATION_INDENT_SIZE" value="8" />
- <option name="TAB_SIZE" value="4" />
- <option name="USE_TAB_CHARACTER" value="false" />
- <option name="SMART_TABS" value="false" />
- <option name="LABEL_INDENT_SIZE" value="0" />
- <option name="LABEL_INDENT_ABSOLUTE" value="false" />
- </ADDITIONAL_INDENT_OPTIONS>
- <ADDITIONAL_INDENT_OPTIONS fileType="xml">
- <option name="INDENT_SIZE" value="4" />
- <option name="CONTINUATION_INDENT_SIZE" value="8" />
- <option name="TAB_SIZE" value="4" />
- <option name="USE_TAB_CHARACTER" value="false" />
- <option name="SMART_TABS" value="false" />
- <option name="LABEL_INDENT_SIZE" value="0" />
- <option name="LABEL_INDENT_ABSOLUTE" value="false" />
- </ADDITIONAL_INDENT_OPTIONS>
- </value>
- </option>
- <option name="USE_PER_PROJECT_SETTINGS" value="false" />
- </component>
- <component name="CompilerAPISettings">
- <option name="DEBUGGING_INFO" value="true" />
- <option name="GENERATE_NO_WARNINGS" value="false" />
- <option name="DEPRECATION" value="true" />
- <option name="ADDITIONAL_OPTIONS_STRING" value="" />
- <option name="MAXIMUM_HEAP_SIZE" value="128" />
- </component>
- <component name="CompilerConfiguration">
- <option name="DEFAULT_COMPILER" value="Javac" />
- <option name="DEPLOY_AFTER_MAKE" value="0" />
- <resourceExtensions>
- <entry name=".+\.(properties|xml|html|dtd|tld)" />
- <entry name=".+\.(gif|png|jpeg|jpg)" />
- </resourceExtensions>
- <wildcardResourcePatterns>
- <entry name="?*.properties" />
- <entry name="?*.xml" />
- <entry name="?*.gif" />
- <entry name="?*.png" />
- <entry name="?*.jpeg" />
- <entry name="?*.jpg" />
- <entry name="?*.html" />
- <entry name="?*.dtd" />
- <entry name="?*.tld" />
- <entry name="?*.ftl" />
- </wildcardResourcePatterns>
- </component>
- <component name="CopyrightManager" default="">
- <module2copyright />
- </component>
- <component name="DependencyValidationManager">
- <option name="SKIP_IMPORT_STATEMENTS" value="false" />
- </component>
- <component name="EclipseCompilerSettings">
- <option name="DEBUGGING_INFO" value="true" />
- <option name="GENERATE_NO_WARNINGS" value="true" />
- <option name="DEPRECATION" value="false" />
- <option name="ADDITIONAL_OPTIONS_STRING" value="" />
- <option name="MAXIMUM_HEAP_SIZE" value="128" />
- </component>
- <component name="EclipseEmbeddedCompilerSettings">
- <option name="DEBUGGING_INFO" value="true" />
- <option name="GENERATE_NO_WARNINGS" value="true" />
- <option name="DEPRECATION" value="false" />
- <option name="ADDITIONAL_OPTIONS_STRING" value="" />
- <option name="MAXIMUM_HEAP_SIZE" value="128" />
- </component>
- <component name="Encoding" useUTFGuessing="true" native2AsciiForPropertiesFiles="false" />
- <component name="FacetAutodetectingManager">
- <autodetection-disabled>
- <facet-type id="Scala">
- <modules>
- <module name="files">
- <files>
- <file url="file://$PROJECT_DIR$/test/files/android/HelloAndroid.scala" />
- <file url="file://$PROJECT_DIR$/test/files/ant/fsc.scala" />
- <file url="file://$PROJECT_DIR$/test/files/ant/scalac.scala" />
- <file url="file://$PROJECT_DIR$/test/files/ant/scaladoc.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cldc/randoms.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cli/test1/Main.check.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cli/test1/Main.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cli/test2/Main.check.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cli/test2/Main.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cli/test3/Main.check.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cli/test3/Main.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/JavaInteraction.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/bigints.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/bug560bis.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/inner.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/manifests.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/methvsfield.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/natives.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/nest.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/protectedacc.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/serialization.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/t0632.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/t1116.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/t1143-2/t1143-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/t1143.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/typerep.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/unittest_io.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/unittest_xml.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xml01.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xml02.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xml03syntax.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xml04embed.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xmlattr.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xmlmore.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xmlpull.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xmlstuff.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/annotations.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/bug676.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/bug680.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/console.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/genericNest.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/interpreter.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/outerEnum.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/stringbuilder.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/sync-var.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/t0014.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/t1461.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/t1464/MyTrait.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/throws-annot.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/typerep.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/abstract.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/accesses.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/annot-nonconst.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/badtok-1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/badtok-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/badtok-3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1010.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1011.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1017.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1041.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1106.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1112.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug112706A.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1181.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1183.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1224.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1241.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1275.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1392.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1523.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1623.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1838.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug200.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug276.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug278.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug284.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug343.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug391.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug409.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug412.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug414.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug418.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug421.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug452.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug473.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug500.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug501.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug510.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug512.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug515.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug520.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug521.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug545.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug550.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug555.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug556.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug558.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug562.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug563.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug565.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug576.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug585.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug588.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug591.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug593.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug608.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug630.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug631.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug633.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug639.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug649.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug650.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug663.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug664.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug667.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug668.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug677.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug691.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug692.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug693.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug696.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug700.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug708.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug712.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug715.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug729.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug752.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug765.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug766.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug779.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug783.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug798.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug800.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug835.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug836.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug845.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug846.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug856.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug875.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug876.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug877.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug882.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug900.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug908.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug909.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug910.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug935.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug944.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug960.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug961.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug987.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug997.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/checksensible.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/constrs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/cyclics.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/divergent-implicit.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/faculty.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/forward.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/gadts1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/higherkind_novalue.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/imp2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/implicits.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/lazy-override.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/lazyvals.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/lubs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/mixins.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/multi-array.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/nopredefs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/null-unsoundness.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/overload.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/parstar.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/patmatexhaust.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/patternalts.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-after-terminal/src/ThePlugin.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-after-terminal/testsource.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-before-parser/src/ThePlugin.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-before-parser/testsource.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-cyclic-dependency/src/ThePlugin.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-cyclic-dependency/testsource.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-multiple-rafter/src/ThePlugin.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-multiple-rafter/testsource.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-rafter-before-1/src/ThePlugin.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-rafter-before-1/testsource.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-rightafter-terminal/src/ThePlugin.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-rightafter-terminal/testsource.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/sabin2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/saito.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/sensitive.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/structural.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/switch.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0003.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0015.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0117.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0152.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0204.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0207.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0209.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0214.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0218.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0226.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0259.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0345.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0351.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0503.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0528neg.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0590.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0606.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0673/Test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0699/A.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0699/B.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0764.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0842.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0899.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0903.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1009.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1033.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1049.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1163.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1168.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1215.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1371.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1659.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/tailrec.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/tcpoly_bounds.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/tcpoly_override.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/tcpoly_typealias.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/tcpoly_variance.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/tcpoly_variance_enforce.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/typeerror.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/unreachablechar.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/variances.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/viewtest.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/volatile-intersection.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/volatile.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/wellkinded_app.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/wellkinded_app2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/wellkinded_bounds.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/wellkinded_wrongarity.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/wellkinded_wrongarity2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmlcorner.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmltruncated1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmltruncated2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmltruncated3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmltruncated4.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmltruncated5.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmltruncated6.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/A.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/List1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/MailBox.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/NoCyclicReference.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/S1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/S3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/S5.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/S8.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/Transactions.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/X.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/Z.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/abstract.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/aliases.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/annot-inner.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/annotations.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/arrays2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/attributes.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bounds.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0002.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0017.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0020.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0029.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0030.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0031.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0032.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0036.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0039.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0049.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0053.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0054.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0061.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0064.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0066.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0068.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0069.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0076.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0081.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0082.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0085.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0091.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0093.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0123.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0204.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0304.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0325.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0422.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0599.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0646.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1000.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1001.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1006.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1014.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1034.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1049.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1050.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1056.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1070.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1075.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1085.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1087.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1090.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1107.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1119.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1123.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug112606A.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1136.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug115.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug116.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1168.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1185.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug119.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1203.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug121.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1210.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1210a.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug122.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1237.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug124.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1241.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1279a.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1292.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1385.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug151.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1565.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug159.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug160.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug175.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug177.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug183.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1858.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug201.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug210.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug211.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug229.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug245.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug247.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug262.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug267.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug284.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug287.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug289.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug295.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug296.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug304.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug318.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug319.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug342.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug344.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug348plus.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug359.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug360.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug361.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug372.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug374.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug389.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug397.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug402.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug404.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug415.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug419.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug422.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug430-feb09.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug430.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug432.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug439.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug443.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug460.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug514.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug516.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug522.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug530.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug531.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug532.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug533.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug566.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug577.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug592.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug595.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug596.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug599.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug602.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug604.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug607.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug611.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug613.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug615.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug616.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug628.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug640.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug651.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug661.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug675.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug684.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug690.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug694.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug697.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug698.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug703.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug704.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug711.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug720.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug756.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug757.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug757a.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug758.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug759.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug762.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug767.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug780.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug788.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug789.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug796.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug802.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug803.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug805.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug807.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug812.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug839.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug851.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug873.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug880.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug892.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug911.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug927.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug946.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/builders.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/caseaccs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/cfcrash.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/channels.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/cls.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/cls1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/clsrefine.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/code.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/collections.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/comp-rec-test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/compile.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/compile1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/compound.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/constfold.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/contrib467.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/contrib701.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/cyclics.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/escapes2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/eta.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/exceptions.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/functions.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/gadt-gilles.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/gadts2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/gosh.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/gui.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/homonym.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/imp2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/imports.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/infer.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/infer2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/inferbroadtype.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/init.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/itay.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/jesper.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/kinzer.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/ksbug1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/lambda.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/lambdalift.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/lambdalift1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/largecasetest.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/listpattern.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/localmodules.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/looping-jsig.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/manifest1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/matchStarlift.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/matthias1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/matthias3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/matthias4.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/matthias5.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/maxim1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/michel1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/michel2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/michel3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/michel4.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/michel5.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/michel6.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/mixins.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/modules.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/modules1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/moduletrans.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/needstypeearly.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/nested.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/nested2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/null.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/nullary.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/nullary_poly.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/override.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/partialfun.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/pat_gilles.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/pat_iuli.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/patterns.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/patterns1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/patterns1213.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/patterns2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/patterns3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/philippe1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/philippe2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/philippe3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/philippe4.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/pmbug.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/proj-rec-test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/propagate.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/protected-t1010.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/rebind.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/return_thistype.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/scoping1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/scoping2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/scoping3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/seqtest2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/simplelists.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/stable.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/strings.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/sudoku.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0055.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0154.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0165.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0227.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0231.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0273.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0288/Foo.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0301.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0438.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0453.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0504.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0586.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0591.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0651.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0654.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0674.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0710.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0770.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0774/deathname.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0774/unrelated.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0786.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0851.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0872.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0904.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0905.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0999.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1001.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1027.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1049.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1059.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1087.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1107/O.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1107/T.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1131.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1146.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1147.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1159.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1164.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1280.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1380/hallo.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1391.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1409/ConcreteImpl.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1438.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1439.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1480.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1648.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1675.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1761.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1789.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1840/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_boundedmonad.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_bounds1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_checkkinds_mix.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_gm.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_higherorder_bound_method.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_late_method_params.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_method.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_overloaded.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_param_scoping.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_poly.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_return_overriding.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_seq.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_seq_typealias.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_subst.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_typeapp.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_typesub.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_variance.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_wildcards.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/ted.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test4.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test4a.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test4refine.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test5.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test5refine.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/testcast.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/thistype.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/thistypes.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/ticket0137.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tinondefcons.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/traits.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tryexpr.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/typealias_dubious.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/typealiases.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/typerep-stephane.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/typerep.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/typesafecons.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapply.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapplyComplex.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapplyContexts2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapplyGeneric.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapplyNeedsMemberType.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapplySeq.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapplyVal.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unicode-decode.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/valdefs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/variances.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/viewtest1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/viewtest2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/chang/Test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/ilya/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/ilya2/A.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/signatures/sig.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t0695/Test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1101/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1102/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1150/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1152/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1176/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1196/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1197/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1203/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1230/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1231/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1232/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1263/test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1711/Seq.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug597/Main.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug597/Test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug687/QueryA.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug687/QueryB.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug722/IfElse.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug722/Parser.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug722/ScanBased.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug735/ScalaExpressions.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug735/ScalaTyper.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug743/BracesXXX.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug743/ParserXXX.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug785/ScalaNewTyper.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug785/ScalaTrees.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug831/NewScalaParserXXX.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug831/NewScalaTestXXX.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-01.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-02.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-03.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-04.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-05.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-06.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-07.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-08.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-09.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-10.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-13.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/NestedClasses.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/absoverride.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/amp.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/arrays.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/arybufgrow.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bitsets.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/boolexprs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/boolord.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bridges.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug0325.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug1074.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug1192.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug1220.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug216.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug405.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug428.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug429.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug594.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug601.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug603.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug627.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug629.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug657.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug744.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug889.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug920.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug949.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug978.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bugs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/byname.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/caseclasses.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/checked.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/classof.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/collection-stacks.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/collections.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/colltest.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/complicatedmatch.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/constrained-types.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/constructors.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/contrib674.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/ctor-order.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/deeps.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/docgenerator.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/enums.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/exceptions-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/exceptions.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/existentials.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/exoticnames.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/fors.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/forvaleq.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/gadts.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/impconvtimes.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/implicits.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/imports.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/infiniteloop.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/infix.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/intmap.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/iq.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/issue192.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/iterables.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/iterators.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/json.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/jtptest.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/lazy-exprs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/lazy-locals.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/lazy-override.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/lazy-traits.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/lisp.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/lists.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/literals.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/map_test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/matcharraytail.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/matchbytes.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/matchemptyarray.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/matchintasany.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/matchonstream.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/misc.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/missingparams.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/mixins.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/multi-array.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/overloads.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/patmatnew.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/promotion.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/randomAccessSeq-apply.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/range.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/retclosure.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/retsynch.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/richs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/runtime-richChar.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/runtime.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/slices.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/sort.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/streams.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/structural.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/syncchannel.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0005.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0017.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0042.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0091.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0412.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0421.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0485.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0486.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0508.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0528.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0607.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0631.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0663.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0668.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0677.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0700.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0807.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0883.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0911.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0936.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1323.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1368.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1423.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1500.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1501.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1505.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1524.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1535.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1618.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1620.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1718.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1747.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1829.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/tailcalls.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/tcpoly_monads.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/tcpoly_overriding.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/tcpoly_parseridioms.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/try-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/try.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/tuples.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/typealias_overriding.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/unapply.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/unapplyArray.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/unboxingBug.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/unittest_collection.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/unittest_io.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/verify-ctor.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/withIndex.scala" />
- <file url="file://$PROJECT_DIR$/test/files/scalacheck/list.scala" />
- <file url="file://$PROJECT_DIR$/test/files/script/fact.scala" />
- <file url="file://$PROJECT_DIR$/test/files/script/second.scala" />
- <file url="file://$PROJECT_DIR$/test/files/script/t1015.scala" />
- <file url="file://$PROJECT_DIR$/test/files/script/t1017.scala" />
- <file url="file://$PROJECT_DIR$/test/files/script/utf8.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/ackermann.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/ary.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/binarytrees.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/binarytrees.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/binarytrees.scala-3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/chameneos.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/except.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/fannkuch.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/fannkuch.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/fibo.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/harmonic.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/hash.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/hash2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/hello.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/knucleotide.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/knucleotide.scala-3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/lists.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/message.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nbody.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nestedloop.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nsieve.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nsieve.scala-3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nsievebits.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nsievebits.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nsievebits.scala-3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/objinst.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/partialsums.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/partialsums.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/recursive.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/regexdna.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/spectralnorm.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/strcat.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/sumcol.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/sumcol.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/takfp.scala" />
- </files>
- </module>
- <module name="library">
- <files>
- <file url="file://$PROJECT_DIR$/src/android-library/scala/ScalaObject.scala" />
- <file url="file://$PROJECT_DIR$/src/android-library/scala/reflect/ScalaBeanInfo.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Application.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/BigDecimal.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/BigInt.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Console.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Math.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Numeric.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Ordering.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Predef.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Range.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Symbol.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/collection/JavaConversions.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/collection/immutable/List.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/collection/immutable/PagedSeq.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/collection/mutable/OpenHashMap.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/collection/mutable/StringBuilder.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/collection/mutable/WeakHashMap.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/compat/Platform.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/concurrent/DelayedLazyVal.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/concurrent/jolib.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/concurrent/ops.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/concurrent/pilib.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/BufferedSource.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/BytePickle.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/Codec.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/File.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/Position.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/Source.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/UTF8Codec.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/mobile/Code.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/mobile/Location.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/net/Utility.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/ref/PhantomReference.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/ref/ReferenceQueue.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/ref/ReferenceWrapper.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/ref/SoftReference.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/ref/WeakReference.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/reflect/Invocation.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/reflect/Manifest.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/reflect/ScalaBeanInfo.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/MethodCache.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/NonLocalReturnException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichChar.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichClass.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichDouble.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichFloat.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichInt.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichLong.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichString.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/StringAdd.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/testing/Benchmark.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/testing/SUnit.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/testing/Show.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/text/Document.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/ClassLoader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/DynamicVariable.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/Marshal.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/NameTransformer.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/Properties.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/Random.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/automata/BaseBerrySethi.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/automata/DetWordAutom.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/automata/Inclusion.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/automata/NondetWordAutom.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/automata/SubsetConstruction.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/automata/WordBerrySethi.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/control/Exception.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/grammar/HedgeRHS.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/grammar/TreeRHS.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/logging/ConsoleLogger.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/logging/Logged.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/matching/Regex.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/ast/AbstractSyntax.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/ast/Binders.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/ImplicitConversions.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/JavaTokenParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/PackratParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/Parsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/RegexParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/lexical/Lexical.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/lexical/Scanners.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/lexical/StdLexical.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/syntactical/TokenParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/testing/RegexTest.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/testing/Tester.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/$tilde.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/ImplicitConversions.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/Parsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/lexical/Lexical.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/lexical/Scanners.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/lexical/StdLexical.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/syntactical/BindingParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/syntactical/StdTokenParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/syntactical/TokenParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/testing/Tester.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/CharArrayPosition.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/CharArrayReader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/CharSequenceReader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/NoPosition.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/OffsetPosition.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/PagedSeqReader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/Position.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/Positional.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/Reader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/StreamReader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/json/JSON.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/json/Lexer.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/json/Parser.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/syntax/StdTokens.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/syntax/Tokens.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/regexp/Base.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/regexp/PointedHedgeExp.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/regexp/SyntaxError.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/regexp/WordExp.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Atom.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Attribute.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Comment.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Document.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Elem.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/EntityRef.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Group.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/HasKeyValue.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/MalformedAttributeException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/MetaData.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/NamespaceBinding.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Node.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/NodeBuffer.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/NodeSeq.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/NodeTraverser.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Null.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/PCData.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Parsing.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/PrefixedAttribute.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/PrettyPrinter.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/ProcInstr.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/QNode.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/SpecialNode.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Text.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/TextBuffer.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/TopScope.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/TypeSymbol.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Unparsed.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/UnprefixedAttribute.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Utility.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/XML.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Xhtml.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/ContentModel.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/ContentModelParser.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/DTD.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/Decl.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/DocType.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/DtdTypeSymbol.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/ElementValidator.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/ExternalID.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/Scanner.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/Tokens.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/ValidationException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/factory/Binder.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/factory/LoggedNodeFactory.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/factory/NodeFactory.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/factory/XMLLoader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/CircularIncludeException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/UnavailableResourceException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/XIncludeException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/sax/EncodingHeuristics.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/sax/Main.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/sax/XIncludeFilter.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/sax/XIncluder.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/ConstructingHandler.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/ConstructingParser.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/DefaultMarkupHandler.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/ExternalSources.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/FactoryAdapter.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/FatalError.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/MarkupHandler.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/MarkupParser.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/NoBindingFactoryAdapter.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/TokenTests.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/ValidatingMarkupHandler.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/XhtmlEntities.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/XhtmlParser.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/path/Expression.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/persistent/CachedFileStorage.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/persistent/Index.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/persistent/IndexedStorage.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/persistent/SetStorage.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/pull/XMLEvent.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/pull/XMLEventReader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/transform/BasicTransformer.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/transform/RewriteRule.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/transform/RuleTransformer.scala" />
- <file url="file://$PROJECT_DIR$/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala" />
- </files>
- </module>
- </modules>
- </facet-type>
- </autodetection-disabled>
- </component>
- <component name="IdProvider" IDEtalkID="522B472C2EA573563CC2BA93160270BE" />
- <component name="InspectionProjectProfileManager">
- <option name="PROJECT_PROFILE" value="Project Default" />
- <option name="USE_PROJECT_PROFILE" value="true" />
- <version value="1.0" />
- <profiles>
- <profile version="1.0" is_locked="false">
- <option name="myName" value="Project Default" />
- <option name="myLocal" value="false" />
- </profile>
- </profiles>
- <list size="5">
- <item index="0" class="java.lang.String" itemvalue="TYPO" />
- <item index="1" class="java.lang.String" itemvalue="SERVER PROBLEM" />
- <item index="2" class="java.lang.String" itemvalue="INFO" />
- <item index="3" class="java.lang.String" itemvalue="WARNING" />
- <item index="4" class="java.lang.String" itemvalue="ERROR" />
- </list>
- </component>
- <component name="JavacSettings">
- <option name="DEBUGGING_INFO" value="true" />
- <option name="GENERATE_NO_WARNINGS" value="false" />
- <option name="DEPRECATION" value="true" />
- <option name="ADDITIONAL_OPTIONS_STRING" value="" />
- <option name="MAXIMUM_HEAP_SIZE" value="128" />
- </component>
- <component name="JavadocGenerationManager">
- <option name="OUTPUT_DIRECTORY" />
- <option name="OPTION_SCOPE" value="protected" />
- <option name="OPTION_HIERARCHY" value="true" />
- <option name="OPTION_NAVIGATOR" value="true" />
- <option name="OPTION_INDEX" value="true" />
- <option name="OPTION_SEPARATE_INDEX" value="true" />
- <option name="OPTION_DOCUMENT_TAG_USE" value="false" />
- <option name="OPTION_DOCUMENT_TAG_AUTHOR" value="false" />
- <option name="OPTION_DOCUMENT_TAG_VERSION" value="false" />
- <option name="OPTION_DOCUMENT_TAG_DEPRECATED" value="true" />
- <option name="OPTION_DEPRECATED_LIST" value="true" />
- <option name="OTHER_OPTIONS" value="" />
- <option name="HEAP_SIZE" />
- <option name="LOCALE" />
- <option name="OPEN_IN_BROWSER" value="true" />
- </component>
- <component name="JikesSettings">
- <option name="JIKES_PATH" value="" />
- <option name="DEBUGGING_INFO" value="true" />
- <option name="DEPRECATION" value="true" />
- <option name="GENERATE_NO_WARNINGS" value="false" />
- <option name="IS_EMACS_ERRORS_MODE" value="true" />
- <option name="ADDITIONAL_OPTIONS_STRING" value="" />
- </component>
- <component name="Palette2">
- <group name="Swing">
- <item class="com.intellij.uiDesigner.HSpacer" tooltip-text="Horizontal Spacer" icon="/com/intellij/uiDesigner/icons/hspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="1" hsize-policy="6" anchor="0" fill="1" />
- </item>
- <item class="com.intellij.uiDesigner.VSpacer" tooltip-text="Vertical Spacer" icon="/com/intellij/uiDesigner/icons/vspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="1" anchor="0" fill="2" />
- </item>
- <item class="javax.swing.JPanel" icon="/com/intellij/uiDesigner/icons/panel.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3" />
- </item>
- <item class="javax.swing.JScrollPane" icon="/com/intellij/uiDesigner/icons/scrollPane.png" removable="false" auto-create-binding="false" can-attach-label="true">
- <default-constraints vsize-policy="7" hsize-policy="7" anchor="0" fill="3" />
- </item>
- <item class="javax.swing.JButton" icon="/com/intellij/uiDesigner/icons/button.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="3" anchor="0" fill="1" />
- <initial-values>
- <property name="text" value="Button" />
- </initial-values>
- </item>
- <item class="javax.swing.JRadioButton" icon="/com/intellij/uiDesigner/icons/radioButton.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
- <initial-values>
- <property name="text" value="RadioButton" />
- </initial-values>
- </item>
- <item class="javax.swing.JCheckBox" icon="/com/intellij/uiDesigner/icons/checkBox.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
- <initial-values>
- <property name="text" value="CheckBox" />
- </initial-values>
- </item>
- <item class="javax.swing.JLabel" icon="/com/intellij/uiDesigner/icons/label.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="0" anchor="8" fill="0" />
- <initial-values>
- <property name="text" value="Label" />
- </initial-values>
- </item>
- <item class="javax.swing.JTextField" icon="/com/intellij/uiDesigner/icons/textField.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
- <preferred-size width="150" height="-1" />
- </default-constraints>
- </item>
- <item class="javax.swing.JPasswordField" icon="/com/intellij/uiDesigner/icons/passwordField.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
- <preferred-size width="150" height="-1" />
- </default-constraints>
- </item>
- <item class="javax.swing.JFormattedTextField" icon="/com/intellij/uiDesigner/icons/formattedTextField.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
- <preferred-size width="150" height="-1" />
- </default-constraints>
- </item>
- <item class="javax.swing.JTextArea" icon="/com/intellij/uiDesigner/icons/textArea.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JTextPane" icon="/com/intellij/uiDesigner/icons/textPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JEditorPane" icon="/com/intellij/uiDesigner/icons/editorPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JComboBox" icon="/com/intellij/uiDesigner/icons/comboBox.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="2" anchor="8" fill="1" />
- </item>
- <item class="javax.swing.JTable" icon="/com/intellij/uiDesigner/icons/table.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JList" icon="/com/intellij/uiDesigner/icons/list.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="2" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JTree" icon="/com/intellij/uiDesigner/icons/tree.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JTabbedPane" icon="/com/intellij/uiDesigner/icons/tabbedPane.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
- <preferred-size width="200" height="200" />
- </default-constraints>
- </item>
- <item class="javax.swing.JSplitPane" icon="/com/intellij/uiDesigner/icons/splitPane.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
- <preferred-size width="200" height="200" />
- </default-constraints>
- </item>
- <item class="javax.swing.JSpinner" icon="/com/intellij/uiDesigner/icons/spinner.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
- </item>
- <item class="javax.swing.JSlider" icon="/com/intellij/uiDesigner/icons/slider.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
- </item>
- <item class="javax.swing.JSeparator" icon="/com/intellij/uiDesigner/icons/separator.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3" />
- </item>
- <item class="javax.swing.JProgressBar" icon="/com/intellij/uiDesigner/icons/progressbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1" />
- </item>
- <item class="javax.swing.JToolBar" icon="/com/intellij/uiDesigner/icons/toolbar.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1">
- <preferred-size width="-1" height="20" />
- </default-constraints>
- </item>
- <item class="javax.swing.JToolBar$Separator" icon="/com/intellij/uiDesigner/icons/toolbarSeparator.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="0" anchor="0" fill="1" />
- </item>
- <item class="javax.swing.JScrollBar" icon="/com/intellij/uiDesigner/icons/scrollbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="0" anchor="0" fill="2" />
- </item>
- </group>
- </component>
- <component name="ProjectDetails">
- <option name="projectName" value="scala-lang" />
- </component>
- <component name="ProjectDictionaryState">
- <dictionary name="odersky" />
- <dictionary name="dragos" />
- </component>
- <component name="ProjectFileVersion" converted="true" />
- <component name="ProjectKey">
- <option name="state" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk/scala-lang.ipr" />
- </component>
- <component name="ProjectModuleManager">
- <modules>
- <module fileurl="file://$PROJECT_DIR$/actors.iml" filepath="$PROJECT_DIR$/actors.iml" />
- <module fileurl="file://$PROJECT_DIR$/compiler.iml" filepath="$PROJECT_DIR$/compiler.iml" />
- <module fileurl="file://$PROJECT_DIR$/dbc.iml" filepath="$PROJECT_DIR$/dbc.iml" />
- <module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
- <module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
- <module fileurl="file://$PROJECT_DIR$/partest.iml" filepath="$PROJECT_DIR$/partest.iml" />
- <module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
- <module fileurl="file://$PROJECT_DIR$/swing.iml" filepath="$PROJECT_DIR$/swing.iml" />
- </modules>
- </component>
- <component name="ProjectRootManager" version="2" languageLevel="JDK_1_5" assert-keyword="true" jdk-15="true" project-jdk-name="1.6" project-jdk-type="JavaSDK">
- <output url="file://$PROJECT_DIR$/out" />
- </component>
- <component name="ResourceManagerContainer">
- <option name="myResourceBundles">
- <value>
- <list size="0" />
- </value>
- </option>
- </component>
- <component name="RmicSettings">
- <option name="IS_EANABLED" value="false" />
- <option name="DEBUGGING_INFO" value="true" />
- <option name="GENERATE_NO_WARNINGS" value="false" />
- <option name="GENERATE_IIOP_STUBS" value="false" />
- <option name="ADDITIONAL_OPTIONS_STRING" value="" />
- </component>
- <component name="ScalacSettings">
- <option name="MAXIMUM_HEAP_SIZE" value="1024" />
- <option name="DEPRECATION" value="false" />
- <option name="UNCHECKED" value="false" />
- </component>
- <component name="SvnBranchConfigurationManager">
- <option name="myConfigurationMap">
- <map>
- <entry key="$PROJECT_DIR$">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/bin">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/src">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/src/actors">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/src/compiler">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/src/library">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/test/disabled/lib/annotations.jar">
- <value>
- <SvnBranchConfiguration />
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/test/disabled/lib/enums.jar">
- <value>
- <SvnBranchConfiguration />
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/test/disabled/lib/nest.jar">
- <value>
- <SvnBranchConfiguration />
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/test/files">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- </map>
- </option>
- <option name="myVersion" value="124" />
- <option name="mySupportsUserInfoFilter" value="true" />
- </component>
- <component name="VcsDirectoryMappings">
- <mapping directory="" vcs="svn" />
- </component>
- <component name="WebServicesPlugin" addRequiredLibraries="true" />
- <component name="libraryTable">
- <library name="Project ant library">
- <CLASSES>
- <root url="jar://$PROJECT_DIR$/lib/ant/ant-contrib.jar!/" />
- <root url="jar://$PROJECT_DIR$/lib/ant/vizant.jar!/" />
- <root url="jar://$PROJECT_DIR$/lib/ant/maven-ant-tasks-2.0.9.jar!/" />
- <root url="jar://$PROJECT_DIR$/lib/ant/ant-dotnet-1.0.jar!/" />
- </CLASSES>
- <JAVADOC />
- <SOURCES />
- </library>
- <library name="Project Scala SDK">
- <CLASSES>
- <root url="jar://$PROJECT_DIR$/lib/jline.jar!/" />
- <root url="jar://$PROJECT_DIR$/lib/fjbg.jar!/" />
- <root url="jar://$PROJECT_DIR$/lib/ScalaCheck.jar!/" />
- <root url="jar://$PROJECT_DIR$/lib/msil.jar!/" />
- </CLASSES>
- <JAVADOC />
- <SOURCES />
- </library>
- </component>
-</project>
-
diff --git a/src/actors/scala/actors/AbstractActor.scala b/src/actors/scala/actors/AbstractActor.scala
index b2ae3d300a..c25e785d76 100644
--- a/src/actors/scala/actors/AbstractActor.scala
+++ b/src/actors/scala/actors/AbstractActor.scala
@@ -1,24 +1,26 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
/**
* The <code>AbstractActor</code> trait.
*
- * @version 0.9.18
* @author Philipp Haller
+ *
+ * @define actor actor
*/
-trait AbstractActor extends OutputChannel[Any] with Replyable[Any, Any] {
+trait AbstractActor extends OutputChannel[Any] with CanReply[Any, Any] {
+
+ type Future[+R] = scala.actors.Future[R]
- private[actors] var exiting = false
+ private[actors] def exiting: Boolean = false
private[actors] def linkTo(to: AbstractActor): Unit
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index b563104f41..006e6439a3 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -1,18 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
-import scala.util.control.ControlException
+import scala.util.control.ControlThrowable
import java.util.{Timer, TimerTask}
-import java.util.concurrent.{ExecutionException, Callable}
/**
* The <code>Actor</code> object provides functions for the definition of
@@ -22,14 +20,42 @@ import java.util.concurrent.{ExecutionException, Callable}
*
* @author Philipp Haller
*/
-object Actor {
+object Actor extends Combinators {
+
+ /** An actor state. An actor can be in one of the following states:
+ * <ul>
+ * <li>New<br>
+ * An actor that has not yet started is in this state.</li>
+ * <li>Runnable<br>
+ * An actor executing is in this state.</li>
+ * <li>Suspended<br>
+ * An actor that is suspended waiting in a react is in this state.</li>
+ * <li>TimedSuspended<br>
+ * An actor that is suspended waiting in a reactWithin is in this state.</li>
+ * <li>Blocked<br>
+ * An actor that is blocked waiting in a receive is in this state.</li>
+ * <li>TimedBlocked<br>
+ * An actor that is blocked waiting in a receiveWithin is in this state.</li>
+ * <li>Terminated<br>
+ * An actor that has terminated is in this state.</li>
+ * </ul>
+ */
+ object State extends Enumeration {
+ val New,
+ Runnable,
+ Suspended,
+ TimedSuspended,
+ Blocked,
+ TimedBlocked,
+ Terminated = Value
+ }
- private[actors] val tl = new ThreadLocal[Reactor]
+ private[actors] val tl = new ThreadLocal[ReplyReactor]
// timer thread runs as daemon
private[actors] val timer = new Timer(true)
- private[actors] val suspendException = new SuspendActorException
+ private[actors] val suspendException = new SuspendActorControl
/**
* Returns the currently executing actor. Should be used instead
@@ -43,9 +69,10 @@ object Actor {
private[actors] def self(sched: IScheduler): Actor =
rawSelf(sched).asInstanceOf[Actor]
- private[actors] def rawSelf: Reactor = rawSelf(Scheduler)
+ private[actors] def rawSelf: ReplyReactor =
+ rawSelf(Scheduler)
- private[actors] def rawSelf(sched: IScheduler): Reactor = {
+ private[actors] def rawSelf(sched: IScheduler): ReplyReactor = {
val s = tl.get
if (s eq null) {
val r = new ActorProxy(currentThread, sched)
@@ -88,17 +115,17 @@ object Actor {
}
/**
- * <p>This is a factory method for creating actors.</p>
+ * This is a factory method for creating actors.
*
- * <p>The following example demonstrates its usage:</p>
+ * The following example demonstrates its usage:
*
- * <pre>
+ * {{{
* import scala.actors.Actor._
* ...
* val a = actor {
* ...
* }
- * </pre>
+ * }}}
*
* @param body the code block to be executed by the newly created actor
* @return the newly created actor. Note that it is automatically started.
@@ -113,14 +140,12 @@ object Actor {
}
/**
- * <p>
* This is a factory method for creating actors whose
- * body is defined using a <code>Responder</code>.
- * </p>
+ * body is defined using a `Responder`.
*
- * <p>The following example demonstrates its usage:</p>
+ * The following example demonstrates its usage:
*
- * <pre>
+ * {{{
* import scala.actors.Actor._
* import Responder.exec
* ...
@@ -130,9 +155,9 @@ object Actor {
* if exec(println("result: "+res))
* } yield {}
* }
- * </pre>
+ * }}}
*
- * @param body the <code>Responder</code> to be executed by the newly created actor
+ * @param body the `Responder` to be executed by the newly created actor
* @return the newly created actor. Note that it is automatically started.
*/
def reactor(body: => Responder[Unit]): Actor = {
@@ -208,7 +233,7 @@ object Actor {
def eventloop(f: PartialFunction[Any, Unit]): Nothing =
rawSelf.react(new RecursiveProxyHandler(rawSelf, f))
- private class RecursiveProxyHandler(a: Reactor, f: PartialFunction[Any, Unit])
+ private class RecursiveProxyHandler(a: ReplyReactor, f: PartialFunction[Any, Unit])
extends PartialFunction[Any, Unit] {
def isDefinedAt(m: Any): Boolean =
true // events are immediately removed from the mailbox
@@ -222,21 +247,21 @@ object Actor {
* Returns the actor which sent the last received message.
*/
def sender: OutputChannel[Any] =
- rawSelf.asInstanceOf[ReplyReactor].sender
+ rawSelf.sender
/**
* Send <code>msg</code> to the actor waiting in a call to
* <code>!?</code>.
*/
def reply(msg: Any): Unit =
- rawSelf.asInstanceOf[ReplyReactor].reply(msg)
+ rawSelf.reply(msg)
/**
* Send <code>()</code> to the actor waiting in a call to
* <code>!?</code>.
*/
def reply(): Unit =
- rawSelf.asInstanceOf[ReplyReactor].reply(())
+ rawSelf.reply(())
/**
* Returns the number of messages in <code>self</code>'s mailbox
@@ -246,20 +271,18 @@ object Actor {
def mailboxSize: Int = rawSelf.mailboxSize
/**
- * <p>
* Converts a synchronous event-based operation into
- * an asynchronous <code>Responder</code>.
- * </p>
+ * an asynchronous `Responder`.
*
- * <p>The following example demonstrates its usage:</p>
+ * The following example demonstrates its usage:
*
- * <pre>
+ * {{{
* val adder = reactor {
* for {
* _ <- respondOn(react) { case Add(a, b) => reply(a+b) }
* } yield {}
* }
- * </pre>
+ * }}}
*/
def respondOn[A, B](fun: PartialFunction[A, Unit] => Nothing):
PartialFunction[A, B] => Responder[B] =
@@ -276,26 +299,6 @@ object Actor {
}
/**
- * Causes <code>self</code> to repeatedly execute
- * <code>body</code>.
- *
- * @param body the code block to be executed
- */
- def loop(body: => Unit): Unit = body andThen loop(body)
-
- /**
- * Causes <code>self</code> to repeatedly execute
- * <code>body</code> while the condition
- * <code>cond</code> is <code>true</code>.
- *
- * @param cond the condition to test
- * @param body the code block to be executed
- */
- def loopWhile(cond: => Boolean)(body: => Unit): Unit =
- if (cond) { body andThen loopWhile(cond)(body) }
- else continue
-
- /**
* Links <code>self</code> to actor <code>to</code>.
*
* @param to the actor to link to
@@ -316,7 +319,7 @@ object Actor {
*
* @param from the actor to unlink from
*/
- def unlink(from: Actor): Unit = self.unlink(from)
+ def unlink(from: AbstractActor): Unit = self.unlink(from)
/**
* <p>
@@ -348,16 +351,51 @@ object Actor {
* <code>Exit(self, 'normal)</code> to <code>a</code>.
* </p>
*/
- def exit(): Nothing = self.exit()
+ def exit(): Nothing = rawSelf.exit()
- def continue: Unit = throw new KillActorException
}
/**
* <p>
- * This class provides an implementation of event-based actors.
- * The main ideas of our approach are explained in the two papers
+ * This trait provides lightweight, concurrent actors. Actors are
+ * created by extending the `Actor` trait (alternatively, one of the
+ * factory methods in its companion object can be used). The
+ * behavior of an `Actor` subclass is defined by implementing its
+ * `act` method:
+ *
+ * {{{
+ * class MyActor extends Actor {
+ * def act() {
+ * // actor behavior goes here
+ * }
+ * }
+ * }}}
+ *
+ * A new `Actor` instance is started by invoking its `start` method.
+ *
+ * '''Note:''' care must be taken when invoking thread-blocking methods
+ * other than those provided by the `Actor` trait or its companion
+ * object (such as `receive`). Blocking the underlying thread inside
+ * an actor may lead to starvation of other actors. This also
+ * applies to actors hogging their thread for a long time between
+ * invoking `receive`/`react`.
+ *
+ * If actors use blocking operations (for example, methods for
+ * blocking I/O), there are several options:
+ * <ul>
+ * <li>The run-time system can be configured to use a larger thread pool size
+ * (for example, by setting the `actors.corePoolSize` JVM property).</li>
+ *
+ * <li>The `scheduler` method of the `Actor` trait can be overridden to return a
+ * `ResizableThreadPoolScheduler`, which resizes its thread pool to
+ * avoid starvation caused by actors that invoke arbitrary blocking methods.</li>
+ *
+ * <li>The `actors.enableForkJoin` JVM property can be set to `false`, in which
+ * case a `ResizableThreadPoolScheduler` is used by default to execute actors.</li>
+ * </ul>
* </p>
+ * <p>
+ * The main ideas of the implementation are explained in the two papers
* <ul>
* <li>
* <a href="http://lampwww.epfl.ch/~odersky/papers/jmlc06.pdf">
@@ -372,11 +410,15 @@ object Actor {
* Philipp Haller and Martin Odersky, <i>Proc. COORDINATION 2007</i>.
* </li>
* </ul>
+ * </p>
*
* @author Philipp Haller
+ *
+ * @define actor actor
+ * @define channel actor's mailbox
*/
@serializable @SerialVersionUID(-781154067877019505L)
-trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
+trait Actor extends AbstractActor with ReplyReactor with ActorCanReply with InputChannel[Any] {
/* The following two fields are only used when the actor
* suspends by blocking its underlying thread, for example,
@@ -392,15 +434,9 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
@volatile
private var received: Option[Any] = None
- /* This option holds a TimerTask when the actor waits in a
- * reactWithin/receiveWithin. The TimerTask is cancelled when
- * the actor can continue.
- */
- private var onTimeout: Option[TimerTask] = None
-
protected[actors] override def scheduler: IScheduler = Scheduler
- private[actors] override def startSearch(msg: Any, replyTo: OutputChannel[Any], handler: Any => Boolean) =
+ private[actors] override def startSearch(msg: Any, replyTo: OutputChannel[Any], handler: PartialFunction[Any, Any]) =
if (isSuspended) {
() => synchronized {
mailbox.append(msg, replyTo)
@@ -408,26 +444,46 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
}
} else super.startSearch(msg, replyTo, handler)
- private[actors] override def makeReaction(fun: () => Unit): Runnable =
- new ActorTask(this, fun)
-
- private[actors] override def resumeReceiver(item: (Any, OutputChannel[Any]), onSameThread: Boolean) {
- synchronized {
- if (!onTimeout.isEmpty) {
- onTimeout.get.cancel()
- onTimeout = None
+ // we override this method to check `shouldExit` before suspending
+ private[actors] override def searchMailbox(startMbox: MQueue[Any],
+ handler: PartialFunction[Any, Any],
+ resumeOnSameThread: Boolean) {
+ var tmpMbox = startMbox
+ var done = false
+ while (!done) {
+ val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
+ senders = List(replyTo)
+ handler.isDefinedAt(msg)
+ })
+ if (tmpMbox ne mailbox)
+ tmpMbox.foreach((m, s) => mailbox.append(m, s))
+ if (null eq qel) {
+ synchronized {
+ // in mean time new stuff might have arrived
+ if (!sendBuffer.isEmpty) {
+ tmpMbox = new MQueue[Any]("Temp")
+ drainSendBuffer(tmpMbox)
+ // keep going
+ } else {
+ // very important to check for `shouldExit` at this point
+ // since linked actors might have set it after we checked
+ // last time (e.g., at the beginning of `react`)
+ if (shouldExit) exit()
+ waitingFor = handler
+ // see Reactor.searchMailbox
+ throw Actor.suspendException
+ }
+ }
+ } else {
+ resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
+ done = true
}
}
- senders = List(item._2)
- super.resumeReceiver(item, onSameThread)
}
- /**
- * Receives a message from this actor's mailbox.
- *
- * @param f a partial function with message patterns and actions
- * @return result of processing the received value
- */
+ private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
+ new ActorTask(this, fun, handler, msg)
+
def receive[R](f: PartialFunction[Any, R]): R = {
assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
@@ -451,7 +507,7 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
drainSendBuffer(mailbox)
// keep going
} else {
- waitingFor = f.isDefinedAt
+ waitingFor = f
isSuspended = true
scheduler.managedBlock(blocker)
drainSendBuffer(mailbox)
@@ -471,14 +527,6 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
result
}
- /**
- * Receives a message from this actor's mailbox within a certain
- * time span.
- *
- * @param msec the time span before timeout
- * @param f a partial function with message patterns and actions
- * @return result of processing the received value
- */
def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R = {
assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
@@ -517,14 +565,24 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
done = true
receiveTimeout
} else {
- waitingFor = f.isDefinedAt
+ if (onTimeout.isEmpty) {
+ if (!f.isDefinedAt(TIMEOUT))
+ error("unhandled timeout")
+
+ val thisActor = this
+ onTimeout = Some(new TimerTask {
+ def run() {
+ thisActor.send(TIMEOUT, thisActor)
+ }
+ })
+ Actor.timer.schedule(onTimeout.get, msec)
+ }
+
+ // It is possible that !onTimeout.isEmpty, but TIMEOUT is not yet in mailbox
+ // See SI-4759
+ waitingFor = f
received = None
isSuspended = true
- val thisActor = this
- onTimeout = Some(new TimerTask {
- def run() { thisActor.send(TIMEOUT, thisActor) }
- })
- Actor.timer.schedule(onTimeout.get, msec)
scheduler.managedBlock(blocker)
drainSendBuffer(mailbox)
// keep going
@@ -551,109 +609,32 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
result
}
- /**
- * Receives a message from this actor's mailbox.
- * <p>
- * This method never returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param f a partial function with message patterns and actions
- */
- override def react(f: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.self(scheduler) == this, "react on channel belonging to other actor")
+ override def react(handler: PartialFunction[Any, Unit]): Nothing = {
synchronized {
- if (shouldExit) exit() // links
- drainSendBuffer(mailbox)
+ if (shouldExit) exit()
}
- continuation = f
- searchMailbox(mailbox, f.isDefinedAt, false)
- throw Actor.suspendException
+ super.react(handler)
}
- /**
- * Receives a message from this actor's mailbox within a certain
- * time span.
- * <p>
- * This method never returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param msec the time span before timeout
- * @param f a partial function with message patterns and actions
- */
- def reactWithin(msec: Long)(f: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.self(scheduler) == this, "react on channel belonging to other actor")
-
+ override def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
synchronized {
- if (shouldExit) exit() // links
- drainSendBuffer(mailbox)
- }
-
- // first, remove spurious TIMEOUT message from mailbox if any
- mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
-
- val receiveTimeout = () => {
- if (f.isDefinedAt(TIMEOUT)) {
- senders = List(this)
- scheduleActor(f, TIMEOUT)
- } else
- error("unhandled timeout")
+ if (shouldExit) exit()
}
-
- var done = false
- while (!done) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- f.isDefinedAt(m)
- })
- if (null eq qel) {
- val todo = synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- () => {}
- } else if (msec == 0L) {
- done = true
- receiveTimeout
- } else {
- waitingFor = f.isDefinedAt
- continuation = f
- val thisActor = this
- onTimeout = Some(new TimerTask {
- def run() { thisActor.send(TIMEOUT, thisActor) }
- })
- Actor.timer.schedule(onTimeout.get, msec)
- done = true
- () => {}
- }
- }
- todo()
- } else {
- senders = List(qel.session)
- scheduleActor(f, qel.msg)
- done = true
- }
- }
-
- throw Actor.suspendException
+ super.reactWithin(msec)(handler)
}
- /**
- * Receives the next message from this actor's mailbox.
- */
def ? : Any = receive {
case x => x
}
// guarded by lock of this
- private[actors] override def scheduleActor(f: PartialFunction[Any, Unit], msg: Any) =
- if ((f eq null) && (continuation eq null)) {
+ // never throws SuspendActorControl
+ private[actors] override def scheduleActor(f: PartialFunction[Any, Any], msg: Any) =
+ if (f eq null) {
// do nothing (timeout is handled instead)
}
else {
- val task = new Reaction(this,
- if (f eq null) continuation else f,
- msg)
+ val task = new ActorTask(this, null, f, msg)
scheduler executeFromActor task
}
@@ -684,26 +665,39 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
notify()
}
- /**
- * Starts this actor.
- */
- override def start(): Actor = synchronized {
+ private[actors] override def exiting = synchronized {
+ _state == Actor.State.Terminated
+ }
+
+ // guarded by this
+ private[actors] override def dostart() {
// Reset various flags.
//
// Note that we do *not* reset `trapExit`. The reason is that
// users should be able to set the field in the constructor
// and before `act` is called.
-
exitReason = 'normal
- exiting = false
shouldExit = false
- scheduler.newActor(this)
- scheduler.execute(new Reaction(this))
+ super.dostart()
+ }
+ override def start(): Actor = synchronized {
+ super.start()
this
}
+ override def getState: Actor.State.Value = synchronized {
+ if (isSuspended) {
+ if (onTimeout.isEmpty)
+ Actor.State.Blocked
+ else
+ Actor.State.TimedBlocked
+ } else
+ super.getState
+ }
+
+ // guarded by this
private[actors] var links: List[AbstractActor] = Nil
/**
@@ -753,8 +747,11 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
links = links.filterNot(from.==)
}
+ @volatile
var trapExit = false
- private[actors] var exitReason: AnyRef = 'normal
+ // guarded by this
+ private var exitReason: AnyRef = 'normal
+ // guarded by this
private[actors] var shouldExit = false
/**
@@ -775,7 +772,9 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
* </p>
*/
protected[actors] def exit(reason: AnyRef): Nothing = {
- exitReason = reason
+ synchronized {
+ exitReason = reason
+ }
exit()
}
@@ -783,28 +782,40 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
* Terminates with exit reason <code>'normal</code>.
*/
protected[actors] override def exit(): Nothing = {
- // links
- if (!links.isEmpty)
- exitLinked()
- terminated()
- throw Actor.suspendException
+ val todo = synchronized {
+ if (!links.isEmpty)
+ exitLinked()
+ else
+ () => {}
+ }
+ todo()
+ super.exit()
}
// Assume !links.isEmpty
- private[actors] def exitLinked() {
- exiting = true
+ // guarded by this
+ private[actors] def exitLinked(): () => Unit = {
+ _state = Actor.State.Terminated
+ // reset waitingFor, otherwise getState returns Suspended
+ waitingFor = Reactor.waitingForNone
// remove this from links
val mylinks = links.filterNot(this.==)
- // exit linked processes
- mylinks.foreach((linked: AbstractActor) => {
- unlink(linked)
- if (!linked.exiting)
- linked.exit(this, exitReason)
- })
+ // unlink actors
+ mylinks.foreach(unlinkFrom(_))
+ // return closure that locks linked actors
+ () => {
+ mylinks.foreach((linked: AbstractActor) => {
+ linked.synchronized {
+ if (!linked.exiting)
+ linked.exit(this, exitReason)
+ }
+ })
+ }
}
// Assume !links.isEmpty
- private[actors] def exitLinked(reason: AnyRef) {
+ // guarded by this
+ private[actors] def exitLinked(reason: AnyRef): () => Unit = {
exitReason = reason
exitLinked()
}
@@ -823,8 +834,17 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
// (because shouldExit == true)
if (isSuspended)
resumeActor()
- else if (waitingFor ne waitingForNone) {
- scheduleActor(continuation, null)
+ else if (waitingFor ne Reactor.waitingForNone) {
+ waitingFor = Reactor.waitingForNone
+ // it doesn't matter what partial function we are passing here
+ scheduleActor(waitingFor, null)
+ /* Here we should not throw a SuspendActorControl,
+ since the current method is called from an actor that
+ is in the process of exiting.
+
+ Therefore, the contract for scheduleActor is that
+ it never throws a SuspendActorControl.
+ */
}
}
}
@@ -853,12 +873,18 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
* <b>case</b> TIMEOUT <b>=&gt;</b> ...
* }</pre>
*
- * @version 0.9.8
* @author Philipp Haller
*/
case object TIMEOUT
+/** An `Exit` message (an instance of this class) is sent to an actor
+ * with `trapExit` set to `true` whenever one of its linked actors
+ * terminates.
+ *
+ * @param from the actor that terminated
+ * @param reason the reason that caused the actor to terminate
+ */
case class Exit(from: AbstractActor, reason: AnyRef)
/** <p>
@@ -866,7 +892,6 @@ case class Exit(from: AbstractActor, reason: AnyRef)
* executions.
* </p>
*
- * @version 0.9.8
* @author Philipp Haller
*/
-private[actors] class SuspendActorException extends Throwable with ControlException
+private[actors] class SuspendActorControl extends ControlThrowable
diff --git a/src/actors/scala/actors/ActorCanReply.scala b/src/actors/scala/actors/ActorCanReply.scala
new file mode 100644
index 0000000000..9d52081a9d
--- /dev/null
+++ b/src/actors/scala/actors/ActorCanReply.scala
@@ -0,0 +1,66 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala.actors
+
+import scala.concurrent.SyncVar
+
+/**
+ * The `ActorCanReply` trait provides message send operations that
+ * may result in a response from the receiver.
+ *
+ * @author Philipp Haller
+ */
+private[actors] trait ActorCanReply extends ReactorCanReply {
+ this: AbstractActor with ReplyReactor =>
+
+ override def !?(msg: Any): Any = {
+ val replyCh = new Channel[Any](Actor.self(scheduler))
+ send(msg, replyCh)
+ replyCh.?
+ }
+
+ override def !?(msec: Long, msg: Any): Option[Any] = {
+ val replyCh = new Channel[Any](Actor.self(scheduler))
+ send(msg, replyCh)
+ replyCh.receiveWithin(msec) {
+ case TIMEOUT => None
+ case x => Some(x)
+ }
+ }
+
+ override def !![A](msg: Any, handler: PartialFunction[Any, A]): Future[A] = {
+ val c = new Channel[A](Actor.self(scheduler))
+ val fun = (res: SyncVar[A]) => {
+ val ftch = new Channel[A](Actor.self(scheduler))
+ send(msg, new OutputChannel[Any] {
+ def !(msg: Any) =
+ ftch ! handler(msg)
+ def send(msg: Any, replyTo: OutputChannel[Any]) =
+ ftch.send(handler(msg), replyTo)
+ def forward(msg: Any) =
+ ftch.forward(handler(msg))
+ def receiver =
+ ftch.receiver
+ })
+ ftch.react {
+ case any => res.set(any)
+ }
+ }
+ val a = new FutureActor[A](fun, c)
+ a.start()
+ a
+ }
+
+ override def !!(msg: Any): Future[Any] = {
+ val noTransform: PartialFunction[Any, Any] = { case x => x }
+ this !! (msg, noTransform)
+ }
+
+}
diff --git a/src/actors/scala/actors/ActorProxy.scala b/src/actors/scala/actors/ActorProxy.scala
index 221368c6aa..be54ca98c2 100644
--- a/src/actors/scala/actors/ActorProxy.scala
+++ b/src/actors/scala/actors/ActorProxy.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
@@ -16,7 +15,6 @@ import java.lang.Thread
* The class <code>ActorProxy</code> provides a dynamic actor proxy for normal
* Java threads.
*
- * @version 0.9.8
* @author Philipp Haller
*/
private[actors] class ActorProxy(t: Thread, override final val scheduler: IScheduler) extends Actor {
diff --git a/src/actors/scala/actors/ActorTask.scala b/src/actors/scala/actors/ActorTask.scala
index df248053e1..66c47f65a5 100644
--- a/src/actors/scala/actors/ActorTask.scala
+++ b/src/actors/scala/actors/ActorTask.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
@@ -17,18 +16,39 @@ package scala.actors
*
* @author Philipp Haller
*/
-private[actors] class ActorTask(actor: Actor, fun: () => Unit) extends ReactorTask[Actor](actor, fun) {
+private[actors] class ActorTask(actor: Actor,
+ fun: () => Unit,
+ handler: PartialFunction[Any, Any],
+ msg: Any)
+ extends ReplyReactorTask(actor, fun, handler, msg) {
- protected override def beforeExecuting() {
- if (actor.shouldExit)
- actor.exit()
+ protected override def beginExecution() {
+ super.beginExecution()
+ actor.synchronized { // shouldExit guarded by actor
+ if (actor.shouldExit)
+ actor.exit()
+ }
}
- protected override def afterExecuting(e: Exception) {
- actor.synchronized {
+ protected override def terminateExecution(e: Throwable) {
+ val senderInfo = try { Some(actor.sender) } catch {
+ case _: Exception => None
+ }
+ val uncaught = UncaughtException(actor,
+ if (msg != null) Some(msg) else None,
+ senderInfo,
+ currentThread,
+ e)
+
+ val todo = actor.synchronized {
if (!actor.links.isEmpty)
- actor.exitLinked(e)
+ actor.exitLinked(uncaught)
+ else {
+ super.terminateExecution(e)
+ () => {}
+ }
}
+ todo()
}
}
diff --git a/src/actors/scala/actors/Replyable.scala b/src/actors/scala/actors/CanReply.scala
index 62247a6b8e..eaaaef1a29 100644
--- a/src/actors/scala/actors/Replyable.scala
+++ b/src/actors/scala/actors/CanReply.scala
@@ -1,26 +1,28 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
/**
- * The Replyable trait defines result-bearing message send operations
- * on replyable actors.
+ * The <code>CanReply</code> trait defines result-bearing message send operations.
*
* @author Philipp Haller
+ *
+ * @define actor `CanReply`
*/
-trait Replyable[-T, +R] {
+trait CanReply[-T, +R] {
+
+ type Future[+P] <: () => P
/**
- * Sends <code>msg</code> to this Replyable and awaits reply
- * (synchronous).
+ * Sends <code>msg</code> to this $actor and
+ * awaits reply (synchronous).
*
* @param msg the message to be sent
* @return the reply
@@ -28,8 +30,9 @@ trait Replyable[-T, +R] {
def !?(msg: T): R
/**
- * Sends <code>msg</code> to this Replyable and awaits reply
- * (synchronous) within <code>msec</code> milliseconds.
+ * Sends <code>msg</code> to this $actor and
+ * awaits reply (synchronous) within <code>msec</code>
+ * milliseconds.
*
* @param msec the time span before timeout
* @param msg the message to be sent
@@ -39,27 +42,25 @@ trait Replyable[-T, +R] {
def !?(msec: Long, msg: T): Option[R]
/**
- * Sends <code>msg</code> to this actor and immediately
- * returns a future representing the reply value.
+ * Sends <code>msg</code> to this $actor and
+ * immediately returns a future representing the reply value.
*
* @param msg the message to be sent
* @return the future
*/
- def !!(msg: T): () => R =
- () => this !? msg
+ def !!(msg: T): Future[R]
/**
- * Sends <code>msg</code> to this actor and immediately
- * returns a future representing the reply value.
+ * Sends <code>msg</code> to this $actor and
+ * immediately returns a future representing the reply value.
* The reply is post-processed using the partial function
- * <code>f</code>. This also allows to recover a more
+ * <code>handler</code>. This also allows to recover a more
* precise type for the reply value.
*
- * @param msg the message to be sent
- * @param f the function to be applied to the response
- * @return the future
+ * @param msg the message to be sent
+ * @param handler the function to be applied to the response
+ * @return the future
*/
- def !![P](msg: T, f: PartialFunction[R, P]): () => P =
- () => f(this !? msg)
+ def !![P](msg: T, handler: PartialFunction[R, P]): Future[P]
}
diff --git a/src/actors/scala/actors/Channel.scala b/src/actors/scala/actors/Channel.scala
index 0c9beacca6..16f43fbd17 100644
--- a/src/actors/scala/actors/Channel.scala
+++ b/src/actors/scala/actors/Channel.scala
@@ -1,31 +1,29 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
+import scala.concurrent.SyncVar
-/** <p>
- * This class is used to pattern match on values that were sent
- * to some channel <code>Chan<sub>n</sub></code> by the current
- * actor <code>self</code>.
- * </p>
- * <p>
- * The following example demonstrates its usage:
- * </p><pre>
+/**
+ * This class is used to pattern match on values that were sent
+ * to some channel <code>Chan<sub>n</sub></code> by the current
+ * actor <code>self</code>.
+ *
+ * The following example demonstrates its usage:
+ * {{{
* receive {
* <b>case</b> Chan1 ! msg1 => ...
* <b>case</b> Chan2 ! msg2 => ...
* }
- * </pre>
+ * }}}
*
- * @version 0.9.8
* @author Philipp Haller
*/
case class ! [a](ch: Channel[a], msg: a)
@@ -35,87 +33,48 @@ case class ! [a](ch: Channel[a], msg: a)
* actors. Only the actor creating an instance of a
* <code>Channel</code> may receive from it.
*
- * @version 0.9.17
* @author Philipp Haller
+ *
+ * @define actor channel
+ * @define channel channel
*/
-class Channel[Msg](val receiver: Actor) extends InputChannel[Msg] with OutputChannel[Msg] {
+class Channel[Msg](val receiver: Actor) extends InputChannel[Msg] with OutputChannel[Msg] with CanReply[Msg, Any] {
+
+ type Future[+P] = scala.actors.Future[P]
def this() = this(Actor.self)
- /**
- * Sends a message to this <code>Channel</code>.
- *
- * @param msg the message to be sent
- */
def !(msg: Msg) {
receiver ! scala.actors.!(this, msg)
}
- /**
- * Sends a message to this <code>Channel</code>
- * (asynchronous) supplying explicit reply destination.
- *
- * @param msg the message to send
- * @param replyTo the reply destination
- */
def send(msg: Msg, replyTo: OutputChannel[Any]) {
receiver.send(scala.actors.!(this, msg), replyTo)
}
- /**
- * Forwards <code>msg</code> to <code>this</code> keeping the
- * last sender as sender instead of <code>self</code>.
- */
def forward(msg: Msg) {
receiver forward scala.actors.!(this, msg)
}
- /**
- * Receives a message from this <code>Channel</code>.
- *
- * @param f a partial function with message patterns and actions
- * @return result of processing the received value
- */
def receive[R](f: PartialFunction[Msg, R]): R = {
val C = this.asInstanceOf[Channel[Any]]
- val recvActor = receiver.asInstanceOf[Actor]
- recvActor.receive {
+ receiver.receive {
case C ! msg if (f.isDefinedAt(msg.asInstanceOf[Msg])) => f(msg.asInstanceOf[Msg])
}
}
- /**
- * Receives the next message from this <code>Channel</code>.
- */
def ? : Msg = receive {
case x => x
}
- /**
- * Receives a message from this <code>Channel</code> within a certain
- * time span.
- *
- * @param msec the time span before timeout
- * @param f a partial function with message patterns and actions
- * @return result of processing the received value
- */
def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R = {
val C = this.asInstanceOf[Channel[Any]]
- val recvActor = receiver.asInstanceOf[Actor]
- recvActor.receiveWithin(msec) {
+ receiver.receiveWithin(msec) {
case C ! msg if (f.isDefinedAt(msg)) => f(msg)
case TIMEOUT => f(TIMEOUT)
}
}
- /**
- * Receives a message from this <code>Channel</code>.
- * <p>
- * This method never returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param f a partial function with message patterns and actions
- */
def react(f: PartialFunction[Msg, Unit]): Nothing = {
val C = this.asInstanceOf[Channel[Any]]
receiver.react {
@@ -123,32 +82,14 @@ class Channel[Msg](val receiver: Actor) extends InputChannel[Msg] with OutputCha
}
}
- /**
- * Receives a message from this <code>Channel</code> within a certain
- * time span.
- * <p>
- * This method never returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param msec the time span before timeout
- * @param f a partial function with message patterns and actions
- */
def reactWithin(msec: Long)(f: PartialFunction[Any, Unit]): Nothing = {
val C = this.asInstanceOf[Channel[Any]]
- val recvActor = receiver.asInstanceOf[Actor]
- recvActor.reactWithin(msec) {
+ receiver.reactWithin(msec) {
case C ! msg if (f.isDefinedAt(msg)) => f(msg)
case TIMEOUT => f(TIMEOUT)
}
}
- /**
- * Sends a message to this <code>Channel</code> and
- * awaits reply.
- *
- * @param msg the message to be sent
- * @return the reply
- */
def !?(msg: Msg): Any = {
val replyCh = new Channel[Any](Actor.self(receiver.scheduler))
receiver.send(scala.actors.!(this, msg), replyCh)
@@ -157,15 +98,6 @@ class Channel[Msg](val receiver: Actor) extends InputChannel[Msg] with OutputCha
}
}
- /**
- * Sends a message to this <code>Channel</code> and
- * awaits reply within a certain time span.
- *
- * @param msec the time span before timeout
- * @param msg the message to be sent
- * @return <code>None</code> in case of timeout, otherwise
- * <code>Some(x)</code> where <code>x</code> is the reply
- */
def !?(msec: Long, msg: Msg): Option[Any] = {
val replyCh = new Channel[Any](Actor.self(receiver.scheduler))
receiver.send(scala.actors.!(this, msg), replyCh)
@@ -175,4 +107,32 @@ class Channel[Msg](val receiver: Actor) extends InputChannel[Msg] with OutputCha
}
}
+ def !![A](msg: Msg, handler: PartialFunction[Any, A]): Future[A] = {
+ val c = new Channel[A](Actor.self(receiver.scheduler))
+ val fun = (res: SyncVar[A]) => {
+ val ftch = new Channel[A](Actor.self(receiver.scheduler))
+ receiver.send(scala.actors.!(this, msg), new OutputChannel[Any] {
+ def !(msg: Any) =
+ ftch ! handler(msg)
+ def send(msg: Any, replyTo: OutputChannel[Any]) =
+ ftch.send(handler(msg), replyTo)
+ def forward(msg: Any) =
+ ftch.forward(handler(msg))
+ def receiver =
+ ftch.receiver
+ })
+ ftch.react {
+ case any => res.set(any)
+ }
+ }
+ val a = new FutureActor[A](fun, c)
+ a.start()
+ a
+ }
+
+ def !!(msg: Msg): Future[Any] = {
+ val noTransform: PartialFunction[Any, Any] = { case x => x }
+ this !! (msg, noTransform)
+ }
+
}
diff --git a/src/actors/scala/actors/Combinators.scala b/src/actors/scala/actors/Combinators.scala
new file mode 100644
index 0000000000..6082f92323
--- /dev/null
+++ b/src/actors/scala/actors/Combinators.scala
@@ -0,0 +1,46 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+// $Id$
+
+package scala.actors
+
+private[actors] trait Combinators {
+
+ /**
+ * Enables the composition of suspendable closures using `andThen`,
+ * `loop`, `loopWhile`, etc.
+ */
+ implicit def mkBody[a](body: => a): Actor.Body[a]
+
+ /**
+ * Repeatedly executes `body`.
+ *
+ * @param body the block to be executed
+ */
+ def loop(body: => Unit): Unit = body andThen loop(body)
+
+ /**
+ * Repeatedly executes `body` while the condition `cond` is `true`.
+ *
+ * @param cond the condition to test
+ * @param body the block to be executed
+ */
+ def loopWhile(cond: => Boolean)(body: => Unit): Unit =
+ if (cond) { body andThen loopWhile(cond)(body) }
+ else continue
+
+ /**
+ * Continues with the execution of the closure registered as
+ * continuation following `andThen`. Continues with the execution
+ * of the next loop iteration when invoked inside the body of `loop`
+ * or `loopWhile`.
+ */
+ def continue: Unit = throw new KillActorControl
+
+}
diff --git a/src/actors/scala/actors/DaemonActor.scala b/src/actors/scala/actors/DaemonActor.scala
index a81d2c8148..0dd828fbc1 100644
--- a/src/actors/scala/actors/DaemonActor.scala
+++ b/src/actors/scala/actors/DaemonActor.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/actors/scala/actors/Debug.scala b/src/actors/scala/actors/Debug.scala
index e94509e048..7140e5817a 100644
--- a/src/actors/scala/actors/Debug.scala
+++ b/src/actors/scala/actors/Debug.scala
@@ -1,46 +1,47 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
/**
+ * Provides methods for generating debugging output.
+ *
* @author Philipp Haller
*/
-object Debug {
+object Debug extends Logger("") {}
+
+private[actors] class Logger(tag: String) {
private var lev = 2
def level = lev
def level_= (lev: Int) = { this.lev = lev }
+ private val tagString = if (tag == "") "" else " ["+tag+"]"
+
def info(s: String) =
- if (lev > 2) System.out.println("Info: " + s)
+ if (lev > 2) System.out.println("Info" + tagString + ": " + s)
def warning(s: String) =
- if (lev > 1) System.err.println("Warning: " + s)
+ if (lev > 1) System.err.println("Warning" + tagString + ": " + s)
def error(s: String) =
- if (lev > 0) System.err.println("Error: " + s)
-}
-
-class Debug(tag: String) {
- private var lev = 2
-
- def level = lev
- def level_= (lev: Int) = { this.lev = lev }
+ if (lev > 0) System.err.println("Error" + tagString + ": " + s)
- def info(s: String) =
- if (lev > 2) System.out.println(tag + " (info): " + s)
+ def doInfo(b: => Unit) =
+ if (lev > 2) b
- def warning(s: String) =
- if (lev > 1) System.err.println(tag + " (warn): " + s)
+ def doWarning(b: => Unit) =
+ if (lev > 1) b
- def error(s: String) =
- if (lev > 0) System.err.println(tag + " (erro): " + s)
+ def doError(b: => Unit) =
+ if (lev > 0) b
}
+
+@deprecated("this class is going to be removed in a future release")
+class Debug(tag: String) extends Logger(tag) {}
diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala
index 38b268d795..5050c84c8a 100644
--- a/src/actors/scala/actors/Future.scala
+++ b/src/actors/scala/actors/Future.scala
@@ -1,89 +1,186 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
-import scheduler.DefaultThreadPoolScheduler
-
-/**
- * <p>
- * A <code>Future[T]</code> is a function of arity 0 that
- * returns a value of type <code>T</code>.
- * Applying a future blocks the current actor (<code>self</code>)
- * until the future's value is available.
- * </p>
- * <p>
- * A future can be queried to find out whether its value
- * is already available.
- * </p>
+import scala.actors.scheduler.DaemonScheduler
+import scala.concurrent.SyncVar
+
+/** A `Future[T]` is a function of arity 0 that returns
+ * a value of type `T`.
+ * Applying a future blocks the current actor (`Actor.self`)
+ * until the future's value is available.
+ *
+ * A future can be queried to find out whether its value
+ * is already available without blocking.
*
- * @author Philipp Haller
+ * @author Philipp Haller
*/
-abstract class Future[+T](val inputChannel: InputChannel[T]) extends Responder[T] with Function0[T] {
+abstract class Future[+T] extends Responder[T] with Function0[T] {
+
+ @volatile
+ private[actors] var fvalue: Option[Any] = None
+ private[actors] def fvalueTyped = fvalue.get.asInstanceOf[T]
+
@deprecated("this member is going to be removed in a future release")
- protected var value: Option[Any] = None
+ def ch: InputChannel[Any] = inputChannel
+
+ @deprecated("this member is going to be removed in a future release")
+ protected def value: Option[Any] = fvalue
+ @deprecated("this member is going to be removed in a future release")
+ protected def value_=(x: Option[Any]) { fvalue = x }
+
+ /** Tests whether the future's result is available.
+ *
+ * @return `true` if the future's result is available,
+ * `false` otherwise.
+ */
def isSet: Boolean
+
+ /** Returns an input channel that can be used to receive the future's result.
+ *
+ * @return the future's input channel
+ */
+ def inputChannel: InputChannel[T]
+
}
-/**
- * The <code>Futures</code> object contains methods that operate on Futures.
- *
- * @author Philipp Haller
- */
-object Futures {
+private case object Eval
- private case object Eval
+private class FutureActor[T](fun: SyncVar[T] => Unit, channel: Channel[T]) extends Future[T] with DaemonActor {
- def future[T](body: => T): Future[T] = {
- val a = new DaemonActor {
- def act() {
- Actor.react {
- case Eval => Actor.reply(body)
+ var enableChannel = false // guarded by this
+
+ def isSet = !fvalue.isEmpty
+
+ def apply(): T = {
+ if (fvalue.isEmpty) {
+ this !? Eval
+ }
+ fvalueTyped
+ }
+
+ def respond(k: T => Unit) {
+ if (isSet) k(fvalueTyped)
+ else {
+ val ft = this !! Eval
+ ft.inputChannel.react {
+ case _ => k(fvalueTyped)
+ }
+ }
+ }
+
+ def inputChannel: InputChannel[T] = {
+ synchronized {
+ if (!enableChannel) {
+ if (isSet)
+ channel ! fvalueTyped
+ enableChannel = true
+ }
+ }
+ channel
+ }
+
+ def act() {
+ val res = new SyncVar[T]
+
+ {
+ fun(res)
+ } andThen {
+
+ synchronized {
+ val v = res.get
+ fvalue = Some(v)
+ if (enableChannel)
+ channel ! v
+ }
+
+ loop {
+ react {
+ case Eval => reply()
}
}
}
+ }
+}
+
+/** The `Futures` object contains methods that operate on futures.
+ *
+ * @author Philipp Haller
+ */
+object Futures {
+
+ /** Arranges for the asynchronous execution of `body`,
+ * returning a future representing the result.
+ *
+ * @param body the computation to be carried out asynchronously
+ * @return the future representing the result of the
+ * computation
+ */
+ def future[T](body: => T): Future[T] = {
+ val c = new Channel[T](Actor.self(DaemonScheduler))
+ val a = new FutureActor[T](_.set(body), c)
a.start()
- a !! (Eval, { case any => any.asInstanceOf[T] })
+ a
}
- def alarm(t: Long) = future {
- Actor.reactWithin(t) {
- case TIMEOUT => {}
+ /** Creates a future that resolves after a given time span.
+ *
+ * @param timespan the time span in ms after which the future resolves
+ * @return the future
+ */
+ def alarm(timespan: Long): Future[Unit] = {
+ val c = new Channel[Unit](Actor.self(DaemonScheduler))
+ val fun = (res: SyncVar[Unit]) => {
+ Actor.reactWithin(timespan) {
+ case TIMEOUT => res.set({})
+ }
}
+ val a = new FutureActor[Unit](fun, c)
+ a.start()
+ a
}
- def awaitEither[a, b](ft1: Future[a], ft2: Future[b]): Any = {
+ /** Waits for the first result returned by one of two
+ * given futures.
+ *
+ * @param ft1 the first future
+ * @param ft2 the second future
+ * @return the result of the future that resolves first
+ */
+ def awaitEither[A, B >: A](ft1: Future[A], ft2: Future[B]): B = {
val FutCh1 = ft1.inputChannel
val FutCh2 = ft2.inputChannel
Actor.receive {
- case FutCh1 ! arg1 => arg1
- case FutCh2 ! arg2 => arg2
+ case FutCh1 ! arg1 => arg1.asInstanceOf[B]
+ case FutCh2 ! arg2 => arg2.asInstanceOf[B]
}
}
- /**
- * <p>
- * Awaits all futures returning an option containing a list of replies,
- * or timeouts returning <code>None</code>.
- * </p>
- * <p>
- * Note that some of the futures might already have been awaited.
- * </p>
+ /** Waits until either all futures are resolved or a given
+ * time span has passed. Results are collected in a list of
+ * options. The result of a future that resolved during the
+ * time span is its value wrapped in `Some`. The result of a
+ * future that did not resolve during the time span is `None`.
+ *
+ * Note that some of the futures might already have been awaited,
+ * in which case their value is returned wrapped in `Some`.
+ * Passing a timeout of 0 causes `awaitAll` to return immediately.
+ *
+ * @param timeout the time span in ms after which waiting is
+ * aborted
+ * @param fts the futures to be awaited
+ * @return the list of optional future values
+ * @throws java.lang.IllegalArgumentException if timeout is negative,
+ * or timeout + `System.currentTimeMillis()` is negative.
*/
def awaitAll(timeout: Long, fts: Future[Any]*): List[Option[Any]] = {
- val thisActor = Actor.self
- val timerTask = new java.util.TimerTask {
- def run() { thisActor ! TIMEOUT }
- }
- Actor.timer.schedule(timerTask, timeout)
-
var resultsMap: collection.mutable.Map[Int, Option[Any]] = new collection.mutable.HashMap[Int, Option[Any]]
var cnt = 0
@@ -103,6 +200,12 @@ object Futures {
singleCase
})
+ val thisActor = Actor.self
+ val timerTask = new java.util.TimerTask {
+ def run() { thisActor ! TIMEOUT }
+ }
+ Actor.timer.schedule(timerTask, timeout)
+
def awaitWith(partFuns: Seq[PartialFunction[Any, Pair[Int, Any]]]) {
val reaction: PartialFunction[Any, Unit] = new PartialFunction[Any, Unit] {
def isDefinedAt(msg: Any) = msg match {
@@ -142,25 +245,4 @@ object Futures {
results
}
- private[actors] def fromInputChannel[T](inputChannel: InputChannel[T]): Future[T] =
- new Future[T](inputChannel) {
- def apply() =
- if (isSet) value.get.asInstanceOf[T]
- else inputChannel.receive {
- case any => value = Some(any); value.get.asInstanceOf[T]
- }
- def respond(k: T => Unit): Unit =
- if (isSet) k(value.get.asInstanceOf[T])
- else inputChannel.react {
- case any => value = Some(any); k(value.get.asInstanceOf[T])
- }
- def isSet = value match {
- case None => inputChannel.receiveWithin(0) {
- case TIMEOUT => false
- case any => value = Some(any); true
- }
- case Some(_) => true
- }
- }
-
}
diff --git a/src/actors/scala/actors/IScheduler.scala b/src/actors/scala/actors/IScheduler.scala
index 8afed3aa86..2080cd79e5 100644
--- a/src/actors/scala/actors/IScheduler.scala
+++ b/src/actors/scala/actors/IScheduler.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
@@ -49,14 +48,14 @@ trait IScheduler {
*
* @param a the actor to be registered
*/
- def newActor(a: Reactor): Unit
+ def newActor(a: TrackedReactor): Unit
/** Unregisters an actor from this scheduler, because it
* has terminated.
*
* @param a the actor to be registered
*/
- def terminated(a: Reactor): Unit
+ def terminated(a: TrackedReactor): Unit
/** Registers a closure to be executed when the specified
* actor terminates.
@@ -64,7 +63,7 @@ trait IScheduler {
* @param a the actor
* @param f the closure to be registered
*/
- def onTerminate(a: Reactor)(f: => Unit): Unit
+ def onTerminate(a: TrackedReactor)(f: => Unit): Unit
def managedBlock(blocker: scala.concurrent.ManagedBlocker): Unit
diff --git a/src/actors/scala/actors/InputChannel.scala b/src/actors/scala/actors/InputChannel.scala
index b7fb0d96b3..3f5bd7183f 100644
--- a/src/actors/scala/actors/InputChannel.scala
+++ b/src/actors/scala/actors/InputChannel.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
@@ -14,13 +13,14 @@ package scala.actors
* The <code>InputChannel</code> trait provides a common interface
* for all channels from which values can be received.
*
- * @version 0.9.8
* @author Philipp Haller
+ *
+ * @define channel `InputChannel`
*/
trait InputChannel[+Msg] {
/**
- * Receives a message from this <code>InputChannel</code>.
+ * Receives a message from this $channel.
*
* @param f a partial function with message patterns and actions
* @return result of processing the received value
@@ -28,7 +28,7 @@ trait InputChannel[+Msg] {
def receive[R](f: PartialFunction[Msg, R]): R
/**
- * Receives a message from this <code>InputChannel</code> within
+ * Receives a message from this $channel within
* a certain time span.
*
* @param msec the time span before timeout
@@ -38,8 +38,8 @@ trait InputChannel[+Msg] {
def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R
/**
- * Receives a message from this <code>InputChannel</code>.
- * <p>
+ * Receives a message from this $channel.
+ *
* This method never returns. Therefore, the rest of the computation
* has to be contained in the actions of the partial function.
*
@@ -48,9 +48,9 @@ trait InputChannel[+Msg] {
def react(f: PartialFunction[Msg, Unit]): Nothing
/**
- * Receives a message from this <code>InputChannel</code> within
+ * Receives a message from this $channel within
* a certain time span.
- * <p>
+ *
* This method never returns. Therefore, the rest of the computation
* has to be contained in the actions of the partial function.
*
@@ -60,7 +60,7 @@ trait InputChannel[+Msg] {
def reactWithin(msec: Long)(f: PartialFunction[Any, Unit]): Nothing
/**
- * Receives the next message from this <code>Channel</code>.
+ * Receives the next message from this $channel.
*/
def ? : Msg
}
diff --git a/src/actors/scala/actors/MessageQueue.scala b/src/actors/scala/actors/MessageQueue.scala
index 469b24c1c1..6936bf63a8 100644
--- a/src/actors/scala/actors/MessageQueue.scala
+++ b/src/actors/scala/actors/MessageQueue.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
@@ -18,11 +17,16 @@ package scala.actors
*/
@serializable @SerialVersionUID(7124278808020037465L)
@deprecated("this class is going to be removed in a future release")
-class MessageQueueElement(val msg: Any, val session: OutputChannel[Any], var next: MessageQueueElement) {
+class MessageQueueElement(msg: Any, session: OutputChannel[Any], next: MessageQueueElement) extends MQueueElement[Any](msg, session, next) {
def this() = this(null, null, null)
def this(msg: Any, session: OutputChannel[Any]) = this(msg, session, null)
}
+private[actors] class MQueueElement[Msg >: Null](val msg: Msg, val session: OutputChannel[Any], var next: MQueueElement[Msg]) {
+ def this() = this(null, null, null)
+ def this(msg: Msg, session: OutputChannel[Any]) = this(msg, session, null)
+}
+
/**
* The class <code>MessageQueue</code> provides an efficient
* implementation of a message queue specialized for this actor
@@ -33,9 +37,11 @@ class MessageQueueElement(val msg: Any, val session: OutputChannel[Any], var nex
*/
@serializable @SerialVersionUID(2168935872884095767L)
@deprecated("this class is going to be removed in a future release")
-class MessageQueue(protected val label: String) {
- protected var first: MessageQueueElement = null
- protected var last: MessageQueueElement = null // last eq null iff list is empty
+class MessageQueue(label: String) extends MQueue[Any](label)
+
+private[actors] class MQueue[Msg >: Null](protected val label: String) {
+ protected var first: MQueueElement[Msg] = null
+ protected var last: MQueueElement[Msg] = null // last eq null iff list is empty
private var _size = 0
def size = _size
@@ -45,9 +51,18 @@ class MessageQueue(protected val label: String) {
_size += diff
}
- def append(msg: Any, session: OutputChannel[Any]) {
+ def append(msg: Msg, session: OutputChannel[Any]) {
+ changeSize(1) // size always increases by 1
+ val el = new MQueueElement(msg, session)
+
+ if (isEmpty) first = el
+ else last.next = el
+
+ last = el
+ }
+
+ def append(el: MQueueElement[Msg]) {
changeSize(1) // size always increases by 1
- val el = new MessageQueueElement(msg, session)
if (isEmpty) first = el
else last.next = el
@@ -55,7 +70,7 @@ class MessageQueue(protected val label: String) {
last = el
}
- def foreach(f: (Any, OutputChannel[Any]) => Unit) {
+ def foreach(f: (Msg, OutputChannel[Any]) => Unit) {
var curr = first
while (curr != null) {
f(curr.msg, curr.session)
@@ -63,7 +78,26 @@ class MessageQueue(protected val label: String) {
}
}
- def foldLeft[B](z: B)(f: (B, Any) => B): B = {
+ def foreachAppend(target: MQueue[Msg]) {
+ var curr = first
+ while (curr != null) {
+ target.append(curr)
+ curr = curr.next
+ }
+ }
+
+ def foreachDequeue(target: MQueue[Msg]) {
+ var curr = first
+ while (curr != null) {
+ target.append(curr)
+ curr = curr.next
+ }
+ first = null
+ last = null
+ _size = 0
+ }
+
+ def foldLeft[B](z: B)(f: (B, Msg) => B): B = {
var acc = z
var curr = first
while (curr != null) {
@@ -76,10 +110,10 @@ class MessageQueue(protected val label: String) {
/** Returns the n-th message that satisfies the predicate <code>p</code>
* without removing it.
*/
- def get(n: Int)(p: Any => Boolean): Option[Any] = {
+ def get(n: Int)(p: Msg => Boolean): Option[Msg] = {
var pos = 0
- def test(msg: Any): Boolean =
+ def test(msg: Msg): Boolean =
p(msg) && (pos == n || { pos += 1; false })
var curr = first
@@ -92,23 +126,60 @@ class MessageQueue(protected val label: String) {
/** Removes the n-th message that satisfies the predicate <code>p</code>.
*/
- def remove(n: Int)(p: (Any, OutputChannel[Any]) => Boolean): Option[(Any, OutputChannel[Any])] =
+ def remove(n: Int)(p: (Msg, OutputChannel[Any]) => Boolean): Option[(Msg, OutputChannel[Any])] =
removeInternal(n)(p) map (x => (x.msg, x.session))
/** Extracts the first message that satisfies the predicate <code>p</code>
* or <code>null</code> if <code>p</code> fails for all of them.
*/
- def extractFirst(p: (Any, OutputChannel[Any]) => Boolean): MessageQueueElement =
+ def extractFirst(p: (Msg, OutputChannel[Any]) => Boolean): MQueueElement[Msg] =
removeInternal(0)(p) orNull
- private def removeInternal(n: Int)(p: (Any, OutputChannel[Any]) => Boolean): Option[MessageQueueElement] = {
+ def extractFirst(pf: PartialFunction[Msg, Any]): MQueueElement[Msg] = {
+ if (isEmpty) // early return
+ return null
+
+ // special handling if returning the head
+ if (pf.isDefinedAt(first.msg)) {
+ val res = first
+ first = first.next
+ if (res eq last)
+ last = null
+
+ changeSize(-1)
+ res
+ }
+ else {
+ var curr = first.next // init to element #2
+ var prev = first
+
+ while (curr != null) {
+ if (pf.isDefinedAt(curr.msg)) {
+ prev.next = curr.next
+ if (curr eq last)
+ last = prev
+
+ changeSize(-1)
+ return curr // early return
+ }
+ else {
+ prev = curr
+ curr = curr.next
+ }
+ }
+ // not found
+ null
+ }
+ }
+
+ private def removeInternal(n: Int)(p: (Msg, OutputChannel[Any]) => Boolean): Option[MQueueElement[Msg]] = {
var pos = 0
- def foundMsg(x: MessageQueueElement) = {
+ def foundMsg(x: MQueueElement[Msg]) = {
changeSize(-1)
Some(x)
}
- def test(msg: Any, session: OutputChannel[Any]): Boolean =
+ def test(msg: Msg, session: OutputChannel[Any]): Boolean =
p(msg, session) && (pos == n || { pos += 1 ; false })
if (isEmpty) // early return
@@ -148,7 +219,7 @@ class MessageQueue(protected val label: String) {
/** Debugging trait.
*/
-private[actors] trait MessageQueueTracer extends MessageQueue
+private[actors] trait MessageQueueTracer extends MQueue[Any]
{
private val queueNumber = MessageQueueTracer.getQueueNumber
@@ -166,7 +237,7 @@ private[actors] trait MessageQueueTracer extends MessageQueue
printQueue("REMOVE %s" format res)
res
}
- override def extractFirst(p: (Any, OutputChannel[Any]) => Boolean): MessageQueueElement = {
+ override def extractFirst(p: (Any, OutputChannel[Any]) => Boolean): MQueueElement[Any] = {
val res = super.extractFirst(p)
printQueue("EXTRACT_FIRST %s" format res)
res
@@ -181,7 +252,7 @@ private[actors] trait MessageQueueTracer extends MessageQueue
override def toString() = "%s:%d".format(label, queueNumber)
}
-object MessageQueueTracer {
+private[actors] object MessageQueueTracer {
// for tracing purposes
private var queueNumberAssigner = 0
private def getQueueNumber = synchronized {
diff --git a/src/actors/scala/actors/OutputChannel.scala b/src/actors/scala/actors/OutputChannel.scala
index aba8327970..c86cfbad32 100644
--- a/src/actors/scala/actors/OutputChannel.scala
+++ b/src/actors/scala/actors/OutputChannel.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
@@ -14,20 +13,21 @@ package scala.actors
* The <code>OutputChannel</code> trait provides a common interface
* for all channels to which values can be sent.
*
- * @version 0.9.17
* @author Philipp Haller
+ *
+ * @define actor `OutputChannel`
*/
-trait OutputChannel[-Msg] extends AbstractReactor[Msg] {
+trait OutputChannel[-Msg] {
/**
- * Sends <code>msg</code> to this
- * <code>OutputChannel</code> (asynchronous).
+ * Sends <code>msg</code> to this $actor (asynchronous).
+ *
+ * @param msg the message to send
*/
def !(msg: Msg): Unit
/**
- * Sends <code>msg</code> to this
- * <code>OutputChannel</code> (asynchronous) supplying
+ * Sends <code>msg</code> to this $actor (asynchronous) supplying
* explicit reply destination.
*
* @param msg the message to send
@@ -36,14 +36,14 @@ trait OutputChannel[-Msg] extends AbstractReactor[Msg] {
def send(msg: Msg, replyTo: OutputChannel[Any]): Unit
/**
- * Forwards <code>msg</code> to this
- * <code>OutputChannel</code> (asynchronous).
+ * Forwards <code>msg</code> to this $actor (asynchronous).
+ *
+ * @param msg the message to forward
*/
def forward(msg: Msg): Unit
/**
- * Returns the <code>Reactor</code> that is
- * receiving from this <code>OutputChannel</code>.
+ * Returns the <code>Actor</code> that is receiving from this $actor.
*/
def receiver: Actor
}
diff --git a/src/actors/scala/actors/ReactChannel.scala b/src/actors/scala/actors/ReactChannel.scala
index ba0828a807..69312e41d6 100644
--- a/src/actors/scala/actors/ReactChannel.scala
+++ b/src/actors/scala/actors/ReactChannel.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id: InputChannel.scala 18844 2009-09-30 20:28:49Z phaller $
package scala.actors
@@ -15,7 +14,7 @@ package scala.actors
*
* @author Philipp Haller
*/
-private[actors] class ReactChannel[Msg](receiver: Reactor) extends InputChannel[Msg] {
+private[actors] class ReactChannel[Msg](receiver: ReplyReactor) extends InputChannel[Msg] {
private case class SendToReactor(channel: ReactChannel[Msg], msg: Msg)
diff --git a/src/actors/scala/actors/Reaction.scala b/src/actors/scala/actors/Reaction.scala
index 80db4b2c1e..83eedf4730 100644
--- a/src/actors/scala/actors/Reaction.scala
+++ b/src/actors/scala/actors/Reaction.scala
@@ -1,20 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
-import scala.util.control.ControlException
+import scala.util.control.ControlThrowable
import java.lang.{InterruptedException, Runnable}
-private[actors] class KillActorException extends Throwable with ControlException
+private[actors] class KillActorControl extends ControlThrowable
/** <p>
* The abstract class <code>Reaction</code> associates
@@ -23,15 +22,11 @@ private[actors] class KillActorException extends Throwable with ControlException
* <code>java.lang.Runnable</code></a>.
* </p>
*
- * @deprecated("this class is going to be removed in a future release")
* @author Philipp Haller
*/
-class Reaction(a: Actor, f: PartialFunction[Any, Unit], msg: Any) extends ActorTask(a, () => {
- if (f == null)
- a.act()
- else
- f(msg)
-}) {
+@deprecated("This class will be removed in a future release")
+class Reaction(a: Actor, f: PartialFunction[Any, Any], msg: Any)
+extends ActorTask(a, if (f == null) (() => a.act()) else null, f, msg) {
def this(a: Actor) = this(a, null, null)
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
index 1015b2eb15..1c1dfdbd7a 100644
--- a/src/actors/scala/actors/Reactor.scala
+++ b/src/actors/scala/actors/Reactor.scala
@@ -1,66 +1,92 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
-import scala.actors.scheduler.{DelegatingScheduler, DefaultThreadPoolScheduler}
-import scala.collection.mutable.Queue
+import scala.actors.scheduler.{DelegatingScheduler, ExecutorScheduler,
+ ForkJoinScheduler, ThreadPoolConfig}
+import java.util.concurrent.{ThreadPoolExecutor, TimeUnit, LinkedBlockingQueue}
+
+private[actors] object Reactor {
-private object Reactor {
val scheduler = new DelegatingScheduler {
def makeNewScheduler: IScheduler = {
- val s = new DefaultThreadPoolScheduler(false)
- Debug.info(this+": starting new "+s+" ["+s.getClass+"]")
- s.start()
- s
+ val sched = if (!ThreadPoolConfig.useForkJoin) {
+ // default is non-daemon
+ val workQueue = new LinkedBlockingQueue[Runnable]
+ ExecutorScheduler(
+ new ThreadPoolExecutor(ThreadPoolConfig.corePoolSize,
+ ThreadPoolConfig.maxPoolSize,
+ 60000L,
+ TimeUnit.MILLISECONDS,
+ workQueue,
+ new ThreadPoolExecutor.CallerRunsPolicy))
+ } else {
+ // default is non-daemon, non-fair
+ val s = new ForkJoinScheduler(ThreadPoolConfig.corePoolSize, ThreadPoolConfig.maxPoolSize, false, false)
+ s.start()
+ s
+ }
+ Debug.info(this+": starting new "+sched+" ["+sched.getClass+"]")
+ sched
}
}
+
+ val waitingForNone = new PartialFunction[Any, Unit] {
+ def isDefinedAt(x: Any) = false
+ def apply(x: Any) {}
+ }
+
}
/**
* The Reactor trait provides lightweight actors.
*
* @author Philipp Haller
+ *
+ * @define actor reactor
*/
-trait Reactor extends OutputChannel[Any] {
+trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
- /* The actor's mailbox. */
- private[actors] val mailbox = new MessageQueue("Reactor")
+ /* The $actor's mailbox. */
+ private[actors] val mailbox = new MQueue[Msg]("Reactor")
// guarded by this
- private[actors] val sendBuffer = new Queue[(Any, OutputChannel[Any])]
-
- /* If the actor waits in a react, continuation holds the
- * message handler that react was called with.
- */
- @volatile
- private[actors] var continuation: PartialFunction[Any, Unit] = null
+ private[actors] val sendBuffer = new MQueue[Msg]("SendBuffer")
- /* Whenever this Actor executes on some thread, waitingFor is
- * guaranteed to be equal to waitingForNone.
+ /* Whenever this $actor executes on some thread, `waitingFor` is
+ * guaranteed to be equal to `Reactor.waitingForNone`.
+ *
+ * In other words, whenever `waitingFor` is not equal to
+ * `Reactor.waitingForNone`, this $actor is guaranteed not to execute
+ * on some thread.
+ *
+ * If the $actor waits in a `react`, `waitingFor` holds the
+ * message handler that `react` was called with.
*
- * In other words, whenever waitingFor is not equal to
- * waitingForNone, this Actor is guaranteed not to execute on some
- * thread.
+ * guarded by this
*/
- private[actors] val waitingForNone = (m: Any) => false
+ private[actors] var waitingFor: PartialFunction[Msg, Any] =
+ Reactor.waitingForNone
- // guarded by lock of this
- private[actors] var waitingFor: Any => Boolean = waitingForNone
+ // guarded by this
+ private[actors] var _state: Actor.State.Value = Actor.State.New
/**
- * The behavior of an actor is specified by implementing this
- * abstract method.
+ * The $actor's behavior is specified by implementing this method.
*/
def act(): Unit
+ /**
+ * This partial function is applied to exceptions that propagate out of
+ * this $actor's body.
+ */
protected[actors] def exceptionHandler: PartialFunction[Exception, Unit] =
Map()
@@ -70,151 +96,202 @@ trait Reactor extends OutputChannel[Any] {
protected[actors] def mailboxSize: Int =
mailbox.size
- /**
- * Sends <code>msg</code> to this actor (asynchronous) supplying
- * explicit reply destination.
- *
- * @param msg the message to send
- * @param replyTo the reply destination
- */
- def send(msg: Any, replyTo: OutputChannel[Any]) {
+ def send(msg: Msg, replyTo: OutputChannel[Any]) {
val todo = synchronized {
- if (waitingFor ne waitingForNone) {
+ if (waitingFor ne Reactor.waitingForNone) {
val savedWaitingFor = waitingFor
- waitingFor = waitingForNone
+ waitingFor = Reactor.waitingForNone
startSearch(msg, replyTo, savedWaitingFor)
} else {
- sendBuffer.enqueue((msg, replyTo))
+ sendBuffer.append(msg, replyTo)
() => { /* do nothing */ }
}
}
todo()
}
- private[actors] def startSearch(msg: Any, replyTo: OutputChannel[Any], handler: Any => Boolean) =
- () => scheduler execute (makeReaction(() => {
- val startMbox = new MessageQueue("Start")
+ private[actors] def startSearch(msg: Msg, replyTo: OutputChannel[Any], handler: PartialFunction[Msg, Any]) =
+ () => scheduler execute makeReaction(() => {
+ val startMbox = new MQueue[Msg]("Start")
synchronized { startMbox.append(msg, replyTo) }
searchMailbox(startMbox, handler, true)
- }))
+ })
- private[actors] def makeReaction(fun: () => Unit): Runnable =
- new ReactorTask(this, fun)
+ private[actors] final def makeReaction(fun: () => Unit): Runnable =
+ makeReaction(fun, null, null)
- /* Note that this method is called without holding a lock.
- * Therefore, to read an up-to-date continuation, it must be @volatile.
- */
- private[actors] def resumeReceiver(item: (Any, OutputChannel[Any]), onSameThread: Boolean) {
- // assert continuation != null
+ /* This method is supposed to be overridden. */
+ private[actors] def makeReaction(fun: () => Unit, handler: PartialFunction[Msg, Any], msg: Msg): Runnable =
+ new ReactorTask(this, fun, handler, msg)
+
+ private[actors] def resumeReceiver(item: (Msg, OutputChannel[Any]), handler: PartialFunction[Msg, Any], onSameThread: Boolean) {
if (onSameThread)
- continuation(item._1)
+ makeReaction(null, handler, item._1).run()
else
- scheduleActor(continuation, item._1)
+ scheduleActor(handler, item._1)
+
+ /* Here, we throw a SuspendActorControl to avoid
+ terminating this actor when the current ReactorTask
+ is finished.
+
+ The SuspendActorControl skips the termination code
+ in ReactorTask.
+ */
+ throw Actor.suspendException
}
- def !(msg: Any) {
+ def !(msg: Msg) {
send(msg, null)
}
- def forward(msg: Any) {
+ def forward(msg: Msg) {
send(msg, null)
}
def receiver: Actor = this.asInstanceOf[Actor]
// guarded by this
- private[actors] def drainSendBuffer(mbox: MessageQueue) {
- while (!sendBuffer.isEmpty) {
- val item = sendBuffer.dequeue()
- mbox.append(item._1, item._2)
- }
+ private[actors] def drainSendBuffer(mbox: MQueue[Msg]) {
+ sendBuffer.foreachDequeue(mbox)
}
- // assume continuation != null
- private[actors] def searchMailbox(startMbox: MessageQueue,
- handlesMessage: Any => Boolean,
+ private[actors] def searchMailbox(startMbox: MQueue[Msg],
+ handler: PartialFunction[Msg, Any],
resumeOnSameThread: Boolean) {
var tmpMbox = startMbox
var done = false
while (!done) {
- val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => handlesMessage(msg))
+ val qel = tmpMbox.extractFirst(handler)
if (tmpMbox ne mailbox)
- tmpMbox.foreach((m, s) => mailbox.append(m, s))
+ tmpMbox.foreachAppend(mailbox)
if (null eq qel) {
synchronized {
// in mean time new stuff might have arrived
if (!sendBuffer.isEmpty) {
- tmpMbox = new MessageQueue("Temp")
+ tmpMbox = new MQueue[Msg]("Temp")
drainSendBuffer(tmpMbox)
// keep going
} else {
- waitingFor = handlesMessage
- done = true
+ waitingFor = handler
+ /* Here, we throw a SuspendActorControl to avoid
+ terminating this actor when the current ReactorTask
+ is finished.
+
+ The SuspendActorControl skips the termination code
+ in ReactorTask.
+ */
+ throw Actor.suspendException
}
}
} else {
- resumeReceiver((qel.msg, qel.session), resumeOnSameThread)
+ resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
done = true
}
}
}
- protected[actors] def react(f: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
+ /**
+ * Receives a message from this $actor's mailbox.
+ *
+ * This method never returns. Therefore, the rest of the computation
+ * has to be contained in the actions of the partial function.
+ *
+ * @param handler a partial function with message patterns and actions
+ */
+ protected def react(handler: PartialFunction[Msg, Unit]): Nothing = {
synchronized { drainSendBuffer(mailbox) }
- continuation = f
- searchMailbox(mailbox, f.isDefinedAt, false)
+ searchMailbox(mailbox, handler, false)
throw Actor.suspendException
}
/* This method is guaranteed to be executed from inside
- * an actors act method.
+ * an $actor's act method.
*
* assume handler != null
+ *
+ * never throws SuspendActorControl
*/
- private[actors] def scheduleActor(handler: PartialFunction[Any, Unit], msg: Any) = {
- val fun = () => handler(msg)
- val task = new ReactorTask(this, fun)
- scheduler executeFromActor task
+ private[actors] def scheduleActor(handler: PartialFunction[Msg, Any], msg: Msg) {
+ scheduler executeFromActor makeReaction(null, handler, msg)
+ }
+
+ // guarded by this
+ private[actors] def dostart() {
+ _state = Actor.State.Runnable
+ scheduler newActor this
+ scheduler execute makeReaction(() => act(), null, null)
}
- def start(): Reactor = {
- scheduler.newActor(this)
- val task = new ReactorTask(this, () => act())
- scheduler execute task
+ /**
+ * Starts this $actor. This method is idempotent.
+ */
+ def start(): Reactor[Msg] = synchronized {
+ if (_state == Actor.State.New)
+ dostart()
this
}
+ /**
+ * Restarts this $actor.
+ *
+ * @throws java.lang.IllegalStateException if the $actor is not in state `Actor.State.Terminated`
+ */
+ def restart(): Unit = synchronized {
+ if (_state == Actor.State.Terminated)
+ dostart()
+ else
+ throw new IllegalStateException("restart only in state "+Actor.State.Terminated)
+ }
+
+ /** Returns the execution state of this $actor.
+ *
+ * @return the execution state
+ */
+ def getState: Actor.State.Value = synchronized {
+ if (waitingFor ne Reactor.waitingForNone)
+ Actor.State.Suspended
+ else
+ _state
+ }
+
+ implicit def mkBody[A](body: => A) = new Actor.Body[A] {
+ def andThen[B](other: => B): Unit = Reactor.this.seq(body, other)
+ }
+
/* This closure is used to implement control-flow operations
* built on top of `seq`. Note that the only invocation of
- * `kill` is supposed to be inside `Reaction.run`.
+ * `kill` is supposed to be inside `ReactorTask.run`.
*/
@volatile
private[actors] var kill: () => Unit =
() => { exit() }
private[actors] def seq[a, b](first: => a, next: => b): Unit = {
- val s = Actor.rawSelf(scheduler)
- val killNext = s.kill
- s.kill = () => {
- s.kill = killNext
+ val killNext = this.kill
+ this.kill = () => {
+ this.kill = killNext
// to avoid stack overflow:
// instead of directly executing `next`,
// schedule as continuation
- scheduleActor({ case _ => next }, 1)
+ scheduleActor({ case _ => next }, null)
throw Actor.suspendException
}
first
- throw new KillActorException
+ throw new KillActorControl
}
- protected[this] def exit(): Nothing = {
+ protected[actors] def exit(): Nothing = {
terminated()
throw Actor.suspendException
}
private[actors] def terminated() {
+ synchronized {
+ _state = Actor.State.Terminated
+ // reset waitingFor, otherwise getState returns Suspended
+ waitingFor = Reactor.waitingForNone
+ }
scheduler.terminated(this)
}
diff --git a/src/actors/scala/actors/ReplyableReactor.scala b/src/actors/scala/actors/ReactorCanReply.scala
index 6ec1d31d1f..884c0ceecc 100644
--- a/src/actors/scala/actors/ReplyableReactor.scala
+++ b/src/actors/scala/actors/ReactorCanReply.scala
@@ -1,44 +1,28 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
/**
- * The ReplyableReactor trait provides
- * message send operations that may result in a
- * response from the receiver.
+ * The ReactorCanReply trait provides message send operations that
+ * may result in a response from the receiver.
*
* @author Philipp Haller
*/
-private[actors] trait ReplyableReactor extends Replyable[Any, Any] {
+private[actors] trait ReactorCanReply extends CanReply[Any, Any] {
_: ReplyReactor =>
- /**
- * Sends <code>msg</code> to this actor and awaits reply
- * (synchronous).
- *
- * @param msg the message to be sent
- * @return the reply
- */
+ override type Future[+P] = scala.actors.Future[P]
+
def !?(msg: Any): Any =
(this !! msg)()
- /**
- * Sends <code>msg</code> to this actor and awaits reply
- * (synchronous) within <code>msec</code> milliseconds.
- *
- * @param msec the time span before timeout
- * @param msg the message to be sent
- * @return <code>None</code> in case of timeout, otherwise
- * <code>Some(x)</code> where <code>x</code> is the reply
- */
def !?(msec: Long, msg: Any): Option[Any] = {
val myself = Actor.rawSelf(this.scheduler)
val res = new scala.concurrent.SyncVar[Any]
@@ -56,38 +40,27 @@ private[actors] trait ReplyableReactor extends Replyable[Any, Any] {
res.get(msec)
}
- /**
- * Sends <code>msg</code> to this actor and immediately
- * returns a future representing the reply value.
- */
- override def !!(msg: Any): Future[Any] =
+ def !!(msg: Any): Future[Any] =
this !! (msg, { case x => x })
- /**
- * Sends <code>msg</code> to this actor and immediately
- * returns a future representing the reply value.
- * The reply is post-processed using the partial function
- * <code>f</code>. This also allows to recover a more
- * precise type for the reply value.
- */
- override def !![A](msg: Any, f: PartialFunction[Any, A]): Future[A] = {
+ def !![A](msg: Any, handler: PartialFunction[Any, A]): Future[A] = {
val myself = Actor.rawSelf(this.scheduler)
val ftch = new ReactChannel[A](myself)
val res = new scala.concurrent.SyncVar[A]
val out = new OutputChannel[Any] {
def !(msg: Any) = {
- val msg1 = f(msg)
+ val msg1 = handler(msg)
ftch ! msg1
res set msg1
}
def send(msg: Any, replyTo: OutputChannel[Any]) = {
- val msg1 = f(msg)
+ val msg1 = handler(msg)
ftch.send(msg1, replyTo)
res set msg1
}
def forward(msg: Any) = {
- val msg1 = f(msg)
+ val msg1 = handler(msg)
ftch forward msg1
res set msg1
}
@@ -97,21 +70,21 @@ private[actors] trait ReplyableReactor extends Replyable[Any, Any] {
this.send(msg, out)
- new Future[A](ftch) {
- def apply() =
- if (isSet) value.get.asInstanceOf[A]
- else {
- value = Some(res.get)
- value.get.asInstanceOf[A]
- }
+ new Future[A] {
+ def apply() = {
+ if (!isSet)
+ fvalue = Some(res.get)
+
+ fvalueTyped
+ }
def respond(k: A => Unit): Unit =
- if (isSet) k(value.get.asInstanceOf[A])
+ if (isSet) k(fvalueTyped)
else inputChannel.react {
- case any => value = Some(any); k(value.get.asInstanceOf[A])
+ case any => fvalue = Some(any); k(fvalueTyped)
}
def isSet =
- !value.isEmpty
+ !fvalue.isEmpty
+ def inputChannel = ftch
}
}
-
}
diff --git a/src/actors/scala/actors/ReactorTask.scala b/src/actors/scala/actors/ReactorTask.scala
index b29e770bc4..c4eab2a65a 100644
--- a/src/actors/scala/actors/ReactorTask.scala
+++ b/src/actors/scala/actors/ReactorTask.scala
@@ -1,64 +1,77 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.actors
import java.lang.Runnable
import java.util.concurrent.Callable
+import scala.concurrent.forkjoin.RecursiveAction
+
/** <p>
* The class <code>ReactorTask</code>.
* </p>
*
* @author Philipp Haller
*/
-private[actors] class ReactorTask[T <: Reactor](var reactor: T, var fun: () => Unit)
- extends Callable[Unit] with Runnable {
+private[actors] class ReactorTask[Msg >: Null](var reactor: Reactor[Msg],
+ var fun: () => Any,
+ var handler: PartialFunction[Msg, Any],
+ var msg: Msg)
+ extends RecursiveAction with Callable[Unit] with Runnable {
def run() {
- val saved = Actor.tl.get
- Actor.tl set reactor
try {
- beforeExecuting()
+ beginExecution()
try {
- try {
+ if (fun eq null)
+ handler(msg)
+ else
fun()
- } catch {
- case e: Exception if (reactor.exceptionHandler.isDefinedAt(e)) =>
- reactor.exceptionHandler(e)
- }
} catch {
- case _: KillActorException =>
+ case _: KillActorControl =>
+ // do nothing
+
+ case e: Exception if reactor.exceptionHandler.isDefinedAt(e) =>
+ reactor.exceptionHandler(e)
}
reactor.kill()
}
catch {
- case _: SuspendActorException =>
+ case _: SuspendActorControl =>
// do nothing (continuation is already saved)
- case e: Exception =>
- Debug.info(reactor+": caught "+e)
+ case e: Throwable =>
+ terminateExecution(e)
reactor.terminated()
- afterExecuting(e)
+ if (!e.isInstanceOf[Exception])
+ throw e
} finally {
- Actor.tl set saved
+ suspendExecution()
this.reactor = null
this.fun = null
+ this.handler = null
+ this.msg = null
}
}
def call() = run()
- protected def beforeExecuting() {}
+ def compute() = run()
- protected def afterExecuting(e: Exception) {}
+ protected def beginExecution() {}
+
+ protected def suspendExecution() {}
+
+ protected def terminateExecution(e: Throwable) {
+ Console.err.println(reactor+": caught "+e)
+ e.printStackTrace()
+ }
}
diff --git a/src/actors/scala/actors/ReplyReactor.scala b/src/actors/scala/actors/ReplyReactor.scala
index 03c97ebdae..af97e286d1 100644
--- a/src/actors/scala/actors/ReplyReactor.scala
+++ b/src/actors/scala/actors/ReplyReactor.scala
@@ -1,15 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
+import java.util.{Timer, TimerTask}
+
/** <p>
* The <code>ReplyReactor</code> trait extends the <code>Reactor</code>
* trait with methods to reply to the sender of a message.
@@ -18,18 +19,29 @@ package scala.actors
* </p>
*
* @author Philipp Haller
+ *
+ * @define actor `ReplyReactor`
*/
-trait ReplyReactor extends Reactor with ReplyableReactor {
+trait ReplyReactor extends Reactor[Any] with ReactorCanReply {
/* A list of the current senders. The head of the list is
* the sender of the message that was received last.
*/
@volatile
- private[actors] var senders: List[OutputChannel[Any]] =
- Nil
+ private[actors] var senders: List[OutputChannel[Any]] = List()
- protected[actors] def sender: OutputChannel[Any] =
- senders.head
+ /* This option holds a TimerTask when the actor waits in a
+ * reactWithin. The TimerTask is cancelled when the actor
+ * resumes.
+ *
+ * guarded by this
+ */
+ private[actors] var onTimeout: Option[TimerTask] = None
+
+ /**
+ * Returns the $actor which sent the last received message.
+ */
+ protected[actors] def sender: OutputChannel[Any] = senders.head
/**
* Replies with <code>msg</code> to the sender.
@@ -38,39 +50,34 @@ trait ReplyReactor extends Reactor with ReplyableReactor {
sender ! msg
}
- /**
- * Sends <code>msg</code> to this actor (asynchronous).
- */
override def !(msg: Any) {
send(msg, Actor.rawSelf(scheduler))
}
- /**
- * Forwards <code>msg</code> to this actor (asynchronous).
- */
override def forward(msg: Any) {
send(msg, Actor.sender)
}
- private[actors] override def resumeReceiver(item: (Any, OutputChannel[Any]), onSameThread: Boolean) {
+ private[actors] override def resumeReceiver(item: (Any, OutputChannel[Any]), handler: PartialFunction[Any, Any], onSameThread: Boolean) {
+ synchronized {
+ if (!onTimeout.isEmpty) {
+ onTimeout.get.cancel()
+ onTimeout = None
+ }
+ }
senders = List(item._2)
- // assert continuation != null
- if (onSameThread)
- continuation(item._1)
- else
- scheduleActor(continuation, item._1)
+ super.resumeReceiver(item, handler, onSameThread)
}
- // assume continuation != null
- private[actors] override def searchMailbox(startMbox: MessageQueue,
- handlesMessage: Any => Boolean,
+ private[actors] override def searchMailbox(startMbox: MQueue[Any],
+ handler: PartialFunction[Any, Any],
resumeOnSameThread: Boolean) {
var tmpMbox = startMbox
var done = false
while (!done) {
val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
senders = List(replyTo)
- handlesMessage(msg)
+ handler.isDefinedAt(msg)
})
if (tmpMbox ne mailbox)
tmpMbox.foreach((m, s) => mailbox.append(m, s))
@@ -78,19 +85,86 @@ trait ReplyReactor extends Reactor with ReplyableReactor {
synchronized {
// in mean time new stuff might have arrived
if (!sendBuffer.isEmpty) {
- tmpMbox = new MessageQueue("Temp")
+ tmpMbox = new MQueue[Any]("Temp")
drainSendBuffer(tmpMbox)
// keep going
} else {
- waitingFor = handlesMessage
- done = true
+ waitingFor = handler
+ // see Reactor.searchMailbox
+ throw Actor.suspendException
}
}
} else {
- resumeReceiver((qel.msg, qel.session), resumeOnSameThread)
+ resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
done = true
}
}
}
+ private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
+ new ReplyReactorTask(this, fun, handler, msg)
+
+ protected[actors] override def react(handler: PartialFunction[Any, Unit]): Nothing = {
+ assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
+ super.react(handler)
+ }
+
+ /**
+ * Receives a message from this $actor's mailbox within a certain
+ * time span.
+ *
+ * This method never returns. Therefore, the rest of the computation
+ * has to be contained in the actions of the partial function.
+ *
+ * @param msec the time span before timeout
+ * @param handler a partial function with message patterns and actions
+ */
+ protected[actors] def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
+ assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
+
+ synchronized { drainSendBuffer(mailbox) }
+
+ // first, remove spurious TIMEOUT message from mailbox if any
+ mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
+
+ while (true) {
+ val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
+ senders = List(replyTo)
+ handler isDefinedAt m
+ })
+ if (null eq qel) {
+ synchronized {
+ // in mean time new messages might have arrived
+ if (!sendBuffer.isEmpty) {
+ drainSendBuffer(mailbox)
+ // keep going
+ } else if (msec == 0L) {
+ // throws Actor.suspendException
+ resumeReceiver((TIMEOUT, this), handler, false)
+ } else {
+ waitingFor = handler
+ val thisActor = this
+ onTimeout = Some(new TimerTask {
+ def run() { thisActor.send(TIMEOUT, thisActor) }
+ })
+ Actor.timer.schedule(onTimeout.get, msec)
+ throw Actor.suspendException
+ }
+ }
+ } else
+ resumeReceiver((qel.msg, qel.session), handler, false)
+ }
+ throw Actor.suspendException
+ }
+
+ override def getState: Actor.State.Value = synchronized {
+ if (waitingFor ne Reactor.waitingForNone) {
+ if (onTimeout.isEmpty)
+ Actor.State.Suspended
+ else
+ Actor.State.TimedSuspended
+ } else
+ _state
+ }
+
}
diff --git a/src/actors/scala/actors/ReplyReactorTask.scala b/src/actors/scala/actors/ReplyReactorTask.scala
new file mode 100644
index 0000000000..59150276c0
--- /dev/null
+++ b/src/actors/scala/actors/ReplyReactorTask.scala
@@ -0,0 +1,36 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+// $Id$
+
+package scala.actors
+
+/** <p>
+ * The class <code>ReplyReactorTask</code>.
+ * </p>
+ *
+ * @author Philipp Haller
+ */
+private[actors] class ReplyReactorTask(reactor: ReplyReactor,
+ fun: () => Unit,
+ handler: PartialFunction[Any, Any],
+ msg: Any)
+ extends ReactorTask(reactor, fun, handler, msg) {
+
+ var saved: ReplyReactor = _
+
+ protected override def beginExecution() {
+ saved = Actor.tl.get
+ Actor.tl set reactor
+ }
+
+ protected override def suspendExecution() {
+ Actor.tl set saved
+ }
+
+}
diff --git a/src/actors/scala/actors/ReplyableActor.scala b/src/actors/scala/actors/ReplyableActor.scala
deleted file mode 100644
index bf9703a629..0000000000
--- a/src/actors/scala/actors/ReplyableActor.scala
+++ /dev/null
@@ -1,160 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.actors
-
-import java.util.concurrent.ExecutionException
-
-/**
- * The ReplyableActor trait provides
- * message send operations that may result in a
- * response from the receiver.
- *
- * @author Philipp Haller
- */
-private[actors] trait ReplyableActor extends ReplyableReactor {
- thiz: AbstractActor with ReplyReactor =>
-
- /**
- * Sends <code>msg</code> to this actor and awaits reply
- * (synchronous).
- *
- * @param msg the message to be sent
- * @return the reply
- */
- override def !?(msg: Any): Any = {
- val replyCh = new Channel[Any](Actor.self(thiz.scheduler))
- thiz.send(msg, replyCh)
- replyCh.receive {
- case x => x
- }
- }
-
- /**
- * Sends <code>msg</code> to this actor and awaits reply
- * (synchronous) within <code>msec</code> milliseconds.
- *
- * @param msec the time span before timeout
- * @param msg the message to be sent
- * @return <code>None</code> in case of timeout, otherwise
- * <code>Some(x)</code> where <code>x</code> is the reply
- */
- override def !?(msec: Long, msg: Any): Option[Any] = {
- val replyCh = new Channel[Any](Actor.self(thiz.scheduler))
- thiz.send(msg, replyCh)
- replyCh.receiveWithin(msec) {
- case TIMEOUT => None
- case x => Some(x)
- }
- }
-
- /**
- * Sends <code>msg</code> to this actor and immediately
- * returns a future representing the reply value.
- * The reply is post-processed using the partial function
- * <code>f</code>. This also allows to recover a more
- * precise type for the reply value.
- */
- override def !![A](msg: Any, f: PartialFunction[Any, A]): Future[A] = {
- val ftch = new Channel[A](Actor.self(thiz.scheduler))
- thiz.send(msg, new OutputChannel[Any] {
- def !(msg: Any) =
- ftch ! f(msg)
- def send(msg: Any, replyTo: OutputChannel[Any]) =
- ftch.send(f(msg), replyTo)
- def forward(msg: Any) =
- ftch.forward(f(msg))
- def receiver =
- ftch.receiver
- })
- Futures.fromInputChannel(ftch)
- }
-
- /**
- * Sends <code>msg</code> to this actor and immediately
- * returns a future representing the reply value.
- */
- override def !!(msg: Any): Future[Any] = {
- val ftch = new Channel[Any](Actor.self(thiz.scheduler))
- val linkedChannel = new AbstractActor {
- type Future[+R] = scala.actors.Future[R]
- def !(msg: Any) =
- ftch ! msg
- def send(msg: Any, replyTo: OutputChannel[Any]) =
- ftch.send(msg, replyTo)
- def forward(msg: Any) =
- ftch.forward(msg)
- def receiver =
- ftch.receiver
- def linkTo(to: AbstractActor) { /* do nothing */ }
- def unlinkFrom(from: AbstractActor) { /* do nothing */ }
- def exit(from: AbstractActor, reason: AnyRef) {
- ftch.send(Exit(from, reason), thiz)
- }
- // should never be invoked; return dummy value
- def !?(msg: Any) = msg
- // should never be invoked; return dummy value
- def !?(msec: Long, msg: Any): Option[Any] = Some(msg)
- // should never be invoked; return dummy value
- override def !!(msg: Any): Future[Any] = {
- val someChan = new Channel[Any](Actor.self(thiz.scheduler))
- Futures.fromInputChannel(someChan)
- }
- // should never be invoked; return dummy value
- override def !![A](msg: Any, f: PartialFunction[Any, A]): Future[A] = {
- val someChan = new Channel[A](Actor.self(thiz.scheduler))
- Futures.fromInputChannel(someChan)
- }
- }
- thiz.linkTo(linkedChannel)
- thiz.send(msg, linkedChannel)
- new Future[Any](ftch) {
- var exitReason: Option[Any] = None
- val handleReply: PartialFunction[Any, Unit] = {
- case Exit(from, reason) =>
- exitReason = Some(reason)
- case any =>
- value = Some(any)
- }
-
- def apply(): Any =
- if (isSet) {
- if (!value.isEmpty)
- value.get
- else if (!exitReason.isEmpty) {
- val reason = exitReason.get
- if (reason.isInstanceOf[Throwable])
- throw new ExecutionException(reason.asInstanceOf[Throwable])
- else
- throw new ExecutionException(new Exception(reason.toString()))
- }
- } else inputChannel.receive(handleReply andThen {(x: Unit) => apply()})
-
- def respond(k: Any => Unit): Unit =
- if (isSet)
- apply()
- else
- inputChannel.react(handleReply andThen {(x: Unit) => k(apply())})
-
- def isSet = (value match {
- case None =>
- val handleTimeout: PartialFunction[Any, Boolean] = {
- case TIMEOUT =>
- false
- }
- val whatToDo =
- handleTimeout orElse (handleReply andThen {(x: Unit) => true})
- inputChannel.receiveWithin(0)(whatToDo)
- case Some(_) => true
- }) || !exitReason.isEmpty
- }
- }
-
-}
diff --git a/src/actors/scala/actors/Scheduler.scala b/src/actors/scala/actors/Scheduler.scala
index 080abbbf41..ad5d83e911 100644
--- a/src/actors/scala/actors/Scheduler.scala
+++ b/src/actors/scala/actors/Scheduler.scala
@@ -1,21 +1,20 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
import java.util.concurrent._
-import scheduler.{DelegatingScheduler, ForkJoinScheduler, ResizableThreadPoolScheduler}
+import scheduler.{DelegatingScheduler, ForkJoinScheduler, ResizableThreadPoolScheduler, ThreadPoolConfig}
/**
* The <code>Scheduler</code> object is used by <code>Actor</code> to
- * execute tasks of an execution of an actor.
+ * execute tasks of an actor execution.
*
* @author Philipp Haller
*/
@@ -24,15 +23,13 @@ object Scheduler extends DelegatingScheduler {
Debug.info("initializing "+this+"...")
def makeNewScheduler: IScheduler = {
- // test on which JVM we are running
- val jvmVendor = System.getProperty("java.vm.vendor")
- val sched = if (jvmVendor.indexOf("IBM") != -1) {
- Debug.info(this+": running on a "+jvmVendor+" JVM")
- // on IBM J9 1.6 do not use ForkJoinPool
+ val sched = if (!ThreadPoolConfig.useForkJoin) {
+ // default is non-daemon
val s = new ResizableThreadPoolScheduler(false)
s.start()
s
} else {
+ // default is non-daemon, fair
val s = new ForkJoinScheduler
s.start()
s
diff --git a/src/actors/scala/actors/SchedulerAdapter.scala b/src/actors/scala/actors/SchedulerAdapter.scala
index 2c5ccd2dd8..75c8405c4b 100644
--- a/src/actors/scala/actors/SchedulerAdapter.scala
+++ b/src/actors/scala/actors/SchedulerAdapter.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
@@ -42,7 +41,7 @@ trait SchedulerAdapter extends IScheduler {
*
* @param a the actor to be registered
*/
- def newActor(a: Reactor) =
+ def newActor(a: TrackedReactor) =
Scheduler.newActor(a)
/** Unregisters an actor from this scheduler, because it
@@ -50,7 +49,7 @@ trait SchedulerAdapter extends IScheduler {
*
* @param a the actor to be unregistered
*/
- def terminated(a: Reactor) =
+ def terminated(a: TrackedReactor) =
Scheduler.terminated(a)
/** Registers a closure to be executed when the specified
@@ -59,7 +58,7 @@ trait SchedulerAdapter extends IScheduler {
* @param a the actor
* @param f the closure to be registered
*/
- def onTerminate(a: Reactor)(f: => Unit) =
+ def onTerminate(a: TrackedReactor)(f: => Unit) =
Scheduler.onTerminate(a)(f)
def managedBlock(blocker: scala.concurrent.ManagedBlocker) {
diff --git a/src/actors/scala/actors/UncaughtException.scala b/src/actors/scala/actors/UncaughtException.scala
new file mode 100644
index 0000000000..2b61b1ad7a
--- /dev/null
+++ b/src/actors/scala/actors/UncaughtException.scala
@@ -0,0 +1,33 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.actors
+
+/**
+ * The exit reason when an actor fails to catch an exception.
+ *
+ * @param actor the actor that threw the exception
+ * @param message the message the actor was processing, or None if no message (e.g. on initial startup)
+ * @param sender the sender of the most recent message
+ * @param thread the thread on which the actor was running
+ * @param cause the uncaught exception
+ *
+ * @author Philipp Haller
+ * @author Erik Engbrecht
+ */
+case class UncaughtException(actor: Actor,
+ message: Option[Any],
+ sender: Option[OutputChannel[Any]],
+ thread: Thread,
+ cause: Throwable)
+extends Exception(cause) {
+
+ override def toString() =
+ "UncaughtException("+actor+","+message+","+sender+","+cause+")"
+
+}
diff --git a/src/actors/scala/actors/package.scala b/src/actors/scala/actors/package.scala
index 7075518931..66ba05b1dd 100644
--- a/src/actors/scala/actors/package.scala
+++ b/src/actors/scala/actors/package.scala
@@ -1,18 +1,32 @@
package scala
package object actors {
- @deprecated("use scala.actors.scheduler.ForkJoinScheduler instead")
+
+ // type of Reactors tracked by termination detector
+ private[actors] type TrackedReactor = Reactor[A] forSome { type A >: Null }
+
+ @deprecated("use scheduler.ForkJoinScheduler instead")
type FJTaskScheduler2 = scala.actors.scheduler.ForkJoinScheduler
- @deprecated("use scala.actors.scheduler.ForkJoinScheduler instead")
+ @deprecated("use scheduler.ForkJoinScheduler instead")
type TickedScheduler = scala.actors.scheduler.ForkJoinScheduler
- @deprecated("use scala.actors.scheduler.ForkJoinScheduler instead")
+ @deprecated("use scheduler.ForkJoinScheduler instead")
type WorkerThreadScheduler = scala.actors.scheduler.ForkJoinScheduler
@deprecated("this class is going to be removed in a future release")
type WorkerThread = java.lang.Thread
+ @deprecated("use scheduler.SingleThreadedScheduler instead")
+ type SingleThreadedScheduler = scala.actors.scheduler.SingleThreadedScheduler
+
+ // This used to do a blind cast and throw a CCE after the package
+ // object was loaded. I have replaced with a variation that should work
+ // in whatever cases that was working but fail less exceptionally for
+ // those not intentionally using it.
@deprecated("this value is going to be removed in a future release")
- val ActorGC = scala.actors.Scheduler.impl.asInstanceOf[scala.actors.scheduler.ThreadPoolScheduler]
+ val ActorGC = scala.actors.Scheduler.impl match {
+ case x: scala.actors.scheduler.ActorGC => x
+ case _ => null
+ }
}
diff --git a/src/actors/scala/actors/remote/FreshNameCreator.scala b/src/actors/scala/actors/remote/FreshNameCreator.scala
index 9be6d06b5b..f7cfcab687 100644
--- a/src/actors/scala/actors/remote/FreshNameCreator.scala
+++ b/src/actors/scala/actors/remote/FreshNameCreator.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
package remote
diff --git a/src/actors/scala/actors/remote/JavaSerializer.scala b/src/actors/scala/actors/remote/JavaSerializer.scala
index 4e8073055e..5d9f9816d1 100644
--- a/src/actors/scala/actors/remote/JavaSerializer.scala
+++ b/src/actors/scala/actors/remote/JavaSerializer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
package remote
diff --git a/src/actors/scala/actors/remote/NetKernel.scala b/src/actors/scala/actors/remote/NetKernel.scala
index 8673b03697..a5ec3e3999 100644
--- a/src/actors/scala/actors/remote/NetKernel.scala
+++ b/src/actors/scala/actors/remote/NetKernel.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
package remote
@@ -140,7 +139,7 @@ private[remote] class NetKernel(service: Service) {
def terminate() {
// tell all proxies to terminate
- proxies.valuesIterator foreach { p => p.send(Terminate, null) }
+ proxies.values foreach { _.send(Terminate, null) }
// tell service to terminate
service.terminate()
diff --git a/src/actors/scala/actors/remote/Proxy.scala b/src/actors/scala/actors/remote/Proxy.scala
index 60bcd34221..625ce8bb0b 100644
--- a/src/actors/scala/actors/remote/Proxy.scala
+++ b/src/actors/scala/actors/remote/Proxy.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
package remote
@@ -20,8 +19,6 @@ import scala.collection.mutable.HashMap
private[remote] class Proxy(node: Node, name: Symbol, @transient var kernel: NetKernel) extends AbstractActor {
import java.io.{IOException, ObjectOutputStream, ObjectInputStream}
- type Future[+R] = scala.actors.Future[R]
-
@transient
private[remote] var del: Actor = null
startDelegate()
@@ -44,7 +41,7 @@ private[remote] class Proxy(node: Node, name: Symbol, @transient var kernel: Net
}
private def setupKernel() {
- kernel = RemoteActor.someKernel
+ kernel = RemoteActor.someNetKernel
kernel.registerProxy(node, name, this)
}
@@ -66,10 +63,10 @@ private[remote] class Proxy(node: Node, name: Symbol, @transient var kernel: Net
def !?(msec: Long, msg: Any): Option[Any] =
del !? (msec, msg)
- override def !!(msg: Any): Future[Any] =
+ def !!(msg: Any): Future[Any] =
del !! msg
- override def !![A](msg: Any, f: PartialFunction[Any, A]): Future[A] =
+ def !![A](msg: Any, f: PartialFunction[Any, A]): Future[A] =
del !! (msg, f)
def linkTo(to: AbstractActor): Unit =
diff --git a/src/actors/scala/actors/remote/RemoteActor.scala b/src/actors/scala/actors/remote/RemoteActor.scala
index 7316017e32..c5397bc1be 100644
--- a/src/actors/scala/actors/remote/RemoteActor.scala
+++ b/src/actors/scala/actors/remote/RemoteActor.scala
@@ -1,45 +1,41 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
package remote
-/** <p>
- * This object provides methods for creating, registering, and
- * selecting remotely accessible actors.
- * </p>
- * <p>
- * A remote actor is typically created like this:
- * </p><pre>
+/**
+ * This object provides methods for creating, registering, and
+ * selecting remotely accessible actors.
+ *
+ * A remote actor is typically created like this:
+ * {{{
* actor {
* alive(9010)
* register('myName, self)
*
* // behavior
* }
- * </pre>
- * <p>
- * It can be accessed by an actor running on a (possibly)
- * different node by selecting it in the following way:
- * </p><pre>
+ * }}}
+ * It can be accessed by an actor running on a (possibly)
+ * different node by selecting it in the following way:
+ * {{{
* actor {
* // ...
- * <b>val</b> c = select(Node("127.0.0.1", 9010), 'myName)
+ * val c = select(Node("127.0.0.1", 9010), 'myName)
* c ! msg
* // ...
* }
- * </pre>
+ * }}}
*
- * @version 0.9.18
* @author Philipp Haller
*/
object RemoteActor {
@@ -60,10 +56,10 @@ object RemoteActor {
* <code>port</code>.
*/
def alive(port: Int): Unit = synchronized {
- createKernelOnPort(port)
+ createNetKernelOnPort(port)
}
- def createKernelOnPort(port: Int): NetKernel = {
+ private def createNetKernelOnPort(port: Int): NetKernel = {
val serv = TcpService(port, cl)
val kern = serv.kernel
val s = Actor.self
@@ -85,6 +81,10 @@ object RemoteActor {
kern
}
+ @deprecated("this member is going to be removed in a future release")
+ def createKernelOnPort(port: Int): NetKernel =
+ createNetKernelOnPort(port)
+
/**
* Registers <code>a</code> under <code>name</code> on this
* node.
@@ -105,7 +105,7 @@ object RemoteActor {
case None =>
// establish remotely accessible
// return path (sender)
- createKernelOnPort(TcpService.generatePort)
+ createNetKernelOnPort(TcpService.generatePort)
case Some(k) =>
k
}
@@ -118,8 +118,12 @@ object RemoteActor {
selfKernel.getOrCreateProxy(node, sym)
}
- def someKernel: NetKernel =
+ private[remote] def someNetKernel: NetKernel =
kernels.valuesIterator.next
+
+ @deprecated("this member is going to be removed in a future release")
+ def someKernel: NetKernel =
+ someNetKernel
}
diff --git a/src/actors/scala/actors/remote/Serializer.scala b/src/actors/scala/actors/remote/Serializer.scala
index fe3ddb0fae..e6f5bd5ba7 100644
--- a/src/actors/scala/actors/remote/Serializer.scala
+++ b/src/actors/scala/actors/remote/Serializer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
diff --git a/src/actors/scala/actors/remote/Service.scala b/src/actors/scala/actors/remote/Service.scala
index 86891b3987..298d87e3c5 100644
--- a/src/actors/scala/actors/remote/Service.scala
+++ b/src/actors/scala/actors/remote/Service.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
package remote
diff --git a/src/actors/scala/actors/remote/TcpService.scala b/src/actors/scala/actors/remote/TcpService.scala
index 9a43f938c0..a6f7adffed 100644
--- a/src/actors/scala/actors/remote/TcpService.scala
+++ b/src/actors/scala/actors/remote/TcpService.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
diff --git a/src/actors/scala/actors/ActorGC.scala b/src/actors/scala/actors/scheduler/ActorGC.scala
index 68d8bda8e2..059eedf7f4 100644
--- a/src/actors/scala/actors/ActorGC.scala
+++ b/src/actors/scala/actors/scheduler/ActorGC.scala
@@ -1,19 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
+package scheduler
import java.lang.ref.{Reference, WeakReference, ReferenceQueue}
import scala.collection.mutable.HashSet
-import scala.actors.scheduler.TerminationMonitor
/**
* ActorGC keeps track of the number of live actors being managed by a
@@ -29,19 +28,19 @@ trait ActorGC extends TerminationMonitor {
self: IScheduler =>
/** Actors are added to refQ in newActor. */
- private val refQ = new ReferenceQueue[Reactor]
+ private val refQ = new ReferenceQueue[TrackedReactor]
/**
* This is a set of references to all the actors registered with
* this ActorGC. It is maintained so that the WeakReferences will not be GC'd
* before the actors to which they point.
*/
- private val refSet = new HashSet[Reference[t] forSome { type t <: Reactor }]
+ private val refSet = new HashSet[Reference[t] forSome { type t <: TrackedReactor }]
/** newActor is invoked whenever a new actor is started. */
- override def newActor(a: Reactor) = synchronized {
+ override def newActor(a: TrackedReactor) = synchronized {
// registers a reference to the actor with the ReferenceQueue
- val wr = new WeakReference[Reactor](a, refQ)
+ val wr = new WeakReference[TrackedReactor](a, refQ)
refSet += wr
activeActors += 1
}
@@ -67,24 +66,24 @@ trait ActorGC extends TerminationMonitor {
}
/** Checks whether all actors have terminated. */
- override def allTerminated: Boolean = synchronized {
+ override private[actors] def allActorsTerminated: Boolean = synchronized {
activeActors <= 0
}
- override def onTerminate(a: Reactor)(f: => Unit): Unit = synchronized {
+ override def onTerminate(a: TrackedReactor)(f: => Unit): Unit = synchronized {
terminationHandlers += (a -> (() => f))
}
- override def terminated(a: Reactor) = {
+ override def terminated(a: TrackedReactor) = {
super.terminated(a)
synchronized {
// find the weak reference that points to the terminated actor, if any
- refSet.find((ref: Reference[t] forSome { type t <: Reactor }) => ref.get() == a) match {
+ refSet.find((ref: Reference[t] forSome { type t <: TrackedReactor }) => ref.get() == a) match {
case Some(r) =>
// invoking clear will not cause r to be enqueued
r.clear()
- refSet -= r.asInstanceOf[Reference[t] forSome { type t <: Reactor }]
+ refSet -= r.asInstanceOf[Reference[t] forSome { type t <: TrackedReactor }]
case None =>
// do nothing
}
diff --git a/src/actors/scala/actors/scheduler/DaemonScheduler.scala b/src/actors/scala/actors/scheduler/DaemonScheduler.scala
index e421984d8b..257e847a6a 100644
--- a/src/actors/scala/actors/scheduler/DaemonScheduler.scala
+++ b/src/actors/scala/actors/scheduler/DaemonScheduler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -16,12 +16,8 @@ package scheduler
*/
object DaemonScheduler extends DelegatingScheduler {
- def makeNewScheduler(): IScheduler = {
- // test on which JVM we are running
- val jvmVendor = System.getProperty("java.vm.vendor")
- val sched = if (jvmVendor.indexOf("IBM") != -1) {
- Debug.info(this+": running on a "+jvmVendor+" JVM")
- // on IBM J9 1.6 do not use ForkJoinPool
+ protected def makeNewScheduler(): IScheduler = {
+ val sched = if (!ThreadPoolConfig.useForkJoin) {
val s = new ResizableThreadPoolScheduler(true)
s.start()
s
diff --git a/src/actors/scala/actors/scheduler/DefaultThreadPoolScheduler.scala b/src/actors/scala/actors/scheduler/DefaultThreadPoolScheduler.scala
deleted file mode 100644
index f5eb6c4884..0000000000
--- a/src/actors/scala/actors/scheduler/DefaultThreadPoolScheduler.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.actors.scheduler
-
-import java.util.concurrent.{ThreadPoolExecutor, TimeUnit, LinkedBlockingQueue,
- ThreadFactory}
-
-/**
- * The <code>DefaultThreadPoolScheduler</code> class uses a default
- * <code>ThreadPoolExecutor</code> for executing <code>Actor</code>s.
- *
- * It can be configured using the two JVM properties
- * <code>actors.corePoolSize</code> and
- * <code>actors.maxPoolSize</code> that control the initial and
- * maximum size of the thread pool, respectively.
- *
- * @author Philipp Haller
- */
-private[actors] class DefaultThreadPoolScheduler(daemon: Boolean) extends ThreadPoolScheduler(daemon) {
-
- executor = {
- val workQueue = new LinkedBlockingQueue[Runnable]
-
- val threadFactory = new ThreadFactory {
- def newThread(r: Runnable): Thread = {
- val t = new Thread(r)
- t setDaemon daemon
- t
- }
- }
-
- new ThreadPoolExecutor(ThreadPoolConfig.corePoolSize,
- ThreadPoolConfig.maxPoolSize,
- 60000L,
- TimeUnit.MILLISECONDS,
- workQueue,
- threadFactory,
- new ThreadPoolExecutor.CallerRunsPolicy)
- }
-
-}
diff --git a/src/actors/scala/actors/scheduler/DelegatingScheduler.scala b/src/actors/scala/actors/scheduler/DelegatingScheduler.scala
index 434911c48d..193b1e3e2b 100644
--- a/src/actors/scala/actors/scheduler/DelegatingScheduler.scala
+++ b/src/actors/scala/actors/scheduler/DelegatingScheduler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -48,7 +48,7 @@ private[actors] trait DelegatingScheduler extends IScheduler {
}
}
- def newActor(actor: Reactor) = synchronized {
+ def newActor(actor: TrackedReactor) = synchronized {
val createNew = if (sched eq null)
true
else sched.synchronized {
@@ -65,9 +65,9 @@ private[actors] trait DelegatingScheduler extends IScheduler {
}
}
- def terminated(actor: Reactor) = impl.terminated(actor)
+ def terminated(actor: TrackedReactor) = impl.terminated(actor)
- def onTerminate(actor: Reactor)(f: => Unit) = impl.onTerminate(actor)(f)
+ def onTerminate(actor: TrackedReactor)(f: => Unit) = impl.onTerminate(actor)(f)
override def managedBlock(blocker: ManagedBlocker): Unit =
impl.managedBlock(blocker)
diff --git a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
index 52ec977b1f..d9194c0edf 100644
--- a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
@@ -1,26 +1,66 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
package scheduler
-import java.util.concurrent.Callable
+import java.util.concurrent.{Callable, ExecutorService}
import scala.concurrent.ThreadPoolRunner
/**
+ * The <code>ExecutorScheduler</code> object is used to create
+ * <code>ExecutorScheduler</code> instances.
+ *
+ * @author Philipp Haller
+ */
+object ExecutorScheduler {
+
+ private def start(sched: ExecutorScheduler): ExecutorScheduler = {
+ sched.start()
+ sched
+ }
+
+ /** Creates an <code>ExecutorScheduler</code> using the provided
+ * <code>ExecutorService</code>.
+ *
+ * @param exec the executor to use
+ * @return the scheduler
+ */
+ def apply(exec: ExecutorService): ExecutorScheduler =
+ start(new ExecutorScheduler {
+ val executor: ExecutorService = exec
+ })
+
+ /** Creates an <code>ExecutorScheduler</code> using the provided
+ * <code>ExecutorService</code>.
+ *
+ * @param exec the executor to use
+ * @param term whether the scheduler should automatically terminate
+ * @return the scheduler
+ */
+ def apply(exec: ExecutorService, term: Boolean): ExecutorScheduler =
+ start(new ExecutorScheduler {
+ val executor: ExecutorService = exec
+ override val terminate = term
+ })
+
+}
+
+/**
* The <code>ExecutorScheduler</code> class uses an
* <code>ExecutorService</code> to execute <code>Actor</code>s.
*
* @author Philipp Haller
*/
-private[scheduler] trait ExecutorScheduler extends IScheduler with ThreadPoolRunner {
+trait ExecutorScheduler extends Thread
+ with IScheduler with TerminationService
+ with ThreadPoolRunner {
def execute(task: Runnable) {
super[ThreadPoolRunner].execute(task.asInstanceOf[Task[Unit]])
diff --git a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
index b0198d4879..4aaef16661 100644
--- a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
@@ -9,7 +9,8 @@ import scala.concurrent.forkjoin._
*
* @author Philipp Haller
*/
-class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean) extends Runnable with IScheduler with TerminationMonitor {
+class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean, fair: Boolean)
+ extends Runnable with IScheduler with TerminationMonitor {
private var pool = makeNewPool() // guarded by this
private var terminating = false // guarded by this
@@ -21,12 +22,19 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
protected val CHECK_FREQ = 10
+ // this random number generator is only used in fair mode
+ private lazy val random = new java.util.Random // guarded by random
+
+ def this(d: Boolean, f: Boolean) {
+ this(ThreadPoolConfig.corePoolSize, ThreadPoolConfig.maxPoolSize, d, f)
+ }
+
def this(d: Boolean) {
- this(ThreadPoolConfig.corePoolSize, ThreadPoolConfig.maxPoolSize, d)
+ this(d, true) // default is fair
}
def this() {
- this(false)
+ this(false) // default is non-daemon
}
private def makeNewPool(): DrainableForkJoinPool = {
@@ -65,12 +73,12 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
}
if (terminating)
- throw new QuitException
+ throw new QuitControl
- if (allTerminated) {
+ if (allActorsTerminated) {
Debug.info(this+": all actors terminated")
terminating = true
- throw new QuitException
+ throw new QuitControl
}
if (!snapshoting) {
@@ -81,12 +89,12 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
Debug.info(this+": drained "+num+" tasks")
drainedTasks = list
terminating = true
- throw new QuitException
+ throw new QuitControl
}
}
}
} catch {
- case _: QuitException =>
+ case _: QuitControl =>
Debug.info(this+": initiating shutdown...")
while (!pool.isQuiescent()) {
try {
@@ -106,11 +114,11 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
}
override def executeFromActor(task: Runnable) {
- // TODO: only pass RecursiveAction (with Runnable), and cast to it
- val recAction = new RecursiveAction {
- def compute() = task.run()
- }
- recAction.fork()
+ // in fair mode: 2% chance of submitting to global task queue
+ if (fair && random.synchronized { random.nextInt(50) == 1 })
+ pool.execute(task)
+ else
+ task.asInstanceOf[RecursiveAction].fork()
}
/** Submits a closure for execution.
diff --git a/src/actors/scala/actors/AbstractReactor.scala b/src/actors/scala/actors/scheduler/QuitControl.scala
index 4ff20eaf3c..b217094c1e 100644
--- a/src/actors/scala/actors/AbstractReactor.scala
+++ b/src/actors/scala/actors/scheduler/QuitControl.scala
@@ -1,27 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
+package scala.actors.scheduler
+
+import scala.util.control.ControlThrowable
/**
- * The <code>AbstractReactor</code> trait.
+ * The <code>QuitControl</code> class is used to manage control flow
+ * of certain schedulers.
*
- * @version 0.9.18
* @author Philipp Haller
*/
-package scala.actors
-
-trait AbstractReactor[-T] {
-
- /**
- * Sends <code>msg</code> to this
- * <code>AbstractReactor</code> (asynchronous).
- */
- def !(msg: T): Unit
-
-}
+private[scheduler] class QuitControl extends ControlThrowable
diff --git a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
index e9044ca268..89df10e19c 100644
--- a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id: ThreadPoolScheduler.scala 18948 2009-10-06 17:30:27Z phaller $
package scala.actors.scheduler
@@ -112,7 +111,7 @@ class ResizableThreadPoolScheduler(protected val terminate: Boolean,
}
if (terminating)
- throw new QuitException
+ throw new QuitControl
if (!suspending) {
gc()
@@ -122,26 +121,26 @@ class ResizableThreadPoolScheduler(protected val terminate: Boolean,
if (coreSize - activeBlocked < numCores && coreSize < maxSize) {
coreSize = numCores + activeBlocked
executor.setCorePoolSize(coreSize)
- } else if (terminate && allTerminated) {
+ } else if (terminate && allActorsTerminated) {
// if all worker threads idle terminate
if (executor.getActiveCount() == 0) {
Debug.info(this+": initiating shutdown...")
Debug.info(this+": corePoolSize = "+coreSize+", maxPoolSize = "+maxSize)
terminating = true
- throw new QuitException
+ throw new QuitControl
}
}
} else {
drainedTasks = executor.shutdownNow()
Debug.info(this+": drained "+drainedTasks.size()+" tasks")
terminating = true
- throw new QuitException
+ throw new QuitControl
}
} // sync
}
} catch {
- case _: QuitException =>
+ case _: QuitControl =>
executor.shutdown()
// allow thread to exit
}
diff --git a/src/actors/scala/actors/scheduler/SchedulerService.scala b/src/actors/scala/actors/scheduler/SchedulerService.scala
deleted file mode 100644
index 37cfa5ca59..0000000000
--- a/src/actors/scala/actors/scheduler/SchedulerService.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.actors
-package scheduler
-
-import scala.util.control.ControlException
-import java.lang.{Runnable, Thread, InterruptedException}
-
-/**
- * The abstract <code>SchedulerService</code> class allows subclasses
- * to implement a custom <code>onShutdown</code> method, which is
- * invoked when the runtime system has detected that all actors have
- * been terminated.
- *
- * @version 0.9.18
- * @author Philipp Haller
- */
-abstract class SchedulerService(daemon: Boolean) extends Thread with IScheduler with ActorGC {
-
- setDaemon(daemon)
-
- def this() =
- this(false)
-
- private var terminating = false
-
- protected val CHECK_FREQ = 100
-
- def onShutdown(): Unit
-
- override def run() {
- try {
- while (true) {
- this.synchronized {
- try {
- wait(CHECK_FREQ)
- } catch {
- case _: InterruptedException =>
- }
- if (terminating)
- throw new QuitException
-
- gc()
-
- if (allTerminated)
- throw new QuitException
- }
- }
- } catch {
- case _: QuitException =>
- Debug.info(this+": initiating shutdown...")
- // invoke shutdown hook
- onShutdown()
- // allow thread to exit
- }
- }
-
- /** Shuts down the scheduler.
- */
- def shutdown(): Unit = synchronized {
- terminating = true
- }
-}
-
-/**
- * The <code>QuitException</code> class is used to manage control flow
- * of certain schedulers and worker threads.
- *
- * @version 0.9.8
- * @author Philipp Haller
- */
-private[actors] class QuitException extends Throwable with ControlException
diff --git a/src/actors/scala/actors/scheduler/SimpleExecutorScheduler.scala b/src/actors/scala/actors/scheduler/SimpleExecutorScheduler.scala
deleted file mode 100644
index d77777caae..0000000000
--- a/src/actors/scala/actors/scheduler/SimpleExecutorScheduler.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.actors
-package scheduler
-
-import java.util.concurrent.ExecutorService
-
-/**
- * The <code>SimpleExecutorScheduler</code> class uses an
- * <code>ExecutorService</code> to execute <code>Actor</code>s. It
- * does not start an additional thread.
- *
- * A <code>SimpleExecutorScheduler</code> attempts to shut down
- * the underlying <code>ExecutorService</code> only if
- * <code>terminate</code> is set to true.
- *
- * Otherwise, the <code>ExecutorService</code> must be shut down either
- * directly or by shutting down the
- * <code>SimpleExecutorScheduler</code> instance.
- *
- * @author Philipp Haller
- */
-class SimpleExecutorScheduler(protected var executor: ExecutorService,
- protected var terminate: Boolean)
- extends TerminationService(terminate) with ExecutorScheduler {
-
- /* This constructor (and the var above) is currently only used to work
- * around a bug in scaladoc, which cannot deal with early initializers
- * (to be used in subclasses such as DefaultExecutorScheduler) properly.
- */
- def this() {
- this(null, true)
- }
-
-}
diff --git a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
index 0a9c41a0c0..6ee3754d69 100644
--- a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
+++ b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
package scheduler
@@ -54,11 +53,11 @@ class SingleThreadedScheduler extends IScheduler {
isShutdown = true
}
- def newActor(actor: Reactor) {}
- def terminated(actor: Reactor) {}
+ def newActor(actor: TrackedReactor) {}
+ def terminated(actor: TrackedReactor) {}
// TODO: run termination handlers at end of shutdown.
- def onTerminate(actor: Reactor)(f: => Unit) {}
+ def onTerminate(actor: TrackedReactor)(f: => Unit) {}
def isActive =
!isShutdown
diff --git a/src/actors/scala/actors/scheduler/TerminationMonitor.scala b/src/actors/scala/actors/scheduler/TerminationMonitor.scala
index 875b79a8c1..bb6ae0bde8 100644
--- a/src/actors/scala/actors/scheduler/TerminationMonitor.scala
+++ b/src/actors/scala/actors/scheduler/TerminationMonitor.scala
@@ -1,26 +1,26 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
package scheduler
import scala.collection.mutable.HashMap
-trait TerminationMonitor {
+private[scheduler] trait TerminationMonitor {
+ _: IScheduler =>
protected var activeActors = 0
- protected val terminationHandlers = new HashMap[Reactor, () => Unit]
+ protected val terminationHandlers = new HashMap[TrackedReactor, () => Unit]
private var started = false
/** newActor is invoked whenever a new actor is started. */
- def newActor(a: Reactor) = synchronized {
+ def newActor(a: TrackedReactor) = synchronized {
activeActors += 1
if (!started)
started = true
@@ -32,7 +32,7 @@ trait TerminationMonitor {
* @param a the actor
* @param f the closure to be registered
*/
- def onTerminate(a: Reactor)(f: => Unit): Unit = synchronized {
+ def onTerminate(a: TrackedReactor)(f: => Unit): Unit = synchronized {
terminationHandlers += (a -> (() => f))
}
@@ -40,7 +40,7 @@ trait TerminationMonitor {
*
* @param a the actor that has terminated
*/
- def terminated(a: Reactor) = {
+ def terminated(a: TrackedReactor) = {
// obtain termination handler (if any)
val todo = synchronized {
terminationHandlers.get(a) match {
@@ -61,11 +61,14 @@ trait TerminationMonitor {
}
/** Checks whether all actors have terminated. */
- @deprecated("this method is going to be removed in a future release")
- def allTerminated: Boolean = synchronized {
+ private[actors] def allActorsTerminated: Boolean = synchronized {
started && activeActors <= 0
}
+ /** Deprecated non-actor-private version */
+ @deprecated("this method is going to be removed in a future release")
+ def allTerminated: Boolean = allActorsTerminated
+
/** Checks for actors that have become garbage. */
protected def gc() {}
}
diff --git a/src/actors/scala/actors/scheduler/TerminationService.scala b/src/actors/scala/actors/scheduler/TerminationService.scala
index 3a05d57b6c..610a76372f 100644
--- a/src/actors/scala/actors/scheduler/TerminationService.scala
+++ b/src/actors/scala/actors/scheduler/TerminationService.scala
@@ -1,17 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
package scheduler
-import java.lang.{Runnable, Thread, InterruptedException}
+import java.lang.{Thread, InterruptedException}
/**
* The <code>TerminationService</code> class starts a new thread
@@ -21,11 +20,16 @@ import java.lang.{Runnable, Thread, InterruptedException}
*
* @author Philipp Haller
*/
-abstract class TerminationService(terminate: Boolean)
- extends Thread with IScheduler with TerminationMonitor {
+private[scheduler] trait TerminationService extends TerminationMonitor {
+ _: Thread with IScheduler =>
private var terminating = false
+ /** Indicates whether the scheduler should terminate when all
+ * actors have terminated.
+ */
+ protected val terminate = true
+
protected val CHECK_FREQ = 50
def onShutdown(): Unit
@@ -39,15 +43,15 @@ abstract class TerminationService(terminate: Boolean)
} catch {
case _: InterruptedException =>
}
- if (terminating)
- throw new QuitException
- if (terminate && allTerminated)
- throw new QuitException
+ if (terminating || (terminate && allActorsTerminated))
+ throw new QuitControl
+
+ gc()
}
}
} catch {
- case _: QuitException =>
+ case _: QuitControl =>
Debug.info(this+": initiating shutdown...")
// invoke shutdown hook
onShutdown()
@@ -60,4 +64,5 @@ abstract class TerminationService(terminate: Boolean)
def shutdown(): Unit = synchronized {
terminating = true
}
+
}
diff --git a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
index 3c2f5ce5da..1c5b9be0d3 100644
--- a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
+++ b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
@@ -1,32 +1,28 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.actors
package scheduler
+import util.Properties.{ javaVersion, javaVmVendor, isJavaAtLeast, propIsSetTo, propOrNone }
+
/**
* @author Erik Engbrecht
+ * @author Philipp Haller
*/
-object ThreadPoolConfig {
+private[actors] object ThreadPoolConfig {
private val rt = Runtime.getRuntime()
private val minNumThreads = 4
- private def getIntegerProp(propName: String): Option[Int] = {
- try {
- val prop = System.getProperty(propName)
- Some(Integer.parseInt(prop))
- } catch {
- case ace: java.security.AccessControlException => None
- case nfe: NumberFormatException => None
- }
- }
+ private def getIntegerProp(propName: String): Option[Int] =
+ try propOrNone(propName) map (_.toInt)
+ catch { case _: SecurityException | _: NumberFormatException => None }
val corePoolSize = getIntegerProp("actors.corePoolSize") match {
case Some(i) if i > 0 => i
@@ -36,9 +32,22 @@ object ThreadPoolConfig {
}
}
- val maxPoolSize = getIntegerProp("actors.maxPoolSize") match {
- case Some(i) if (i >= corePoolSize) => i
- case Some(i) if (i < corePoolSize) => corePoolSize
- case _ => 256
+ val maxPoolSize = {
+ val preMaxSize = getIntegerProp("actors.maxPoolSize") getOrElse 256
+ if (preMaxSize >= corePoolSize) preMaxSize else corePoolSize
}
+
+ private[actors] def useForkJoin: Boolean =
+ try !propIsSetTo("actors.enableForkJoin", "false") &&
+ (propIsSetTo("actors.enableForkJoin", "true") || {
+ Debug.info(this+": java.version = "+javaVersion)
+ Debug.info(this+": java.vm.vendor = "+javaVmVendor)
+
+ // on IBM J9 1.6 do not use ForkJoinPool
+ // XXX this all needs to go into Properties.
+ isJavaAtLeast("1.6") && ((javaVmVendor contains "Sun") || (javaVmVendor contains "Apple"))
+ })
+ catch {
+ case _: SecurityException => false
+ }
}
diff --git a/src/actors/scala/actors/scheduler/ThreadPoolScheduler.scala b/src/actors/scala/actors/scheduler/ThreadPoolScheduler.scala
deleted file mode 100644
index 08c842e71b..0000000000
--- a/src/actors/scala/actors/scheduler/ThreadPoolScheduler.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.actors.scheduler
-
-import java.util.concurrent.ThreadPoolExecutor
-import scala.actors.Debug
-import scala.concurrent.ManagedBlocker
-
-/**
- * The <code>ThreadPoolScheduler</code> class uses a
- * <code>ThreadPoolExecutor</code> to execute <code>Actor</code>s.
- *
- * A <code>ThreadPoolScheduler</code> attempts to shut down
- * the underlying <code>ThreadPoolExecutor</code> only if
- * <code>terminate</code> is set to true.
- *
- * Otherwise, the <code>ThreadPoolExecutor</code> must be shut down
- * either directly or by shutting down the
- * <code>ThreadPoolScheduler</code> instance.
- *
- * @author Philipp Haller
- */
-class ThreadPoolScheduler(protected var executor: ThreadPoolExecutor,
- protected val terminate: Boolean,
- protected val daemon: Boolean)
- extends Thread with ExecutorScheduler with TerminationMonitor {
-
- setDaemon(daemon)
-
- private var terminating = false // guarded by this
- protected val CHECK_FREQ = 10
-
- /* This constructor (and the var above) is currently only used to work
- * around a bug in scaladoc, which cannot deal with early initializers
- * (to be used in subclasses such as DefaultThreadPoolScheduler)
- * properly.
- */
- def this(d: Boolean) {
- this(null, true, d)
- }
-
- override def run() {
- try {
- while (true) {
- this.synchronized {
- try {
- wait(CHECK_FREQ)
- } catch {
- case _: InterruptedException =>
- }
-
- if (terminating || (terminate && allTerminated))
- throw new QuitException
-
- gc()
- }
- }
- } catch {
- case _: QuitException =>
- Debug.info(this+": initiating shutdown...")
- // invoke shutdown hook
- onShutdown()
- // allow thread to exit
- }
- }
-
- /** Shuts down the scheduler.
- */
- def shutdown(): Unit = synchronized {
- terminating = true
- }
-
-}
diff --git a/src/actors/scala/actors/threadpool/AbstractExecutorService.java b/src/actors/scala/actors/threadpool/AbstractExecutorService.java
index 7953bfe30f..4a12aa3c28 100644
--- a/src/actors/scala/actors/threadpool/AbstractExecutorService.java
+++ b/src/actors/scala/actors/threadpool/AbstractExecutorService.java
@@ -120,7 +120,7 @@ public abstract class AbstractExecutorService implements ExecutorService {
int ntasks = tasks.size();
if (ntasks == 0)
throw new IllegalArgumentException();
- List futures= new ArrayList(ntasks);
+ List<Future> futures = new ArrayList<Future>(ntasks);
ExecutorCompletionService ecs =
new ExecutorCompletionService(this);
@@ -203,10 +203,10 @@ public abstract class AbstractExecutorService implements ExecutorService {
return doInvokeAny(tasks, true, unit.toNanos(timeout));
}
- public List invokeAll(Collection tasks) throws InterruptedException {
+ public List<Future> invokeAll(Collection tasks) throws InterruptedException {
if (tasks == null)
throw new NullPointerException();
- List futures = new ArrayList(tasks.size());
+ List<Future> futures = new ArrayList<Future>(tasks.size());
boolean done = false;
try {
for (Iterator t = tasks.iterator(); t.hasNext();) {
@@ -235,13 +235,13 @@ public abstract class AbstractExecutorService implements ExecutorService {
}
}
- public List invokeAll(Collection tasks,
+ public List<Future> invokeAll(Collection tasks,
long timeout, TimeUnit unit)
throws InterruptedException {
if (tasks == null || unit == null)
throw new NullPointerException();
long nanos = unit.toNanos(timeout);
- List futures = new ArrayList(tasks.size());
+ List<Future> futures = new ArrayList<Future>(tasks.size());
boolean done = false;
try {
for (Iterator t = tasks.iterator(); t.hasNext();)
diff --git a/src/actors/scala/actors/threadpool/BlockingQueue.java b/src/actors/scala/actors/threadpool/BlockingQueue.java
index 880c2580da..1b4e808d84 100644
--- a/src/actors/scala/actors/threadpool/BlockingQueue.java
+++ b/src/actors/scala/actors/threadpool/BlockingQueue.java
@@ -7,9 +7,10 @@
package scala.actors.threadpool;
import java.util.Collection;
+import java.util.Queue;
/**
- * A {@link edu.emory.mathcs.backport.java.util.Queue} that additionally supports operations
+ * A {@link java.util.Queue} that additionally supports operations
* that wait for the queue to become non-empty when retrieving an
* element, and wait for space to become available in the queue when
* storing an element.
@@ -146,8 +147,9 @@ import java.util.Collection;
*
* @since 1.5
* @author Doug Lea
+ * @param <E> the type of elements held in this collection
*/
-public interface BlockingQueue extends Queue {
+public interface BlockingQueue<E> extends Queue<E> {
/**
* Inserts the specified element into this queue if it is possible to do
* so immediately without violating capacity restrictions, returning
@@ -157,7 +159,7 @@ public interface BlockingQueue extends Queue {
* use {@link #offer(Object) offer}.
*
* @param e the element to add
- * @return <tt>true</tt> (as specified by {@link java.util.Collection#add})
+ * @return <tt>true</tt> (as specified by {@link Collection#add})
* @throws IllegalStateException if the element cannot be added at this
* time due to capacity restrictions
* @throws ClassCastException if the class of the specified element
@@ -166,7 +168,7 @@ public interface BlockingQueue extends Queue {
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this queue
*/
- boolean add(Object e);
+ boolean add(E e);
/**
* Inserts the specified element into this queue if it is possible to do
@@ -185,7 +187,7 @@ public interface BlockingQueue extends Queue {
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this queue
*/
- boolean offer(Object e);
+ boolean offer(E e);
/**
* Inserts the specified element into this queue, waiting if necessary
@@ -199,7 +201,7 @@ public interface BlockingQueue extends Queue {
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this queue
*/
- void put(Object e) throws InterruptedException;
+ void put(E e) throws InterruptedException;
/**
* Inserts the specified element into this queue, waiting up to the
@@ -219,7 +221,7 @@ public interface BlockingQueue extends Queue {
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this queue
*/
- boolean offer(Object e, long timeout, TimeUnit unit)
+ boolean offer(E e, long timeout, TimeUnit unit)
throws InterruptedException;
/**
@@ -229,7 +231,7 @@ public interface BlockingQueue extends Queue {
* @return the head of this queue
* @throws InterruptedException if interrupted while waiting
*/
- Object take() throws InterruptedException;
+ E take() throws InterruptedException;
/**
* Retrieves and removes the head of this queue, waiting up to the
@@ -243,7 +245,7 @@ public interface BlockingQueue extends Queue {
* specified waiting time elapses before an element is available
* @throws InterruptedException if interrupted while waiting
*/
- Object poll(long timeout, TimeUnit unit)
+ E poll(long timeout, TimeUnit unit)
throws InterruptedException;
/**
@@ -313,7 +315,7 @@ public interface BlockingQueue extends Queue {
* queue, or some property of an element of this queue prevents
* it from being added to the specified collection
*/
- int drainTo(Collection c);
+ int drainTo(Collection<? super E> c);
/**
* Removes at most the given number of available elements from
@@ -338,5 +340,5 @@ public interface BlockingQueue extends Queue {
* queue, or some property of an element of this queue prevents
* it from being added to the specified collection
*/
- int drainTo(Collection c, int maxElements);
+ int drainTo(Collection<? super E> c, int maxElements);
}
diff --git a/src/actors/scala/actors/threadpool/Executors.java b/src/actors/scala/actors/threadpool/Executors.java
index e74d665f33..49a127a8db 100644
--- a/src/actors/scala/actors/threadpool/Executors.java
+++ b/src/actors/scala/actors/threadpool/Executors.java
@@ -605,12 +605,12 @@ public class Executors {
public Future submit(Runnable task, Object result) {
return e.submit(task, result);
}
- public List invokeAll(Collection tasks)
+ public List<Future> invokeAll(Collection tasks)
throws InterruptedException {
return e.invokeAll(tasks);
}
- public List invokeAll(Collection tasks,
- long timeout, TimeUnit unit)
+ public List<Future> invokeAll(Collection tasks,
+ long timeout, TimeUnit unit)
throws InterruptedException {
return e.invokeAll(tasks, timeout, unit);
}
diff --git a/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java b/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java
index 87fecff09c..f434ab0e7b 100644
--- a/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java
+++ b/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java
@@ -6,11 +6,13 @@
package scala.actors.threadpool;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.AbstractQueue;
import java.util.Collection;
import java.util.Iterator;
import java.util.NoSuchElementException;
-//import edu.emory.mathcs.backport.java.util.*;
-import scala.actors.threadpool.helpers.*;
/**
* An optionally-bounded {@linkplain BlockingQueue blocking queue} based on
@@ -41,10 +43,11 @@ import scala.actors.threadpool.helpers.*;
*
* @since 1.5
* @author Doug Lea
+ * @param <E> the type of elements held in this collection
*
*/
-public class LinkedBlockingQueue extends AbstractQueue
- implements BlockingQueue, java.io.Serializable {
+public class LinkedBlockingQueue<E> extends AbstractQueue<E>
+ implements BlockingQueue<E>, java.io.Serializable {
private static final long serialVersionUID = -6903933977591709194L;
/*
@@ -59,43 +62,87 @@ public class LinkedBlockingQueue extends AbstractQueue
* items have been entered since the signal. And symmetrically for
* takes signalling puts. Operations such as remove(Object) and
* iterators acquire both locks.
+ *
+ * Visibility between writers and readers is provided as follows:
+ *
+ * Whenever an element is enqueued, the putLock is acquired and
+ * count updated. A subsequent reader guarantees visibility to the
+ * enqueued Node by either acquiring the putLock (via fullyLock)
+ * or by acquiring the takeLock, and then reading n = count.get();
+ * this gives visibility to the first n items.
+ *
+ * To implement weakly consistent iterators, it appears we need to
+ * keep all Nodes GC-reachable from a predecessor dequeued Node.
+ * That would cause two problems:
+ * - allow a rogue Iterator to cause unbounded memory retention
+ * - cause cross-generational linking of old Nodes to new Nodes if
+ * a Node was tenured while live, which generational GCs have a
+ * hard time dealing with, causing repeated major collections.
+ * However, only non-deleted Nodes need to be reachable from
+ * dequeued Nodes, and reachability does not necessarily have to
+ * be of the kind understood by the GC. We use the trick of
+ * linking a Node that has just been dequeued to itself. Such a
+ * self-link implicitly means to advance to head.next.
*/
/**
* Linked list node class
*/
- static class Node {
- /** The item, volatile to ensure barrier separating write and read */
- volatile Object item;
- Node next;
- Node(Object x) { item = x; }
+ static class Node<E> {
+ E item;
+
+ /**
+ * One of:
+ * - the real successor Node
+ * - this Node, meaning the successor is head.next
+ * - null, meaning there is no successor (this is the last node)
+ */
+ Node<E> next;
+
+ Node(E x) { item = x; }
}
/** The capacity bound, or Integer.MAX_VALUE if none */
private final int capacity;
/** Current number of elements */
- private volatile int count = 0;
+ private final AtomicInteger count = new AtomicInteger(0);
- /** Head of linked list */
- private transient Node head;
+ /**
+ * Head of linked list.
+ * Invariant: head.item == null
+ */
+ private transient Node<E> head;
- /** Tail of linked list */
- private transient Node last;
+ /**
+ * Tail of linked list.
+ * Invariant: last.next == null
+ */
+ private transient Node<E> last;
/** Lock held by take, poll, etc */
- private final Object takeLock = new SerializableLock();
+ private final ReentrantLock takeLock = new ReentrantLock();
+
+ /** Wait queue for waiting takes */
+ private final Condition notEmpty = takeLock.newCondition();
/** Lock held by put, offer, etc */
- private final Object putLock = new SerializableLock();
+ private final ReentrantLock putLock = new ReentrantLock();
+
+ /** Wait queue for waiting puts */
+ private final Condition notFull = putLock.newCondition();
/**
* Signals a waiting take. Called only from put/offer (which do not
* otherwise ordinarily lock takeLock.)
*/
private void signalNotEmpty() {
- synchronized (takeLock) {
- takeLock.notify();
+ final ReentrantLock takeLock = this.takeLock;
+ takeLock.lock();
+ try {
+ notEmpty.signal();
+ } finally {
+ takeLock.unlock();
}
}
@@ -103,34 +150,69 @@ public class LinkedBlockingQueue extends AbstractQueue
* Signals a waiting put. Called only from take/poll.
*/
private void signalNotFull() {
- synchronized (putLock) {
- putLock.notify();
+ final ReentrantLock putLock = this.putLock;
+ putLock.lock();
+ try {
+ notFull.signal();
+ } finally {
+ putLock.unlock();
}
}
/**
* Creates a node and links it at end of queue.
+ *
* @param x the item
*/
- private void insert(Object x) {
- last = last.next = new Node(x);
+ private void enqueue(E x) {
+ // assert putLock.isHeldByCurrentThread();
+ // assert last.next == null;
+ last = last.next = new Node<E>(x);
}
/**
- * Removes a node from head of queue,
+ * Removes a node from head of queue.
+ *
* @return the node
*/
- private Object extract() {
- Node first = head.next;
+ private E dequeue() {
+ // assert takeLock.isHeldByCurrentThread();
+ // assert head.item == null;
+ Node<E> h = head;
+ Node<E> first = h.next;
+ h.next = h; // help GC
head = first;
- Object x = first.item;
+ E x = first.item;
first.item = null;
return x;
}
+ /**
+ * Lock to prevent both puts and takes.
+ */
+ void fullyLock() {
+ putLock.lock();
+ takeLock.lock();
+ }
/**
- * Creates a <tt>LinkedBlockingQueue</tt> with a capacity of
+ * Unlock to allow both puts and takes.
+ */
+ void fullyUnlock() {
+ takeLock.unlock();
+ putLock.unlock();
+ }
+
+// /**
+// * Tells whether both locks are held by current thread.
+// */
+// boolean isFullyLocked() {
+// return (putLock.isHeldByCurrentThread() &&
+// takeLock.isHeldByCurrentThread());
+// }
+
+ /**
+ * Creates a {@code LinkedBlockingQueue} with a capacity of
* {@link Integer#MAX_VALUE}.
*/
public LinkedBlockingQueue() {
@@ -138,20 +220,20 @@ public class LinkedBlockingQueue extends AbstractQueue
}
/**
- * Creates a <tt>LinkedBlockingQueue</tt> with the given (fixed) capacity.
+ * Creates a {@code LinkedBlockingQueue} with the given (fixed) capacity.
*
* @param capacity the capacity of this queue
- * @throws IllegalArgumentException if <tt>capacity</tt> is not greater
+ * @throws IllegalArgumentException if {@code capacity} is not greater
* than zero
*/
public LinkedBlockingQueue(int capacity) {
if (capacity <= 0) throw new IllegalArgumentException();
this.capacity = capacity;
- last = head = new Node(null);
+ last = head = new Node<E>(null);
}
/**
- * Creates a <tt>LinkedBlockingQueue</tt> with a capacity of
+ * Creates a {@code LinkedBlockingQueue} with a capacity of
* {@link Integer#MAX_VALUE}, initially containing the elements of the
* given collection,
* added in traversal order of the collection's iterator.
@@ -160,11 +242,23 @@ public class LinkedBlockingQueue extends AbstractQueue
* @throws NullPointerException if the specified collection or any
* of its elements are null
*/
- public LinkedBlockingQueue(Collection c) {
+ public LinkedBlockingQueue(Collection<? extends E> c) {
this(Integer.MAX_VALUE);
- for (Iterator itr = c.iterator(); itr.hasNext();) {
- Object e = itr.next();
- add(e);
+ final ReentrantLock putLock = this.putLock;
+ putLock.lock(); // Never contended, but necessary for visibility
+ try {
+ int n = 0;
+ for (E e : c) {
+ if (e == null)
+ throw new NullPointerException();
+ if (n == capacity)
+ throw new IllegalStateException("Queue full");
+ enqueue(e);
+ ++n;
+ }
+ count.set(n);
+ } finally {
+ putLock.unlock();
}
}
@@ -177,7 +271,7 @@ public class LinkedBlockingQueue extends AbstractQueue
* @return the number of elements in this queue
*/
public int size() {
- return count;
+ return count.get();
}
// this doc comment is a modified copy of the inherited doc comment,
@@ -186,15 +280,15 @@ public class LinkedBlockingQueue extends AbstractQueue
* Returns the number of additional elements that this queue can ideally
* (in the absence of memory or resource constraints) accept without
* blocking. This is always equal to the initial capacity of this queue
- * less the current <tt>size</tt> of this queue.
+ * less the current {@code size} of this queue.
*
* <p>Note that you <em>cannot</em> always tell if an attempt to insert
- * an element will succeed by inspecting <tt>remainingCapacity</tt>
+ * an element will succeed by inspecting {@code remainingCapacity}
* because it may be the case that another thread is about to
* insert or remove an element.
*/
public int remainingCapacity() {
- return capacity - count;
+ return capacity - count.get();
}
/**
@@ -204,34 +298,33 @@ public class LinkedBlockingQueue extends AbstractQueue
* @throws InterruptedException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
*/
- public void put(Object e) throws InterruptedException {
+ public void put(E e) throws InterruptedException {
if (e == null) throw new NullPointerException();
- // Note: convention in all put/take/etc is to preset
- // local var holding count negative to indicate failure unless set.
+ // Note: convention in all put/take/etc is to preset local var
+ // holding count negative to indicate failure unless set.
int c = -1;
- synchronized (putLock) {
+ final ReentrantLock putLock = this.putLock;
+ final AtomicInteger count = this.count;
+ putLock.lockInterruptibly();
+ try {
/*
* Note that count is used in wait guard even though it is
* not protected by lock. This works because count can
* only decrease at this point (all other puts are shut
* out by lock), and we (or some other waiting put) are
- * signalled if it ever changes from
- * capacity. Similarly for all other uses of count in
- * other wait guards.
+ * signalled if it ever changes from capacity. Similarly
+ * for all other uses of count in other wait guards.
*/
- try {
- while (count == capacity)
- putLock.wait();
- } catch (InterruptedException ie) {
- putLock.notify(); // propagate to a non-interrupted thread
- throw ie;
+ while (count.get() == capacity) {
+ notFull.await();
}
- insert(e);
- synchronized (this) { c = count++; }
+ enqueue(e);
+ c = count.getAndIncrement();
if (c + 1 < capacity)
- putLock.notify();
+ notFull.signal();
+ } finally {
+ putLock.unlock();
}
-
if (c == 0)
signalNotEmpty();
}
@@ -240,37 +333,32 @@ public class LinkedBlockingQueue extends AbstractQueue
* Inserts the specified element at the tail of this queue, waiting if
* necessary up to the specified wait time for space to become available.
*
- * @return <tt>true</tt> if successful, or <tt>false</tt> if
+ * @return {@code true} if successful, or {@code false} if
* the specified waiting time elapses before space is available.
* @throws InterruptedException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
*/
- public boolean offer(Object e, long timeout, TimeUnit unit)
+ public boolean offer(E e, long timeout, TimeUnit unit)
throws InterruptedException {
if (e == null) throw new NullPointerException();
long nanos = unit.toNanos(timeout);
int c = -1;
- synchronized (putLock) {
- long deadline = Utils.nanoTime() + nanos;
- for (;;) {
- if (count < capacity) {
- insert(e);
- synchronized (this) { c = count++; }
- if (c + 1 < capacity)
- putLock.notify();
- break;
- }
+ final ReentrantLock putLock = this.putLock;
+ final AtomicInteger count = this.count;
+ putLock.lockInterruptibly();
+ try {
+ while (count.get() == capacity) {
if (nanos <= 0)
return false;
- try {
- TimeUnit.NANOSECONDS.timedWait(putLock, nanos);
- nanos = deadline - Utils.nanoTime();
- } catch (InterruptedException ie) {
- putLock.notify(); // propagate to a non-interrupted thread
- throw ie;
- }
+ nanos = notFull.awaitNanos(nanos);
}
+ enqueue(e);
+ c = count.getAndIncrement();
+ if (c + 1 < capacity)
+ notFull.signal();
+ } finally {
+ putLock.unlock();
}
if (c == 0)
signalNotEmpty();
@@ -280,7 +368,7 @@ public class LinkedBlockingQueue extends AbstractQueue
/**
* Inserts the specified element at the tail of this queue if it is
* possible to do so immediately without exceeding the queue's capacity,
- * returning <tt>true</tt> upon success and <tt>false</tt> if this queue
+ * returning {@code true} upon success and {@code false} if this queue
* is full.
* When using a capacity-restricted queue, this method is generally
* preferable to method {@link BlockingQueue#add add}, which can fail to
@@ -288,18 +376,23 @@ public class LinkedBlockingQueue extends AbstractQueue
*
* @throws NullPointerException if the specified element is null
*/
- public boolean offer(Object e) {
+ public boolean offer(E e) {
if (e == null) throw new NullPointerException();
- if (count == capacity)
+ final AtomicInteger count = this.count;
+ if (count.get() == capacity)
return false;
int c = -1;
- synchronized (putLock) {
- if (count < capacity) {
- insert(e);
- synchronized (this) { c = count++; }
+ final ReentrantLock putLock = this.putLock;
+ putLock.lock();
+ try {
+ if (count.get() < capacity) {
+ enqueue(e);
+ c = count.getAndIncrement();
if (c + 1 < capacity)
- putLock.notify();
+ notFull.signal();
}
+ } finally {
+ putLock.unlock();
}
if (c == 0)
signalNotEmpty();
@@ -307,128 +400,134 @@ public class LinkedBlockingQueue extends AbstractQueue
}
- public Object take() throws InterruptedException {
- Object x;
+ public E take() throws InterruptedException {
+ E x;
int c = -1;
- synchronized (takeLock) {
- try {
- while (count == 0)
- takeLock.wait();
- } catch (InterruptedException ie) {
- takeLock.notify(); // propagate to a non-interrupted thread
- throw ie;
+ final AtomicInteger count = this.count;
+ final ReentrantLock takeLock = this.takeLock;
+ takeLock.lockInterruptibly();
+ try {
+ while (count.get() == 0) {
+ notEmpty.await();
}
-
- x = extract();
- synchronized (this) { c = count--; }
+ x = dequeue();
+ c = count.getAndDecrement();
if (c > 1)
- takeLock.notify();
+ notEmpty.signal();
+ } finally {
+ takeLock.unlock();
}
if (c == capacity)
signalNotFull();
return x;
}
- public Object poll(long timeout, TimeUnit unit) throws InterruptedException {
- Object x = null;
+ public E poll(long timeout, TimeUnit unit) throws InterruptedException {
+ E x = null;
int c = -1;
long nanos = unit.toNanos(timeout);
- synchronized (takeLock) {
- long deadline = Utils.nanoTime() + nanos;
- for (;;) {
- if (count > 0) {
- x = extract();
- synchronized (this) { c = count--; }
- if (c > 1)
- takeLock.notify();
- break;
- }
+ final AtomicInteger count = this.count;
+ final ReentrantLock takeLock = this.takeLock;
+ takeLock.lockInterruptibly();
+ try {
+ while (count.get() == 0) {
if (nanos <= 0)
return null;
- try {
- TimeUnit.NANOSECONDS.timedWait(takeLock, nanos);
- nanos = deadline - Utils.nanoTime();
- } catch (InterruptedException ie) {
- takeLock.notify(); // propagate to a non-interrupted thread
- throw ie;
- }
+ nanos = notEmpty.awaitNanos(nanos);
}
+ x = dequeue();
+ c = count.getAndDecrement();
+ if (c > 1)
+ notEmpty.signal();
+ } finally {
+ takeLock.unlock();
}
if (c == capacity)
signalNotFull();
return x;
}
- public Object poll() {
- if (count == 0)
+ public E poll() {
+ final AtomicInteger count = this.count;
+ if (count.get() == 0)
return null;
- Object x = null;
+ E x = null;
int c = -1;
- synchronized (takeLock) {
- if (count > 0) {
- x = extract();
- synchronized (this) { c = count--; }
+ final ReentrantLock takeLock = this.takeLock;
+ takeLock.lock();
+ try {
+ if (count.get() > 0) {
+ x = dequeue();
+ c = count.getAndDecrement();
if (c > 1)
- takeLock.notify();
+ notEmpty.signal();
}
+ } finally {
+ takeLock.unlock();
}
if (c == capacity)
signalNotFull();
return x;
}
-
- public Object peek() {
- if (count == 0)
+ public E peek() {
+ if (count.get() == 0)
return null;
- synchronized (takeLock) {
- Node first = head.next;
+ final ReentrantLock takeLock = this.takeLock;
+ takeLock.lock();
+ try {
+ Node<E> first = head.next;
if (first == null)
return null;
else
return first.item;
+ } finally {
+ takeLock.unlock();
}
}
/**
+ * Unlinks interior Node p with predecessor trail.
+ */
+ void unlink(Node<E> p, Node<E> trail) {
+ // assert isFullyLocked();
+ // p.next is not changed, to allow iterators that are
+ // traversing p to maintain their weak-consistency guarantee.
+ p.item = null;
+ trail.next = p.next;
+ if (last == p)
+ last = trail;
+ if (count.getAndDecrement() == capacity)
+ notFull.signal();
+ }
+
+ /**
* Removes a single instance of the specified element from this queue,
- * if it is present. More formally, removes an element <tt>e</tt> such
- * that <tt>o.equals(e)</tt>, if this queue contains one or more such
+ * if it is present. More formally, removes an element {@code e} such
+ * that {@code o.equals(e)}, if this queue contains one or more such
* elements.
- * Returns <tt>true</tt> if this queue contained the specified element
+ * Returns {@code true} if this queue contained the specified element
* (or equivalently, if this queue changed as a result of the call).
*
* @param o element to be removed from this queue, if present
- * @return <tt>true</tt> if this queue changed as a result of the call
+ * @return {@code true} if this queue changed as a result of the call
*/
public boolean remove(Object o) {
if (o == null) return false;
- boolean removed = false;
- synchronized (putLock) {
- synchronized (takeLock) {
- Node trail = head;
- Node p = head.next;
- while (p != null) {
- if (o.equals(p.item)) {
- removed = true;
- break;
- }
- trail = p;
- p = p.next;
- }
- if (removed) {
- p.item = null;
- trail.next = p.next;
- if (last == p)
- last = trail;
- synchronized (this) {
- if (count-- == capacity)
- putLock.notifyAll();
- }
+ fullyLock();
+ try {
+ for (Node<E> trail = head, p = trail.next;
+ p != null;
+ trail = p, p = p.next) {
+ if (o.equals(p.item)) {
+ unlink(p, trail);
+ return true;
}
}
+ return false;
+ } finally {
+ fullyUnlock();
}
- return removed;
}
/**
@@ -445,15 +544,16 @@ public class LinkedBlockingQueue extends AbstractQueue
* @return an array containing all of the elements in this queue
*/
public Object[] toArray() {
- synchronized (putLock) {
- synchronized (takeLock) {
- int size = count;
- Object[] a = new Object[size];
- int k = 0;
- for (Node p = head.next; p != null; p = p.next)
- a[k++] = p.item;
- return a;
- }
+ fullyLock();
+ try {
+ int size = count.get();
+ Object[] a = new Object[size];
+ int k = 0;
+ for (Node<E> p = head.next; p != null; p = p.next)
+ a[k++] = p.item;
+ return a;
+ } finally {
+ fullyUnlock();
}
}
@@ -467,22 +567,22 @@ public class LinkedBlockingQueue extends AbstractQueue
* <p>If this queue fits in the specified array with room to spare
* (i.e., the array has more elements than this queue), the element in
* the array immediately following the end of the queue is set to
- * <tt>null</tt>.
+ * {@code null}.
*
* <p>Like the {@link #toArray()} method, this method acts as bridge between
* array-based and collection-based APIs. Further, this method allows
* precise control over the runtime type of the output array, and may,
* under certain circumstances, be used to save allocation costs.
*
- * <p>Suppose <tt>x</tt> is a queue known to contain only strings.
+ * <p>Suppose {@code x} is a queue known to contain only strings.
* The following code can be used to dump the queue into a newly
- * allocated array of <tt>String</tt>:
+ * allocated array of {@code String}:
*
* <pre>
* String[] y = x.toArray(new String[0]);</pre>
*
- * Note that <tt>toArray(new Object[0])</tt> is identical in function to
- * <tt>toArray()</tt>.
+ * Note that {@code toArray(new Object[0])} is identical in function to
+ * {@code toArray()}.
*
* @param a the array into which the elements of the queue are to
* be stored, if it is big enough; otherwise, a new array of the
@@ -493,29 +593,32 @@ public class LinkedBlockingQueue extends AbstractQueue
* this queue
* @throws NullPointerException if the specified array is null
*/
- public Object[] toArray(Object[] a) {
- synchronized (putLock) {
- synchronized (takeLock) {
- int size = count;
- if (a.length < size)
- a = (Object[])java.lang.reflect.Array.newInstance
- (a.getClass().getComponentType(), size);
-
- int k = 0;
- for (Node p = head.next; p != null; p = p.next)
- a[k++] = (Object)p.item;
- if (a.length > k)
- a[k] = null;
- return a;
- }
+ @SuppressWarnings("unchecked")
+ public <T> T[] toArray(T[] a) {
+ fullyLock();
+ try {
+ int size = count.get();
+ if (a.length < size)
+ a = (T[])java.lang.reflect.Array.newInstance
+ (a.getClass().getComponentType(), size);
+
+ int k = 0;
+ for (Node<E> p = head.next; p != null; p = p.next)
+ a[k++] = (T)p.item;
+ if (a.length > k)
+ a[k] = null;
+ return a;
+ } finally {
+ fullyUnlock();
}
}
public String toString() {
- synchronized (putLock) {
- synchronized (takeLock) {
- return super.toString();
- }
+ fullyLock();
+ try {
+ return super.toString();
+ } finally {
+ fullyUnlock();
}
}
@@ -524,19 +627,18 @@ public class LinkedBlockingQueue extends AbstractQueue
* The queue will be empty after this call returns.
*/
public void clear() {
- synchronized (putLock) {
- synchronized (takeLock) {
- head.next = null;
- assert head.item == null;
- last = head;
- int c;
- synchronized (this) {
- c = count;
- count = 0;
- }
- if (c == capacity)
- putLock.notifyAll();
+ fullyLock();
+ try {
+ for (Node<E> p, h = head; (p = h.next) != null; h = p) {
+ h.next = h;
+ p.item = null;
}
+ head = last;
+ // assert head.item == null && head.next == null;
+ if (count.getAndSet(0) == capacity)
+ notFull.signal();
+ } finally {
+ fullyUnlock();
}
}
@@ -546,35 +648,8 @@ public class LinkedBlockingQueue extends AbstractQueue
* @throws NullPointerException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
*/
- public int drainTo(Collection c) {
- if (c == null)
- throw new NullPointerException();
- if (c == this)
- throw new IllegalArgumentException();
- Node first;
- synchronized (putLock) {
- synchronized (takeLock) {
- first = head.next;
- head.next = null;
- assert head.item == null;
- last = head;
- int cold;
- synchronized (this) {
- cold = count;
- count = 0;
- }
- if (cold == capacity)
- putLock.notifyAll();
- }
- }
- // Transfer the elements outside of locks
- int n = 0;
- for (Node p = first; p != null; p = p.next) {
- c.add(p.item);
- p.item = null;
- ++n;
- }
- return n;
+ public int drainTo(Collection<? super E> c) {
+ return drainTo(c, Integer.MAX_VALUE);
}
/**
@@ -583,70 +658,77 @@ public class LinkedBlockingQueue extends AbstractQueue
* @throws NullPointerException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
*/
- public int drainTo(Collection c, int maxElements) {
+ public int drainTo(Collection<? super E> c, int maxElements) {
if (c == null)
throw new NullPointerException();
if (c == this)
throw new IllegalArgumentException();
- synchronized (putLock) {
- synchronized (takeLock) {
- int n = 0;
- Node p = head.next;
- while (p != null && n < maxElements) {
+ boolean signalNotFull = false;
+ final ReentrantLock takeLock = this.takeLock;
+ takeLock.lock();
+ try {
+ int n = Math.min(maxElements, count.get());
+ // count.get provides visibility to first n Nodes
+ Node<E> h = head;
+ int i = 0;
+ try {
+ while (i < n) {
+ Node<E> p = h.next;
c.add(p.item);
p.item = null;
- p = p.next;
- ++n;
- }
- if (n != 0) {
- head.next = p;
- assert head.item == null;
- if (p == null)
- last = head;
- int cold;
- synchronized (this) {
- cold = count;
- count -= n;
- }
- if (cold == capacity)
- putLock.notifyAll();
+ h.next = h;
+ h = p;
+ ++i;
}
return n;
+ } finally {
+ // Restore invariants even if c.add() threw
+ if (i > 0) {
+ // assert h.item == null;
+ head = h;
+ signalNotFull = (count.getAndAdd(-i) == capacity);
+ }
}
+ } finally {
+ takeLock.unlock();
+ if (signalNotFull)
+ signalNotFull();
}
}
/**
* Returns an iterator over the elements in this queue in proper sequence.
- * The returned <tt>Iterator</tt> is a "weakly consistent" iterator that
- * will never throw {@link java.util.ConcurrentModificationException},
+ * The returned {@code Iterator} is a "weakly consistent" iterator that
+ * will never throw {@link java.util.ConcurrentModificationException
+ * ConcurrentModificationException},
* and guarantees to traverse elements as they existed upon
* construction of the iterator, and may (but is not guaranteed to)
* reflect any modifications subsequent to construction.
*
* @return an iterator over the elements in this queue in proper sequence
*/
- public Iterator iterator() {
+ public Iterator<E> iterator() {
return new Itr();
}
- private class Itr implements Iterator {
+ private class Itr implements Iterator<E> {
/*
- * Basic weak-consistent iterator. At all times hold the next
+ * Basic weakly-consistent iterator. At all times hold the next
* item to hand out so that if hasNext() reports true, we will
* still have it to return even if lost race with a take etc.
*/
- private Node current;
- private Node lastRet;
- private Object currentElement;
+ private Node<E> current;
+ private Node<E> lastRet;
+ private E currentElement;
Itr() {
- synchronized (putLock) {
- synchronized (takeLock) {
- current = head.next;
- if (current != null)
- currentElement = current.item;
- }
+ fullyLock();
+ try {
+ current = head.next;
+ if (current != null)
+ currentElement = current.item;
+ } finally {
+ fullyUnlock();
}
}
@@ -654,45 +736,56 @@ public class LinkedBlockingQueue extends AbstractQueue
return current != null;
}
- public Object next() {
- synchronized (putLock) {
- synchronized (takeLock) {
- if (current == null)
- throw new NoSuchElementException();
- Object x = currentElement;
- lastRet = current;
- current = current.next;
- if (current != null)
- currentElement = current.item;
- return x;
- }
+ /**
+ * Returns the next live successor of p, or null if no such.
+ *
+ * Unlike other traversal methods, iterators need to handle both:
+ * - dequeued nodes (p.next == p)
+ * - (possibly multiple) interior removed nodes (p.item == null)
+ */
+ private Node<E> nextNode(Node<E> p) {
+ for (;;) {
+ Node<E> s = p.next;
+ if (s == p)
+ return head.next;
+ if (s == null || s.item != null)
+ return s;
+ p = s;
+ }
+ }
+
+ public E next() {
+ fullyLock();
+ try {
+ if (current == null)
+ throw new NoSuchElementException();
+ E x = currentElement;
+ lastRet = current;
+ current = nextNode(current);
+ currentElement = (current == null) ? null : current.item;
+ return x;
+ } finally {
+ fullyUnlock();
}
}
public void remove() {
if (lastRet == null)
throw new IllegalStateException();
- synchronized (putLock) {
- synchronized (takeLock) {
- Node node = lastRet;
- lastRet = null;
- Node trail = head;
- Node p = head.next;
- while (p != null && p != node) {
- trail = p;
- p = p.next;
- }
+ fullyLock();
+ try {
+ Node<E> node = lastRet;
+ lastRet = null;
+ for (Node<E> trail = head, p = trail.next;
+ p != null;
+ trail = p, p = p.next) {
if (p == node) {
- p.item = null;
- trail.next = p.next;
- if (last == p)
- last = trail;
- int c;
- synchronized (this) { c = count--; }
- if (c == capacity)
- putLock.notifyAll();
+ unlink(p, trail);
+ break;
}
}
+ } finally {
+ fullyUnlock();
}
}
}
@@ -701,31 +794,33 @@ public class LinkedBlockingQueue extends AbstractQueue
* Save the state to a stream (that is, serialize it).
*
* @serialData The capacity is emitted (int), followed by all of
- * its elements (each an <tt>Object</tt>) in the proper order,
+ * its elements (each an {@code Object}) in the proper order,
* followed by a null
* @param s the stream
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
- synchronized (putLock) {
- synchronized (takeLock) {
- // Write out any hidden stuff, plus capacity
- s.defaultWriteObject();
+ fullyLock();
+ try {
+ // Write out any hidden stuff, plus capacity
+ s.defaultWriteObject();
- // Write out all elements in the proper order.
- for (Node p = head.next; p != null; p = p.next)
- s.writeObject(p.item);
+ // Write out all elements in the proper order.
+ for (Node<E> p = head.next; p != null; p = p.next)
+ s.writeObject(p.item);
- // Use trailing null as sentinel
- s.writeObject(null);
- }
+ // Use trailing null as sentinel
+ s.writeObject(null);
+ } finally {
+ fullyUnlock();
}
}
/**
* Reconstitute this queue instance from a stream (that is,
* deserialize it).
+ *
* @param s the stream
*/
private void readObject(java.io.ObjectInputStream s)
@@ -733,19 +828,16 @@ public class LinkedBlockingQueue extends AbstractQueue
// Read in capacity, and any hidden stuff
s.defaultReadObject();
- synchronized (this) { count = 0; }
- last = head = new Node(null);
+ count.set(0);
+ last = head = new Node<E>(null);
// Read in all elements and place in queue
for (;;) {
- Object item = (Object)s.readObject();
+ @SuppressWarnings("unchecked")
+ E item = (E)s.readObject();
if (item == null)
break;
add(item);
}
}
-
- private static class SerializableLock implements java.io.Serializable {
- private final static long serialVersionUID = -8856990691138858668L;
- }
}
diff --git a/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java b/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java
index f41b2790b6..11e35b034c 100644
--- a/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java
+++ b/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java
@@ -791,7 +791,7 @@ public class ThreadPoolExecutor extends AbstractExecutorService {
*/
private List drainQueue() {
BlockingQueue q = workQueue;
- List taskList = new ArrayList();
+ List<Runnable> taskList = new ArrayList<Runnable>();
q.drainTo(taskList);
if (!q.isEmpty()) {
Runnable[] arr = (Runnable[])q.toArray(new Runnable[0]);
diff --git a/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java b/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java
index 6306faa08f..432b851f3e 100644
--- a/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java
+++ b/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java
@@ -64,7 +64,7 @@ public class FIFOWaitQueue extends WaitQueue implements java.io.Serializable {
}
public Collection getWaitingThreads() {
- List list = new ArrayList();
+ List<Thread> list = new ArrayList<Thread>();
int count = 0;
WaitNode node = head_;
while (node != null) {
diff --git a/src/actors/scala/actors/threadpool/helpers/Utils.java b/src/actors/scala/actors/threadpool/helpers/Utils.java
index df1dbd4960..d12389215d 100644
--- a/src/actors/scala/actors/threadpool/helpers/Utils.java
+++ b/src/actors/scala/actors/threadpool/helpers/Utils.java
@@ -41,9 +41,9 @@ public final class Utils {
static {
NanoTimer timer = null;
try {
- String nanoTimerClassName = (String)
- AccessController.doPrivileged(new PrivilegedAction() {
- public Object run() {
+ String nanoTimerClassName =
+ AccessController.doPrivileged(new PrivilegedAction<String>() {
+ public String run() {
return System.getProperty(providerProp);
}
});
@@ -206,9 +206,9 @@ public final class Utils {
final Perf perf;
final long multiplier, divisor;
SunPerfProvider() {
- perf = (Perf)
- AccessController.doPrivileged(new PrivilegedAction() {
- public Object run() {
+ perf =
+ AccessController.doPrivileged(new PrivilegedAction<Perf>() {
+ public Perf run() {
return Perf.getPerf();
}
});
diff --git a/src/actors/scala/actors/threadpool/locks/CondVar.java b/src/actors/scala/actors/threadpool/locks/CondVar.java
index 132e72fe2a..44df1c0b97 100644
--- a/src/actors/scala/actors/threadpool/locks/CondVar.java
+++ b/src/actors/scala/actors/threadpool/locks/CondVar.java
@@ -17,6 +17,7 @@ import scala.actors.threadpool.*;
import scala.actors.threadpool.helpers.*;
class CondVar implements Condition, java.io.Serializable {
+ private static final long serialVersionUID = -5009898475638427940L;
/** The lock **/
protected final ExclusiveLock lock;
diff --git a/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java b/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java
index 7495a8a884..144ac54d37 100644
--- a/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java
+++ b/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java
@@ -17,6 +17,7 @@ import scala.actors.threadpool.*;
import scala.actors.threadpool.helpers.*;
class FIFOCondVar extends CondVar implements Condition, java.io.Serializable {
+ private static final long serialVersionUID = -497497271881010475L;
private static final WaitQueue.QueuedSync sync = new WaitQueue.QueuedSync() {
public boolean recheck(WaitQueue.WaitNode node) { return false; }
diff --git a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
index 6411bbea01..437af77c7a 100644
--- a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
+++ b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
@@ -190,7 +190,7 @@ public class ReentrantReadWriteLock implements ReadWriteLock, java.io.Serializab
transient int writeHolds_ = 0;
/** Number of acquires on read lock by any reader thread **/
- transient HashMap readers_ = new HashMap();
+ transient HashMap<Thread, Integer> readers_ = new HashMap<Thread, Integer>();
/** cache/reuse the special Integer value one to speed up readlocks **/
static final Integer IONE = new Integer(1);
@@ -344,7 +344,7 @@ public class ReentrantReadWriteLock implements ReadWriteLock, java.io.Serializab
synchronized int getReadHoldCount() {
if (activeReaders_ == 0) return 0;
Thread t = Thread.currentThread();
- Integer i = (Integer)readers_.get(t);
+ Integer i = readers_.get(t);
return (i == null) ? 0 : i.intValue();
}
@@ -363,7 +363,7 @@ public class ReentrantReadWriteLock implements ReadWriteLock, java.io.Serializab
// and ensure visibility by synchronizing (all other accesses to
// readers_ are also synchronized on "this")
synchronized (this) {
- readers_ = new HashMap();
+ readers_ = new HashMap<Thread, Integer>();
}
}
}
@@ -372,6 +372,8 @@ public class ReentrantReadWriteLock implements ReadWriteLock, java.io.Serializab
* Nonfair version of Sync
*/
private static class NonfairSync extends Sync {
+ private static final long serialVersionUID = -2392241841540339773L;
+
NonfairSync() {}
}
diff --git a/src/android-library/scala/ScalaObject.scala b/src/android-library/scala/ScalaObject.scala
index b532f4e55f..10b2306abd 100644
--- a/src/android-library/scala/ScalaObject.scala
+++ b/src/android-library/scala/ScalaObject.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/attic/README b/src/attic/README
new file mode 100644
index 0000000000..9fb600ae57
--- /dev/null
+++ b/src/attic/README
@@ -0,0 +1,2 @@
+This is a holding area for source files which aren't used in
+trunk anymore but which we're keeping available for a time. \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/models/Models.scala b/src/attic/scala/tools/nsc/models/Models.scala
index 1aafc5f658..7365709b25 100644
--- a/src/compiler/scala/tools/nsc/models/Models.scala
+++ b/src/attic/scala/tools/nsc/models/Models.scala
@@ -1,14 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package models
import scala.tools.nsc.Global
-import scala.tools.nsc.util.{Position,NoPosition}
/** This abstract class ...
*
@@ -263,7 +261,6 @@ abstract class Models {
}
abstract class ValOrDefMod(parent0: Composite) extends MemberComposite(parent0) with HasClassObjects {
- def treey = tree.asInstanceOf[ValOrDefDef]
override def replacedBy(tree0: Tree): Boolean =
super.replacedBy(tree0) && tree0.isInstanceOf[ValOrDefDef]
@@ -297,7 +294,6 @@ abstract class Models {
abstract class ImplMod(parent0: Composite)
extends MemberComposite(parent0) with HasClassObjects {
- def treey = tree.asInstanceOf[ImplDef]
override def replacedBy(tree0: Tree): Boolean =
super.replacedBy(tree0) && tree0.isInstanceOf[ImplDef]
override def isMember(tree: Tree): Boolean = (super.isMember(tree) ||
@@ -362,7 +358,6 @@ abstract class Models {
super.replacedBy(tree0) && tree0.isInstanceOf[ModuleDef]
}
class TypeMod(parent0: Composite) extends MemberMod(parent0) {
- def treey = tree.asInstanceOf[TypeDef];
override def replacedBy(tree0 : Tree) : Boolean = (super.replacedBy(tree0) && tree0.isInstanceOf[TypeDef]);
}
def SourceMod(original: CompilationUnit) = new SourceMod(original)
diff --git a/src/compiler/scala/tools/nsc/models/SemanticTokens.scala b/src/attic/scala/tools/nsc/models/SemanticTokens.scala
index ed9ef4f419..5b492ba056 100644
--- a/src/compiler/scala/tools/nsc/models/SemanticTokens.scala
+++ b/src/attic/scala/tools/nsc/models/SemanticTokens.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package models
@@ -14,8 +13,8 @@ import scala.collection.mutable.{HashMap, HashSet}
import scala.tools.nsc.Global
import scala.tools.nsc.symtab.{Flags, Names}
import scala.tools.nsc.symtab.Flags.DEFERRED
-import scala.tools.nsc.util.{BatchSourceFile, NoPosition, Position, SourceFile}
-import scala.util.NameTransformer
+import scala.tools.nsc.util.{BatchSourceFile, SourceFile}
+import scala.reflect.NameTransformer
class SemanticTokens(val compiler: Global) {
import compiler._
@@ -262,7 +261,6 @@ class SemanticTokens(val compiler: Global) {
build(arg.tpt);
}
}
- try {
//TPT=scala.Iterator[DocGenerator.this.compiler0.CompilationUnit] 260 class scala.tools.nsc.ast.Trees$TypeTree scala.Iterator[DocGenerator.this.compiler0.CompilationUnit] class scala.tools.nsc.symtab.Types$$anon$5
if ((tree.tpt eq null) || (tree.tpt.tpe eq null)) {
//Console.err.println("BAD: " + tree.tpt + " in " + tree);
@@ -270,11 +268,6 @@ class SemanticTokens(val compiler: Global) {
//Console.err.println("TPT=" + tree.tpt + " " + tree.tpt.pos + " " + tree.tpt.getClass() + " " + tree.tpt.tpe + " " + tree.tpt.tpe.getClass() + " " + tree.tpt.tpe.getClass().getSuperclass());
build(tree.tpt);
}
- } catch {
- case e: Error =>
- Console.err.println("VALDEF: " + tree + " " + tree.tpt + " " + tree.pos + " " + tree.tpt.pos);
- throw e;
- }
//Console.err.println("RHS: " + tree.rhs + " " + tree.rhs.getClass() + " " + tree.rhs.getClass().getSuperclass());
build(tree.rhs);
}
@@ -554,7 +547,7 @@ class SemanticTokens(val compiler: Global) {
Console.err.println("" + tree + "@" + tree.pos + " not in " +
unit.source.file.name + "[" + buf.length + "]");
Thread.dumpStack()
- throw new Error()
+ abort()
}
return 0
}
@@ -621,7 +614,7 @@ class SemanticTokens(val compiler: Global) {
Console.err.println("OFFSET=" + offset + " " + tok + " " + tok.length);
Console.err.println(" " + cursor.offset + " " + gap.length);
gap.length0 = offset - cursor.offset + tok.length
- //throw new Error();
+ //abort();
}
if (offset == cursor.offset) {
// replace or prepend
diff --git a/src/compiler/scala/tools/nsc/models/Signatures.scala b/src/attic/scala/tools/nsc/models/Signatures.scala
index 98bfa142ff..cca65f7fc9 100644
--- a/src/compiler/scala/tools/nsc/models/Signatures.scala
+++ b/src/attic/scala/tools/nsc/models/Signatures.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package models
@@ -11,7 +10,7 @@ import scala.collection.mutable.{HashMap, HashSet}
import scala.tools.nsc.{Global => Compiler}
import scala.tools.nsc.symtab.{Flags, Names}
import scala.tools.nsc.util.{ Position, SourceFile }
-import scala.util.NameTransformer
+import scala.reflect.NameTransformer
/** This class ...
*
@@ -25,14 +24,10 @@ class Signatures(val compiler: Compiler) {
def asString: String = name + "[" + asString0(children) + "]"
}
- def sort(sigs: List[Signature]) =
- sigs.sort((l0,l1) => l0.name.compareTo(l1.name) > 0)
+ def sort(sigs: List[Signature]) = sigs sortBy (_.name) reverse
- def asString0(sigs: List[Signature]): String = {
- var ret = ""
- for (sig <- sort(sigs)) ret = ret + sig.asString
- ret
- }
+ def asString0(sigs: List[Signature]): String =
+ sort(sigs) map (_.asString) mkString
def signature(unit: CompilationUnit): String =
asString0(signature(unit.body, Nil))
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala b/src/attic/scala/tools/nsc/symtab/SymbolWalker.scala
index 5f0574b525..8c111875dd 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala
+++ b/src/attic/scala/tools/nsc/symtab/SymbolWalker.scala
@@ -3,7 +3,6 @@ package symtab
trait SymbolWalker {
val global : Global
- import scala.tools.nsc.util._
import global._
import scala.collection.mutable.LinkedHashSet
trait Visitor {
@@ -20,32 +19,33 @@ trait SymbolWalker {
def apply(pos : Position) : Symbol = map.apply(pos)
}
*/
+ private def validSym(t: Tree) = t.symbol != NoSymbol && t.symbol != null
+ private def validSym(tp: Type) = tp != null && tp.typeSymbol != NoSymbol && tp.typeSymbol != null
+ private def notNull(tp: Type) = tp.typeSymbol != null
+ private def isNoSymbol(t: Tree) = t.symbol eq NoSymbol
+
def walk(tree: Tree, visitor : Visitor)(fid : (util.Position) => Option[String]) : Unit = {
val visited = new LinkedHashSet[Tree]
def f(t : Tree) : Unit = {
if (visited.add(t)) return
- def fs(l : List[Tree]) : Unit = {
- val i = l.iterator
- while (i.hasNext) f(i.next)
- }
- def fss(l : List[List[Tree]]) : Unit = {
- val i = l.iterator
- while (i.hasNext) fs(i.next)
- }
- if (t.isInstanceOf[StubTree]) return
- def asTypeRef = t.tpe.asInstanceOf[TypeRef]
- val sym = (t,t.tpe) match {
- case (Super(_,_),SuperType(_,supertp)) if supertp.typeSymbol != NoSymbol && supertp.typeSymbol != null => supertp.typeSymbol
- case _ if t.symbol != NoSymbol && t.symbol != null => t.symbol
- case (t : TypeTree, tp) if tp != null && tp.typeSymbol != null && tp.typeSymbol != NoSymbol => tp.typeSymbol
- case (t : TypeTree, tp) if tp != null && tp.resultType != null && tp.resultType.typeSymbol != null => tp.resultType.typeSymbol
- case (t, tpe : Type) if tpe != null && (t.symbol eq NoSymbol) && t.isTerm && tpe.termSymbol != null =>
- tpe.termSymbol
- case (t, tpe : Type) if tpe != null && (t.symbol eq NoSymbol) && tpe.typeSymbol != null =>
- if (t.tpe.isInstanceOf[TypeRef]) asTypeRef.sym // XXX: looks like a bug
- else tpe.typeSymbol
- case _ => NoSymbol
+
+ def fs(l: List[Tree]) = l foreach f
+ def fss(l: List[List[Tree]]) = l foreach fs
+
+ val sym = (t, t.tpe) match {
+ case (Super(_,_),SuperType(_,supertp)) if validSym(supertp) => supertp.typeSymbol
+ case _ if validSym(t) => t.symbol
+ case (t: TypeTree, tp) if validSym(tp) => tp.typeSymbol
+ case (t: TypeTree, tp) if validSym(tp.resultType) => tp.resultType.typeSymbol
+ case (t, tpe: Type) if isNoSymbol(t) && tpe.termSymbol != null =>
+ if (t.isTerm) tpe.termSymbol
+ else t.tpe match {
+ case x: TypeRef => x.sym // XXX: looks like a bug
+ case _ => tpe.typeSymbol
+ }
+ case _ => NoSymbol
}
+
if (sym != null && sym != NoSymbol /* && !sym.hasFlag(SYNTHETIC) */) {
var id = fid(t.pos)
val doAdd = if (id.isDefined) {
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index 8abbe2d77b..842d235f24 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
/** <p>
* This program generates the <code>ProductN</code>, <code>TupleN</code> <code>FunctionN</code> classes, where
@@ -32,7 +31,8 @@ object genprod {
def productFiles = arities map Product.make
def tupleFiles = arities map Tuple.make
def functionFiles = (0 :: arities) map Function.make
- def allfiles = productFiles ::: tupleFiles ::: functionFiles
+ def absFunctionFiles = (0 :: arities) map AbstractFunction.make
+ def allfiles = productFiles ::: tupleFiles ::: functionFiles ::: absFunctionFiles
trait Arity extends Group {
def i: Int // arity
@@ -48,8 +48,10 @@ object genprod {
def xdefs = to map ("x" + _)
def mdefs = to map ("_" + _)
def invariantArgs = typeArgsString(targs)
- def covariantArgs = typeArgsString(targs map ("+" + _))
- def contraCoArgs = typeArgsString((targs map ("-" + _)) ::: List("+R"))
+ def covariantArgs = typeArgsString(targs map (covariantSpecs + "+" + _))
+ def covariantSpecs = ""
+ def contravariantSpecs = ""
+ def contraCoArgs = typeArgsString((targs map (contravariantSpecs + "-" + _)) ::: List(covariantSpecs + "+R"))
def fields = List.map2(mdefs, targs)(_ + ":" + _) mkString ","
def funArgs = List.map2(vdefs, targs)(_ + ":" + _) mkString ","
@@ -59,22 +61,24 @@ object genprod {
def descriptiveComment = ""
def withFancy = if (descriptiveComment.isEmpty) "" else "(with fancy comment)"
def withMoreMethods = if (moreMethods.isEmpty) "" else "(with extra methods)"
+ def packageDef = "scala"
+ def imports = ""
def header = """
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
%s
-package scala
-""".trim.format(genprodString) + "\n\n"
+package %s
+%s
+""".trim.format(genprodString, packageDef, imports) + "\n\n"
}
def main(args: Array[String]) {
@@ -96,7 +100,7 @@ package scala
w.close
} catch {
case e: java.io.IOException =>
- println(e.getMessage() + ": " + out)
+ println(e.getMessage() + ": " + f)
exit(-1)
}
}
@@ -112,6 +116,7 @@ import genprod._
zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz */
object FunctionZero extends Function(0) {
+ override def covariantSpecs = "@specialized "
override def descriptiveComment = functionNTemplate.format("currentSeconds", "anonfun0",
""" *
* <b>val</b> currentSeconds = () => System.currentTimeMillis() / 1000L
@@ -126,6 +131,9 @@ object FunctionZero extends Function(0) {
}
object FunctionOne extends Function(1) {
+ override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double) "
+ override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) "
+
override def descriptiveComment = functionNTemplate.format("succ", "anonfun1",
""" *
* <b>val</b> succ = (x: Int) => x + 1
@@ -149,6 +157,9 @@ object FunctionOne extends Function(1) {
}
object FunctionTwo extends Function(2) {
+ override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Double) "
+ override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) "
+
override def descriptiveComment = functionNTemplate.format("max", "anonfun2",
""" *
* <b>val</b> max = (x: Int, y: Int) => <b>if</b> (x < y) y <b>else</b> x
@@ -200,34 +211,49 @@ trait {className}{contraCoArgs} extends AnyRef {{ self =>
}}
</file>
}
+
+ private def commaXs = xdefs.mkString("(", ", ", ")")
+
// (x1: T1) => (x2: T2) => (x3: T3) => (x4: T4) => apply(x1,x2,x3,x4)
def shortCurry = {
- val body = xdefs.mkString("apply(", ",", ")")
+ val body = "apply" + commaXs
List.map2(xdefs, targs)("(%s: %s) => ".format(_, _)).mkString("", "", body)
}
- // (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7) => self.apply(x1,x2,x3,x4,x5,x6,x7)).curry
+ // (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7) => self.apply(x1,x2,x3,x4,x5,x6,x7)).curried
def longCurry = (List.map2(xdefs, targs)(_ + ": " + _) drop 1).mkString(
"(x1: T1) => ((",
", ",
- ") => self.apply(%s)).curry".format(xdefs mkString ",")
+ ") => self.apply%s).curried".format(commaXs)
)
- // f(x1,x2,x3,x4,x5,x6) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)
+ // f(x1,x2,x3,x4,x5,x6) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)
def curryComment = { """
- /** f(%s) == (f.curry)%s
+ /** f%s == (f.curried)%s
+ */
+""".format(commaXs, xdefs map ("(" + _ + ")") mkString)
+ }
+
+ def tupleMethod = {
+ def comment = """
+ /* f%s == (f.tupled)(Tuple%d%s)
*/
-""".format(xdefs mkString ",", xdefs map ("(" + _ + ")") mkString)
+""".format(commaXs, i, commaXs)
+ def body = "case Tuple%d%s => apply%s".format(i, commaXs, commaXs)
+
+ comment + " def tupled: Tuple%d%s => R = {\n %s\n }\n".format(i, invariantArgs, body)
}
def curryMethod = {
val body = if (i < 5) shortCurry else longCurry
- " def curry: %s => R = {\n %s\n }\n".format(
+
+ curryComment +
+ " def curried: %s => R = {\n %s\n }\n".format(
targs mkString " => ", body
- )
+ ) + """ @deprecated("Use 'curried' instead")""" + "\n def curry = curried\n"
}
- override def moreMethods = curryComment + curryMethod
+ override def moreMethods = curryMethod + tupleMethod
} // object Function
@@ -239,16 +265,245 @@ object Tuple
{
def make(i: Int) = apply(i)()
def apply(i: Int) = i match {
+ case 1 => TupleOne
case 2 => TupleTwo
+ case 3 => TupleThree
case _ => new Tuple(i)
}
}
+object TupleOne extends Tuple(1)
+{
+ override def covariantSpecs = "@specialized(Int, Long, Double) "
+}
+
object TupleTwo extends Tuple(2)
{
+ override def imports = """
+import scala.collection.{TraversableLike, IterableLike}
+import scala.collection.generic.CanBuildFrom
+"""
+ override def covariantSpecs = "@specialized(Int, Long, Double) "
override def moreMethods = """
/** Swap the elements of the tuple */
def swap: Tuple2[T2,T1] = Tuple2(_2, _1)
+
+ def zip[Repr1, El1, El2, To](implicit w1: T1 => TraversableLike[El1, Repr1],
+ w2: T2 => Iterable[El2],
+ cbf1: CanBuildFrom[Repr1, (El1, El2), To]): To = {
+ val coll1: TraversableLike[El1, Repr1] = _1
+ val coll2: Iterable[El2] = _2
+ val b1 = cbf1(coll1.repr)
+ val elems2 = coll2.iterator
+
+ for(el1 <- coll1)
+ if(elems2.hasNext)
+ b1 += ((el1, elems2.next))
+
+ b1.result
+ }
+
+ /** Wraps a tuple in a `Zipped`, which supports 2-ary generalisations of map, flatMap, filter,...
+ *
+ * @see Zipped
+ * $willNotTerminateInf
+ */
+ def zipped[Repr1, El1, Repr2, El2](implicit w1: T1 => TraversableLike[El1, Repr1], w2: T2 => IterableLike[El2, Repr2]): Zipped[Repr1, El1, Repr2, El2]
+ = new Zipped[Repr1, El1, Repr2, El2](_1, _2)
+
+ class Zipped[+Repr1, +El1, +Repr2, +El2](coll1: TraversableLike[El1, Repr1], coll2: IterableLike[El2, Repr2]) { // coll2: IterableLike for filter
+ def map[B, To](f: (El1, El2) => B)(implicit cbf: CanBuildFrom[Repr1, B, To]): To = {
+ val b = cbf(coll1.repr)
+ val elems2 = coll2.iterator
+
+ for(el1 <- coll1)
+ if(elems2.hasNext)
+ b += f(el1, elems2.next)
+
+ b.result
+ }
+
+ def flatMap[B, To](f: (El1, El2) => Traversable[B])(implicit cbf: CanBuildFrom[Repr1, B, To]): To = {
+ val b = cbf(coll1.repr)
+ val elems2 = coll2.iterator
+
+ for(el1 <- coll1)
+ if(elems2.hasNext)
+ b ++= f(el1, elems2.next)
+
+ b.result
+ }
+
+ def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CanBuildFrom[Repr1, El1, To1], cbf2: CanBuildFrom[Repr2, El2, To2]): (To1, To2) = {
+ val b1 = cbf1(coll1.repr)
+ val b2 = cbf2(coll2.repr)
+ val elems2 = coll2.iterator
+
+ for(el1 <- coll1) {
+ if(elems2.hasNext) {
+ val el2 = elems2.next
+ if(f(el1, el2)) {
+ b1 += el1
+ b2 += el2
+ }
+ }
+ }
+
+ (b1.result, b2.result)
+ }
+
+ def exists(f: (El1, El2) => Boolean): Boolean = {
+ var acc = false
+ val elems2 = coll2.iterator
+
+ for(el1 <- coll1)
+ if(!acc && elems2.hasNext)
+ acc = f(el1, elems2.next)
+
+ acc
+ }
+
+ def forall(f: (El1, El2) => Boolean): Boolean = {
+ var acc = true
+ val elems2 = coll2.iterator
+
+ for(el1 <- coll1)
+ if(acc && elems2.hasNext)
+ acc = f(el1, elems2.next)
+
+ acc
+ }
+
+ def foreach[U](f: (El1, El2) => U): Unit = {
+ val elems2 = coll2.iterator
+
+ for(el1 <- coll1)
+ if(elems2.hasNext)
+ f(el1, elems2.next)
+ }
+ }
+"""
+}
+
+object TupleThree extends Tuple(3) {
+ override def imports = """
+import scala.collection.{TraversableLike, IterableLike}
+import scala.collection.generic.CanBuildFrom
+"""
+ override def moreMethods = """
+ def zip[Repr1, El1, El2, El3, To](implicit w1: T1 => TraversableLike[El1, Repr1],
+ w2: T2 => Iterable[El2],
+ w3: T3 => Iterable[El3],
+ cbf1: CanBuildFrom[Repr1, (El1, El2, El3), To]): To = {
+ val coll1: TraversableLike[El1, Repr1] = _1
+ val coll2: Iterable[El2] = _2
+ val coll3: Iterable[El3] = _3
+ val b1 = cbf1(coll1.repr)
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+
+ for(el1 <- coll1)
+ if(elems2.hasNext && elems3.hasNext)
+ b1 += ((el1, elems2.next, elems3.next))
+
+ b1.result
+ }
+
+ /** Wraps a tuple in a `Zipped`, which supports 3-ary generalisations of map, flatMap, filter,...
+ *
+ * @see Zipped
+ * $willNotTerminateInf
+ */
+ def zipped[Repr1, El1, Repr2, El2, Repr3, El3](implicit w1: T1 => TraversableLike[El1, Repr1],
+ w2: T2 => IterableLike[El2, Repr2],
+ w3: T3 => IterableLike[El3, Repr3]): Zipped[Repr1, El1, Repr2, El2, Repr3, El3]
+ = new Zipped[Repr1, El1, Repr2, El2, Repr3, El3](_1, _2, _3)
+
+ class Zipped[+Repr1, +El1, +Repr2, +El2, +Repr3, +El3](coll1: TraversableLike[El1, Repr1],
+ coll2: IterableLike[El2, Repr2],
+ coll3: IterableLike[El3, Repr3]) {
+ def map[B, To](f: (El1, El2, El3) => B)(implicit cbf: CanBuildFrom[Repr1, B, To]): To = {
+ val b = cbf(coll1.repr)
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+
+ for(el1 <- coll1)
+ if(elems2.hasNext && elems3.hasNext)
+ b += f(el1, elems2.next, elems3.next)
+
+ b.result
+ }
+
+ def flatMap[B, To](f: (El1, El2, El3) => Traversable[B])(implicit cbf: CanBuildFrom[Repr1, B, To]): To = {
+ val b = cbf(coll1.repr)
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+
+ for(el1 <- coll1)
+ if(elems2.hasNext && elems3.hasNext)
+ b ++= f(el1, elems2.next, elems3.next)
+
+ b.result
+ }
+
+ def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)(
+ implicit cbf1: CanBuildFrom[Repr1, El1, To1],
+ cbf2: CanBuildFrom[Repr2, El2, To2],
+ cbf3: CanBuildFrom[Repr3, El3, To3]): (To1, To2, To3) = {
+ val b1 = cbf1(coll1.repr)
+ val b2 = cbf2(coll2.repr)
+ val b3 = cbf3(coll3.repr)
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+
+ for(el1 <- coll1) {
+ if(elems2.hasNext && elems3.hasNext) {
+ val el2 = elems2.next
+ val el3 = elems3.next
+ if(f(el1, el2, el3)) {
+ b1 += el1
+ b2 += el2
+ b3 += el3
+ }
+ }
+ }
+
+ (b1.result, b2.result, b3.result)
+ }
+
+ def exists(f: (El1, El2, El3) => Boolean): Boolean = {
+ var acc = false
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+
+ for(el1 <- coll1)
+ if(!acc && elems2.hasNext && elems3.hasNext)
+ acc = f(el1, elems2.next, elems3.next)
+
+ acc
+ }
+
+ def forall(f: (El1, El2, El3) => Boolean): Boolean = {
+ var acc = true
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+
+ for(el1 <- coll1)
+ if(acc && elems2.hasNext && elems3.hasNext)
+ acc = f(el1, elems2.next, elems3.next)
+
+ acc
+ }
+
+ def foreach[U](f: (El1, El2, El3) => U): Unit = {
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+
+ for(el1 <- coll1)
+ if(elems2.hasNext && elems3.hasNext)
+ f(el1, elems2.next, elems3.next)
+ }
+ }
"""
}
@@ -256,7 +511,7 @@ class Tuple(val i: Int) extends Group("Tuple") with Arity
{
// prettifies it a little if it's overlong
def mkToString() = {
- def str(xs: List[String]) = xs.mkString(""" + "," + """)
+ def str(xs: List[String]) = xs.mkString(""" + "," + """)
if (i <= MAX_ARITY / 2) str(mdefs)
else {
val s1 = str(mdefs take (i / 2))
@@ -288,7 +543,21 @@ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz */
object Product extends Group("Product")
{
def make(i: Int) = apply(i)()
- def apply(i: Int) = new Product(i)
+ def apply(i: Int) = i match {
+ case 1 => ProductOne
+ case 2 => ProductTwo
+ case _ => new Product(i)
+ }
+}
+
+object ProductOne extends Product(1)
+{
+ override def covariantSpecs = "@specialized(Int, Long, Double) "
+}
+
+object ProductTwo extends Product(2)
+{
+ override def covariantSpecs = "@specialized(Int, Long, Double) "
}
class Product(val i: Int) extends Group("Product") with Arity
@@ -339,3 +608,44 @@ trait {className}{covariantArgs} extends Product {{
</file>}
}
+
+/** Abstract functions **/
+
+object AbstractFunctionZero extends AbstractFunction(0) {
+ override def covariantSpecs = FunctionZero.covariantSpecs
+}
+
+object AbstractFunctionOne extends AbstractFunction(1) {
+ override def covariantSpecs = FunctionOne.covariantSpecs
+ override def contravariantSpecs = FunctionOne.contravariantSpecs
+}
+
+object AbstractFunctionTwo extends AbstractFunction(2) {
+ override def covariantSpecs = FunctionTwo.covariantSpecs
+ override def contravariantSpecs = FunctionTwo.contravariantSpecs
+}
+
+class AbstractFunction(val i: Int) extends Group("AbstractFunction") with Arity
+{
+ override def packageDef = "scala.runtime"
+
+ val superTypeArgs = typeArgsString(targs ::: List("R"))
+
+ def apply() = {
+<file name={"runtime/" + fileName}>{header}
+abstract class {className}{contraCoArgs} extends Function{i}{superTypeArgs} {{
+{moreMethods}
+}}
+</file>}
+
+}
+object AbstractFunction
+{
+ def make(i: Int) = apply(i)()
+ def apply(i: Int) = i match {
+ case 0 => AbstractFunctionZero
+ case 1 => AbstractFunctionOne
+ case 2 => AbstractFunctionTwo
+ case _ => new AbstractFunction(i)
+ }
+}
diff --git a/src/build/maven/continuations-plugin-pom.xml b/src/build/maven/continuations-plugin-pom.xml
new file mode 100644
index 0000000000..0277b899ed
--- /dev/null
+++ b/src/build/maven/continuations-plugin-pom.xml
@@ -0,0 +1,51 @@
+<project
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang.plugins</groupId>
+ <artifactId>continuations</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2010</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html
+ </url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
+ <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ </scm>
+ <issueManagement>
+ <system>trac</system>
+ <url>http://lampsvn.epfl.ch/trac/scala
+ </url>
+ </issueManagement>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-compiler</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+</project>
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index 4075c25ae0..2d6cffe71f 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -28,7 +28,7 @@
<target name="init.maven" depends="init.properties">
<!-- Add our maven ant tasks -->
- <path id="maven-ant-tasks.classpath" path="maven-ant-tasks-2.0.9.jar" />
+ <path id="maven-ant-tasks.classpath" path="maven-ant-tasks-2.1.1.jar" />
<typedef resource="org/apache/maven/artifact/ant/antlib.xml" uri="urn:maven-artifact-ant" classpathref="maven-ant-tasks.classpath" />
<!-- simplify fixing pom versions -->
<macrodef name="make-pom">
@@ -45,6 +45,21 @@
<artifact:pom id="@{name}.pom" file="@{name}/@{name}-pom-fixed.xml" />
</sequential>
</macrodef>
+
+ <macrodef name="make-pom-plugin">
+ <attribute name="name" />
+ <attribute name="version" />
+ <sequential>
+ <copy file="plugins/@{name}/@{name}-plugin-pom.xml" tofile="plugins/@{name}/@{name}-pom-fixed.xml" overwrite="true">
+ <filterset>
+ <filter token="VERSION" value="@{version}" />
+ <filter token="RELEASE_REPOSITORY" value="${remote.release.repository}" />
+ <filter token="SNAPSHOT_REPOSITORY" value="${remote.snapshot.repository}" />
+ </filterset>
+ </copy>
+ <artifact:pom id="plugin-@{name}.pom" file="plugins/@{name}/@{name}-pom-fixed.xml" />
+ </sequential>
+ </macrodef>
<!-- Simply attaching documentation -->
<macrodef name="attach-doc">
<attribute name="name" />
@@ -72,6 +87,23 @@
</sequential>
</macrodef>
+ <!-- Deploy compiler plugins -->
+ <macrodef name="deploy-local-plugin">
+ <attribute name="name" />
+ <attribute name="version" />
+ <attribute name="repository" />
+ <element name="extra-attachments" optional="yes" />
+ <sequential>
+ <make-pom-plugin name="@{name}" version="@{version}" />
+ <artifact:install file="plugins/@{name}/@{name}.jar">
+ <artifact:pom refid="plugin-@{name}.pom" />
+ <artifact:localRepository path="@{repository}" id="${repository.credentials.id}" />
+ <extra-attachments />
+ </artifact:install>
+ </sequential>
+ </macrodef>
+
+
<!-- Deploy all artifacts locally -->
<macrodef name="deploy-local-all">
<attribute name="repository" />
@@ -87,6 +119,7 @@
<deploy-local name="scala-swing" version="@{version}" repository="@{repository}"/>
<deploy-local name="scalap" version="@{version}" repository="@{repository}"/>
<deploy-local name="scala-partest" version="@{version}" repository="@{repository}"/>
+ <deploy-local-plugin name="continuations" version="@{version}" repository="@{repository}"/>
<!-- scala swing api is included in main library api
<extra-attachments>
<artifact:attach type="jar" file="scala-swing/scala-swing-docs.jar" classifier="javadoc" />
@@ -116,6 +149,23 @@
</sequential>
</macrodef>
+
+ <!-- Deploy compiler plugins -->
+ <macrodef name="deploy-remote-plugin">
+ <attribute name="name" />
+ <attribute name="version" />
+ <attribute name="repository" />
+ <element name="extra-attachments" optional="yes" />
+ <sequential>
+ <make-pom-plugin name="@{name}" version="@{version}" />
+ <artifact:deploy file="plugins/@{name}/@{name}.jar" settingsFile="${settings.file}">
+ <artifact:pom refid="plugin-@{name}.pom" />
+ <artifact:remoteRepository url="@{repository}" id="${repository.credentials.id}" />
+ <extra-attachments />
+ </artifact:deploy>
+ </sequential>
+ </macrodef>
+
<!-- Deploy all artifacts locally -->
<macrodef name="deploy-remote-all">
<attribute name="repository" />
@@ -131,6 +181,7 @@
<deploy-remote name="scala-swing" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scalap" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-partest" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-plugin name="continuations" version="@{version}" repository="@{repository}"/>
<!-- scala swing api is included in main library api
<extra-attachments>
<artifact:attach type="jar" file="scala-swing/scala-swing-docs.jar" classifier="javadoc" />
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 3011503527..6e7c5b03ee 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -100,8 +100,10 @@ MAIN DISTRIBUTION SBAZ
link="${sbaz.universe}/scala-devel-${version.number}.sbp">
<binset
dir="${dist.dir}/bin"
- includes="scala,scala.bat,scalac,scalac.bat,scalap,scalap.bat,scaladoc,scaladoc.bat,fsc,fsc.bat"/>
+ includes="scala,scala.bat,scalac,scalac.bat,scaladoc,scaladoc.bat,fsc,fsc.bat"/>
<libset dir="${dist.dir}/lib" includes="scala-compiler.jar,jline.jar"/>
+ <miscset dir="${dist.dir}/misc/scala-devel"
+ includes="plugins/continuations.jar"/>
<manset dir="${dist.dir}/man" includes="**"/>
<srcset dir="${dist.dir}/src" includes="scala-compiler-src.jar"/>
</sbaz>
@@ -118,9 +120,10 @@ MAIN DISTRIBUTION SBAZ
<binset dir="${basedir}/test"
includes="clitest,diff/diff.*,diff/lib*.dll,partest,partest.bat"/>
<miscset dir="${basedir}/test"
- includes="files/**/*.args,files/**/*.check,files/**/*.dll,files/**/*.jar,files/**/*.java,files/**/*.scala,files/**/*.flags,files/cli/**/*.check.*,files/jvm/*.so,files/shootout/*.javaopts,files/shootout/*.runner,files/shootout/*.txt"/>
+ includes="files/**/*.args,files/**/*.check,files/**/*.dll,files/**/*.jar,files/**/*.java,files/**/*.scala,files/**/*.flags,files/cli/**/*.check.*,files/jvm/*.so,files/shootout/*.javaopts,files/shootout/*.runner,files/shootout/*.txt,files/**/*.test"/>
<!-- <srcset dir="${dist.dir}/src" includes="scala-partest-src.jar"/> -->
<libset dir="${dist.dir}/lib" includes="scala-partest.jar"/>
+ <libset dir="${lib.dir}" includes="scalacheck.jar"/>
</sbaz>
</target>
@@ -194,7 +197,25 @@ MAIN DISTRIBUTION SBAZ
<mvn-copy-lib mvn.artifact.name="scalap"/>
</target>
- <target name="pack-maven.docs" depends="pack-maven.libs">
+ <target name="pack-maven.plugins" depends="pack-maven.start">
+ <macrodef name="mvn-copy-plugin">
+ <attribute name="mvn.artifact.name"/>
+ <sequential>
+ <mkdir dir="${dists.dir}/maven/${version.number}/plugins/@{mvn.artifact.name}"/>
+ <copy todir="${dists.dir}/maven/${version.number}/plugins/@{mvn.artifact.name}">
+ <fileset dir="${dist.dir}/misc/scala-devel/plugins/">
+ <filename name="@{mvn.artifact.name}.jar"/>
+ </fileset>
+ <fileset dir="${src.dir}/build/maven/">
+ <filename name="@{mvn.artifact.name}-plugin-pom.xml"/>
+ </fileset>
+ </copy>
+ </sequential>
+ </macrodef>
+ <mvn-copy-plugin mvn.artifact.name="continuations"/>
+ </target>
+
+ <target name="pack-maven.docs" depends="pack-maven.libs, pack-maven.plugins">
<jar destfile="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"
basedir="${build-docs.dir}/library">
<include name="**/*"/>
@@ -221,7 +242,7 @@ MAIN DISTRIBUTION SBAZ
<target name="pack-maven.scripts" depends="pack-maven.latest.unix,pack-maven.latest.win">
<copy todir="${dists.dir}/maven/${version.number}"
- file="${lib-ant.dir}/maven-ant-tasks-2.0.9.jar"/>
+ file="${lib-ant.dir}/maven-ant-tasks-2.1.1.jar"/>
<copy tofile="${dists.dir}/maven/${version.number}/build.xml"
file="${src.dir}/build/maven/maven-deploy.xml"/>
<!-- export properties for use when deploying -->
diff --git a/src/compiler/scala/tools/ant/FastScalac.scala b/src/compiler/scala/tools/ant/FastScalac.scala
index 074cb42f67..8c8363a321 100644
--- a/src/compiler/scala/tools/ant/FastScalac.scala
+++ b/src/compiler/scala/tools/ant/FastScalac.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant
diff --git a/src/compiler/scala/tools/ant/Pack200Task.scala b/src/compiler/scala/tools/ant/Pack200Task.scala
index ffee39777e..12a8706a1c 100644
--- a/src/compiler/scala/tools/ant/Pack200Task.scala
+++ b/src/compiler/scala/tools/ant/Pack200Task.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -71,7 +71,7 @@ class Pack200Task extends MatchingTask {
* is used to remove empty packages and improve pack200 optimization.
* @param keep
* true to retain file ordering.
- * false to optomize directory structure (DEFAULT). */
+ * false to optimize directory structure (DEFAULT). */
def setKeepFileOrder(x: Boolean) { keepFileOrder = x }
/** If false, a single modification time is used for all contained files */
@@ -99,7 +99,7 @@ class Pack200Task extends MatchingTask {
\*============================================================================*/
/** Gets the list of individual JAR files for processing.
- * @returns The list of JAR files */
+ * @return The list of JAR files */
private def getFileList: List[File] = {
var files: List[File] = Nil
val fs = getImplicitFileSet
diff --git a/src/compiler/scala/tools/ant/Same.scala b/src/compiler/scala/tools/ant/Same.scala
index 9d4bebe060..4f7173e68b 100644
--- a/src/compiler/scala/tools/ant/Same.scala
+++ b/src/compiler/scala/tools/ant/Same.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant
@@ -37,7 +36,7 @@ import org.apache.tools.ant.types.Mapper
class Same extends MatchingTask {
/** The unique Ant file utilities instance to use in this task. */
- private val fileUtils = FileUtils.newFileUtils()
+ private val fileUtils = FileUtils.getFileUtils()
/*============================================================================*\
** Ant user-properties **
diff --git a/src/compiler/scala/tools/ant/ScalaBazaar.scala b/src/compiler/scala/tools/ant/ScalaBazaar.scala
index a6e44f95c3..1c1b0ccb46 100644
--- a/src/compiler/scala/tools/ant/ScalaBazaar.scala
+++ b/src/compiler/scala/tools/ant/ScalaBazaar.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant {
@@ -56,7 +55,7 @@ package scala.tools.ant {
class ScalaBazaar extends Task {
/** The unique Ant file utilities instance to use in this task. */
- private val fileUtils = FileUtils.newFileUtils()
+ private val fileUtils = FileUtils.getFileUtils()
/******************************************************************************\
** Ant user-properties **
@@ -177,19 +176,19 @@ package scala.tools.ant {
\******************************************************************************/
/** Gets the value of the file attribute in a Scala-friendly form.
- * @returns The file as a file. */
+ * @return The file as a file. */
private def getName: String =
if (name.isEmpty) error("Name attribute must be defined first.")
else name.get
/** Gets the value of the file attribute in a Scala-friendly form.
- * @returns The file as a file. */
+ * @return The file as a file. */
private def getFile: File =
if (file.isEmpty) error("Member 'file' is empty.")
else getProject().resolveFile(file.get.toString())
/** Gets the value of the adfile attribute in a Scala-friendly form.
- * @returns The adfile as a file. */
+ * @return The adfile as a file. */
private def getAdfile: File =
if (adfile.isEmpty) error("Member 'adfile' is empty.")
else getProject().resolveFile(adfile.get.toString())
diff --git a/src/compiler/scala/tools/ant/ScalaTool.scala b/src/compiler/scala/tools/ant/ScalaTool.scala
index b64c4481a5..db53224b4f 100644
--- a/src/compiler/scala/tools/ant/ScalaTool.scala
+++ b/src/compiler/scala/tools/ant/ScalaTool.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant
@@ -72,7 +71,7 @@ class ScalaTool extends MatchingTask {
private var classpathPath: Path = emptyPath
/** Comma-separated Java system properties to pass to the JRE. Properties
- * are formated as name=value. Properties scala.home, scala.tool.name and
+ * are formatted as name=value. Properties scala.home, scala.tool.name and
* scala.tool.version are always set. */
private var properties: List[(String, String)] = Nil
@@ -166,12 +165,12 @@ class ScalaTool extends MatchingTask {
\*============================================================================*/
/** Gets the value of the classpath attribute in a Scala-friendly form.
- * @returns The class path as a list of files. */
+ * @return The class path as a list of files. */
private def getUnixclasspath: String =
transposeVariableMarkup(classpath.mkString("", ":", "").replace('\\', '/'), "${", "}")
/** Gets the value of the classpath attribute in a Scala-friendly form.
- * @returns The class path as a list of files. */
+ * @return The class path as a list of files. */
private def getWinclasspath: String =
transposeVariableMarkup(classpath.mkString("", ";", "").replace('/', '\\'), "%", "%")
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index aaa50cdd08..6cda588419 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant
@@ -14,11 +13,11 @@ import java.io.{File,PrintWriter,BufferedWriter,FileWriter}
import org.apache.tools.ant.{ BuildException, Project, AntClassLoader }
import org.apache.tools.ant.taskdefs.{MatchingTask,Java}
-import org.apache.tools.ant.types.{Path, Reference, FileSet}
+import org.apache.tools.ant.types.{Path, Reference}
import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper,
SourceFileScanner}
-import scala.tools.nsc.{Global, Settings}
+import scala.tools.nsc.{Global, Settings, CompilerCommand}
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
/** <p>
@@ -69,10 +68,10 @@ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
*
* @author Gilles Dubochet, Stephane Micheloud
*/
-class Scalac extends MatchingTask {
+class Scalac extends MatchingTask with ScalacShared {
/** The unique Ant file utilities instance to use in this task. */
- private val fileUtils = FileUtils.newFileUtils()
+ private val fileUtils = FileUtils.getFileUtils()
/*============================================================================*\
** Ant user-properties **
@@ -105,10 +104,10 @@ class Scalac extends MatchingTask {
/** Defines valid values for the <code>deprecation</code> and
* <code>unchecked</code> properties. */
object Flag extends PermissibleValue {
- val values = List("yes", "no", "on", "off")
+ val values = List("yes", "no", "on", "off", "true", "false")
def toBoolean(flag: String) =
- if (flag == "yes" || flag == "on") Some(true)
- else if (flag == "no" || flag == "off") Some(false)
+ if (flag == "yes" || flag == "on" || flag == "true") Some(true)
+ else if (flag == "no" || flag == "off" || flag == "false") Some(false)
else None
}
@@ -477,6 +476,9 @@ class Scalac extends MatchingTask {
/** Initializes settings and source files */
protected def initialize: (Settings, List[File], Boolean) = {
+ if (scalacDebugging)
+ log("Base directory is `%s`".format(scala.tools.nsc.io.Path("").normalize))
+
// Tests if all mandatory attributes are set and valid.
if (origin.isEmpty) error("Attribute 'srcdir' is not set.")
if (!destination.isEmpty && !destination.get.isDirectory())
@@ -558,11 +560,22 @@ class Scalac extends MatchingTask {
if (!assemrefs.isEmpty) settings.assemrefs.value = assemrefs.get
log("Scalac params = '" + addParams + "'", Project.MSG_DEBUG)
- settings.parseParams(addParams)
- (settings, sourceFiles, javaOnly)
- }
+ // let CompilerCommand processes all params
+ val command = new CompilerCommand(settings.splitParams(addParams), settings)
+
+ // resolve dependenciesFile path from project's basedir, so <ant antfile ...> call from other project works.
+ // the dependenciesFile may be relative path to basedir or absolute path, in either case, the following code
+ // will return correct answer.
+ command.settings.dependenciesFile.value match {
+ case "none" =>
+ case x =>
+ val depFilePath = scala.tools.nsc.io.Path(x)
+ command.settings.dependenciesFile.value = scala.tools.nsc.io.Path(getProject.getBaseDir).normalize resolve depFilePath path
+ }
+ (command.settings, sourceFiles, javaOnly)
+ }
override def execute() {
val (settings, sourceFiles, javaOnly) = initialize
@@ -586,13 +599,13 @@ class Scalac extends MatchingTask {
if (compilerPath.isDefined) path add compilerPath.get
else getClass.getClassLoader match {
case cl: AntClassLoader => path add new Path(getProject, cl.getClasspath)
- case _ => error("Cannot determine default classpath for sclac, please specify one!")
+ case _ => error("Cannot determine default classpath for scalac, please specify one!")
}
path
}
java setClasspath scalacPath
- java setClassname "scala.tools.nsc.Main"
+ java setClassname MainClass
// Write all settings to a temporary file
def writeSettings() : File = {
@@ -602,20 +615,16 @@ class Scalac extends MatchingTask {
val out = new PrintWriter(new BufferedWriter(new FileWriter(file)))
try {
- for (setting <- settings.allSettings ; arg <- setting.unparse)
+ for (setting <- settings.visibleSettings ; arg <- setting.unparse)
out println escapeArgument(arg)
for (file <- sourceFiles)
- out println file.getAbsolutePath
+ out println escapeArgument(file.getAbsolutePath)
}
finally out.close()
file
}
-
- java.createArg() setValue ("@" + writeSettings.getCanonicalPath)
- log(java.getCommandLine.getCommandline.mkString(" "), Project.MSG_VERBOSE)
-
- val res = java.executeJava()
+ val res = execWithArgFiles(java, List(writeSettings.getCanonicalPath))
if (failonerror && res != 0)
error("Compilation failed because of an internal compiler error;"+
" see the error output for details.")
diff --git a/src/compiler/scala/tools/ant/ScalacShared.scala b/src/compiler/scala/tools/ant/ScalacShared.scala
new file mode 100644
index 0000000000..356fb99e56
--- /dev/null
+++ b/src/compiler/scala/tools/ant/ScalacShared.scala
@@ -0,0 +1,25 @@
+/* __ *\
+** ________ ___ / / ___ Scala Ant Tasks **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools.ant
+
+import org.apache.tools.ant.Project
+import org.apache.tools.ant.taskdefs.{ MatchingTask, Java }
+import scala.tools.nsc.io
+
+trait ScalacShared extends MatchingTask {
+ val MainClass = "scala.tools.nsc.Main"
+
+ def execWithArgFiles(java: Java, paths: List[String]) = {
+ paths foreach (p => java.createArg() setValue ("@"+ p))
+
+ val debugString = paths map (x => " (@ = '%s')".format(io.File(x).slurp())) mkString ""
+ log(java.getCommandLine.getCommandline.mkString("", " ", debugString), Project.MSG_VERBOSE)
+ java.executeJava()
+ }
+}
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala
index 8a85393293..3d96b959f5 100644
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ b/src/compiler/scala/tools/ant/Scaladoc.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant
@@ -17,8 +16,8 @@ import org.apache.tools.ant.taskdefs.MatchingTask
import org.apache.tools.ant.types.{Path, Reference}
import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper}
-import scala.tools.nsc.{Global, Settings}
-import scala.tools.nsc.doc.DefaultDocDriver
+import scala.tools.nsc.Global
+import scala.tools.nsc.doc.Settings
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
/** <p>
@@ -40,9 +39,7 @@ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
* <li>extdirs,</li>
* <li>extdirsref,</li>
* <li>encoding,</li>
- * <li>windowtitle,</li>
* <li>doctitle,</li>
- * <li>stylesheetfile,</li>
* <li>header,</li>
* <li>footer,</li>
* <li>top,</li>
@@ -67,7 +64,7 @@ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
class Scaladoc extends MatchingTask {
/** The unique Ant file utilities instance to use in this task. */
- private val fileUtils = FileUtils.newFileUtils()
+ private val fileUtils = FileUtils.getFileUtils()
/*============================================================================*\
** Ant user-properties **
@@ -103,20 +100,14 @@ class Scaladoc extends MatchingTask {
/** The character encoding of the files to compile. */
private var encoding: Option[String] = None
- /** The window title of the generated HTML documentation. */
- private var windowtitle: Option[String] = None
-
/** The document title of the generated HTML documentation. */
private var doctitle: Option[String] = None
- /** The user-specified stylesheet file. */
- private var stylesheetfile: Option[String] = None
+ /** The document version, to be added to the title. */
+ private var docversion: Option[String] = None
- /** The user-specified header/footer and top/bottom texts. */
- private var pageheader: Option[String] = None
- private var pagefooter: Option[String] = None
- private var pagetop : Option[String] = None
- private var pagebottom: Option[String] = None
+ /** Instruct the compiler to generate links to sources */
+ private var docsourceurl: Option[String] = None
/** Instruct the compiler to use additional parameters */
private var addParams: String = ""
@@ -275,60 +266,28 @@ class Scaladoc extends MatchingTask {
encoding = Some(input)
}
- /** Sets the <code>windowtitle</code> attribute.
- *
- * @param input The value of <code>windowtitle</code>.
- */
- def setWindowtitle(input: String) {
- windowtitle = Some(input)
- }
-
- /** Sets the <code>doctitle</code> attribute.
- *
- * @param input The value of <code>doctitle</code>.
- */
- def setDoctitle(input: String) {
- doctitle = Some(input)
- }
-
- /** Sets the <code>stylesheetfile</code> attribute.
- *
- * @param input The value of <code>stylesheetfile</code>.
- */
- def setStylesheetfile(input: String) {
- stylesheetfile = Some(input)
- }
-
- /** Sets the <code>header</code> attribute.
- *
- * @param input The value of <code>header</code>.
- */
- def setHeader(input: String) {
- pageheader = Some(input)
- }
-
- /** Sets the <code>footer</code> attribute.
+ /** Sets the <code>docversion</code> attribute.
*
- * @param input The value of <code>footer</code>.
+ * @param input The value of <code>docversion</code>.
*/
- def setFooter(input: String) {
- pagefooter = Some(input)
+ def setDocversion(input: String) {
+ docversion = Some(input)
}
- /** Sets the <code>top</code> attribute.
+ /** Sets the <code>docsourceurl</code> attribute.
*
- * @param input The value of <code>top</code>.
+ * @param input The value of <code>docsourceurl</code>.
*/
- def setTop(input: String) {
- pagetop = Some(input)
+ def setDocsourceurl(input: String) {
+ docsourceurl = Some(input)
}
- /** Sets the <code>bottom</code> attribute.
+ /** Sets the <code>doctitle</code> attribute.
*
- * @param input The value of <code>bottom</code>.
+ * @param input The value of <code>doctitle</code>.
*/
- def setBottom(input: String) {
- pagebottom = Some(input)
+ def setDoctitle(input: String) {
+ doctitle = Some(input)
}
/** Set the <code>addparams</code> info attribute.
@@ -492,7 +451,7 @@ class Scaladoc extends MatchingTask {
\*============================================================================*/
/** Initializes settings and source files */
- protected def initialize: Pair[scala.tools.nsc.doc.Settings, List[File]] = {
+ protected def initialize: Pair[Settings, List[File]] = {
// Tests if all mandatory attributes are set and valid.
if (origin.isEmpty) error("Attribute 'srcdir' is not set.")
if (getOrigin.isEmpty) error("Attribute 'srcdir' is not set.")
@@ -542,7 +501,7 @@ class Scaladoc extends MatchingTask {
// Builds-up the compilation settings for Scalac with the existing Ant
// parameters.
- val docSettings = new scala.tools.nsc.doc.Settings(error)
+ val docSettings = new Settings(error)
docSettings.outdir.value = asString(destination.get)
if (!classpath.isEmpty)
docSettings.classpath.value = asString(getClasspath)
@@ -554,55 +513,24 @@ class Scaladoc extends MatchingTask {
docSettings.bootclasspath.value = asString(getBootclasspath)
if (!extdirs.isEmpty) docSettings.extdirs.value = asString(getExtdirs)
if (!encoding.isEmpty) docSettings.encoding.value = encoding.get
- if (!windowtitle.isEmpty) docSettings.windowtitle.value = windowtitle.get
if (!doctitle.isEmpty) docSettings.doctitle.value = decodeEscapes(doctitle.get)
- if (!stylesheetfile.isEmpty) docSettings.stylesheetfile.value = stylesheetfile.get
- if (!pageheader.isEmpty) docSettings.pageheader.value = decodeEscapes(pageheader.get)
- if (!pagefooter.isEmpty) docSettings.pagefooter.value = decodeEscapes(pagefooter.get)
- if (!pagetop.isEmpty) docSettings.pagetop.value = decodeEscapes(pagetop.get)
- if (!pagebottom.isEmpty) docSettings.pagebottom.value = decodeEscapes(pagebottom.get)
+ if (!docversion.isEmpty) docSettings.docversion.value = decodeEscapes(docversion.get)
+ if (!docsourceurl.isEmpty) docSettings.docsourceurl.value =decodeEscapes(docsourceurl.get)
docSettings.deprecation.value = deprecation
docSettings.unchecked.value = unchecked
log("Scaladoc params = '" + addParams + "'", Project.MSG_DEBUG)
- var args =
- if (addParams.trim() == "") Nil
- else addParams.trim().split(" ").toList.map(_.trim())
-
- while (!args.isEmpty) {
- if (args.head startsWith "-") {
- val args0 = args
- args = docSettings.parseParams(args)
- if (args0 eq args) error("Parameter '" + args.head + "' is not recognised by Scaladoc.")
- }
- else if (args.head == "") args = args.tail
- else error("Parameter '" + args.head + "' does not start with '-'.")
- }
+ docSettings processArgumentString addParams
Pair(docSettings, sourceFiles)
}
/** Performs the compilation. */
override def execute() = {
- val Pair(commandSettings, sourceFiles) = initialize
- val reporter = new ConsoleReporter(commandSettings)
-
- // Compiles the actual code
- val compiler = new Global(commandSettings, reporter) {
- override protected def computeInternalPhases() {
- phasesSet += syntaxAnalyzer
- phasesSet += analyzer.namerFactory
- phasesSet += analyzer.typerFactory
- }
- override def onlyPresentation = true
- }
+ val Pair(docSettings, sourceFiles) = initialize
+ val reporter = new ConsoleReporter(docSettings)
try {
- val run = new compiler.Run
- run.compile(sourceFiles.map (_.toString))
- object generator extends DefaultDocDriver {
- lazy val global: compiler.type = compiler
- lazy val settings = commandSettings
- }
- generator.process(run.units)
+ val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings)
+ docProcessor.document(sourceFiles.map (_.toString))
if (reporter.ERROR.count > 0)
error(
"Document failed with " +
diff --git a/src/compiler/scala/tools/ant/sabbus/Break.scala b/src/compiler/scala/tools/ant/sabbus/Break.scala
index 3a8451e6ef..95bc5cc14e 100644
--- a/src/compiler/scala/tools/ant/sabbus/Break.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Break.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant.sabbus
diff --git a/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala b/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala
index f73a482d15..94e2ceffd2 100644
--- a/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala
+++ b/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant.sabbus
diff --git a/src/compiler/scala/tools/ant/sabbus/Compiler.scala b/src/compiler/scala/tools/ant/sabbus/Compiler.scala
index 787c6af870..6d8473ef7d 100644
--- a/src/compiler/scala/tools/ant/sabbus/Compiler.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Compiler.scala
@@ -1,19 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant.sabbus
import java.io.File
import java.net.URL
import java.lang.reflect.InvocationTargetException
-import scala.util.ScalaClassLoader
+import scala.tools.nsc.util.ScalaClassLoader
class Compiler(classpath: Array[URL], val settings: Settings)
{
diff --git a/src/compiler/scala/tools/ant/sabbus/Compilers.scala b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
index 33a328f140..91a8580475 100644
--- a/src/compiler/scala/tools/ant/sabbus/Compilers.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant.sabbus
diff --git a/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala b/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala
index 06a429fe2b..697ec44bd0 100644
--- a/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant.sabbus
@@ -32,7 +31,7 @@ class ForeignCompiler {
private lazy val nsc: Global = {
try {
- val command = new CompilerCommand(args.toList, settings, error, false)
+ val command = new CompilerCommand(args.toList, settings)
new Global(command.settings, reporter)
}
catch {
@@ -42,7 +41,7 @@ class ForeignCompiler {
}
def compile(files: Array[File]): Int = {
- val command = new CompilerCommand(files.toList.map(_.toString), settings, error, true)
+ val command = new CompilerCommand(files.toList map (_.toString), settings)
(new nsc.Run) compile command.files
reporter.ERROR.count << 16 | reporter.WARNING.count
}
diff --git a/src/compiler/scala/tools/ant/sabbus/Make.scala b/src/compiler/scala/tools/ant/sabbus/Make.scala
index 93f8a60e16..9173745472 100644
--- a/src/compiler/scala/tools/ant/sabbus/Make.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Make.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant.sabbus
@@ -23,6 +22,6 @@ class Make extends Task with TaskArgs {
if (!compilationPath.isEmpty) settings.classpath = compilationPath.get
if (!sourcePath.isEmpty) settings.sourcepath = sourcePath.get
if (!params.isEmpty) settings.more = params.get
- Compilers.make(id.get, (compilerPath.get.list.map{ path => new File(path).toURL }), settings)
+ Compilers.make(id.get, (compilerPath.get.list.map{ path => new File(path).toURI.toURL }), settings)
}
}
diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
index 8be3d4a4e3..689df48a88 100644
--- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
@@ -1,22 +1,27 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-package scala.tools.ant.sabbus
+package scala.tools.ant
+package sabbus
import java.io.File
import java.io.FileWriter
import org.apache.tools.ant.Project
-import org.apache.tools.ant.taskdefs.{MatchingTask, Java}
-import org.apache.tools.ant.util.{GlobPatternMapper, SourceFileScanner}
+import org.apache.tools.ant.taskdefs.{ MatchingTask, Java }
+import org.apache.tools.ant.util.{ GlobPatternMapper, SourceFileScanner }
+import scala.tools.nsc.io
+import scala.tools.nsc.util.ScalaClassLoader
+
+class ScalacFork extends MatchingTask with ScalacShared with TaskArgs {
+ private def originOfThis: String =
+ ScalaClassLoader.originOfClass(classOf[ScalacFork]) map (_.toString) getOrElse "<unknown>"
-class ScalacFork extends MatchingTask with TaskArgs {
def setSrcdir(input: File) {
sourceDir = Some(input)
}
@@ -43,67 +48,73 @@ class ScalacFork extends MatchingTask with TaskArgs {
private var jvmArgs: Option[String] = None
private var argfile: Option[File] = None
+ private def createMapper() = {
+ val mapper = new GlobPatternMapper()
+ val extension = if (isMSIL) "*.msil" else "*.class"
+ mapper setTo extension
+ mapper setFrom "*.scala"
+
+ mapper
+ }
+
override def execute() {
- if (compilerPath.isEmpty) error("Mandatory attribute 'compilerpath' is not set.")
- if (sourceDir.isEmpty) error("Mandatory attribute 'srcdir' is not set.")
- if (destinationDir.isEmpty) error("Mandatory attribute 'destdir' is not set.")
+ def plural(x: Int) = if (x > 1) "s" else ""
+
+ log("Executing ant task scalacfork, origin: %s".format(originOfThis), Project.MSG_VERBOSE)
+
+ val compilerPath = this.compilerPath getOrElse error("Mandatory attribute 'compilerpath' is not set.")
+ val sourceDir = this.sourceDir getOrElse error("Mandatory attribute 'srcdir' is not set.")
+ val destinationDir = this.destinationDir getOrElse error("Mandatory attribute 'destdir' is not set.")
val settings = new Settings
- settings.d = destinationDir.get
- if (!compTarget.isEmpty) settings.target = compTarget.get
- if (!compilationPath.isEmpty) settings.classpath = compilationPath.get
- if (!sourcePath.isEmpty) settings.sourcepath = sourcePath.get
- if (compTarget.isDefined && compTarget.get == "msil") settings.sourcedir = sourceDir.get
- if (!params.isEmpty) settings.more = params.get
+ settings.d = destinationDir
- // not yet used: compilerPath, sourcedir (used in mapper), failonerror, timeout
+ compTarget foreach (settings.target = _)
+ compilationPath foreach (settings.classpath = _)
+ sourcePath foreach (settings.sourcepath = _)
+ params foreach (settings.more = _)
+
+ if (isMSIL)
+ settings.sourcedir = sourceDir
+
+ val mapper = createMapper()
- val mapper = new GlobPatternMapper()
- if (compTarget.isDefined && compTarget.get == "msil")
- mapper.setTo("*.msil")
- else
- mapper.setTo("*.class")
- mapper.setFrom("*.scala")
val includedFiles: Array[File] =
new SourceFileScanner(this).restrict(
- getDirectoryScanner(sourceDir.get).getIncludedFiles,
- sourceDir.get,
- destinationDir.get,
+ getDirectoryScanner(sourceDir).getIncludedFiles,
+ sourceDir,
+ destinationDir,
mapper
- ) map (new File(sourceDir.get, _))
- if (includedFiles.size > 0 || argfile.isDefined) {
- if (includedFiles.size > 0)
- log("Compiling "+ includedFiles.size +" file"+
- (if (includedFiles.size > 1) "s" else "") +" to "+ destinationDir.get)
- if (argfile.isDefined)
- log("Using argument file: @"+ argfile.get)
-
- val java = new Java(this) // set this as owner
- java.setFork(true)
- // using 'setLine' creates multiple arguments out of a space-separated string
- if (!jvmArgs.isEmpty) java.createJvmarg().setLine(jvmArgs.get)
- java.setClasspath(compilerPath.get)
- java.setClassname("scala.tools.nsc.Main")
- if (!timeout.isEmpty) java.setTimeout(timeout.get)
-
- //dump the arguments to a file and do "java @file"
- val tempArgFile = File.createTempFile("scalacfork","")
- val outf = new FileWriter(tempArgFile)
- for (arg <- settings.toArgs)
- { outf.write(arg) ; outf.write(" ") }
- for (file <- includedFiles)
- { outf.write(file.getPath) ; outf.write(" ") }
- outf.close
-
- java.createArg().setValue("@"+ tempArgFile.getAbsolutePath)
- if (argfile.isDefined)
- java.createArg().setValue("@"+ argfile.get)
-
- log(java.getCommandLine.getCommandline.mkString("", " ", ""), Project.MSG_VERBOSE)
- val res = java.executeJava()
- if (failOnError && res != 0)
- error("Compilation failed because of an internal compiler error;"+
- " see the error output for details.")
- }
+ ) map (x => new File(sourceDir, x))
+
+ /** Nothing to do. */
+ if (includedFiles.isEmpty && argfile.isEmpty)
+ return
+
+ if (includedFiles.nonEmpty)
+ log("Compiling %d file%s to %s".format(includedFiles.size, plural(includedFiles.size), destinationDir))
+
+ argfile foreach (x => log("Using argfile file: @" + x))
+
+ val java = new Java(this) // set this as owner
+ java setFork true
+ // using 'setLine' creates multiple arguments out of a space-separated string
+ jvmArgs foreach (java.createJvmarg() setLine _)
+ timeout foreach (java setTimeout _)
+
+ java setClasspath compilerPath
+ java setClassname MainClass
+
+ // dump the arguments to a file and do "java @file"
+ val tempArgFile = io.File.makeTemp("scalacfork")
+ val tokens = settings.toArgs ++ (includedFiles map (_.getPath))
+ tempArgFile writeAll (tokens mkString " ")
+
+ val paths = List(Some(tempArgFile.toAbsolute.path), argfile).flatten map (_.toString)
+ val res = execWithArgFiles(java, paths)
+
+ if (failOnError && res != 0)
+ error("Compilation failed because of an internal compiler error;"+
+ " see the error output for details.")
}
}
diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala
index b5f2854ffb..2304c31a0b 100644
--- a/src/compiler/scala/tools/ant/sabbus/Settings.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Settings.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant.sabbus
@@ -14,7 +13,6 @@ import java.io.File
import org.apache.tools.ant.types.{Path, Reference}
-@cloneable
class Settings {
private var gBf: Option[String] = None
diff --git a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
index 4583b5dd84..977bfee3d4 100644
--- a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
+++ b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant.sabbus
@@ -14,22 +13,10 @@ import java.io.File
import org.apache.tools.ant.Task
import org.apache.tools.ant.types.{Path, Reference}
-trait TaskArgs { this: Task =>
+trait CompilationPathProperty {
+ this: Task =>
- def setId(input: String) {
- id = Some(input)
- }
-
- def setParams(input: String) {
- params = params match {
- case None => Some(input)
- case Some(ps) => Some(ps + " " + input)
- }
- }
-
- def setTarget(input: String) {
- compTarget = Some(input)
- }
+ protected var compilationPath: Option[Path] = None
def setCompilationPath(input: Path) {
if (compilationPath.isEmpty) compilationPath = Some(input)
@@ -44,6 +31,25 @@ trait TaskArgs { this: Task =>
def setCompilationPathRef(input: Reference) {
createCompilationPath.setRefid(input)
}
+}
+
+trait TaskArgs extends CompilationPathProperty {
+ this: Task =>
+
+ def setId(input: String) {
+ id = Some(input)
+ }
+
+ def setParams(input: String) {
+ params = params match {
+ case None => Some(input)
+ case Some(ps) => Some(ps + " " + input)
+ }
+ }
+
+ def setTarget(input: String) {
+ compTarget = Some(input)
+ }
def setSrcPath(input: Path) {
if (sourcePath.isEmpty) sourcePath = Some(input)
@@ -80,8 +86,9 @@ trait TaskArgs { this: Task =>
protected var id: Option[String] = None
protected var params: Option[String] = None
protected var compTarget: Option[String] = None
- protected var compilationPath: Option[Path] = None
protected var sourcePath: Option[Path] = None
protected var compilerPath: Option[Path] = None
protected var destinationDir: Option[File] = None
+
+ def isMSIL = compTarget exists (_ == "msil")
}
diff --git a/src/compiler/scala/tools/ant/sabbus/Use.scala b/src/compiler/scala/tools/ant/sabbus/Use.scala
index 3cb7ed29d1..a466d7b18a 100644
--- a/src/compiler/scala/tools/ant/sabbus/Use.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Use.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.ant.sabbus
@@ -61,7 +60,7 @@ class Use extends MatchingTask {
if (errors > 0)
error("Compilation failed with " + errors + " error" + (if (errors > 1) "s" else "") + ".")
else if (warnings > 0)
- log("Compilation suceeded with " + warnings + " warning" + (if (warnings > 1) "s" else "") + ".")
+ log("Compilation succeeded with " + warnings + " warning" + (if (warnings > 1) "s" else "") + ".")
}
catch {
case CompilationFailure(msg, ex) =>
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index 3e22fa6e1e..b87463f0b9 100644
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -1,7 +1,7 @@
#!/bin/sh
-
+#
##############################################################################
-# Copyright 2002-2009, LAMP/EPFL
+# Copyright 2002-2010, LAMP/EPFL
#
# This is free software; see the distribution for copying conditions.
# There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
@@ -63,6 +63,19 @@ fi
# Reminder: substitution ${JAVA_OPTS:=-Xmx256M -Xms16M} DO NOT work on Solaris
[ -n "$JAVA_OPTS" ] || JAVA_OPTS="@javaflags@"
+# break out -D options and add them to JAVA_OPTS as well so they reach the
+# underlying JVM in time to do some good.
+for i
+do
+ case "$i" in
+ -D*)
+ JAVA_OPTS="$JAVA_OPTS $i" ;;
+ *)
+ ;;
+ esac
+done
+
+
if [ -z "$JAVACMD" -a -n "$JAVA_HOME" -a -x "$JAVA_HOME/bin/java" ]; then
JAVACMD="$JAVA_HOME/bin/java"
fi
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index 4bdb6adccd..ad72e3ff64 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -1,7 +1,7 @@
@@echo off
rem ##########################################################################
-rem # Copyright 2002-2009, LAMP/EPFL
+rem # Copyright 2002-2010, LAMP/EPFL
rem #
rem # This is free software; see the distribution for copying conditions.
rem # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
diff --git a/src/compiler/scala/tools/cmd/CommandLine.scala b/src/compiler/scala/tools/cmd/CommandLine.scala
new file mode 100644
index 0000000000..9b8bef4a9a
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/CommandLine.scala
@@ -0,0 +1,91 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+
+import scala.collection.mutable.ListBuffer
+
+trait CommandLineConfig {
+ def enforceArity: Boolean = true
+ def onlyKnownOptions: Boolean = true
+}
+
+/** An instance of a command line, parsed according to a Spec.
+ */
+class CommandLine(val spec: Reference, val originalArgs: List[String]) extends CommandLineConfig {
+ def this(spec: Reference, line: String) = this(spec, Parser tokenize line)
+ def this(spec: Reference, args: Array[String]) = this(spec, args.toList)
+
+ import spec.{ isAnyOption, isUnaryOption, isBinaryOption, isExpandOption }
+
+ val Terminator = "--"
+ val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true
+
+ def mapForUnary(opt: String) = Map(opt -> ValueForUnaryOption)
+ def errorFn(msg: String) = println(msg)
+
+ /** argMap is option -> argument (or "" if it is a unary argument)
+ * residualArgs are what is left after removing the options and their args.
+ */
+ lazy val (argMap, residualArgs) = {
+ val residualBuffer = new ListBuffer[String]
+
+ def loop(args: List[String]): Map[String, String] = {
+ def residual(xs: List[String]) = { residualBuffer ++= xs ; Map[String, String]() }
+
+ /** Returns Some(List(args)) if this option expands to an
+ * argument list and it's not returning only the same arg.
+ */
+ def expand(s1: String) = {
+ if (isExpandOption(s1)) {
+ val s2 = spec expandArg s1
+ if (s2 == List(s1)) None
+ else Some(s2)
+ }
+ else None
+ }
+
+ /** Assumes known options have all been ruled out already. */
+ def isUnknown(opt: String) =
+ onlyKnownOptions && (opt startsWith "-") && {
+ errorFn("Option '%s' not recognized.".format(opt))
+ true
+ }
+
+ args match {
+ case Nil => Map()
+ case Terminator :: xs => residual(xs)
+ case x :: Nil =>
+ expand(x) foreach (exp => return loop(exp))
+ if (isBinaryOption(x) && enforceArity)
+ errorFn("Option '%s' requires argument, found EOF instead.".format(x))
+
+ if (isUnaryOption(x)) mapForUnary(x)
+ else if (isUnknown(x)) Map()
+ else residual(args)
+
+ case x1 :: x2 :: xs =>
+ expand(x1) foreach (exp => return loop(exp ++ args.tail))
+
+ if (x2 == Terminator) mapForUnary(x1) ++ residual(xs)
+ else if (isUnaryOption(x1)) mapForUnary(x1) ++ loop(args.tail)
+ else if (isBinaryOption(x1)) Map(x1 -> x2) ++ loop(xs)
+ else if (isUnknown(x1)) loop(args.tail)
+ else residual(List(x1)) ++ loop(args.tail)
+ }
+ }
+
+ (loop(originalArgs), residualBuffer map stripQuotes toList)
+ }
+
+ def apply(arg: String) = argMap(arg)
+ def get(arg: String) = argMap get arg
+ def isSet(arg: String) = argMap contains arg
+
+ def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse
+
+ override def toString() = argMap.toString + " " + residualArgs.toString
+}
diff --git a/src/compiler/scala/tools/cmd/Demo.scala b/src/compiler/scala/tools/cmd/Demo.scala
new file mode 100644
index 0000000000..22cf50bd58
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/Demo.scala
@@ -0,0 +1,84 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+
+/** A sample command specification for illustrative purposes.
+ * First take advantage of the meta-options:
+ *
+ * // this command creates an executable runner script "demo"
+ * % scala scala.tools.cmd.Demo --self-update demo
+ *
+ * // this one creates and sources a completion file - note backticks
+ * % `./demo --bash`
+ *
+ * // and now you have a runner with working completion
+ * % ./demo --<tab>
+ * --action --defint --int
+ * --bash --defstr --str
+ * --defenv --self-update --unary
+ *
+ * The normal option configuration is plausibly self-explanatory.
+ */
+trait DemoSpec extends Spec with Meta.StdOpts with Interpolation {
+ lazy val referenceSpec = DemoSpec
+ lazy val programInfo = Spec.Info("demo", "Usage: demo [<options>]", "scala.tools.cmd.Demo")
+
+ help("""Usage: demo [<options>]""")
+ heading("Unary options:")
+
+ val optIsUnary = "unary" / "a unary option" --? ;
+ ("action" / "a body which may be run") --> println("Hello, I am the --action body.")
+
+ heading("Binary options:")
+ val optopt = "str" / "an optional String" --|
+ val optoptInt = ("int" / "an optional Int") . --^[Int]
+ val optEnv = "defenv" / "an optional String" defaultToEnv "PATH"
+ val optDefault = "defstr" / "an optional String" defaultTo "default"
+ val optDefaultInt = "defint" / "an optional Int" defaultTo -1
+ val optExpand = "alias" / "an option which expands" expandTo ("--int", "15")
+}
+
+object DemoSpec extends DemoSpec with Property {
+ lazy val propMapper = new PropertyMapper(DemoSpec)
+
+ type ThisCommandLine = SpecCommandLine
+ def creator(args: List[String]) =
+ new SpecCommandLine(args) {
+ override def errorFn(msg: String) = { println("Error: " + msg) ; System.exit(0) }
+ }
+}
+
+class Demo(args: List[String]) extends {
+ val parsed = DemoSpec(args: _*)
+} with DemoSpec with Instance {
+ import java.lang.reflect._
+
+ def helpMsg = DemoSpec.helpMsg
+ def demoSpecMethods = this.getClass.getMethods.toList
+ private def isDemo(m: Method) = (m.getName startsWith "opt") && !(m.getName contains "$") && (m.getParameterTypes.isEmpty)
+
+ def demoString(ms: List[Method]) = {
+ val longest = ms map (_.getName.length) max
+ val formatStr = " %-" + longest + "s: %s"
+ val xs = ms map (m => formatStr.format(m.getName, m.invoke(this)))
+
+ xs mkString ("Demo(\n ", "\n ", "\n)\n")
+ }
+
+ override def toString = demoString(demoSpecMethods filter isDemo)
+}
+
+object Demo {
+ def main(args: Array[String]): Unit = {
+ val runner = new Demo(args.toList)
+
+ if (args.isEmpty)
+ println(runner.helpMsg)
+
+ println(runner)
+ }
+}
diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala
new file mode 100644
index 0000000000..81454e7a30
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/FromString.scala
@@ -0,0 +1,72 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+
+import nsc.io.{ Path, File, Directory }
+import reflect.OptManifest
+
+/** A general mechanism for defining how a command line argument
+ * (always a String) is transformed into an arbitrary type. A few
+ * example instances are in the companion object, but in general
+ * either IntFromString will suffice or you'll want custom transformers.
+ */
+abstract class FromString[+T](implicit m: OptManifest[T]) extends PartialFunction[String, T] {
+ def apply(s: String): T
+ def isDefinedAt(s: String): Boolean = true
+ def zero: T = apply("")
+
+ def targetString: String = m.toString
+}
+
+object FromString {
+ // We need these because we clash with the String => Path implicits.
+ private def toFile(s: String) = new File(new java.io.File(s))
+ private def toDir(s: String) = new Directory(new java.io.File(s))
+
+ /** Path related stringifiers.
+ */
+ val ExistingFile: FromString[File] = new FromString[File] {
+ override def isDefinedAt(s: String) = toFile(s).isFile
+ def apply(s: String): File =
+ if (isDefinedAt(s)) toFile(s)
+ else cmd.runAndExit(println("'%s' is not an existing file." format s))
+ }
+ val ExistingDir: FromString[Directory] = new FromString[Directory] {
+ override def isDefinedAt(s: String) = toDir(s).isDirectory
+ def apply(s: String): Directory =
+ if (isDefinedAt(s)) toDir(s)
+ else cmd.runAndExit(println("'%s' is not an existing directory." format s))
+ }
+ def ExistingDirRelativeTo(root: Directory) = new FromString[Directory] {
+ private def resolve(s: String) = toDir(s) toAbsoluteWithRoot root toDirectory
+ override def isDefinedAt(s: String) = resolve(s).isDirectory
+ def apply(s: String): Directory =
+ if (isDefinedAt(s)) resolve(s)
+ else cmd.runAndExit(println("'%s' is not an existing directory." format resolve(s)))
+ }
+
+ /** Argument expander, i.e. turns single argument "foo bar baz" into argument
+ * list "foo", "bar", "baz".
+ */
+ val ArgumentsFromString: FromString[List[String]] = new FromString[List[String]] {
+ def apply(s: String) = toArgs(s)
+ }
+
+ /** Identity.
+ */
+ implicit val StringFromString: FromString[String] = new FromString[String] {
+ def apply(s: String): String = s
+ }
+
+ /** Implicit as the most likely to be useful as-is.
+ */
+ implicit val IntFromString: FromString[Int] = new FromString[Int] {
+ override def isDefinedAt(s: String) = safeToInt(s).isDefined
+ def apply(s: String) = safeToInt(s).get
+ def safeToInt(s: String): Option[Int] = try Some(java.lang.Integer.parseInt(s)) catch { case _: NumberFormatException => None }
+ }
+}
diff --git a/src/compiler/scala/tools/cmd/Instance.scala b/src/compiler/scala/tools/cmd/Instance.scala
new file mode 100644
index 0000000000..3c0dbbaa1f
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/Instance.scala
@@ -0,0 +1,24 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+
+/** The trait mixed into each instance of a specification.
+ *
+ * @see Reference
+ */
+trait Instance extends Spec {
+ def parsed: CommandLine
+
+ protected def help(str: => String): Unit = ()
+
+ def isSet(s: String) = parsed isSet toOpt(s)
+ def originalArgs = parsed.originalArgs // the full original list
+ def residualArgs = parsed.residualArgs // only args which were not options or args to options
+
+ type OptionMagic = Opt.Instance
+ protected implicit def optionMagicAdditions(name: String) = new Opt.Instance(programInfo, parsed, name)
+}
diff --git a/src/compiler/scala/tools/cmd/Interpolation.scala b/src/compiler/scala/tools/cmd/Interpolation.scala
new file mode 100644
index 0000000000..a326d48f64
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/Interpolation.scala
@@ -0,0 +1,57 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+
+/** Interpolation logic for generated files. The idea is to be
+ * able to write in terms of @@THIS@@ and @@THAT@@ and the reference
+ * specification knows enough to perform the substitutions. Warrants
+ * expansion.
+ */
+trait Interpolation {
+ self: Spec =>
+
+ private lazy val reference = referenceSpec
+ import reference._
+
+ object interpolate {
+ def mapper: Map[String, () => String] = Map(
+ "PROGRAM" -> (() => programInfo.runner),
+ "ALLOPTIONS" -> (() => options.all mkString " "),
+ "MAINCLASS" -> (() => programInfo.mainClass)
+ )
+
+ private def mark(key: String) = "@@" + key + "@@"
+ def apply(template: String) = mapper.foldLeft(template) { case (s, (key, f)) => s.replaceAll(mark(key), f()) }
+ }
+}
+
+object Interpolation {
+ /** A simple template for generating bash completion functions.
+ */
+ lazy val bashTemplate = """
+ |_@@PROGRAM@@()
+ |{
+ | local cur opts base
+ | COMPREPLY=()
+ | cur="${COMP_WORDS[COMP_CWORD]}"
+ | opts="@@ALLOPTIONS@@"
+ |
+ | COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
+ | _filedir
+ | return 0
+ |} && complete -F _@@PROGRAM@@ @@PROGRAM@@
+ """.stripMargin
+
+ /** A simple template for generating a runner script.
+ */
+ val runnerTemplate = """
+ |#!/bin/sh
+ |#
+ |
+ |scala @@MAINCLASS@@ "$@"
+ |""".stripMargin.trim + "\n"
+}
diff --git a/src/compiler/scala/tools/cmd/Meta.scala b/src/compiler/scala/tools/cmd/Meta.scala
new file mode 100644
index 0000000000..8609db3d50
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/Meta.scala
@@ -0,0 +1,67 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+
+import nsc.io.File
+import Interpolation._
+
+/** Meta-options for command line tools. We could have all kinds
+ * of additional goodness here, but for now it's completion and script
+ * generation. See Demo for example usage.
+ */
+object Meta {
+ trait Opt {
+ def name: String
+ def action: () => Unit
+ }
+
+ trait StdOpts {
+ self: Spec with Interpolation =>
+
+ Bash.name --> runAndExit(Bash.action())
+ val selfUpdateName = SelfUpdate.name --| ;
+
+ if (selfUpdateName.isDefined)
+ runAndExit(SelfUpdate.action())
+
+ /** I think we're as close as we can get to bundling completion with
+ * the program given the constraints imposed by bash. This outputs
+ * the completion function to a tempfile and echoes ". /path/to/file"
+ * to the console. Place it inside backtickes like `partest --bash`
+ * and voila, you have absorbed command completion.
+ */
+ object Bash extends Opt {
+ val name = "bash"
+ val action = () => {
+ val file = File.makeTemp("scala.cmd.bash")
+ file writeAll interpolate(bashTemplate)
+
+ // Would be nice to print something like this but comments are
+ // not always comments in bash, and breaking it is worse.
+ // Console println ("# Run the following line, or issue the --bash command in `backticks`.")
+ Console println (". " + file.normalize.path)
+ }
+ }
+
+ /** Generates a very basic runner script. It's called SelfUpdate
+ * because once it exists you can do something like
+ *
+ * tools/scmp --self-update tools/scmp
+ *
+ * and it will overwrite itself with the current version.
+ */
+ object SelfUpdate extends Opt {
+ val name = "self-update"
+ val action = () => {
+ val file = File(selfUpdateName.get)
+ file writeAll interpolate(runnerTemplate)
+ file setExecutable true
+ ()
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/cmd/Opt.scala b/src/compiler/scala/tools/cmd/Opt.scala
new file mode 100644
index 0000000000..beea590492
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/Opt.scala
@@ -0,0 +1,91 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+
+import nsc.Properties.envOrElse
+import Spec.Info
+
+/** Machinery for what amounts to a command line specification DSL.
+ * It is designed so the same specification trait can be used for
+ * two different purposes: generating a singleton specification object
+ * (trait Reference) and providing well typed vals for every configurable
+ * option in response to any given set of arguments (trait Instance).
+ */
+object Opt {
+ trait Error {
+ self: Implicit =>
+
+ protected def fail(msg: String) = runAndExit(println(programInfo.runner + ": " + msg))
+ protected def failOption(arg: String, why: String) = fail("%s: '%s' is %s".format(opt, arg, why))
+ }
+
+ trait Implicit {
+ def name: String
+ def programInfo: Info
+ protected def opt = toOpt(name)
+
+ def --? : Boolean // --opt is set
+ def --> (body: => Unit): Unit // if --opt is set, execute body
+ def --| : Option[String] // --opt <arg: String> is optional, result is Option[String]
+ def --^[T: FromString] : Option[T] // --opt <arg: T> is optional, result is Option[T]
+
+ def optMap[T](f: String => T) = --| map f
+
+ /** Names.
+ */
+ def defaultTo[T: FromString](default: T): T
+ def defaultToEnv(envVar: String): String
+ def choiceOf[T: FromString](choices: T*): Option[T]
+ def expandTo(args: String*): Unit
+
+ /** Help.
+ */
+ def /(descr: String): String // --opt has help description 'descr'
+ }
+
+ class Reference(val programInfo: Info, val options: Reference.Accumulators, val name: String) extends Implicit {
+ import options._
+
+ def --? = { addUnary(opt) ; false }
+ def --> (body: => Unit) = { addUnary(opt) }
+ def --| = { addBinary(opt) ; None }
+ def --^[T: FromString] = { addBinary(opt) ; None }
+
+ def defaultTo[T: FromString](default: T) = { addBinary(opt) ; addHelpDefault(() => default.toString) ; default }
+ def defaultToEnv(envVar: String) = { addBinary(opt) ; addHelpEnvDefault(envVar) ; "" }
+ def choiceOf[T: FromString](choices: T*) = { addBinary(opt) ; None }
+ def expandTo(args: String*) = { addExpand(name, args.toList) ; addHelpAlias(() => args mkString " ") }
+
+ def /(descr: String) = returning(name)(_ => addHelp(() => helpFormatStr.format(opt, descr)))
+ }
+
+ class Instance(val programInfo: Info, val parsed: CommandLine, val name: String) extends Implicit with Error {
+ def --? = parsed isSet opt
+ def --> (body: => Unit) = if (parsed isSet opt) body
+ def --| = parsed get opt
+ def --^[T: FromString] = {
+ val fs = implicitly[FromString[T]]
+ --| map { arg =>
+ if (fs isDefinedAt arg) fs(arg)
+ else failOption(arg, "not a " + fs.targetString)
+ }
+ }
+
+ def defaultTo[T: FromString](default: T) = --^[T] getOrElse default
+ def defaultToEnv(envVar: String) = --| getOrElse envOrElse(envVar, "")
+ def expandTo(args: String*) = ()
+
+ def choiceOf[T: FromString](choices: T*) = {
+ --^[T] map { arg =>
+ if (choices contains arg) arg
+ else failOption(arg.toString, "not a valid choice from " + choices)
+ }
+ }
+
+ def /(descr: String) = name
+ }
+}
diff --git a/src/compiler/scala/tools/cmd/Parser.scala b/src/compiler/scala/tools/cmd/Parser.scala
new file mode 100644
index 0000000000..f4bce745e2
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/Parser.scala
@@ -0,0 +1,52 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+
+import scala.util.parsing.combinator._
+import scala.util.parsing.input.CharArrayReader.EofCh
+
+/** A simple (overly so) command line parser.
+ * !!! This needs a thorough test suite to make sure quoting is
+ * done correctly and portably.
+ */
+trait ParserUtil extends Parsers {
+ class ParserPlus[+T](underlying: Parser[T]) {
+ def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b => b }
+ def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b => a }
+ }
+ protected implicit def parser2parserPlus[T](p: Parser[T]): ParserPlus[T] = new ParserPlus(p)
+}
+
+object Parser extends RegexParsers with ParserUtil {
+ override def skipWhitespace = false
+
+ def elemExcept(xs: Elem*): Parser[Elem] = elem("elemExcept", x => x != EofCh && !(xs contains x))
+ def elemOf(xs: Elem*): Parser[Elem] = elem("elemOf", xs contains _)
+ def escaped(ch: Char): Parser[String] = "\\" + ch
+ def mkQuoted(ch: Char): Parser[String] = (
+ elem(ch) !~> rep(escaped(ch) | elemExcept(ch)) <~ ch ^^ (_.mkString)
+ | failure("Unmatched %s in input." format ch)
+ )
+
+ /** Apparently windows can't deal with the quotes sticking around. */
+ lazy val squoted: Parser[String] = mkQuoted('\'') // ^^ (x => "'%s'" format x)
+ lazy val dquoted: Parser[String] = mkQuoted('"') // ^^ (x => "\"" + x + "\"")
+ lazy val token: Parser[String] = """\S+""".r
+
+ lazy val argument: Parser[String] = squoted | dquoted | token
+ lazy val commandLine: Parser[List[String]] = phrase(repsep(argument, whiteSpace))
+
+ class ParseException(msg: String) extends RuntimeException(msg)
+
+ def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x))
+ def tokenize(line: String, errorFn: String => Unit): List[String] = {
+ parse(commandLine, line.trim) match {
+ case Success(args, _) => args
+ case NoSuccess(msg, rest) => errorFn(msg) ; Nil
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/cmd/Property.scala b/src/compiler/scala/tools/cmd/Property.scala
new file mode 100644
index 0000000000..009e7e6142
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/Property.scala
@@ -0,0 +1,71 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+
+import nsc.io._
+import java.util.Properties
+import java.io.FileInputStream
+
+/** Contains logic for translating a property key/value pair into
+ * equivalent command line arguments. The default settings will
+ * translate, given programInfo.runner == "foo" :
+ *
+ * foo.bar=true to --bar // if --bar is unary
+ * foo.bar=quux to --bar quux // if --bar is binary
+ */
+class PropertyMapper(reference: Reference) extends (((String, String)) => List[String]) {
+ import reference._
+ lazy val RunnerName = programInfo.runner
+
+ // e.g. "partest.shootout" -> "--shootout"
+ def propNameToOptionName(key: String): Option[String] = (key split '.').toList match {
+ case List(RunnerName, name) => Some(name)
+ case _ => None
+ }
+
+ def isPassThrough(key: String): Boolean = false // e.g. "partest.options"
+ def onError(key: String, value: String): Unit = () // called when translate fails
+
+ def translate(key: String, value: String): List[String] = {
+ val opt = toOpt(key)
+
+ if (isUnaryOption(key) && isTrue(value)) List(opt)
+ else if (isBinaryOption(key)) List(opt, value)
+ else returning(Nil)(_ => onError(key, value))
+ }
+ def isTrue(value: String) = List("yes", "on", "true") contains value.toLowerCase
+
+ def apply(kv: (String, String)): List[String] = {
+ val (k, v) = kv
+
+ if (isPassThrough(k)) toArgs(v)
+ else propNameToOptionName(k) match {
+ case Some(optName) => translate(optName, v)
+ case _ => Nil
+ }
+ }
+}
+
+trait Property extends Reference {
+ def propMapper: PropertyMapper
+ override def propertyArgs: List[String] = systemPropertiesToOptions
+
+ def loadProperties(file: File): Properties =
+ returning(new Properties)(_ load new FileInputStream(file.path))
+
+ def systemPropertiesToOptions: List[String] =
+ propertiesToOptions(System.getProperties)
+
+ def propertiesToOptions(file: File): List[String] =
+ propertiesToOptions(loadProperties(file))
+
+ def propertiesToOptions(props: java.util.Properties): List[String] = {
+ import collection.JavaConversions._
+ propertiesToOptions(props.toList)
+ }
+ def propertiesToOptions(props: List[(String, String)]) = props flatMap propMapper
+}
diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala
new file mode 100644
index 0000000000..3f3712766b
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/Reference.scala
@@ -0,0 +1,99 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+
+import collection.mutable.ListBuffer
+import nsc.Properties.envOrNone
+
+/** Mixes in the specification trait and uses the vals therein to
+ * side-effect private accumulators. From this emerges formatted help,
+ * lists of unary and binary arguments, an apply which can creates
+ * instances of the specification, and etc.
+ *
+ * @see Instance
+ */
+trait Reference extends Spec {
+ lazy val options = new Reference.Accumulators()
+ import options._
+
+ def helpMsg = options.helpMsg
+ def propertyArgs: List[String] = Nil
+
+ def isUnaryOption(s: String) = unary contains toOpt(s)
+ def isBinaryOption(s: String) = binary contains toOpt(s)
+ def isExpandOption(s: String) = expansionMap contains toOpt(s)
+ def isAnyOption(s: String) = isUnaryOption(s) || isBinaryOption(s) || isExpandOption(s)
+
+ def expandArg(arg: String) = expansionMap.getOrElse(fromOpt(arg), List(arg))
+
+ protected def help(str: => String) = addHelp(() => str)
+
+ type ThisCommandLine <: CommandLine
+
+ class SpecCommandLine(args: List[String]) extends CommandLine(Reference.this, args) { }
+ protected def creator(args: List[String]): ThisCommandLine
+ final def apply(args: String*): ThisCommandLine = creator(propertyArgs ++ args flatMap expandArg)
+
+ type OptionMagic = Opt.Reference
+ protected implicit def optionMagicAdditions(name: String) = new Opt.Reference(programInfo, options, name)
+}
+
+object Reference {
+ val MaxLine = 80
+
+ class Accumulators() {
+ private var _help = new ListBuffer[() => String]
+ private var _unary = List[String]()
+ private var _binary = List[String]()
+ private var _expand = Map[String, List[String]]()
+
+ def helpFormatStr = " %-" + longestArg + "s %s"
+ def defaultFormatStr = (" " * (longestArg + 7)) + "%s"
+
+ def addUnary(s: String) = _unary +:= s
+ def addBinary(s: String) = _binary +:= s
+
+ def addExpand(opt: String, expanded: List[String]) =
+ _expand += (opt -> expanded)
+
+ def mapHelp(g: String => String) = {
+ val idx = _help.length - 1
+ val f = _help(idx)
+
+ _help(idx) = () => g(f())
+ }
+
+ def addHelp(f: () => String) = _help += f
+ def addHelpAlias(f: () => String) = mapHelp { s =>
+ val str = "alias for '%s'" format f()
+ def noHelp = (helpFormatStr.format("", "")).length == s.length
+ val str2 = if (noHelp) str else " (" + str + ")"
+
+ s + str2
+ }
+ def addHelpDefault(f: () => String) = mapHelp { s =>
+ val str = "(default: %s)" format f()
+
+ if (s.length + str.length < MaxLine) s + " " + str
+ else defaultFormatStr.format(s, str)
+ }
+ def addHelpEnvDefault(name: String) = mapHelp { s =>
+ val line1 = "%s (default: %s)".format(s, name)
+ val envNow = envOrNone(name) map ("'" + _ + "'") getOrElse "unset"
+ val line2 = defaultFormatStr.format("Currently " + envNow)
+
+ line1 + "\n" + line2
+ }
+
+ lazy val unary = (_unary ++ _expand.keys).distinct
+ lazy val binary = _binary.distinct
+ lazy val all = unary ++ binary
+ lazy val expansionMap = _expand
+ lazy val helpMsg = _help map (f => f() + "\n") mkString
+ lazy val longestArg = all map (_.length) max
+ }
+}
diff --git a/src/compiler/scala/tools/cmd/Spec.scala b/src/compiler/scala/tools/cmd/Spec.scala
new file mode 100644
index 0000000000..794bb3303f
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/Spec.scala
@@ -0,0 +1,52 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+
+/** This trait works together with others in scala.tools.cmd to allow
+ * declaratively specifying a command line program, with many attendant
+ * benefits. See scala.tools.cmd.DemoSpec for an example.
+ */
+trait Spec {
+ def referenceSpec: Reference
+ def programInfo: Spec.Info
+
+ protected def help(str: => String): Unit
+ protected def heading(str: => String): Unit = help("\n " + str)
+
+ type OptionMagic <: Opt.Implicit
+ protected implicit def optionMagicAdditions(s: String): OptionMagic
+}
+
+object Spec {
+ class Info(
+ val runner: String,
+ val usage: String,
+ val mainClass: String
+ )
+ object Info {
+ def apply(runner: String, help: String, mainClass: String): Info = new Info(runner, help, mainClass)
+ }
+
+ class Accumulator[T: FromString]() {
+ private var _buf: List[T] = Nil
+
+ def convert(s: String) = implicitly[FromString[T]] apply s
+ def apply(s: String): T = returning(convert(s))(_buf +:= _)
+
+ lazy val get = _buf
+ }
+
+ class Choices[T: FromString](val xs: List[T]) {
+ def fs: FromString[T] = implicitly[FromString[T]]
+ def contains(x: T) = xs contains x
+ override def toString = xs.mkString("{ ", ", ", " }")
+ }
+
+ class EnvironmentVar(val name: String) {
+ override def toString = "${%s}" format name
+ }
+}
diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala
new file mode 100644
index 0000000000..33d3892077
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/package.scala
@@ -0,0 +1,28 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+
+package object cmd {
+ def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
+
+ private[cmd] def debug(msg: String) = println(msg)
+
+ def runAndExit(body: => Unit): Nothing = {
+ body
+ System exit 0
+ error("unreachable")
+ }
+
+ def toOpt(s: String) = if (s startsWith "--") s else "--" + s
+ def fromOpt(s: String) = s stripPrefix "--"
+ def toArgs(line: String) = Parser tokenize line
+ def fromArgs(args: List[String]) = args mkString " "
+
+ def stripQuotes(s: String) = {
+ def isQuotedBy(c: Char) = s.length > 0 && s.head == c && s.last == c
+ if (List('"', '\'') exists isQuotedBy) s.tail.init else s
+ }
+}
diff --git a/src/compiler/scala/tools/cmd/program/Scmp.scala b/src/compiler/scala/tools/cmd/program/Scmp.scala
new file mode 100644
index 0000000000..ff4fa11eaf
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/program/Scmp.scala
@@ -0,0 +1,59 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+package program
+
+import nsc.io._
+
+object Scmp {
+ private val scmpUsage = """
+ |Usage: scmp [options] <cmd line>
+ |Example: scmp --p1 '-no-specialization -Ydebug' scalac src/library/scala/Function1.scala
+ |
+ |Note: the command line must start with a path to scalac.
+ |""".stripMargin
+ private val scmpOptions = List(
+ "p1" -> "options for the first run only",
+ "p2" -> "options for the second run only"
+ )
+ private val scmpInfo = Simple.scalaProgramInfo("scmp", scmpUsage)
+ lazy val ScmpSpec = Simple(scmpInfo, Nil, scmpOptions, x => returning(x)(_.onlyKnownOptions = false))
+
+ def main(args0: Array[String]): Unit = {
+ if (args0.isEmpty)
+ return println(scmpUsage)
+
+ val runner = ScmpSpec instance args0
+ import runner._
+
+ val p1args = parsed.getOrElse("--p1", "")
+ val p2args = parsed.getOrElse("--p2", "")
+
+ if (p1args.isEmpty && p2args.isEmpty)
+ return println("At least one of --p1 and --p2 must be given.")
+ if (residualArgs.isEmpty)
+ return println("There is no command to run.")
+
+ def createCmd(extras: String) =
+ fromArgs(residualArgs.patch(1, toArgs(extras), 0))
+
+ def runCmd(cmd: String) = {
+ val output = Process(cmd, redirect = true).slurp()
+
+ returning(File.makeTemp())(_ writeAll output)
+ }
+
+ val cmds = List(p1args, p2args) map createCmd
+ println(cmds.mkString("Running command lines:\n ", "\n ", ""))
+
+ val files = cmds map runCmd map (_.path)
+ val diff = Process("diff %s %s".format(files: _*)).slurp()
+
+ if (diff.isEmpty) println("No differences.")
+ else println(diff)
+ }
+}
diff --git a/src/compiler/scala/tools/cmd/program/Simple.scala b/src/compiler/scala/tools/cmd/program/Simple.scala
new file mode 100644
index 0000000000..641be31c9e
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/program/Simple.scala
@@ -0,0 +1,81 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+package program
+
+import Spec.Info
+
+/** A boilerplate reducer for commands with simple requirements. For examples,
+ * see Scmp and Tokens in this package.
+ */
+object Simple {
+ type CommandLineTransform = SimpleCommandLine => SimpleCommandLine
+
+ abstract class SimpleSpec(val programInfo: Info) extends Spec with Meta.StdOpts with Interpolation
+
+ trait SimpleInstance extends SimpleSpec with Instance {
+ val parsed: CommandLine
+ }
+
+ class SimpleReference(
+ programInfo: Info,
+ unary: List[(String, String)] = Nil,
+ binary: List[(String, String)] = Nil,
+ postCreation: CommandLineTransform = null
+ ) extends SimpleSpec(programInfo) with Reference {
+
+ spec =>
+
+ if (programInfo.usage != "") help(programInfo.usage)
+ unary foreach { case (option, help) => option / help --? }
+ binary foreach { case (option, help) => option / help --| }
+
+ type ThisCommandLine = SimpleCommandLine
+
+ def creator(args: List[String]) = new SimpleCommandLine(spec, args)
+ def instance(args: Array[String]): SimpleInstance = instance(args.toList)
+ def instance(args: List[String]): SimpleInstance =
+ new {
+ val parsed = spec(args: _*)
+ } with SimpleSpec(programInfo) with SimpleInstance {
+ lazy val referenceSpec = spec
+ }
+
+ lazy val referenceSpec = spec
+ }
+
+ def apply(info: Info, unary: List[(String, String)], binary: List[(String, String)], postCreation: CommandLineTransform): SimpleReference = {
+ new SimpleReference(info, unary, binary, postCreation) {
+ override def creator(args: List[String]) = {
+ val obj = super.creator(args)
+ if (postCreation == null) obj
+ else postCreation(obj)
+ }
+ }
+ }
+
+ def scalaProgramInfo(name: String, help: String) =
+ Spec.Info(name, help, "scala.tools.cmd.program." + name.capitalize)
+
+ /** You can't override a def with a var unless a setter exists. We cleverly
+ * sidestep this by mixing in a trait with dummy setters which will be
+ * inaccessible due to the overriding var.
+ */
+ trait Ticket2338WontFixWorkaround {
+ def enforceArity_=(x: Boolean): Unit = error("unreachable")
+ def onlyKnownOptions_=(x: Boolean): Unit = error("unreachable")
+ }
+
+ /** Configurability simplicity achieved by turning defs into vars and letting
+ * the spec creator apply a transformation. This way there's no need to create
+ * custom subclasses of CommandLine.
+ */
+ class SimpleCommandLine(spec: Reference, args: List[String]) extends CommandLine(spec, args) with Ticket2338WontFixWorkaround {
+ override var enforceArity: Boolean = true
+ override var onlyKnownOptions: Boolean = true
+ }
+}
diff --git a/src/compiler/scala/tools/cmd/program/Tokens.scala b/src/compiler/scala/tools/cmd/program/Tokens.scala
new file mode 100644
index 0000000000..36786aa2b7
--- /dev/null
+++ b/src/compiler/scala/tools/cmd/program/Tokens.scala
@@ -0,0 +1,100 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package cmd
+package program
+
+import nsc._
+import util.Chars.char2uescape
+import io._
+import ast.parser.Tokens._
+
+/** Given paths on the command line, tokenizes any scala files found
+ * and prints one token per line.
+ */
+object Tokens {
+ private val tokensUsage = "Usage: tokens [options] <path1 path2 ...>\n\nOptions:"
+ private val tokensUnary = List(
+ "verbose" -> "be more verbose",
+ "freq" -> "combine token lists and sort by frequency",
+ "stats" -> "output some stats"
+ )
+ private val tokensBinary = List(
+ "sliding" -> "print tokens in groups of given size"
+ )
+ private val tokensInfo = Simple.scalaProgramInfo("tokens", tokensUsage)
+ private lazy val TokensSpec = Simple(tokensInfo, tokensUnary, tokensBinary, null)
+
+ def sanitize(x: Any): String = sanitize(x.toString)
+ def sanitize(str: String): String = str flatMap (x => if (x.isControl) char2uescape(x) else x.toString)
+
+ def main(args0: Array[String]): Unit = {
+ if (args0.isEmpty)
+ return println(TokensSpec.helpMsg)
+
+ val runner = TokensSpec instance args0
+ import runner._
+
+ val files = (residualArgs flatMap walk).distinct
+ if (parsed isSet "--verbose")
+ println("Tokenizing: " + (files map (_.name) mkString " "))
+
+ if (parsed isSet "--stats")
+ println("Stats not yet implemented.")
+
+ def raw = files flatMap fromScalaSource
+ def tokens: List[Any] =
+ if (parsed isSet "--sliding") raw sliding parsed("--sliding").toInt map (_ map sanitize mkString " ") toList
+ else raw
+
+ def output =
+ if (parsed isSet "--freq")
+ (tokens groupBy (x => x) mapValues (_.length)).toList sortBy (-_._2) map (x => x._2 + " " + x._1)
+ else
+ tokens
+
+ output foreach println
+ }
+
+ def fromPaths(paths: String*): List[Any] =
+ (paths.toList flatMap walk).distinct flatMap fromScalaSource
+
+ /** Given a path, returns all .scala files underneath it.
+ */
+ private def walk(arg: String): List[File] = {
+ def traverse = Path(arg) ifDirectory (_.deepList()) getOrElse Iterator(File(arg))
+
+ Path onlyFiles traverse filter (_ hasExtension "scala") toList
+ }
+
+ /** Tokenizes a single scala file.
+ */
+ def fromScalaSource(file: Path): List[Any] = fromScalaSource(file.path)
+ def fromScalaSource(file: String): List[Any] = {
+ val global = new Global(new Settings())
+ import global._
+ import syntaxAnalyzer.{ UnitScanner, token2string }
+
+ val in = new UnitScanner(new CompilationUnit(getSourceFile(file)))
+ in.init()
+
+ Iterator continually {
+ val token = in.token match {
+ case IDENTIFIER | BACKQUOTED_IDENT => in.name
+ case CHARLIT | INTLIT | LONGLIT => in.intVal
+ case DOUBLELIT | FLOATLIT => in.floatVal
+ case STRINGLIT => "\"" + in.strVal + "\""
+ case SEMI | NEWLINE => ";"
+ case NEWLINES => ";;"
+ case COMMA => ","
+ case EOF => null
+ case x => token2string(x)
+ }
+ in.nextToken()
+ token
+ } takeWhile (_ != null) toList
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index af5eb88ebe..de48ff9931 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -1,14 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
-import scala.tools.nsc.util.{FreshNameCreator,OffsetPosition,Position,NoPosition,SourceFile}
-import scala.tools.nsc.io.AbstractFile
-import scala.collection.mutable.{LinkedHashSet, HashSet, HashMap, ListBuffer}
+import util.{ FreshNameCreator,Position,NoPosition,SourceFile }
+import scala.collection.mutable.{ LinkedHashSet, HashSet, HashMap, ListBuffer }
trait CompilationUnits { self: Global =>
@@ -31,6 +29,16 @@ trait CompilationUnits { self: Global =>
/** all comments found in this compilation unit */
val comments = new ListBuffer[Comment]
+// def parseSettings() = {
+// val argsmarker = "SCALAC_ARGS"
+// if(comments nonEmpty) {
+// val pragmas = comments find (_.text.startsWith("//#")) // only parse first one
+// pragmas foreach { p =>
+// val i = p.text.indexOf(argsmarker)
+// if(i > 0)
+// }
+// }
+// }
/** Note: depends now contains toplevel classes.
* To get their sourcefiles, you need to dereference with .sourcefile
*/
diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala
index 7c639e013b..c3307cba77 100644
--- a/src/compiler/scala/tools/nsc/CompileClient.scala
+++ b/src/compiler/scala/tools/nsc/CompileClient.scala
@@ -1,13 +1,15 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
-import java.io.{BufferedReader, File, InputStreamReader, PrintWriter}
+import java.io.{ BufferedReader, File, InputStreamReader, PrintWriter }
import Properties.fileEndings
+import scala.tools.util.PathResolver
+import io.Path
+import util.ClassPath
/** The client part of the fsc offline compiler. Instead of compiling
* things itself, it send requests to a CompileServer.
@@ -26,11 +28,7 @@ class StandardCompileClient {
/** Convert a sequence of filenames, separated by <code>File.pathSeparator</code>,
* into absolute filenames.
*/
- def absFileNames(paths: String) = {
- val sep = File.pathSeparator
- val pathsList = paths.split(sep).toList
- pathsList map absFileName mkString sep
- }
+ def absFileNames(paths: String) = ClassPath.map(paths, absFileName)
protected def normalize(args: Array[String]): (String, String) = {
var i = 0
@@ -40,7 +38,7 @@ class StandardCompileClient {
while (i < args.length) {
val arg = args(i)
if (fileEndings exists(arg endsWith _)) {
- args(i) = absFileName(arg)
+ args(i) = Path(arg).toAbsolute.path
} else if (arg startsWith "-J") {
//see http://java.sun.com/j2se/1.5.0/docs/tooldocs/solaris/javac.html#J
vmArgs append " "+arg.substring(2)
@@ -57,7 +55,7 @@ class StandardCompileClient {
if (i < args.length) {
arg match {
case "-classpath" | "-sourcepath" | "-bootclasspath" | "-extdirs" | "-d" =>
- args(i) = absFileNames(args(i))
+ args(i) = PathResolver.makeAbsolute(args(i))
i += 1
case "-server" =>
serverAdr = args(i)
@@ -76,36 +74,41 @@ class StandardCompileClient {
val (vmArgs, serverAdr) = normalize(args)
if (version) {
- Console.println(versionMsg)
+ Console println versionMsg
return 0
}
if (verbose) {
- Console.println("[Server arguments: " + args.mkString("", " ", "]"))
- Console.println("[VM arguments: " + vmArgs + "]")
+ Console println args.mkString("[Server arguments: ", " ", "]")
+ Console println "[VM arguments: %s]".format(vmArgs)
}
- val socket = if (serverAdr == "") compileSocket.getOrCreateSocket(vmArgs, !shutdown)
- else compileSocket.getSocket(serverAdr)
- var sawerror = false
- if (socket eq null) {
- if (shutdown) {
- Console.println("[No compilation server running.]")
- } else {
- Console.println("Compilation failed.")
- sawerror = true
- }
- } else {
- val out = new PrintWriter(socket.getOutputStream(), true)
- val in = new BufferedReader(new InputStreamReader(socket.getInputStream()))
- out.println(compileSocket.getPassword(socket.getPort()))
- out.println(args.mkString("", "\0", ""))
- var fromServer = in.readLine()
- while (fromServer ne null) {
- if (compileSocket.errorPattern.matcher(fromServer).matches)
- sawerror = true
- Console.println(fromServer)
- fromServer = in.readLine()
- }
- in.close ; out.close ; socket.close
+ val socket =
+ if (serverAdr == "") compileSocket.getOrCreateSocket(vmArgs, !shutdown)
+ else Some(compileSocket.getSocket(serverAdr))
+
+ val sawerror: Boolean = socket match {
+ case None =>
+ val msg = if (shutdown) "[No compilation server running.]" else "Compilation failed."
+ Console println msg
+ !shutdown
+
+ case Some(sock) =>
+ var wasError = false
+
+ sock.applyReaderAndWriter { (in, out) =>
+ out println compileSocket.getPassword(sock.getPort())
+ out println args.mkString("\0")
+ def loop: Unit = in.readLine() match {
+ case null => ()
+ case fromServer =>
+ if (compileSocket.errorPattern matcher fromServer matches)
+ wasError = true
+
+ Console println fromServer
+ loop
+ }
+ loop
+ }
+ wasError
}
if (sawerror) 1 else 0
}
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index 426562509d..832f1e5aef 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
@@ -70,75 +69,78 @@ class StandardCompileServer extends SocketServer
(totalMemory - freeMemory).toDouble / maxMemory.toDouble > MaxCharge
}
- protected def newOfflineCompilerCommand(
- arguments: List[String],
- settings: Settings,
- error: String => Unit,
- interactive: Boolean
- ) = new OfflineCompilerCommand(arguments, settings, error, interactive)
+ protected def newOfflineCompilerCommand(arguments: List[String], settings: Settings) =
+ new OfflineCompilerCommand(arguments, settings)
def session() {
printMemoryStats()
val password = compileSocket getPassword port
val guessedPassword = in.readLine()
val input = in.readLine()
- if ((input ne null) && password == guessedPassword) {
- val args = input.split("\0",-1).toList
- if (args contains "-shutdown") {
- out.println("[Compile server exited]")
- shutDown = true
- return
- }
- if (args contains "-reset") {
- out.println("[Compile server was reset]")
- compiler = null
- return
- }
- def error(msg: String) {
- out.println(/*new Position*/ FakePos("fsc"),
- msg + "\n fsc -help gives more information")
- }
- val command = newOfflineCompilerCommand(args, new Settings(error), error, false)
- reporter = new ConsoleReporter(command.settings, in, out) {
- // disable prompts, so that compile server cannot block
- override def displayPrompt = ()
- }
+ if (input == null || password != guessedPassword)
+ return
- if (command.shouldStopWithInfo) {
- reporter.info(null,
- command.getInfoMessage(newGlobal(command.settings, reporter)), true)
- } else if (command.files.isEmpty)
- reporter.info(null, command.usageMsg, true)
- else {
- try {
- if ((compiler ne null) && settingsAreCompatible(command.settings, compiler.settings)) {
- compiler.settings = command.settings
- compiler.reporter = reporter
- } else {
- if (args contains "-verbose")
- out.println("[Starting new compile server instance]")
- compiler = newGlobal(command.settings, reporter)
- }
- val c = compiler
- val run = new c.Run()
- run compile command.files
- } catch {
- case ex @ FatalError(msg) =>
- if (command.settings.debug.value)
- ex.printStackTrace(out);
- reporter.error(null, "fatal error: " + msg)
- compiler = null
- case ex: Throwable =>
- ex.printStackTrace(out);
- reporter.error(null, "fatal error (server aborted): " + ex.getMessage())
- shutDown = true
- }
- reporter.printSummary()
- if (isMemoryFullEnough)
- compiler = null
+ val args = input.split("\0", -1).toList
+ if (args contains "-shutdown") {
+ out.println("[Compile server exited]")
+ shutDown = true
+ return
+ }
+ if (args contains "-reset") {
+ out.println("[Compile server was reset]")
+ compiler = null
+ return
+ }
+
+ def error(msg: String) {
+ out.println(FakePos("fsc"), msg + "\n fsc -help gives more information")
+ }
+
+ val command = newOfflineCompilerCommand(args, new Settings(error))
+
+ reporter = new ConsoleReporter(command.settings, in, out) {
+ // disable prompts, so that compile server cannot block
+ override def displayPrompt = ()
+ }
+
+ if (command.shouldStopWithInfo)
+ reporter.info(null, command.getInfoMessage(newGlobal(command.settings, reporter)), true)
+ else if (command.files.isEmpty)
+ reporter.info(null, command.usageMsg, true)
+ else {
+ try {
+ if (compiler != null && command.settings == compiler.settings) {
+ compiler.settings = command.settings
+ compiler.reporter = reporter
+ }
+ else {
+ if (args contains "-verbose") {
+ val reason = if (compiler == null) "compiler is null" else "settings not equal"
+ out.println("[Starting new compile server instance because %s]".format(reason))
}
+ compiler = newGlobal(command.settings, reporter)
+ }
+ val c = compiler
+ val run = new c.Run()
+ run compile command.files
+ }
+ catch {
+ case ex @ FatalError(msg) =>
+ if (command.settings.debug.value)
+ ex.printStackTrace(out);
+ reporter.error(null, "fatal error: " + msg)
+ compiler = null
+ case ex: Throwable =>
+ ex.printStackTrace(out);
+ reporter.error(null, "fatal error (server aborted): " + ex.getMessage())
+ shutDown = true
+ }
}
+
+ reporter.printSummary()
+ if (isMemoryFullEnough)
+ compiler = null
}
/** A directory holding redirected output */
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index 03c86ec962..d57016c4e9 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
@@ -12,10 +11,9 @@ import java.util.regex.Pattern
import java.net._
import java.security.SecureRandom
-import io.{ File, Path }
+import io.{ File, Path, Process, Socket }
import scala.util.control.Exception.catching
-
-// class CompileChannel { }
+import scala.tools.util.StringOps.splitWhere
/** This class manages sockets for the fsc offline compiler. */
class CompileSocket {
@@ -24,13 +22,12 @@ class CompileSocket {
/** The prefix of the port identification file, which is followed
* by the port number.
*/
- protected def dirName = "scalac-compile-server-port" //todo: lazy val
-
- protected def cmdName = Properties.cmdName //todo: lazy val
+ protected lazy val dirName = "scalac-compile-server-port"
+ protected lazy val cmdName = Properties.scalaCmd
/** The vm part of the command to start a new scala compile server */
protected val vmCommand = Properties.scalaHome match {
- case null => cmdName
+ case "" => cmdName
case dirname =>
val trial = File(dirname) / "bin" / cmdName
if (trial.canRead) trial.path
@@ -80,23 +77,16 @@ class CompileSocket {
/** The command which starts the compile server, given vm arguments.
*
* @param vmArgs the argument string to be passed to the java or scala command
- * the string must be either empty or start with a ' '.
*/
- private def serverCommand(vmArgs: String): String =
- vmCommand + vmArgs + " " + serverClass
+ private def serverCommand(vmArgs: Seq[String]): Seq[String] =
+ Seq(vmCommand) ++ vmArgs ++ Seq(serverClass) filterNot (_ == "")
/** Start a new server; returns true iff it succeeds */
private def startNewServer(vmArgs: String) {
- val cmd = serverCommand(vmArgs)
- info("[Executed command: " + cmd + "]")
- try {
- Runtime.getRuntime().exec(cmd)
-// val exitVal = proc.waitFor()
-// info("[Exit value: " + exitVal + "]")
- } catch {
- case ex: IOException =>
- fatal("Cannot start compilation daemon." +
- "\ntried command: " + cmd)
+ val cmd = serverCommand(vmArgs split " " toSeq)
+ info("[Executed command: %s]" format cmd)
+ try Process exec cmd catch {
+ case ex: IOException => fatal("Cannot start compilation daemon.\ntried command: %s" format cmd)
}
}
@@ -104,13 +94,13 @@ class CompileSocket {
def portFile(port: Int) = portsDir / File(port.toString)
/** Poll for a server port number; return -1 if none exists yet */
- private def pollPort(): Int =
- portsDir.list.toList match {
- case Nil => -1
- case p :: xs =>
- xs forall (_.delete())
- p.name.toInt
- }
+ private def pollPort(): Int = portsDir.list match {
+ case it if !it.hasNext => -1
+ case it =>
+ val ret = it.next.name.toInt
+ it foreach (_.delete())
+ ret
+ }
/** Get the port number to which a scala compile server is connected;
* If no server is running yet, then create one.
@@ -138,7 +128,7 @@ class CompileSocket {
val file = portFile(port)
val secret = new SecureRandom().nextInt.toString
- try file writeAll List(secret) catch {
+ try file writeAll secret catch {
case e @ (_: FileNotFoundException | _: SecurityException) =>
fatal("Cannot create file: %s".format(file.path))
}
@@ -148,38 +138,36 @@ class CompileSocket {
def deletePort(port: Int) = portFile(port).delete()
/** Get a socket connected to a daemon. If create is true, then
- * create a new daemon if necessary. Returns null if the connection
+ * create a new daemon if necessary. Returns None if the connection
* cannot be established.
*/
- def getOrCreateSocket(vmArgs: String, create: Boolean = true): Socket = {
- val nAttempts = 49 // try for about 5 seconds
- def getsock(attempts: Int): Socket =
- if (attempts == 0) {
- error("Unable to establish connection to compilation daemon")
- null
- } else {
- val port = if(create) getPort(vmArgs) else pollPort()
- if(port < 0) return null
- val hostAdr = InetAddress.getLocalHost()
- try {
- val result = new Socket(hostAdr, port)
- info("[Connected to compilation daemon at port " + port + "]")
- result
- } catch {
- case e: /*IO+Security*/Exception =>
- info(e.toString)
- info("[Connecting to compilation daemon at port " +
- port + " failed; re-trying...]")
+ def getOrCreateSocket(vmArgs: String, create: Boolean = true): Option[Socket] = {
+ // try for 5 seconds
+ val retryDelay = 100
+ val maxAttempts = (5 * 1000) / retryDelay
+
+ def getsock(attempts: Int): Option[Socket] = attempts match {
+ case 0 => error("Unable to establish connection to compilation daemon") ; None
+ case num =>
+ val port = if (create) getPort(vmArgs) else pollPort()
+ if (port < 0) return None
+
+ Socket(InetAddress.getLocalHost(), port).either match {
+ case Right(socket) =>
+ info("[Connected to compilation daemon at port %d]" format port)
+ Some(socket)
+ case Left(err) =>
+ info(err.toString)
+ info("[Connecting to compilation daemon at port %d failed; re-trying...]" format port)
if (attempts % 2 == 0)
- portFile(port).delete // 50% chance to stop trying on this port
-
- Thread.sleep(100) // delay before retrying
+ deletePort(port) // 50% chance to stop trying on this port
+ Thread sleep retryDelay // delay before retrying
getsock(attempts - 1)
}
- }
- getsock(nAttempts)
+ }
+ getsock(maxAttempts)
}
// XXX way past time for this to be central
@@ -187,24 +175,13 @@ class CompileSocket {
try { Some(x.toInt) }
catch { case _: NumberFormatException => None }
- def getSocket(serverAdr: String): Socket = {
- def fail = fatal("Malformed server address: %s; exiting" format serverAdr)
- (serverAdr indexOf ':') match {
- case -1 => fail
- case cpos =>
- val hostName: String = serverAdr take cpos
- parseInt(serverAdr drop (cpos + 1)) match {
- case Some(port) => getSocket(hostName, port)
- case _ => fail
- }
- }
- }
+ def getSocket(serverAdr: String): Socket = (
+ for ((name, portStr) <- splitWhere(serverAdr, _ == ':', true) ; port <- parseInt(portStr)) yield
+ getSocket(name, port)
+ ) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr)
def getSocket(hostName: String, port: Int): Socket =
- try new Socket(hostName, port) catch {
- case e @ (_: IOException | _: SecurityException) =>
- fatal("Unable to establish connection to server %s:%d; exiting".format(hostName, port))
- }
+ Socket(hostName, port).opt getOrElse fatal("Unable to establish connection to server %s:%d; exiting".format(hostName, port))
def getPassword(port: Int): String = {
val ff = portFile(port)
diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala
index ec564f17db..54ef6bb8be 100644
--- a/src/compiler/scala/tools/nsc/CompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala
@@ -1,57 +1,44 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
-import Settings.Setting
import java.io.IOException
+import scala.collection.mutable.ListBuffer
+import io.File
/** A class representing command line info for scalac */
-class CompilerCommand(
- arguments: List[String],
- val settings: Settings,
- error: String => Unit,
- interactive: Boolean,
- shouldProcessArguments: Boolean)
-{
- def this(arguments: List[String], settings: Settings, error: String => Unit, interactive: Boolean) =
- this(arguments, settings, error, interactive, true)
+class CompilerCommand(arguments: List[String], val settings: Settings) {
+ def this(arguments: List[String], error: String => Unit) = this(arguments, new Settings(error))
+ type Setting = Settings#Setting
/** file extensions of files that the compiler can process */
lazy val fileEndings = Properties.fileEndings
- /** Private buffer for accumulating files to compile */
- private var fs: List[String] = List()
-
- /** Public list of files to compile */
- def files: List[String] = fs.reverse
-
/** The name of the command */
- val cmdName = "scalac"
+ def cmdName = "scalac"
+ private def isFsc = cmdName == "fsc"
private val helpSyntaxColumnWidth: Int =
- (settings.allSettings map (_.helpSyntax.length)) max
+ (settings.visibleSettings map (_.helpSyntax.length)) max
- private def format(s: String): String = {
- val buf = new StringBuilder(s)
- var i = s.length
- while (i < helpSyntaxColumnWidth) { buf.append(' '); i += 1 }
- buf.toString()
- }
+ private def format(s: String): String =
+ if (s.length >= helpSyntaxColumnWidth) s
+ else s + (" " * (helpSyntaxColumnWidth - s.length))
/** Creates a help message for a subset of options based on cond */
def createUsageMsg(label: String, cond: (Setting) => Boolean): String =
- settings.allSettings .
+ settings.visibleSettings .
filter(cond) .
map(s => format(s.helpSyntax) + " " + s.helpDescription) .
- mkString("Usage: %s <options> <source files>\n%s options include:\n " .
+ toList.sorted.mkString("Usage: %s <options> <source files>\n%s options include:\n " .
format(cmdName, label), "\n ", "\n")
/** Messages explaining usage and options */
def usageMsg = createUsageMsg("where possible standard", _.isStandard)
+ def fscUsageMsg = createUsageMsg("where possible standard", ( st => st.isStandard || st.name == "-shutdown"))
def xusageMsg = createUsageMsg("Possible advanced", _.isAdvanced)
def yusageMsg = createUsageMsg("Possible private", _.isPrivate)
@@ -59,6 +46,7 @@ class CompilerCommand(
// an informative message of some sort should be printed instead.
// (note: do not add "files.isEmpty" do this list)
val stopSettings = List[(() => Boolean, (Global) => String)](
+ ((() => (settings.help.value _)() && isFsc), fscUsageMsg + _.pluginOptionsHelp),
(settings.help.value _, usageMsg + _.pluginOptionsHelp),
(settings.Xhelp.value _, _ => xusageMsg),
(settings.Yhelp.value _, _ => yusageMsg),
@@ -73,52 +61,33 @@ class CompilerCommand(
case None => ""
}
- /** Whether the command was processed okay */
- var ok = true
-
- /** Process the arguments and update the settings accordingly.
- This method is called only once, during initialization. */
- protected def processArguments() {
- // initialization
- var args = arguments
- def errorAndNotOk(msg: String) = { error(msg) ; ok = false }
-
- // given a @ argument expands it out
- def doExpand(x: String) =
- try { args = util.ArgumentsExpander.expandArg(x) ::: args.tail }
- catch { case ex: IOException => errorAndNotOk(ex.getMessage) }
-
- // true if it's a legit looking source file
- def isSourceFile(x: String) =
- (settings.script.value != "") ||
- (fileEndings exists (x endsWith _))
-
- // given an option for scalac finds out what it is
- def doOption(x: String): Unit = {
- if (interactive)
- return errorAndNotOk("no options can be given in interactive mode")
-
- val argsLeft = settings.parseParams(args)
- if (args != argsLeft) args = argsLeft
- else errorAndNotOk("bad option: '" + x + "'")
- }
+ /**
+ * Expands all arguments starting with @ to the contents of the
+ * file named like each argument.
+ */
+ def expandArg(arg: String): List[String] = {
+ def stripComment(s: String) = s takeWhile (_ != '#')
+ val file = File(arg stripPrefix "@")
+ if (!file.exists)
+ throw new java.io.FileNotFoundException("argument file %s could not be found" format file.name)
+
+ settings splitParams (file.lines() map stripComment mkString " ")
+ }
+
+ // override this if you don't want arguments processed here
+ def shouldProcessArguments: Boolean = true
- // cycle through args until empty or error
- while (!args.isEmpty && ok) args.head match {
- case x if x startsWith "@" => doExpand(x)
- case x if x startsWith "-" => doOption(x)
- case x if isSourceFile(x) => fs = x :: fs ; args = args.tail
- case "" => args = args.tail // quick fix [martin: for what?]
- case x => errorAndNotOk("don't know what to do with " + x)
+ def processArguments: (Boolean, List[String]) = {
+ // expand out @filename to the contents of that filename
+ val expandedArguments = arguments flatMap {
+ case x if x startsWith "@" => expandArg(x)
+ case x => List(x)
}
- ok &&= settings.checkDependencies
+ settings.processArguments(expandedArguments, true)
}
- // CompilerCommand needs processArguments called at the end of its constructor,
- // as does its subclass GenericRunnerCommand, but it cannot be called twice as it
- // accumulates arguments. The fact that it's called from within the constructors
- // makes initialization order an obstacle to simplicity.
- if (shouldProcessArguments)
- processArguments()
+ val (ok, files) =
+ if (shouldProcessArguments) processArguments
+ else (true, Nil)
}
diff --git a/src/compiler/scala/tools/nsc/CompilerRun.scala b/src/compiler/scala/tools/nsc/CompilerRun.scala
index de36b07096..9cac12d896 100644
--- a/src/compiler/scala/tools/nsc/CompilerRun.scala
+++ b/src/compiler/scala/tools/nsc/CompilerRun.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
diff --git a/src/compiler/scala/tools/nsc/ConsoleWriter.scala b/src/compiler/scala/tools/nsc/ConsoleWriter.scala
index f77f2541a5..3c7d10767c 100644
--- a/src/compiler/scala/tools/nsc/ConsoleWriter.scala
+++ b/src/compiler/scala/tools/nsc/ConsoleWriter.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2009 LAMP/EPFL
+ * Copyright 2006-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
diff --git a/src/compiler/scala/tools/nsc/EvalLoop.scala b/src/compiler/scala/tools/nsc/EvalLoop.scala
index 12e9154f0f..d8aa7799cb 100644
--- a/src/compiler/scala/tools/nsc/EvalLoop.scala
+++ b/src/compiler/scala/tools/nsc/EvalLoop.scala
@@ -1,27 +1,25 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
-trait EvalLoop {
+import annotation.tailrec
+import java.io.EOFException
+trait EvalLoop {
def prompt: String
def loop(action: (String) => Unit) {
- Console.print(prompt)
- try {
- val line = Console.readLine
- if (line.length() > 0) {
+ @tailrec def inner() {
+ Console.print(prompt)
+ val line = try Console.readLine catch { case _: EOFException => null }
+ if (line != null && line != "") {
action(line)
- loop(action)
+ inner()
}
}
- catch {
- case _: java.io.EOFException => //nop
- }
+ inner()
}
-
}
diff --git a/src/compiler/scala/tools/nsc/FatalError.scala b/src/compiler/scala/tools/nsc/FatalError.scala
index bded46ffdd..145eb4c9ee 100644
--- a/src/compiler/scala/tools/nsc/FatalError.scala
+++ b/src/compiler/scala/tools/nsc/FatalError.scala
@@ -1,12 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
-case class FatalError(msg: String) extends Exception(msg)
+import scala.util.control.ControlThrowable
+
+case class FatalError(msg: String) extends Throwable(msg)
class MissingRequirementError(val req: String) extends FatalError(req + " not found.")
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
index bde623b5d7..c088bb9303 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
@@ -3,77 +3,59 @@
* @author Lex Spoon
*/
-// $Id$
package scala.tools.nsc
/** A command for ScriptRunner */
class GenericRunnerCommand(
- allargs: List[String],
- override val settings: GenericRunnerSettings,
- error: String => Unit)
-extends CompilerCommand(allargs, settings, error, false, false)
-{
- def this(allargs: List[String], error: String=>Unit) =
- this(allargs, new GenericRunnerSettings(error), error)
+ args: List[String],
+ override val settings: GenericRunnerSettings)
+extends CompilerCommand(args, settings) {
- def this(allargs: List[String]) =
- this(allargs, str => Console.println("Error: " + str))
+ def this(args: List[String], error: String => Unit) =
+ this(args, new GenericRunnerSettings(error))
+
+ def this(args: List[String]) =
+ this(args, str => Console.println("Error: " + str))
/** name of the associated compiler command */
override val cmdName = "scala"
val compCmdName = "scalac"
- /** What to run. If it is None, then the interpreter should be started */
- var thingToRun: Option[String] = None
+ // change CompilerCommand behavior
+ override def shouldProcessArguments: Boolean = false
- /** Arguments to pass to the object or script to run */
- var arguments: List[String] = Nil
+ /** thingToRun: What to run. If it is None, then the interpreter should be started
+ * arguments: Arguments to pass to the object or script to run
+ */
+ val (thingToRun, arguments) = settings.processArguments(args, false)._2 match {
+ case Nil => (None, Nil)
+ case hd :: tl => (Some(hd), tl)
+ }
- override protected def processArguments() {
- var args = allargs
+ override def usageMsg = """
+%s [ <option> ]... [<torun> <arguments>]
- while (!args.isEmpty && ok && args.head.startsWith("-")) {
- val args0 = args
- args = settings parseParams args
- if (args eq args0) {
- error("bad option: '" + args.head + "'")
- ok = false
- }
- }
+All options to %s are allowed. See %s -help.
- if (!args.isEmpty) {
- thingToRun = Some(args.head)
- arguments = args.tail
- }
- }
+<torun>, if present, is an object or script file to run.
+If no <torun> is present, run an interactive shell.
- // we can safely call processArguments since we passed the superclass shouldProcessArguments=false
- processArguments()
-
- override def usageMsg = {
- cmdName + " [ <option> ]... [<torun> <arguments>]\n" +
- "\n" +
- "All options to "+compCmdName+" are allowed. See "+compCmdName+" -help.\n" +
- "\n" +
- "<torun>, if present, is an object or script file to run.\n" +
- "If no <torun> is present, run an interactive shell.\n" +
- "\n" +
- "Option -howtorun allows explicitly specifying how to run <torun>:\n" +
- " script: it is a script file\n" +
- " object: it is an object name\n" +
- " guess: (the default) try to guess\n" +
- "\n" +
- "Option -i requests that a file be pre-loaded. It is only\n" +
- "meaningful for interactive shells.\n" +
- "\n" +
- "Option -e requests that its argument be executed as Scala code.\n" +
- "\n" +
- "Option -savecompiled requests that the compiled script be saved\n" +
- "for future use.\n" +
- "\n" +
- "Option -nocompdaemon requests that the fsc offline compiler not be used.\n" +
- "\n" +
- "Option -Dproperty=value sets a Java system property.\n"
- }
+Option -howtorun allows explicitly specifying how to run <torun>:
+ script: it is a script file
+ object: it is an object name
+ guess: (the default) try to guess
+
+Option -i requests that a file be pre-loaded. It is only
+meaningful for interactive shells.
+
+Option -e requests that its argument be executed as Scala code.
+
+Option -savecompiled requests that the compiled script be saved
+for future use.
+
+Option -nocompdaemon requests that the fsc offline compiler not be used.
+
+Option -Dproperty=value sets a Java system property.
+""".format(cmdName, compCmdName, compCmdName)
}
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index 0e23681ed6..5d272b06ae 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2009 LAMP/EPFL
+ * Copyright 2006-2010 LAMP/EPFL
* @author Lex Spoon
*/
-// $Id$
package scala.tools.nsc
@@ -38,6 +37,4 @@ extends Settings(error) {
BooleanSetting(
"-nocompdaemon",
"do not use the fsc compilation daemon")
-
- val defines = DefinesSetting
}
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index bae0d624c6..e1be5e39a6 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -1,35 +1,33 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
-import java.io.{File, FileOutputStream, PrintWriter}
-import java.io.{IOException, FileNotFoundException}
-import java.nio.charset._
+import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
+import java.nio.charset.{ Charset, IllegalCharsetNameException, UnsupportedCharsetException }
import compat.Platform.currentTime
-import scala.tools.nsc.io.{SourceReader, AbstractFile}
-import scala.tools.nsc.reporters._
-import scala.tools.nsc.util.{ClassPath, MsilClassPath, JavaClassPath, SourceFile, BatchSourceFile, OffsetPosition, RangePosition}
-import scala.collection.mutable.{HashSet, HashMap, ListBuffer}
+import io.{ SourceReader, AbstractFile, Path }
+import reporters.{ Reporter, ConsoleReporter }
+import util.{ ClassPath, SourceFile, Statistics, BatchSourceFile, ScriptSourceFile, returning }
+import collection.mutable.{ HashSet, HashMap, ListBuffer }
+import reflect.generic.{ PickleBuffer }
-import symtab._
-import symtab.classfile.{PickleBuffer, Pickler}
-import dependencies.{DependencyAnalysis}
-import util.Statistics
+import symtab.{ Flags, SymbolTable, SymbolLoaders }
+import symtab.classfile.Pickler
+import dependencies.DependencyAnalysis
import plugins.Plugins
import ast._
import ast.parser._
import typechecker._
import transform._
-import backend.icode.{ICodes, GenICode, Checkers}
-import backend.ScalaPrimitives
+
+import backend.icode.{ ICodes, GenICode, Checkers }
+import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform }
import backend.jvm.GenJVM
-import backend.msil.GenMSIL
-import backend.opt.{Inliners, ClosureElimination, DeadCodeElimination}
+import backend.opt.{ Inliners, ClosureElimination, DeadCodeElimination }
import backend.icode.analysis._
class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
@@ -46,7 +44,16 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def this(settings: Settings) =
this(settings, new ConsoleReporter(settings))
- //def this() = this(new Settings, new ConsoleReporter)
+ // platform specific elements
+
+ type ThisPlatform = Platform[_] { val global: Global.this.type }
+
+ lazy val platform: ThisPlatform =
+ if (forMSIL) new { val global: Global.this.type = Global.this } with MSILPlatform
+ else new { val global: Global.this.type = Global.this } with JavaPlatform
+
+ def classPath: ClassPath[_] = platform.classPath
+ def rootLoader: LazyType = platform.rootLoader
// sub-components --------------------------------------------------
@@ -115,34 +122,13 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val treeBrowser = treeBrowsers.create()
+ // ------------ Hooks for interactive mode-------------------------
-// val copy = new LazyTreeCopier()
-
- /** A map of all doc comments, indexed by symbols.
- * Only active in onlyPresentation mode
- */
- val comments =
- if (onlyPresentation) new HashMap[Symbol,String]
- else null
-
- /** A map of all doc comments source file offsets,
- * indexed by symbols.
- * Only active in onlyPresentation mode
- */
- val commentOffsets =
- if (onlyPresentation) new HashMap[Symbol,Int]
- else null
-
- /** A map of argument names for methods
- * !!! can be dropped once named method arguments are in !!!
+ /** Called from parser, which signals hereby that a method definition has been parsed.
*/
- val methodArgumentNames =
- if (onlyPresentation) new HashMap[Symbol,List[List[Symbol]]]
- else null
-
- // ------------ Hooks for interactive mode-------------------------
+ def signalParseProgress(pos: Position) {}
- /** Called every time an AST node is succesfully typedchecked in typerPhase.
+ /** Called every time an AST node is successfully typechecked in typerPhase.
*/
def signalDone(context: analyzer.Context, old: Tree, result: Tree) {}
@@ -150,12 +136,17 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
*/
def registerContext(c: analyzer.Context) {}
+ /** Register top level class (called on entering the class)
+ */
+ def registerTopLevelSym(sym: Symbol) {}
+
// ------------------ Reporting -------------------------------------
- import util.NoPosition
def error(msg: String) = reporter.error(NoPosition, msg)
- def warning(msg: String) = reporter.warning(NoPosition, msg)
- def inform(msg: String) = Console.err.println(msg)
+ def warning(msg: String) =
+ if (settings.Xwarnfatal.value) reporter.error(NoPosition, msg)
+ else reporter.warning(NoPosition, msg)
+ def inform(msg: String) = reporter.info(NoPosition, msg, true)
def inform[T](msg: String, value: T): T = { inform(msg+value); value }
//reporter.info(null, msg, true)
@@ -170,29 +161,26 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
if (settings.log contains phase.name) inform("[log " + phase + "] " + msg)
}
- class ErrorWithPosition(val pos: Int, val error: Throwable) extends Error
+ class ThrowableWithPosition(val pos: Int, val error: Throwable) extends Throwable
- def tryWith[T](pos: Int, body: => T): T = try {
- body
- } catch {
- case e : ErrorWithPosition => throw e
- case te: TypeError => throw te
- case e : Error => throw new ErrorWithPosition(pos, e)
- case e : RuntimeException => throw new ErrorWithPosition(pos, e)
- }
+ def tryWith[T](pos: Int, body: => T): T =
+ try body
+ catch {
+ case e : ThrowableWithPosition => throw e
+ case te: TypeError => throw te
+ case e : RuntimeException => throw new ThrowableWithPosition(pos, e)
+ }
- def catchWith[T](source : SourceFile, body : => T) : T = try {
- body
- } catch {
- case e : ErrorWithPosition =>
- logError("POS: " + source.dbg(e.pos), e)
- throw e.error
- }
+ def catchWith[T](source : SourceFile, body : => T) : T =
+ try body
+ catch {
+ case e : ThrowableWithPosition =>
+ logError("POS: " + source.dbg(e.pos), e)
+ throw e.error
+ }
def logError(msg: String, t: Throwable): Unit = ()
- def abort(msg: String) = throw new Error(msg)
-
// ------------ File interface -----------------------------------------
private val reader: SourceReader = {
@@ -227,49 +215,25 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
settings.dependenciesFile.value match {
case "none" => ()
case x =>
- val jfile = new java.io.File(x)
- if (!jfile.exists) jfile.createNewFile
- else {
- // This logic moved here from scala.tools.nsc.dependencies.File.
- // Note that it will trip an assertion in lookupPathUnchecked
- // if the path being looked at is absolute.
-
- /** The directory where file lookup should start at. */
- val rootDirectory: AbstractFile = {
- AbstractFile.getDirectory(".")
-// val roots = java.io.File.listRoots()
-// assert(roots.length > 0)
-// new PlainFile(roots(0))
- }
-
- def toFile(path: String) = {
- val file = rootDirectory.lookupPathUnchecked(path, false)
- assert(file ne null, path)
- file
- }
-
- dependencyAnalysis.loadFrom(AbstractFile.getFile(jfile), toFile)
+ val depFilePath = Path(x)
+ if (depFilePath.exists) {
+ /** The directory where file lookup should start */
+ val rootPath = depFilePath.parent
+ def toFile(path: String) = AbstractFile.getFile(rootPath resolve Path(path))
+ dependencyAnalysis.loadFrom(AbstractFile.getFile(depFilePath), toFile)
}
}
- lazy val classPath = {
- ClassPath.XO = settings.XO.value
- if (forMSIL)
- new MsilClassPath(settings.assemextdirs.value, settings.assemrefs.value,
- settings.sourcepath.value)
- else
- new JavaClassPath(settings.bootclasspath.value, settings.extdirs.value,
- settings.classpath.value, settings.sourcepath.value,
- settings.Xcodebase.value)
- }
+ if (settings.verbose.value || settings.Ylogcp.value)
+ inform("[Classpath = " + classPath.asClasspathString + "]")
- if (settings.verbose.value) {
- inform("[Classpath = " + classPath + "]")
- if (forMSIL) inform("[AssemRefs = " + settings.assemrefs.value + "]")
- }
+ /** True if -Xscript has been set, indicating a script run.
+ */
+ def isScriptRun = settings.script.value != ""
def getSourceFile(f: AbstractFile): BatchSourceFile =
- new BatchSourceFile(f, reader.read(f))
+ if (isScriptRun) ScriptSourceFile(f, reader read f)
+ else new BatchSourceFile(f, reader read f)
def getSourceFile(name: String): SourceFile = {
val f = AbstractFile.getFile(name)
@@ -282,10 +246,6 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val global: Global.this.type = Global.this
}
- def rootLoader: LazyType =
- if (forMSIL) new loaders.NamespaceLoader(classPath.asInstanceOf[MsilClassPath])
- else new loaders.JavaPackageLoader(classPath.asInstanceOf[JavaClassPath])
-
// ------------ Phases -------------------------------------------}
var globalPhase: Phase = NoPhase
@@ -307,19 +267,25 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
override def flatClasses: Boolean = isFlat
private val isDevirtualized = prev.name == "devirtualize" || prev.devirtualized
override def devirtualized: Boolean = isDevirtualized // (part of DEVIRTUALIZE)
+ private val isSpecialized = prev.name == "specialize" || prev.specialized
+ override def specialized: Boolean = isSpecialized
+ private val isRefChecked = prev.name == "refchecks" || prev.refChecked
+ override def refChecked: Boolean = isRefChecked
/** Is current phase cancelled on this unit? */
- def cancelled(unit: CompilationUnit) =
- reporter.cancelled ||
- unit.isJava && this.id > currentRun.namerPhase.id
+ def cancelled(unit: CompilationUnit) = {
+ // run the typer only if in `createJavadoc` mode
+ val maxJavaPhase = if (createJavadoc) currentRun.typerPhase.id else currentRun.namerPhase.id
+ reporter.cancelled || unit.isJava && this.id > maxJavaPhase
+ }
final def applyPhase(unit: CompilationUnit) {
if (settings.debug.value) inform("[running phase " + name + " on " + unit + "]")
val unit0 = currentRun.currentUnit
try {
currentRun.currentUnit = unit
- reporter.setSource(unit.source)
- if (!cancelled(unit)) apply(unit)
+ if (!cancelled(unit))
+ reporter.withSource(unit.source) { apply(unit) }
currentRun.advanceUnit
} finally {
//assert(currentRun.currentUnit == unit)
@@ -391,10 +357,6 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val runsRightAfter = None
} with TailCalls
- // object checkDefined extends {
- // val global: Global.this.type = Global.this
- // } with CheckDefined
-
// phaseName = "explicitouter"
object explicitOuter extends {
val global: Global.this.type = Global.this
@@ -518,13 +480,6 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val runsRightAfter = None
} with DependencyAnalysis
- // phaseName = "msil"
- object genMSIL extends {
- val global: Global.this.type = Global.this
- val runsAfter = List[String]("dce")
- val runsRightAfter = None
- } with GenMSIL
-
// phaseName = "terminal"
object terminal extends {
val global: Global.this.type = Global.this
@@ -564,56 +519,46 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/* Add the internal compiler phases to the phases set
*/
protected def computeInternalPhases() {
- phasesSet += syntaxAnalyzer // The parser
- phasesSet += analyzer.namerFactory // note: types are there because otherwise
- phasesSet += analyzer.typerFactory // consistency check after refchecks would fail.
+ phasesSet += syntaxAnalyzer // The parser
+ phasesSet += analyzer.namerFactory // note: types are there because otherwise
+ phasesSet += analyzer.packageObjects // consistency check after refchecks would fail.
+ phasesSet += analyzer.typerFactory
phasesSet += superAccessors // add super accessors
phasesSet += pickler // serialize symbol tables
phasesSet += refchecks // perform reference and override checking, translate nested objects
-// if (false && settings.Xexperimental.value)
+// if (false && settings.YvirtClasses)
// phasesSet += devirtualize // Desugar virtual classes4
- phasesSet += uncurry // uncurry, translate function values to anonymous classes
- phasesSet += tailCalls // replace tail calls by jumps
- if (settings.specialize.value)
- phasesSet += specializeTypes
- phasesSet += explicitOuter // replace C.this by explicit outer pointers, eliminate pattern matching
- phasesSet += erasure // erase generic types to Java 1.4 types, add interfaces for traits
- phasesSet += lazyVals //
- phasesSet += lambdaLift // move nested functions to top level
-// if (forJVM && settings.Xdetach.value)
-// phasesSet += detach // convert detached closures
- phasesSet += constructors // move field definitions into constructors
- phasesSet += mixer // do mixin composition
- phasesSet += cleanup // some platform-specific cleanups
- phasesSet += genicode // generate portable intermediate code
- phasesSet += inliner // optimization: do inlining
- phasesSet += closureElimination // optimization: get rid of uncalled closures
- phasesSet += deadCode // optimization: get rid of dead cpde
- phasesSet += terminal // The last phase in the compiler chain
-
- if (! forMSIL) {
- phasesSet += flatten // get rid of inner classes
- }
- if (forJVM) {
- phasesSet += liftcode // generate reified trees
- phasesSet += genJVM // generate .class files
- if (settings.make.value != "all")
- phasesSet += dependencyAnalysis
- }
- if (forMSIL) {
- phasesSet += genMSIL // generate .msil files
- }
+ phasesSet += uncurry // uncurry, translate function values to anonymous classes
+ phasesSet += tailCalls // replace tail calls by jumps
+ phasesSet += specializeTypes
+ phasesSet += explicitOuter // replace C.this by explicit outer pointers, eliminate pattern matching
+ phasesSet += erasure // erase types, add interfaces for traits
+ phasesSet += lazyVals
+ phasesSet += lambdaLift // move nested functions to top level
+ // if (forJVM && settings.Xdetach.value)
+ // phasesSet += detach // convert detached closures
+
+ phasesSet += constructors // move field definitions into constructors
+ phasesSet += mixer // do mixin composition
+ phasesSet += cleanup // some platform-specific cleanups
+ phasesSet += genicode // generate portable intermediate code
+ phasesSet += inliner // optimization: do inlining
+ phasesSet += closureElimination // optimization: get rid of uncalled closures
+ phasesSet += deadCode // optimization: get rid of dead cpde
+ phasesSet += terminal // The last phase in the compiler chain
}
+ protected def computePlatformPhases() = platform.platformPhases foreach (phasesSet += _)
/* Helper method for sequncing the phase assembly
*/
private def computePhaseDescriptors: List[SubComponent] = {
- computeInternalPhases() // Global.scala
- computePluginPhases() // plugins/Plugins.scala
- buildCompilerFromPhasesSet() // PhaseAssembly.scala
+ computeInternalPhases() // Global.scala
+ computePlatformPhases() // backend/Platform.scala
+ computePluginPhases() // plugins/Plugins.scala
+ buildCompilerFromPhasesSet() // PhaseAssembly.scala
}
/* The phase descriptor list */
@@ -622,17 +567,26 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/* The set of phase objects that is the basis for the compiler phase chain */
protected val phasesSet : HashSet[SubComponent] = new HashSet[SubComponent]
- /** A description of the phases that will run */
- def phaseDescriptions: String = {
+ /** The names of the phases. */
+ lazy val phaseNames = {
new Run // force some initialization
- val messages =
- for (phase <- phaseDescriptors)
- yield phase.phaseName //todo: + " - " + phase.description
- messages.mkString("\n")
+ phaseDescriptors map (_.phaseName)
}
+ /** A description of the phases that will run */
+ def phaseDescriptions: String =
+ phaseNames mkString "\n" // todo: + " - " + phase.description
+
// ----------- Runs ---------------------------------------
+ /** Remove the current run when not needed anymore. Used by the build
+ * manager to save on the memory foot print. The current run holds on
+ * to all compilation units, which in turn hold on to trees.
+ */
+ private [nsc] def dropRun() {
+ curRun = null
+ }
+
private var curRun: Run = null
private var curRunId = 0
@@ -657,7 +611,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
curRun = this
//Console.println("starting run: " + id)
- // Can not take the phaseDescriptors.head even though its the syntaxAnalyser, this will implicitly
+ // Can not take the phaseDescriptors.head even though its the syntaxAnalyzer, this will implicitly
// call definitions.init which uses phase and needs it to be != NoPhase
val phase1 = syntaxAnalyzer.newPhase(NoPhase)
phase = phase1
@@ -701,6 +655,12 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
private var phasec: Int = 0
private var unitc: Int = 0
+ /**
+ * For subclasses to override. Called when `phase` is about to be run on `unit`.
+ * Variables are passed explicitly to indicate that `globalPhase` and `currentUnit` have been set.
+ */
+ def informUnitStarting(phase: Phase, unit: CompilationUnit) { }
+
/** take note that phase is completed
* (for progress reporting)
*/
@@ -717,9 +677,9 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
refreshProgress
}
private def refreshProgress =
- if (fileset.size > 0)
- progress((phasec * fileset.size) + unitc,
- (phaseDescriptors.length-1) * fileset.size) // terminal phase not part of the progress display
+ if (compiledFiles.size > 0)
+ progress((phasec * compiledFiles.size) + unitc,
+ (phaseDescriptors.length-1) * compiledFiles.size) // terminal phase not part of the progress display
// ----- finding phases --------------------------------------------
@@ -733,6 +693,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val typerPhase = phaseNamed("typer")
val picklerPhase = phaseNamed("pickler")
val refchecksPhase = phaseNamed("refchecks")
+ val uncurryPhase = phaseNamed("uncurry")
val explicitOuterPhase = phaseNamed("explicitouter")
val erasurePhase = phaseNamed("erasure")
@@ -748,12 +709,13 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
// ----------- Units and top-level classes and objects --------
private var unitbuf = new ListBuffer[CompilationUnit]
- private var fileset = new HashSet[AbstractFile]
+ var compiledFiles = new HashSet[String]
/** add unit to be compiled in this run */
private def addUnit(unit: CompilationUnit) {
+// unit.parseSettings()
unitbuf += unit
- fileset += unit.source.file
+ compiledFiles += unit.source.file.path
}
/* An iterator returning all the units being compiled in this run */
@@ -773,12 +735,18 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
else if (sym.isModuleClass) compiles(sym.sourceModule)
else false
+ /** Is this run allowed to redefine the given symbol? Usually this is true
+ * if the run does not already compile `sym`, but for interactive mode
+ * we have a more liberal interpretation.
+ */
+ def canRedefine(sym: Symbol) = !compiles(sym)
+
// --------------- Compilation methods ----------------------------
/** Compile list of source files */
def compileSources(_sources: List[SourceFile]) {
- val depSources = dependencyAnalysis.filter(_sources.removeDuplicates) // bug #1268, scalac confused by duplicated filenames
- val sources = pkgObjectsFirst(depSources)
+ val depSources = dependencyAnalysis.filter(_sources.distinct) // bug #1268, scalac confused by duplicated filenames
+ val sources = coreClassesFirst(depSources)
if (reporter.hasErrors)
return // there is a problem already, e.g. a
// plugin was passed a bad option
@@ -791,7 +759,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val startTime = currentTime
phase = globalPhase
globalPhase.run
- if (settings.print contains globalPhase.name)
+ if (settings.Xprint contains globalPhase.name)
if (settings.writeICode.value && globalPhase.id >= icodePhase.id) writeICode()
else if (settings.Xshowtrees.value) nodePrinters.printAll()
else printAllUnits()
@@ -807,9 +775,9 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
phase = globalPhase
if (globalPhase.id >= icodePhase.id) icodeChecker.checkICodes
else checker.checkTrees
- }
- else if (!settings.check.doAllPhases) {
- warning("It is not possible to check the result of the "+globalPhase.name+" phase")
+ }
+ else if (!settings.check.doAllPhases) {
+ warning("It is not possible to check the result of the "+globalPhase.name+" phase")
}
}
if (settings.Ystatistics.value) statistics.print(phase)
@@ -841,57 +809,51 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
informTime("total", startTime)
if (!dependencyAnalysis.off) {
-
- def fromFile(file: AbstractFile): String = {
- val path = file.path
- if (path.startsWith("./"))
- path.substring(2, path.length)
- else path
+ settings.dependenciesFile.value match {
+ case "none" =>
+ case x =>
+ val depFilePath = Path(x)
+ if (!depFilePath.exists)
+ dependencyAnalysis.dependenciesFile = AbstractFile.getFile(depFilePath.createFile())
+
+ /** The directory where file lookup should start */
+ val rootPath = depFilePath.parent.normalize
+ def fromFile(file: AbstractFile): String =
+ rootPath.relativize(Path(file.file).normalize).path
+
+ dependencyAnalysis.saveDependencies(fromFile)
}
-
- dependencyAnalysis.saveDependencies(fromFile)
}
}
/** Compile list of abstract files */
def compileFiles(files: List[AbstractFile]) {
- try {
- compileSources(files map getSourceFile)
- } catch {
- case ex: IOException => error(ex.getMessage())
- }
+ try compileSources(files map getSourceFile)
+ catch { case ex: IOException => error(ex.getMessage()) }
}
/** Compile list of files given by their names */
def compile(filenames: List[String]) {
try {
- val scriptMain = settings.script.value
- // Are we compiling a script?
- if (scriptMain != "") {
- if(filenames.length != 1)
- error("can only compile one script at a time")
- val scriptFile =
- ScriptRunner.wrappedScript(scriptMain, filenames.head, getSourceFile)
- compileSources(List(scriptFile))
- // No we are compiling regular source files
- } else {
- compileSources(filenames map getSourceFile)
- }
- } catch {
- case ex: IOException => error(ex.getMessage())
+ val sources: List[SourceFile] =
+ if (isScriptRun && filenames.size > 1) returning(Nil)(_ => error("can only compile one script at a time"))
+ else filenames map getSourceFile
+
+ compileSources(sources)
}
+ catch { case ex: IOException => error(ex.getMessage()) }
}
/** Compile abstract file until `globalPhase`, but at least
* to phase "namer".
*/
def compileLate(file: AbstractFile) {
- if (fileset eq null) {
+ if (compiledFiles eq null) {
val msg = "No class file for " + file +
" was found\n(This file cannot be loaded as a source file)"
inform(msg)
throw new FatalError(msg)
- } else if (!(fileset contains file)) {
+ } else if (!(compiledFiles contains file.path)) {
compileLate(new CompilationUnit(getSourceFile(file)))
}
}
@@ -902,17 +864,31 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def compileLate(unit: CompilationUnit) {
addUnit(unit)
var localPhase = firstPhase.asInstanceOf[GlobalPhase]
- while (localPhase != null && (localPhase.id < globalPhase.id || localPhase.id <= namerPhase.id)/* && !reporter.hasErrors*/) {
+ while (localPhase != null && (localPhase.id < globalPhase.id || localPhase.id < typerPhase.id)/* && !reporter.hasErrors*/) {
val oldSource = reporter.getSource
- reporter.setSource(unit.source)
- atPhase(localPhase)(localPhase.applyPhase(unit))
+ reporter.withSource(unit.source) {
+ atPhase(localPhase)(localPhase.applyPhase(unit))
+ }
val newLocalPhase = localPhase.next.asInstanceOf[GlobalPhase]
localPhase = if (localPhase == newLocalPhase) null else newLocalPhase
- reporter.setSource(oldSource)
}
refreshProgress
}
+ /**
+ * Attempt to locate a source file providing the given name as a top-level
+ * definition in the given context, and add it to the run via compileLate
+ * if found.
+ */
+ def compileSourceFor(context : analyzer.Context, name : Name) = false
+
+ /**
+ * Attempt to locate a source file providing the given name as a top-level
+ * definition with the given prefix, and add it to the run via compileLate
+ * if found.
+ */
+ def compileSourceFor(qual : Tree, name : Name) = false
+
/** Reset package class to state at typer (not sure what this
* is needed for?)
*/
@@ -923,18 +899,44 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
if (!pclazz.isRoot) resetPackageClass(pclazz.owner)
}
- private def pkgObjectsFirst(files: List[SourceFile]) = {
+ /**
+ * Re-orders the source files to
+ * 1. ScalaObject
+ * 2. LowPriorityImplicits / StandardEmbeddings (i.e. parents of Predef)
+ * 3. the rest
+ *
+ * 1 is to avoid cyclic reference errors.
+ * 2 is due to the following. When completing "Predef" (*), typedIdent is called
+ * for its parents (e.g. "LowPriorityImplicits"). typedIdent checks wethter
+ * the symbol reallyExists, which tests if the type of the symbol after running
+ * its completer is != NoType.
+ * If the "namer" phase has not yet run for "LowPriorityImplicits", the symbol
+ * has a SourcefileLoader as type. Calling "doComplete" on it does nothing at
+ * all, because the source file is part of the files to be compiled anyway.
+ * So the "reallyExists" test will return "false".
+ * Only after the namer, the symbol has a lazy type which actually computes
+ * the info, and "reallyExists" behaves as expected.
+ * So we need to make sure that the "namer" phase is run on predef's parents
+ * before running it on predef.
+ *
+ * (*) Predef is completed early when calling "mkAttributedRef" during the
+ * addition of "import Predef._" to sourcefiles. So this situation can't
+ * happen for user classes.
+ *
+ */
+ private def coreClassesFirst(files: List[SourceFile]) = {
def inScalaFolder(f: SourceFile) =
f.file.container.name == "scala"
- val res = new ListBuffer[SourceFile]
var scalaObject: Option[SourceFile] = None
+ val res = new ListBuffer[SourceFile]
for (file <- files) file.file.name match {
case "ScalaObject.scala" if inScalaFolder(file) => scalaObject = Some(file)
- case "package.scala" => file +=: res // prepend package objects
- case _ => res += file // append all others
+ case "LowPriorityImplicits.scala" if inScalaFolder(file) => file +=: res
+ case "StandardEmbeddings.scala" if inScalaFolder(file) => file +=: res
+ case _ => res += file
}
- scalaObject.map(res.+=:(_)) // ScalaObject 1st
- res.toList // then package objects, then others
+ for (so <- scalaObject) so +=: res
+ res.toList
}
} // class Run
@@ -966,7 +968,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def getFile(clazz: Symbol, suffix: String): File = {
val outdirname = settings.outputDirs.outputDirFor(clazz.sourceFile)
var outdir = new File(if (outdirname.path == "") "." else outdirname.path)
- val filename = clazz.fullNameString('.')
+ val filename = clazz.fullName
var start = 0
var end = filename.indexOf('.', start)
while (end >= start) {
@@ -980,7 +982,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
private def writeICode() {
val printer = new icodes.TextPrinter(null, icodes.linearizer)
- icodes.classes.valuesIterator.foreach((cls) => {
+ icodes.classes.values.foreach((cls) => {
val suffix = if (cls.symbol hasFlag Flags.MODULE) "$.icode" else ".icode"
var file = getFile(cls.symbol, suffix)
// if (file.exists())
@@ -1000,5 +1002,9 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def forJVM : Boolean = settings.target.value startsWith "jvm"
def forMSIL: Boolean = settings.target.value == "msil"
+ def forInteractive = onlyPresentation
+ def forScaladoc = onlyPresentation
+ @deprecated("Use forInteractive or forScaladoc, depending on what you're after")
def onlyPresentation = false
+ def createJavadoc = false
}
diff --git a/src/compiler/scala/tools/nsc/Interpreter.scala b/src/compiler/scala/tools/nsc/Interpreter.scala
index 489ab1a3e0..f456039fa6 100644
--- a/src/compiler/scala/tools/nsc/Interpreter.scala
+++ b/src/compiler/scala/tools/nsc/Interpreter.scala
@@ -1,28 +1,33 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
+import Predef.{ println => _, _ }
import java.io.{ File, PrintWriter, StringWriter, Writer }
+import File.pathSeparator
import java.lang.{ Class, ClassLoader }
import java.net.{ MalformedURLException, URL }
import java.lang.reflect
import reflect.InvocationTargetException
-import scala.collection.immutable.ListSet
+import scala.PartialFunction.{ cond, condOpt }
+import scala.tools.util.PathResolver
+import scala.reflect.Manifest
import scala.collection.mutable
-import scala.collection.mutable.{ ListBuffer, HashSet, ArrayBuffer }
-import scala.util.{ ScalaClassLoader, URLClassLoader }
+import scala.collection.mutable.{ ListBuffer, HashSet, HashMap, ArrayBuffer }
+import scala.collection.immutable.Set
+import scala.tools.nsc.util.ScalaClassLoader
+import ScalaClassLoader.URLClassLoader
import scala.util.control.Exception.{ Catcher, catching, ultimately, unwrapping }
import io.{ PlainFile, VirtualDirectory }
import reporters.{ ConsoleReporter, Reporter }
import symtab.{ Flags, Names }
-import util.{ SourceFile, BatchSourceFile, ClassPath }
-import scala.util.NameTransformer
+import util.{ SourceFile, BatchSourceFile, ScriptSourceFile, ClassPath, Chars, stringFromWriter }
+import scala.reflect.NameTransformer
import scala.tools.nsc.{ InterpreterResults => IR }
import interpreter._
import Interpreter._
@@ -52,7 +57,7 @@ import Interpreter._
* all variables defined by that code. To extract the result of an
* interpreted line to show the user, a second "result object" is created
* which imports the variables exported by the above object and then
- * exports a single member named "result". To accomodate user expressions
+ * exports a single member named "scala_repl_result". To accomodate user expressions
* that read from variables or methods defined in previous statements, "import"
* statements are used.
* </p>
@@ -67,27 +72,93 @@ import Interpreter._
* @author Moez A. Abdel-Gawad
* @author Lex Spoon
*/
-class Interpreter(val settings: Settings, out: PrintWriter)
-{
+class Interpreter(val settings: Settings, out: PrintWriter) {
+ repl =>
+
+ def println(x: Any) = {
+ out.println(x)
+ out.flush()
+ }
+
+ /** construct an interpreter that reports to Console */
+ def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
+ def this() = this(new Settings())
+
/** directory to save .class files to */
val virtualDirectory = new VirtualDirectory("(memory)", None)
- /** the compiler to compile expressions with */
- val compiler: Global = newCompiler(settings, reporter)
+ /** reporter */
+ object reporter extends ConsoleReporter(settings, null, out) {
+ override def printMessage(msg: String) {
+ out println clean(msg)
+ out.flush()
+ }
+ }
+
+ /** We're going to go to some trouble to initialize the compiler asynchronously.
+ * It's critical that nothing call into it until it's been initialized or we will
+ * run into unrecoverable issues, but the perceived repl startup time goes
+ * through the roof if we wait for it. So we initialize it with a future and
+ * use a lazy val to ensure that any attempt to use the compiler object waits
+ * on the future.
+ */
+ private val _compiler: Global = newCompiler(settings, reporter)
+ private def _initialize(): Boolean = {
+ val source = """
+ |// this is assembled to force the loading of approximately the
+ |// classes which will be loaded on the first expression anyway.
+ |class $repl_$init {
+ | val x = "abc".reverse.length + (5 max 5)
+ | scala.runtime.ScalaRunTime.stringOf(x)
+ |}
+ |""".stripMargin
+
+ try {
+ new _compiler.Run() compileSources List(new BatchSourceFile("<init>", source))
+ if (isReplDebug || settings.debug.value)
+ println("Repl compiler initialized.")
+ true
+ }
+ catch {
+ case MissingRequirementError(msg) => println("""
+ |Failed to initialize compiler: %s not found.
+ |** Note that as of 2.8 scala does not assume use of the java classpath.
+ |** For the old behavior pass -usejavacp to scala, or if using a Settings
+ |** object programatically, settings.usejavacp.value = true.""".stripMargin.format(msg)
+ )
+ false
+ }
+ }
+
+ // set up initialization future
+ private var _isInitialized: () => Boolean = null
+ def initialize() = synchronized {
+ if (_isInitialized == null)
+ _isInitialized = scala.concurrent.ops future _initialize()
+ }
+
+ /** the public, go through the future compiler */
+ lazy val compiler: Global = {
+ initialize()
- import compiler.{ Traverser, CompilationUnit, Symbol, Name, Type }
+ // blocks until it is ; false means catastrophic failure
+ if (_isInitialized()) _compiler
+ else null
+ }
+
+ import compiler.{ Traverser, CompilationUnit, Symbol, Name, Type, TypeRef, PolyType }
import compiler.{
Tree, TermTree, ValOrDefDef, ValDef, DefDef, Assign, ClassDef,
- ModuleDef, Ident, Select, TypeDef, Import, MemberDef, DocDef }
- import compiler.{ nme, newTermName }
+ ModuleDef, Ident, Select, TypeDef, Import, MemberDef, DocDef,
+ ImportSelector, EmptyTree, NoType }
+ import compiler.{ nme, newTermName, newTypeName }
import nme.{
INTERPRETER_VAR_PREFIX, INTERPRETER_SYNTHVAR_PREFIX, INTERPRETER_LINE_PREFIX,
INTERPRETER_IMPORT_WRAPPER, INTERPRETER_WRAPPER_SUFFIX, USCOREkw
}
- /** construct an interpreter that reports to Console */
- def this(settings: Settings) =
- this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
+ import compiler.definitions
+ import definitions.{ EmptyPackage, getMember }
/** whether to print out result lines */
private[nsc] var printResults: Boolean = true
@@ -101,15 +172,21 @@ class Interpreter(val settings: Settings, out: PrintWriter)
}
}
- /** interpreter settings */
- lazy val isettings = new InterpreterSettings(this)
+ /** whether to bind the lastException variable */
+ private var bindLastException = true
- object reporter extends ConsoleReporter(settings, null, out) {
- override def printMessage(msg: String) {
- out.print(clean(msg) + "\n"); out.flush()
+ /** Temporarily stop binding lastException */
+ def withoutBindingLastException[T](operation: => T): T = {
+ val wasBinding = bindLastException
+ ultimately(bindLastException = wasBinding) {
+ bindLastException = false
+ operation
}
}
+ /** interpreter settings */
+ lazy val isettings = new InterpreterSettings(this)
+
/** Instantiate a compiler. Subclasses can override this to
* change the compiler class used by this interpreter. */
protected def newCompiler(settings: Settings, reporter: Reporter) = {
@@ -118,14 +195,7 @@ class Interpreter(val settings: Settings, out: PrintWriter)
}
/** the compiler's classpath, as URL's */
- val compilerClasspath: List[URL] = {
- import scala.net.Utility.parseURL
- val classpathPart =
- ClassPath.expandPath(compiler.settings.classpath.value).map(s => new File(s).toURL)
-
- val codebasePart = (compiler.settings.Xcodebase.value.split(" ")).toList flatMap parseURL
- classpathPart ::: codebasePart
- }
+ lazy val compilerClasspath: List[URL] = new PathResolver(settings) asURLs
/* A single class loader is used for all commands interpreted by this Interpreter.
It would also be possible to create a new class loader for each command
@@ -140,8 +210,15 @@ class Interpreter(val settings: Settings, out: PrintWriter)
shadow the old ones, and old code objects refer to the old
definitions.
*/
- private var classLoader: ScalaClassLoader = makeClassLoader()
- private def makeClassLoader(): ScalaClassLoader = {
+ private var _classLoader: AbstractFileClassLoader = null
+ def resetClassLoader() = _classLoader = makeClassLoader()
+ def classLoader: AbstractFileClassLoader = {
+ if (_classLoader == null)
+ resetClassLoader()
+
+ _classLoader
+ }
+ private def makeClassLoader(): AbstractFileClassLoader = {
val parent =
if (parentClassLoader == null) ScalaClassLoader fromURLs compilerClasspath
else new URLClassLoader(compilerClasspath, parentClassLoader)
@@ -152,28 +229,82 @@ class Interpreter(val settings: Settings, out: PrintWriter)
private def methodByName(c: Class[_], name: String): reflect.Method =
c.getMethod(name, classOf[Object])
- protected def parentClassLoader: ClassLoader = this.getClass.getClassLoader()
+ protected def parentClassLoader: ClassLoader =
+ settings.explicitParentLoader.getOrElse( this.getClass.getClassLoader() )
+
+ def getInterpreterClassLoader() = classLoader
// Set the current Java "context" class loader to this interpreter's class loader
def setContextClassLoader() = classLoader.setAsContext()
/** the previous requests this interpreter has processed */
private val prevRequests = new ArrayBuffer[Request]()
- val prevImports = new ListBuffer[Import]()
+ private val usedNameMap = new HashMap[Name, Request]()
+ private val boundNameMap = new HashMap[Name, Request]()
+ private def allHandlers = prevRequests.toList flatMap (_.handlers)
+ private def allReqAndHandlers = prevRequests.toList flatMap (req => req.handlers map (req -> _))
+
+ def printAllTypeOf = {
+ prevRequests foreach { req =>
+ req.typeOf foreach { case (k, v) => Console.println(k + " => " + v) }
+ }
+ }
+
+ /** Most recent tree handled which wasn't wholly synthetic. */
+ private def mostRecentlyHandledTree: Option[Tree] = {
+ for {
+ req <- prevRequests.reverse
+ handler <- req.handlers.reverse
+ name <- handler.generatesValue
+ if !isSynthVarName(name)
+ } return Some(handler.member)
+
+ None
+ }
- private def allUsedNames = prevRequests.toList.flatMap(_.usedNames).removeDuplicates
- private def allBoundNames = prevRequests.toList.flatMap(_.boundNames).removeDuplicates
- // private def allImportedNames = prevImports.toList.flatMap(_.importedNames).removeDuplicates
+ def recordRequest(req: Request) {
+ def tripart[T](set1: Set[T], set2: Set[T]) = {
+ val intersect = set1 intersect set2
+ List(set1 -- intersect, intersect, set2 -- intersect)
+ }
+
+ prevRequests += req
+ req.usedNames foreach (x => usedNameMap(x) = req)
+ req.boundNames foreach (x => boundNameMap(x) = req)
+
+ // XXX temporarily putting this here because of tricky initialization order issues
+ // so right now it's not bound until after you issue a command.
+ if (prevRequests.size == 1)
+ quietBind("settings", "scala.tools.nsc.InterpreterSettings", isettings)
+
+ // println("\n s1 = %s\n s2 = %s\n s3 = %s".format(
+ // tripart(usedNameMap.keysIterator.toSet, boundNameMap.keysIterator.toSet): _*
+ // ))
+ }
+
+ private def keyList[T](x: collection.Map[T, _]): List[T] = x.keys.toList sortBy (_.toString)
+ def allUsedNames = keyList(usedNameMap)
+ def allBoundNames = keyList(boundNameMap)
+ def allSeenTypes = prevRequests.toList flatMap (_.typeOf.values.toList) distinct
+ def allValueGeneratingNames = allHandlers flatMap (_.generatesValue)
+ def allImplicits = partialFlatMap(allHandlers) {
+ case x: MemberHandler if x.definesImplicit => x.boundNames
+ }
/** Generates names pre0, pre1, etc. via calls to apply method */
class NameCreator(pre: String) {
private var x = -1
+ var mostRecent: String = null
+
def apply(): String = {
x += 1
val name = pre + x.toString
// make sure we don't overwrite their unwisely named res3 etc.
- if (allBoundNames exists (_.toString == name)) apply()
- else name
+ mostRecent =
+ if (allBoundNames exists (_.toString == name)) apply()
+ else name
+
+ mostRecent
}
def reset(): Unit = x = -1
def didGenerate(name: String) =
@@ -181,28 +312,23 @@ class Interpreter(val settings: Settings, out: PrintWriter)
}
/** allocate a fresh line name */
- private val lineNameCreator = new NameCreator(INTERPRETER_LINE_PREFIX)
+ private lazy val lineNameCreator = new NameCreator(INTERPRETER_LINE_PREFIX)
/** allocate a fresh var name */
- private val varNameCreator = new NameCreator(INTERPRETER_VAR_PREFIX)
+ private lazy val varNameCreator = new NameCreator(INTERPRETER_VAR_PREFIX)
/** allocate a fresh internal variable name */
- private def synthVarNameCreator = new NameCreator(INTERPRETER_SYNTHVAR_PREFIX)
+ private lazy val synthVarNameCreator = new NameCreator(INTERPRETER_SYNTHVAR_PREFIX)
/** Check if a name looks like it was generated by varNameCreator */
private def isGeneratedVarName(name: String): Boolean = varNameCreator didGenerate name
private def isSynthVarName(name: String): Boolean = synthVarNameCreator didGenerate name
+ private def isSynthVarName(name: Name): Boolean = synthVarNameCreator didGenerate name.toString
- /** generate a string using a routine that wants to write on a stream */
- private def stringFrom(writer: PrintWriter => Unit): String = {
- val stringWriter = new StringWriter()
- val stream = new NewLinePrintWriter(stringWriter)
- writer(stream)
- stream.close
- stringWriter.toString
- }
+ def getVarName = varNameCreator()
+ def getSynthVarName = synthVarNameCreator()
- /** Truncate a string if it is longer than settings.maxPrintString */
+ /** Truncate a string if it is longer than isettings.maxPrintString */
private def truncPrintString(str: String): String = {
val maxpr = isettings.maxPrintString
val trailer = "..."
@@ -212,19 +338,28 @@ class Interpreter(val settings: Settings, out: PrintWriter)
}
/** Clean up a string for output */
- private def clean(str: String) = truncPrintString(stripWrapperGunk(str))
+ private def clean(str: String) = truncPrintString(
+ if (isettings.unwrapStrings) stripWrapperGunk(str)
+ else str
+ )
/** Indent some code by the width of the scala> prompt.
* This way, compiler error messages read better.
*/
private final val spaces = List.fill(7)(" ").mkString
- def indentCode(code: String) =
- stringFrom(str =>
+ def indentCode(code: String) = {
+ /** Heuristic to avoid indenting and thereby corrupting """-strings and XML literals. */
+ val noIndent = (code contains "\n") && (List("\"\"\"", "</", "/>") exists (code contains _))
+ stringFromWriter(str =>
for (line <- code.lines) {
- str.print(spaces)
+ if (!noIndent)
+ str.print(spaces)
+
str.print(line + "\n")
str.flush()
})
+ }
+ def indentString(s: String) = s split "\n" map (spaces + _ + "\n") mkString
implicit def name2string(name: Name) = name.toString
@@ -257,41 +392,38 @@ class Interpreter(val settings: Settings, out: PrintWriter)
* should be taken. Removes requests which cannot contribute
* useful imports for the specified set of wanted names.
*/
- case class ReqAndHandler(req: Request, handler: MemberHandler)
+ case class ReqAndHandler(req: Request, handler: MemberHandler) { }
+
def reqsToUse: List[ReqAndHandler] = {
/** Loop through a list of MemberHandlers and select which ones to keep.
* 'wanted' is the set of names that need to be imported.
*/
def select(reqs: List[ReqAndHandler], wanted: Set[Name]): List[ReqAndHandler] = {
val isWanted = wanted contains _
- def keepHandler(handler: MemberHandler): Boolean = {
- import handler._
- // Single symbol imports might be implicits! See bug #1752. Rather than
- // try to finesse this, we will mimic all imports for now.
- def isImport = handler.isInstanceOf[ImportHandler]
- definesImplicit || isImport || (importedNames ++ boundNames).exists(isWanted)
+ // Single symbol imports might be implicits! See bug #1752. Rather than
+ // try to finesse this, we will mimic all imports for now.
+ def keepHandler(handler: MemberHandler) = handler match {
+ case _: ImportHandler => true
+ case x => x.definesImplicit || (x.boundNames exists isWanted)
}
reqs match {
case Nil => Nil
case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
case rh :: rest =>
+ val importedNames = rh.handler match { case x: ImportHandler => x.importedNames ; case _ => Nil }
import rh.handler._
val newWanted = wanted ++ usedNames -- boundNames -- importedNames
rh :: select(rest, newWanted)
}
}
- val rhpairs = for {
- req <- prevRequests.toList.reverse
- handler <- req.handlers
- } yield ReqAndHandler(req, handler)
-
- select(rhpairs, wanted).reverse
+ /** Flatten the handlers out and pair each with the original request */
+ select(allReqAndHandlers reverseMap { case (r, h) => ReqAndHandler(r, h) }, wanted).reverse
}
val code, trailingBraces, accessPath = new StringBuffer
- val currentImps = mutable.Set.empty[Name]
+ val currentImps = HashSet[Name]()
// add code for a new object to hold some imports
def addWrapper() {
@@ -307,32 +439,33 @@ class Interpreter(val settings: Settings, out: PrintWriter)
// loop through previous requests, adding imports for each one
for (ReqAndHandler(req, handler) <- reqsToUse) {
- import handler._
- // If the user entered an import, then just use it; add an import wrapping
- // level if the import might conflict with some other import
- if (importsWildcard || currentImps.exists(importedNames.contains))
- addWrapper()
-
- if (member.isInstanceOf[Import])
- code append (member.toString + "\n")
-
- // give wildcard imports a import wrapper all to their own
- if (importsWildcard) addWrapper()
- else currentImps ++= importedNames
-
- // For other requests, import each bound variable.
- // import them explicitly instead of with _, so that
- // ambiguity errors will not be generated. Also, quote
- // the name of the variable, so that we don't need to
- // handle quoting keywords separately.
- for (imv <- boundNames) {
- if (currentImps contains imv) addWrapper()
-
- code append ("import " + req.fullPath(imv))
- currentImps += imv
+ handler match {
+ // If the user entered an import, then just use it; add an import wrapping
+ // level if the import might conflict with some other import
+ case x: ImportHandler =>
+ if (x.importsWildcard || (currentImps exists (x.importedNames contains _)))
+ addWrapper()
+
+ code append (x.member.toString + "\n")
+
+ // give wildcard imports a import wrapper all to their own
+ if (x.importsWildcard) addWrapper()
+ else currentImps ++= x.importedNames
+
+ // For other requests, import each bound variable.
+ // import them explicitly instead of with _, so that
+ // ambiguity errors will not be generated. Also, quote
+ // the name of the variable, so that we don't need to
+ // handle quoting keywords separately.
+ case x =>
+ for (imv <- x.boundNames) {
+ if (currentImps contains imv) addWrapper()
+
+ code append ("import %s\n" format (req fullPath imv))
+ currentImps += imv
+ }
}
}
-
// add one extra wrapper, to prevent warnings in the common case of
// redefining the value bound in the last interpreter request.
addWrapper()
@@ -360,7 +493,7 @@ class Interpreter(val settings: Settings, out: PrintWriter)
}
/** Compile an nsc SourceFile. Returns true if there are
- * no compilation errors, or false othrewise.
+ * no compilation errors, or false otherwise.
*/
def compileSources(sources: SourceFile*): Boolean = {
reporter.reset
@@ -374,11 +507,23 @@ class Interpreter(val settings: Settings, out: PrintWriter)
def compileString(code: String): Boolean =
compileSources(new BatchSourceFile("<script>", code))
+ def compileAndSaveRun(label: String, code: String) = {
+ if (isReplDebug) {
+ parse(code) match {
+ case Some(trees) => trees foreach (t => DBG(compiler.asCompactString(t)))
+ case _ => DBG("Parse error:\n\n" + code)
+ }
+ }
+ val run = new compiler.Run()
+ run.compileSources(List(new BatchSourceFile(label, code)))
+ run
+ }
+
/** Build a request from the user. <code>trees</code> is <code>line</code>
* after being parsed.
*/
- private def buildRequest(trees: List[Tree], line: String, lineName: String): Request =
- new Request(line, lineName)
+ private def buildRequest(line: String, lineName: String, trees: List[Tree]): Request =
+ new Request(line, lineName, trees)
private def chooseHandler(member: Tree): MemberHandler = member match {
case member: DefDef => new DefHandler(member)
@@ -392,6 +537,29 @@ class Interpreter(val settings: Settings, out: PrintWriter)
case member => new GenericHandler(member)
}
+ private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
+ val trees = parse(indentCode(line)) match {
+ case None => return Left(IR.Incomplete)
+ case Some(Nil) => return Left(IR.Error) // parse error or empty input
+ case Some(trees) => trees
+ }
+
+ // use synthetic vars to avoid filling up the resXX slots
+ def varName = if (synthetic) getSynthVarName else getVarName
+
+ // Treat a single bare expression specially. This is necessary due to it being hard to
+ // modify code at a textual level, and it being hard to submit an AST to the compiler.
+ if (trees.size == 1) trees.head match {
+ case _:Assign => // we don't want to include assignments
+ case _:TermTree | _:Ident | _:Select => // ... but do want these as valdefs.
+ return requestFromLine("val %s =\n%s".format(varName, line), synthetic)
+ case _ =>
+ }
+
+ // figure out what kind of request
+ Right(buildRequest(line, lineNameCreator(), trees))
+ }
+
/** <p>
* Interpret one line of input. All feedback, including parse errors
* and evaluation results, are printed via the supplied compiler's
@@ -406,45 +574,34 @@ class Interpreter(val settings: Settings, out: PrintWriter)
* @param line ...
* @return ...
*/
- def interpret(line: String): IR.Result = {
- // initialize the compiler
- if (prevRequests.isEmpty) new compiler.Run()
-
- // parse
- val trees = parse(indentCode(line)) match {
- case None => return IR.Incomplete
- case Some(Nil) => return IR.Error // parse error or empty input
- case Some(trees) => trees
+ def interpret(line: String): IR.Result = interpret(line, false)
+ def interpret(line: String, synthetic: Boolean): IR.Result = {
+ def loadAndRunReq(req: Request) = {
+ val (result, succeeded) = req.loadAndRun
+ if (printResults || !succeeded)
+ out print clean(result)
+
+ // book-keeping
+ if (succeeded && !synthetic)
+ recordRequest(req)
+
+ if (succeeded) IR.Success
+ else IR.Error
}
- // Treat a single bare expression specially. This is necessary due to it being hard to
- // modify code at a textual level, and it being hard to submit an AST to the compiler.
- if (trees.size == 1) trees.head match {
- case _:Assign => // we don't want to include assignments
- case _:TermTree | _:Ident | _:Select =>
- return interpret("val %s =\n%s".format(varNameCreator(), line))
- case _ =>
- }
-
- // figure out what kind of request
- val req = buildRequest(trees, line, lineNameCreator())
- // null is a disallowed statement type; otherwise compile and fail if false (implying e.g. a type error)
- if (req == null || !req.compile)
- return IR.Error
-
- val (result, succeeded) = req.loadAndRun
- if (printResults || !succeeded)
- out print clean(result)
-
- if (succeeded) {
- prevRequests += req // book-keeping
- IR.Success
+ if (compiler == null) IR.Error
+ else requestFromLine(line, synthetic) match {
+ case Left(result) => result
+ case Right(req) =>
+ // null indicates a disallowed statement type; otherwise compile and
+ // fail if false (implying e.g. a type error)
+ if (req == null || !req.compile) IR.Error
+ else loadAndRunReq(req)
}
- else IR.Error
}
/** A name creator used for objects created by <code>bind()</code>. */
- private val newBinder = new NameCreator("binder")
+ private lazy val newBinder = new NameCreator("binder")
/** Bind a specified name to a specified value. The name may
* later be used by expressions passed to interpret.
@@ -455,29 +612,29 @@ class Interpreter(val settings: Settings, out: PrintWriter)
* @return an indication of whether the binding succeeded
*/
def bind(name: String, boundType: String, value: Any): IR.Result = {
- val binderName = newBinder() // "binder" + binderNum()
+ val binderName = newBinder()
compileString("""
- | object %s {
- | var value: %s = _
- | def set(x: Any) = value = x.asInstanceOf[%s]
- | }
+ |object %s {
+ | var value: %s = _
+ | def set(x: Any) = value = x.asInstanceOf[%s]
+ |}
""".stripMargin.format(binderName, boundType, boundType))
val binderObject = loadByName(binderName)
val setterMethod = methodByName(binderObject, "set")
- // this roundabout approach is to ensure the value is boxed
- var argsHolder: Array[Any] = null
- argsHolder = List(value).toArray
- setterMethod.invoke(null, argsHolder.asInstanceOf[Array[AnyRef]]: _*)
+ setterMethod.invoke(null, value.asInstanceOf[AnyRef])
interpret("val %s = %s.value".format(name, binderName))
}
+ def quietBind(name: String, boundType: String, value: Any): IR.Result =
+ beQuietDuring { bind(name, boundType, value) }
+
/** Reset this interpreter, forgetting all user-specified requests. */
def reset() {
virtualDirectory.clear
- classLoader = makeClassLoader
+ resetClassLoader()
lineNameCreator.reset()
varNameCreator.reset()
prevRequests.clear
@@ -495,12 +652,14 @@ class Interpreter(val settings: Settings, out: PrintWriter)
/** A traverser that finds all mentioned identifiers, i.e. things
* that need to be imported. It might return extra names.
*/
- private class ImportVarsTraverser(definedVars: List[Name]) extends Traverser {
+ private class ImportVarsTraverser extends Traverser {
val importVars = new HashSet[Name]()
override def traverse(ast: Tree) = ast match {
- case Ident(name) => importVars += name
- case _ => super.traverse(ast)
+ // XXX this is obviously inadequate but it's going to require some effort
+ // to get right.
+ case Ident(name) if !(name.toString startsWith "x$") => importVars += name
+ case _ => super.traverse(ast)
}
}
@@ -508,23 +667,21 @@ class Interpreter(val settings: Settings, out: PrintWriter)
* in a single interpreter request.
*/
private sealed abstract class MemberHandler(val member: Tree) {
- val usedNames: List[Name] = {
- val ivt = new ImportVarsTraverser(boundNames)
- ivt.traverseTrees(List(member))
+ lazy val usedNames: List[Name] = {
+ val ivt = new ImportVarsTraverser()
+ ivt traverse member
ivt.importVars.toList
}
def boundNames: List[Name] = Nil
- def valAndVarNames: List[Name] = Nil
- def defNames: List[Name] = Nil
- val importsWildcard = false
- val importedNames: Seq[Name] = Nil
- val definesImplicit = member match {
- case tree: MemberDef => tree.mods hasFlag Flags.IMPLICIT
- case _ => false
+ val definesImplicit = cond(member) {
+ case tree: MemberDef => tree.mods hasFlag Flags.IMPLICIT
}
+ def generatesValue: Option[Name] = None
def extraCodeToEvaluate(req: Request, code: PrintWriter) { }
def resultExtractionCode(req: Request, code: PrintWriter) { }
+
+ override def toString = "%s(used = %s)".format(this.getClass.toString split '.' last, usedNames)
}
private class GenericHandler(member: Tree) extends MemberHandler(member)
@@ -535,34 +692,30 @@ class Interpreter(val settings: Settings, out: PrintWriter)
lazy val isLazy = mods hasFlag Flags.LAZY
override lazy val boundNames = List(vname)
- override def valAndVarNames = boundNames
+ override def generatesValue = Some(vname)
override def resultExtractionCode(req: Request, code: PrintWriter) {
val isInternal = isGeneratedVarName(vname) && req.typeOfEnc(vname) == "Unit"
if (!mods.isPublic || isInternal) return
- lazy val extractor = """
- | {
- | val s = scala.runtime.ScalaRunTime.stringOf(%s)
- | val nl = if (s.contains('\n')) "\n" else ""
- | nl + s + "\n"
- | }
- """.stripMargin.format(req fullPath vname)
+ lazy val extractor = "scala.runtime.ScalaRunTime.stringOf(%s)".format(req fullPath vname)
// if this is a lazy val we avoid evaluating it here
val resultString = if (isLazy) codegenln(false, "<lazy>") else extractor
val codeToPrint =
- """ + "%s: %s = " + %s""" .
- format(prettyName, string2code(req.typeOf(vname)), resultString)
+ """ + "%s: %s = " + %s""".format(prettyName, string2code(req typeOf vname), resultString)
code print codeToPrint
}
}
private class DefHandler(defDef: DefDef) extends MemberHandler(defDef) {
- lazy val DefDef(mods, name, _, _, _, _) = defDef
+ lazy val DefDef(mods, name, _, vparamss, _, _) = defDef
override lazy val boundNames = List(name)
- override def defNames = boundNames
+ // true if 0-arity
+ override def generatesValue =
+ if (vparamss.isEmpty || vparamss.head.isEmpty) Some(name)
+ else None
override def resultExtractionCode(req: Request, code: PrintWriter) =
if (mods.isPublic) code print codegenln(name, ": ", req.typeOf(name))
@@ -571,7 +724,7 @@ class Interpreter(val settings: Settings, out: PrintWriter)
private class AssignHandler(member: Assign) extends MemberHandler(member) {
val lhs = member.lhs.asInstanceOf[Ident] // an unfortunate limitation
val helperName = newTermName(synthVarNameCreator())
- override val valAndVarNames = List(helperName)
+ override def generatesValue = Some(helperName)
override def extraCodeToEvaluate(req: Request, code: PrintWriter) =
code println """val %s = %s""".format(helperName, lhs)
@@ -589,6 +742,7 @@ class Interpreter(val settings: Settings, out: PrintWriter)
private class ModuleHandler(module: ModuleDef) extends MemberHandler(module) {
lazy val ModuleDef(mods, name, _) = module
override lazy val boundNames = List(name)
+ override def generatesValue = Some(name)
override def resultExtractionCode(req: Request, code: PrintWriter) =
code println codegenln("defined module ", name)
@@ -613,28 +767,39 @@ class Interpreter(val settings: Settings, out: PrintWriter)
}
private class ImportHandler(imp: Import) extends MemberHandler(imp) {
+ lazy val Import(expr, selectors) = imp
+ def targetType = stringToCompilerType(expr.toString) match {
+ case NoType => None
+ case x => Some(x)
+ }
+
+ private def selectorWild = selectors filter (_.name == USCOREkw) // wildcard imports, e.g. import foo._
+ private def selectorMasked = selectors filter (_.rename == USCOREkw) // masking imports, e.g. import foo.{ bar => _ }
+ private def selectorNames = selectors map (_.name)
+ private def selectorRenames = selectors map (_.rename) filterNot (_ == null)
+
/** Whether this import includes a wildcard import */
- override val importsWildcard = imp.selectors.map(_.name) contains USCOREkw
+ val importsWildcard = selectorWild.nonEmpty
- /** The individual names imported by this statement */
- override val importedNames: Seq[Name] = for {
- sel <- imp.selectors
- if (sel.rename != null && sel.rename != USCOREkw)
- name <- List(sel.rename.toTypeName, sel.rename.toTermName)
- }
- yield name
+ /** Complete list of names imported by a wildcard */
+ def wildcardImportedNames: List[Name] = (
+ for (tpe <- targetType ; if importsWildcard) yield
+ tpe.nonPrivateMembers filter (x => x.isMethod && x.isPublic) map (_.name) distinct
+ ).toList.flatten
- // record the import
- prevImports += imp
+ /** The individual names imported by this statement */
+ /** XXX come back to this and see what can be done with wildcards now that
+ * we know how to enumerate the identifiers.
+ */
+ val importedNames: List[Name] =
+ selectorRenames filterNot (_ == USCOREkw) flatMap (x => List(x.toTypeName, x.toTermName))
override def resultExtractionCode(req: Request, code: PrintWriter) =
code println codegenln(imp.toString)
}
/** One line of code submitted by the user for interpretation */
- private class Request(val line: String, val lineName: String) {
- val trees = parse(line) getOrElse Nil
-
+ private class Request(val line: String, val lineName: String, val trees: List[Tree]) {
/** name to use for the object that will compute "line" */
def objectName = lineName + INTERPRETER_WRAPPER_SUFFIX
@@ -645,18 +810,27 @@ class Interpreter(val settings: Settings, out: PrintWriter)
val handlers: List[MemberHandler] = trees map chooseHandler
/** all (public) names defined by these statements */
- val boundNames = (ListSet() ++ handlers.flatMap(_.boundNames)).toList
+ val boundNames = handlers flatMap (_.boundNames)
/** list of names used by this expression */
- val usedNames: List[Name] = handlers.flatMap(_.usedNames)
+ val usedNames: List[Name] = handlers flatMap (_.usedNames)
+
+ /** def and val names */
+ def defNames = partialFlatMap(handlers) { case x: DefHandler => x.boundNames }
+ def valueNames = partialFlatMap(handlers) {
+ case x: AssignHandler => List(x.helperName)
+ case x: ValHandler => boundNames
+ case x: ModuleHandler => List(x.name)
+ }
/** Code to import bound names from previous lines - accessPath is code to
- * append to objectName to access anything bound by request. */
+ * append to objectName to access anything bound by request.
+ */
val ComputedImports(importsPreamble, importsTrailer, accessPath) =
importsCode(Set.empty ++ usedNames)
/** Code to access a variable with the specified name */
- def fullPath(vname: String): String = "%s.`%s`\n".format(objectName + accessPath, vname)
+ def fullPath(vname: String): String = "%s.`%s`".format(objectName + accessPath, vname)
/** Code to access a variable with the specified name */
def fullPath(vname: Name): String = fullPath(vname.toString)
@@ -665,41 +839,48 @@ class Interpreter(val settings: Settings, out: PrintWriter)
def toCompute = line
/** generate the source code for the object that computes this request */
- def objectSourceCode: String = stringFrom { code =>
- // whitespace compatible with interpreter.scala
- val preamble = """object %s {
- | %s%s
+ def objectSourceCode: String = stringFromWriter { code =>
+ val preamble = """
+ |object %s {
+ | %s%s
""".stripMargin.format(objectName, importsPreamble, indentCode(toCompute))
- // val preamble = """
- // | object %s {
- // | %s %s
- // """.stripMargin.format(objectName, importsPreamble, indentCode(toCompute))
- val postamble = importsTrailer + "; }"
+ val postamble = importsTrailer + "\n}"
code println preamble
handlers foreach { _.extraCodeToEvaluate(this, code) }
code println postamble
}
- /** Types of variables defined by this request. They are computed
- after compilation of the main object */
- var typeOf: Map[Name, String] = _
- def typeOfEnc(vname: Name) = typeOf(compiler encode vname)
-
/** generate source code for the object that retrieves the result
from objectSourceCode */
- def resultObjectSourceCode: String = stringFrom { code =>
+ def resultObjectSourceCode: String = stringFromWriter { code =>
+ /** We only want to generate this code when the result
+ * is a value which can be referred to as-is.
+ */
+ val valueExtractor = handlers.last.generatesValue match {
+ case Some(vname) if typeOf contains vname =>
+ """
+ |lazy val scala_repl_value = {
+ | scala_repl_result
+ | %s
+ |}""".stripMargin.format(fullPath(vname))
+ case _ => ""
+ }
+
+ // first line evaluates object to make sure constructor is run
+ // initial "" so later code can uniformly be: + etc
val preamble = """
- | object %s {
- | val result: String = {
- | %s // evaluate object to make sure constructor is run
- | ("" // an initial "" so later code can uniformly be: + etc
- """.stripMargin.format(resultObjectName, objectName + accessPath)
+ |object %s {
+ | %s
+ | val scala_repl_result: String = {
+ | %s
+ | (""
+ """.stripMargin.format(resultObjectName, valueExtractor, objectName + accessPath)
val postamble = """
- | )
- | }
- | }
+ | )
+ | }
+ |}
""".stripMargin
code println preamble
@@ -707,6 +888,23 @@ class Interpreter(val settings: Settings, out: PrintWriter)
code println postamble
}
+ // compile the object containing the user's code
+ lazy val objRun = compileAndSaveRun("<console>", objectSourceCode)
+
+ // compile the result-extraction object
+ lazy val extractionObjectRun = compileAndSaveRun("<console>", resultObjectSourceCode)
+
+ lazy val loadedResultObject = loadByName(resultObjectName)
+
+ def extractionValue(): Option[AnyRef] = {
+ // ensure it has run
+ extractionObjectRun
+
+ // load it and retrieve the value
+ try Some(loadedResultObject getMethod "scala_repl_value" invoke loadedResultObject)
+ catch { case _: Exception => None }
+ }
+
/** Compile the object file. Returns whether the compilation succeeded.
* If all goes well, the "types" map is computed. */
def compile(): Boolean = {
@@ -714,211 +912,347 @@ class Interpreter(val settings: Settings, out: PrintWriter)
reporter.reset
// compile the main object
- val objRun = new compiler.Run()
- objRun.compileSources(List(new BatchSourceFile("<console>", objectSourceCode)))
+ objRun
+
+ // bail on error
if (reporter.hasErrors)
return false
// extract and remember types
- typeOf = findTypes(objRun)
+ typeOf
// compile the result-extraction object
- new compiler.Run().compileSources(List(new BatchSourceFile("<console>", resultObjectSourceCode)))
+ extractionObjectRun
// success
!reporter.hasErrors
}
- /** Dig the types of all bound variables out of the compiler run.
- *
- * @param objRun ...
- * @return ...
- */
- def findTypes(objRun: compiler.Run): Map[Name, String] = {
- import compiler.definitions.{ EmptyPackage, getMember }
- def valAndVarNames = handlers flatMap { _.valAndVarNames }
- def defNames = handlers flatMap { _.defNames }
+ def atNextPhase[T](op: => T): T = compiler.atPhase(objRun.typerPhase.next)(op)
- def getTypes(names: List[Name], nameMap: Name => Name): Map[Name, String] = {
- def atNextPhase[T](op: => T): T = compiler.atPhase(objRun.typerPhase.next)(op)
+ /** The outermost wrapper object */
+ lazy val outerResObjSym: Symbol = getMember(EmptyPackage, newTermName(objectName))
- /** the outermost wrapper object */
- val outerResObjSym: Symbol = getMember(EmptyPackage, newTermName(objectName))
+ /** The innermost object inside the wrapper, found by
+ * following accessPath into the outer one. */
+ lazy val resObjSym =
+ accessPath.split("\\.").foldLeft(outerResObjSym) { (sym, name) =>
+ if (name == "") sym else
+ atNextPhase(sym.info member newTermName(name))
+ }
- /** the innermost object inside the wrapper, found by
- * following accessPath into the outer one. */
- val resObjSym =
- accessPath.split("\\.").foldLeft(outerResObjSym) { (sym, name) =>
- if (name == "") sym else
- atNextPhase(sym.info member newTermName(name))
- }
+ /* typeOf lookup with encoding */
+ def typeOfEnc(vname: Name) = typeOf(compiler encode vname)
+ /** Types of variables defined by this request. */
+ lazy val typeOf: Map[Name, String] = {
+ def getTypes(names: List[Name], nameMap: Name => Name): Map[Name, String] = {
names.foldLeft(Map.empty[Name, String]) { (map, name) =>
- val rawType = atNextPhase(resObjSym.info.member(name).tpe)
+ val tp1 = atNextPhase(resObjSym.info.nonPrivateDecl(name).tpe)
// the types are all =>T; remove the =>
- val cleanedType = rawType match {
- case compiler.PolyType(Nil, rt) => rt
- case rawType => rawType
+ val tp2 = tp1 match {
+ case PolyType(Nil, tp) => tp
+ case tp => tp
}
+ // normalize non-public types so we don't see protected aliases like Self
+ val tp3 = compiler.atPhase(objRun.typerPhase)(tp2 match {
+ case TypeRef(_, sym, _) if !sym.isPublic => tp2.normalize.toString
+ case tp => tp.toString
+ })
- map + (name -> atNextPhase(cleanedType.toString))
+ map + (name -> tp3)
}
}
- val names1 = getTypes(valAndVarNames, nme.getterToLocal(_))
- val names2 = getTypes(defNames, identity)
- names1 ++ names2
+ getTypes(valueNames, nme.getterToLocal(_)) ++ getTypes(defNames, identity)
}
/** load and run the code using reflection */
def loadAndRun: (String, Boolean) = {
- val resultObject: Class[_] = loadByName(resultObjectName)
- val resultValMethod: reflect.Method = resultObject getMethod "result"
+ val resultValMethod: reflect.Method = loadedResultObject getMethod "scala_repl_result"
// XXX if wrapperExceptions isn't type-annotated we crash scalac
val wrapperExceptions: List[Class[_ <: Throwable]] =
List(classOf[InvocationTargetException], classOf[ExceptionInInitializerError])
- def onErr: Catcher[(String, Boolean)] = { case t: Throwable =>
- beQuietDuring { bind("lastException", "java.lang.Throwable", t) }
- (stringFrom(t.printStackTrace(_)), false)
+ /** We turn off the binding to accomodate ticket #2817 */
+ def onErr: Catcher[(String, Boolean)] = {
+ case t: Throwable if bindLastException =>
+ withoutBindingLastException {
+ quietBind("lastException", "java.lang.Throwable", t)
+ (stringFromWriter(t.printStackTrace(_)), false)
+ }
}
catching(onErr) {
unwrapping(wrapperExceptions: _*) {
- (resultValMethod.invoke(resultObject).toString, true)
+ (resultValMethod.invoke(loadedResultObject).toString, true)
}
}
}
+
+ override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
}
- /** These methods are exposed so REPL commands can access them.
- * The command infrastructure is in InterpreterLoop.
+ /** A container class for methods to be injected into the repl
+ * in power mode.
*/
- def dumpState(xs: List[String]): String = {
- // println("Imports for " + req + " => " + req.importsPreamble)
- // req.handlers foreach { h => println("Handler " + h + " used names: " + h.usedNames) }
- // req.trees foreach { x => println("Tree: " + x) }
- // xs foreach { x => println("membersOfIdentifier(" + x + ") = " + membersOfIdentifier(x)) }
- List(
- "allUsedNames = " + allUsedNames,
- "allBoundNames = " + allBoundNames,
- prevRequests.toList.map(req => " \"" + req.line + "\" => " + req.objectSourceCode)
- ).mkString("", "\n", "\n")
- }
+ object power {
+ lazy val compiler: repl.compiler.type = repl.compiler
+ import compiler.{ phaseNames, atPhase, currentRun }
+
+ def mkContext(code: String = "") = compiler.analyzer.rootContext(mkUnit(code))
+ def mkAlias(name: String, what: String) = interpret("type %s = %s".format(name, what))
+ def mkSourceFile(code: String) = new BatchSourceFile("<console>", code)
+ def mkUnit(code: String) = new CompilationUnit(mkSourceFile(code))
+
+ def mkTree(code: String): Tree = mkTrees(code).headOption getOrElse EmptyTree
+ def mkTrees(code: String): List[Tree] = parse(code) getOrElse Nil
+ def mkTypedTrees(code: String*): List[compiler.Tree] = {
+ class TyperRun extends compiler.Run {
+ override def stopPhase(name: String) = name == "superaccessors"
+ }
- // very simple right now, will get more interesting
- def dumpTrees(xs: List[String]): String = {
- val treestrs = (
- for (x <- xs ; name <- nameOfIdent(x) ; req <- requestForName(name))
- yield req.trees
- ).flatten
+ reporter.reset
+ val run = new TyperRun
+ run compileSources (code.toList.zipWithIndex map {
+ case (s, i) => new BatchSourceFile("<console %d>".format(i), s)
+ })
+ run.units.toList map (_.body)
+ }
+ def mkTypedTree(code: String) = mkTypedTrees(code).head
+ def mkType(id: String): compiler.Type = stringToCompilerType(id)
+
+ def dump(): String = (
+ ("Names used: " :: allUsedNames) ++
+ ("\nIdentifiers: " :: unqualifiedIds)
+ ) mkString " "
+
+ lazy val allPhases: List[Phase] = phaseNames map (currentRun phaseNamed _)
+ def atAllPhases[T](op: => T): List[(String, T)] = allPhases map (ph => (ph.name, atPhase(ph)(op)))
+ def showAtAllPhases(op: => Any): Unit =
+ atAllPhases(op.toString) foreach { case (ph, op) => Console.println("%15s -> %s".format(ph, op take 240)) }
+ }
- if (treestrs.isEmpty) "No trees found."
- else treestrs.map(t => t.toString + " (" + t.getClass.getSimpleName + ")\n").mkString
+ def unleash(): Unit = beQuietDuring {
+ interpret("import scala.tools.nsc._")
+ repl.bind("repl", "scala.tools.nsc.Interpreter", this)
+ interpret("val global: repl.compiler.type = repl.compiler")
+ interpret("val power: repl.power.type = repl.power")
+ // interpret("val replVars = repl.replVars")
}
- def powerUser(): String = {
- beQuietDuring {
- val mkTypeCmd =
- """def mkType(name: String, what: String) = interpreter.interpret("type " + name + " = " + what)"""
+ /** Artificial object demonstrating completion */
+ // lazy val replVars = CompletionAware(
+ // Map[String, CompletionAware](
+ // "ids" -> CompletionAware(() => unqualifiedIds, completionAware _),
+ // "synthVars" -> CompletionAware(() => allBoundNames filter isSynthVarName map (_.toString)),
+ // "types" -> CompletionAware(() => allSeenTypes map (_.toString)),
+ // "implicits" -> CompletionAware(() => allImplicits map (_.toString))
+ // )
+ // )
+
+ /** Returns the name of the most recent interpreter result.
+ * Mostly this exists so you can conveniently invoke methods on
+ * the previous result.
+ */
+ def mostRecentVar: String =
+ if (mostRecentlyHandledTree.isEmpty) ""
+ else mostRecentlyHandledTree.get match {
+ case x: ValOrDefDef => x.name
+ case Assign(Ident(name), _) => name
+ case ModuleDef(_, name, _) => name
+ case _ => onull(varNameCreator.mostRecent)
+ }
+
+ private def requestForName(name: Name): Option[Request] =
+ prevRequests.reverse find (_.boundNames contains name)
+
+ private def requestForIdent(line: String): Option[Request] = requestForName(newTermName(line))
- this.bind("interpreter", "scala.tools.nsc.Interpreter", this)
- interpret(mkTypeCmd)
+ def stringToCompilerType(id: String): compiler.Type = {
+ // if it's a recognized identifier, the type of that; otherwise treat the
+ // String like a value (e.g. scala.collection.Map) .
+ def findType = typeForIdent(id) match {
+ case Some(x) => definitions.getClass(newTermName(x)).tpe
+ case _ => definitions.getModule(newTermName(id)).tpe
}
- """** Power User mode enabled - BEEP BOOP **
- |** New vals! Try interpreter.<tab> **
- |** New defs! Try mkType("T", "String") **
- |** New cmds! :help to discover them **""".stripMargin
+ try findType catch { case _: MissingRequirementError => NoType }
}
- def nameOfIdent(line: String): Option[Name] = {
- parse(line) match {
- case Some(List(Ident(x))) => Some(x)
- case _ => None
- }
+ def typeForIdent(id: String): Option[String] =
+ requestForIdent(id) flatMap (x => x.typeOf get newTermName(id))
+
+ def methodsOf(name: String) =
+ evalExpr[List[String]](methodsCode(name)) map (x => NameTransformer.decode(getOriginalName(x)))
+
+ def completionAware(name: String) = {
+ // XXX working around "object is not a value" crash, i.e.
+ // import java.util.ArrayList ; ArrayList.<tab>
+ clazzForIdent(name) flatMap (_ => evalExpr[Option[CompletionAware]](asCompletionAwareCode(name)))
}
- private def requestForName(name: Name): Option[Request] = {
- for (req <- prevRequests.toList.reverse) {
- if (req.handlers.exists(_.boundNames contains name))
- return Some(req)
- }
- None
+ def extractionValueForIdent(id: String): Option[AnyRef] =
+ requestForIdent(id) flatMap (_.extractionValue)
+
+ /** Executes code looking for a manifest of type T.
+ */
+ def manifestFor[T: Manifest] =
+ evalExpr[Manifest[T]]("""manifest[%s]""".format(manifest[T]))
+
+ /** Executes code looking for an implicit value of type T.
+ */
+ def implicitFor[T: Manifest] = {
+ val s = manifest[T].toString
+ evalExpr[Option[T]]("{ def f(implicit x: %s = null): %s = x ; Option(f) }".format(s, s))
+ // We don't use implicitly so as to fail without failing.
+ // evalExpr[T]("""implicitly[%s]""".format(manifest[T]))
+ }
+ /** Executes code looking for an implicit conversion from the type
+ * of the given identifier to CompletionAware.
+ */
+ def completionAwareImplicit[T](id: String) = {
+ val f1string = "%s => %s".format(typeForIdent(id).get, classOf[CompletionAware].getName)
+ val code = """{
+ | def f(implicit x: (%s) = null): %s = x
+ | val f1 = f
+ | if (f1 == null) None else Some(f1(%s))
+ |}""".stripMargin.format(f1string, f1string, id)
+
+ evalExpr[Option[CompletionAware]](code)
}
- // XXX at the moment this is imperfect because scala's protected semantics
- // differ from java's, so protected methods appear public via reflection;
- // yet scala enforces the protection. The result is that protected members
- // appear in completion yet cannot actually be called. Fixing this
- // properly requires a scala.reflect.* API. Fixing it uglily is possible
- // too (cast to structural type!) but I deem poor use of energy.
- private val filterFlags: Int = {
- import java.lang.reflect.Modifier._
- STATIC | PRIVATE | PROTECTED
- }
- private val methodsCode = """ .
- | asInstanceOf[AnyRef].getClass.getMethods .
- | filter(x => (x.getModifiers & %d) == 0) .
- | map(_.getName) .
- | mkString(" ")""".stripMargin.format(filterFlags)
-
- /** The main entry point for tab-completion. When the user types x.<tab>
- * this method is called with "x" as an argument, and it discovers the
- * fields and methods of x via reflection and returns their names to jline.
+ def clazzForIdent(id: String): Option[Class[_]] =
+ extractionValueForIdent(id) flatMap (x => Option(x) map (_.getClass))
+
+ private def methodsCode(name: String) =
+ "%s.%s(%s)".format(classOf[ReflectionCompletion].getName, "methodsOf", name)
+
+ private def asCompletionAwareCode(name: String) =
+ "%s.%s(%s)".format(classOf[CompletionAware].getName, "unapply", name)
+
+ private def getOriginalName(name: String): String =
+ nme.originalName(newTermName(name)).toString
+
+ case class InterpreterEvalException(msg: String) extends Exception(msg)
+ def evalError(msg: String) = throw InterpreterEvalException(msg)
+
+ /** The user-facing eval in :power mode wraps an Option.
*/
- def membersOfIdentifier(line: String): List[String] = {
- import Completion.{ isValidCompletion }
- import NameTransformer.{ decode, encode } // e.g. $plus$plus => ++
-
- val res = beQuietDuring {
- for (name <- nameOfIdent(line) ; req <- requestForName(name)) yield {
- if (interpret("val " + synthVarNameCreator() + " = " + name + methodsCode) != IR.Success) Nil
- else {
- val result = prevRequests.last.resultObjectName
- val resultObj = (classLoader tryToInitializeClass result).get
- val valMethod = resultObj getMethod "result"
- val str = valMethod.invoke(resultObj).toString
-
- str.substring(str.indexOf('=') + 1).trim .
- split(" ").toList .
- map(decode) .
- filter(isValidCompletion) .
- removeDuplicates
- }
- }
+ def eval[T: Manifest](line: String): Option[T] =
+ try Some(evalExpr[T](line))
+ catch { case InterpreterEvalException(msg) => out println indentString(msg) ; None }
+
+ def evalExpr[T: Manifest](line: String): T = {
+ // Nothing means the type could not be inferred.
+ if (manifest[T] eq Manifest.Nothing)
+ evalError("Could not infer type: try 'eval[SomeType](%s)' instead".format(line))
+
+ val lhs = getSynthVarName
+ beQuietDuring { interpret("val " + lhs + " = { " + line + " } ") }
+
+ // TODO - can we meaningfully compare the inferred type T with
+ // the internal compiler Type assigned to lhs?
+ // def assignedType = prevRequests.last.typeOf(newTermName(lhs))
+
+ val req = requestFromLine(lhs, true) match {
+ case Left(result) => evalError(result.toString)
+ case Right(req) => req
}
+ if (req == null || !req.compile || req.handlers.size != 1)
+ evalError("Eval error.")
- res getOrElse Nil
+ try req.extractionValue.get.asInstanceOf[T] catch {
+ case e: Exception => evalError(e.getMessage)
+ }
+ }
+
+ def interpretExpr[T: Manifest](code: String): Option[T] = beQuietDuring {
+ interpret(code) match {
+ case IR.Success =>
+ try prevRequests.last.extractionValue map (_.asInstanceOf[T])
+ catch { case e: Exception => out println e ; None }
+ case _ => None
+ }
}
/** Another entry point for tab-completion, ids in scope */
- def unqualifiedIds(): List[String] =
- allBoundNames .
- map(_.toString) .
- filter(!isSynthVarName(_))
+ private def unqualifiedIdNames() = partialFlatMap(allHandlers) {
+ case x: AssignHandler => List(x.helperName)
+ case x: ValHandler => List(x.vname)
+ case x: ModuleHandler => List(x.name)
+ case x: DefHandler => List(x.name)
+ case x: ImportHandler => x.importedNames
+ } filterNot isSynthVarName
+
+ /** Types which have been wildcard imported, such as:
+ * val x = "abc" ; import x._ // type java.lang.String
+ * import java.lang.String._ // object java.lang.String
+ *
+ * Used by tab completion.
+ *
+ * XXX right now this gets import x._ and import java.lang.String._,
+ * but doesn't figure out import String._. There's a lot of ad hoc
+ * scope twiddling which should be swept away in favor of digging
+ * into the compiler scopes.
+ */
+ def wildcardImportedTypes(): List[Type] = {
+ val xs = allHandlers collect { case x: ImportHandler if x.importsWildcard => x.targetType }
+ xs.flatten.reverse.distinct
+ }
+
+ /** Another entry point for tab-completion, ids in scope */
+ def unqualifiedIds() = (unqualifiedIdNames() map (_.toString)).distinct.sorted
/** For static/object method completion */
def getClassObject(path: String): Option[Class[_]] = classLoader tryToLoadClass path
+ /** Parse the ScalaSig to find type aliases */
+ def aliasForType(path: String) = ByteCode.aliasForType(path)
+
+ // Coming soon
+ // implicit def string2liftedcode(s: String): LiftedCode = new LiftedCode(s)
+ // case class LiftedCode(code: String) {
+ // val lifted: String = {
+ // beQuietDuring { interpret(code) }
+ // eval2[String]("({ " + code + " }).toString")
+ // }
+ // def >> : String = lifted
+ // }
+
// debugging
- private var debuggingOutput = false
- def DBG(s: String) = if (debuggingOutput) out println s else ()
+ def isReplDebug = settings.Yrepldebug.value
+ def isCompletionDebug = settings.Ycompletion.value
+ def DBG(s: String) = if (isReplDebug) out println s else ()
}
/** Utility methods for the Interpreter. */
object Interpreter {
+
+ import scala.collection.generic.CanBuildFrom
+ def partialFlatMap[A, B, CC[X] <: Traversable[X]]
+ (coll: CC[A])
+ (pf: PartialFunction[A, CC[B]])
+ (implicit bf: CanBuildFrom[CC[A], B, CC[B]]) =
+ {
+ val b = bf(coll)
+ for (x <- coll collect pf)
+ b ++= x
+
+ b.result
+ }
+
object DebugParam {
- implicit def tuple2debugparam[T](x: (String, T))(implicit m: scala.reflect.Manifest[T]): DebugParam[T] =
+ implicit def tuple2debugparam[T](x: (String, T))(implicit m: Manifest[T]): DebugParam[T] =
DebugParam(x._1, x._2)
- implicit def any2debugparam[T](x: T)(implicit m: scala.reflect.Manifest[T]): DebugParam[T] =
+ implicit def any2debugparam[T](x: T)(implicit m: Manifest[T]): DebugParam[T] =
DebugParam("p" + getCount(), x)
private var counter = 0
def getCount() = { counter += 1; counter }
}
- case class DebugParam[T](name: String, param: T)(implicit m: scala.reflect.Manifest[T]) {
+ case class DebugParam[T](name: String, param: T)(implicit m: Manifest[T]) {
val manifest = m
val typeStr = {
val str = manifest.toString
@@ -934,13 +1268,16 @@ object Interpreter {
}
}
}
- def breakIf(assertion: => Boolean, args: DebugParam[_]*): Unit =
- if (assertion) break(args.toList)
+ // provide the enclosing type T
+ // in order to set up the interpreter's classpath and parent class loader properly
+ def breakIf[T: Manifest](assertion: => Boolean, args: DebugParam[_]*): Unit =
+ if (assertion) break[T](args.toList)
// start a repl, binding supplied args
- def break(args: List[DebugParam[_]]): Unit = {
+ def break[T: Manifest](args: List[DebugParam[_]]): Unit = {
val intLoop = new InterpreterLoop
intLoop.settings = new Settings(Console.println)
+ intLoop.settings.embeddedDefaults[T]
intLoop.createInterpreter
intLoop.in = InteractiveReader.createDefault(intLoop.interpreter)
@@ -949,48 +1286,32 @@ object Interpreter {
intLoop.interpreter.interpret("""def exit = println("Type :quit to resume program execution.")""")
for (p <- args) {
intLoop.interpreter.bind(p.name, p.typeStr, p.param)
- println("%s: %s".format(p.name, p.typeStr))
+ Console println "%s: %s".format(p.name, p.typeStr)
}
}
intLoop.repl()
intLoop.closeInterpreter
}
- /** Heuristically strip interpreter wrapper prefixes
- * from an interpreter output string.
- */
- def stripWrapperGunk(str: String): String = {
- val wrapregex = """(line[0-9]+\$object[$.])?(\$iw[$.])*"""
- str.replaceAll(wrapregex, "")
- }
-
def codegenln(leadingPlus: Boolean, xs: String*): String = codegen(leadingPlus, (xs ++ Array("\n")): _*)
def codegenln(xs: String*): String = codegenln(true, xs: _*)
+
def codegen(xs: String*): String = codegen(true, xs: _*)
def codegen(leadingPlus: Boolean, xs: String*): String = {
val front = if (leadingPlus) "+ " else ""
- xs.map("\"" + string2code(_) + "\"").mkString(front, " + ", "")
+ front + (xs map string2codeQuoted mkString " + ")
}
+ def string2codeQuoted(str: String) = "\"" + string2code(str) + "\""
+
/** Convert a string into code that can recreate the string.
* This requires replacing all special characters by escape
* codes. It does not add the surrounding " marks. */
def string2code(str: String): String = {
- /** Convert a character to a backslash-u escape */
- def char2uescape(c: Char): String = {
- var rest = c.toInt
- val buf = new StringBuilder
- for (i <- 1 to 4) {
- buf ++= (rest % 16).toHexString
- rest = rest / 16
- }
- "\\u" + buf.toString.reverse
- }
-
val res = new StringBuilder
for (c <- str) c match {
case '"' | '\'' | '\\' => res += '\\' ; res += c
- case _ if c.isControl => res ++= char2uescape(c)
+ case _ if c.isControl => res ++= Chars.char2uescape(c)
case _ => res += c
}
res.toString
diff --git a/src/compiler/scala/tools/nsc/InterpreterCommand.scala b/src/compiler/scala/tools/nsc/InterpreterCommand.scala
index 3ed91c156c..0ac2ef9617 100644
--- a/src/compiler/scala/tools/nsc/InterpreterCommand.scala
+++ b/src/compiler/scala/tools/nsc/InterpreterCommand.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
@@ -11,8 +10,7 @@ package scala.tools.nsc
* @author Lex Spoon
* @version 1.0
*/
-class InterpreterCommand(arguments: List[String], error: String => Unit)
-extends CompilerCommand(arguments, new Settings(error), error, false) {
+class InterpreterCommand(arguments: List[String], error: String => Unit) extends CompilerCommand(arguments, error) {
override val cmdName = "scala"
override lazy val fileEndings = List(".scalaint")
}
diff --git a/src/compiler/scala/tools/nsc/InterpreterLoop.scala b/src/compiler/scala/tools/nsc/InterpreterLoop.scala
index 4d2228b6fd..bdcd7b9f58 100644
--- a/src/compiler/scala/tools/nsc/InterpreterLoop.scala
+++ b/src/compiler/scala/tools/nsc/InterpreterLoop.scala
@@ -1,61 +1,67 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Alexander Spoon
*/
-// $Id$
package scala.tools.nsc
-import java.io.{BufferedReader, File, FileReader, PrintWriter}
+import Predef.{ println => _, _ }
+import java.io.{ BufferedReader, FileReader, PrintWriter }
import java.io.IOException
-import scala.tools.nsc.{InterpreterResults => IR}
-import scala.tools.nsc.interpreter._
+import scala.tools.nsc.{ InterpreterResults => IR }
+import scala.annotation.tailrec
+import scala.collection.mutable.ListBuffer
+import scala.concurrent.ops
+import util.{ ClassPath }
+import interpreter._
+import io.{ File, Process }
// Classes to wrap up interpreter commands and their results
// You can add new commands by adding entries to val commands
// inside InterpreterLoop.
-object InterpreterControl {
+trait InterpreterControl {
+ self: InterpreterLoop =>
+
// the default result means "keep running, and don't record that line"
val defaultResult = Result(true, None)
// a single interpreter command
sealed abstract class Command extends Function1[List[String], Result] {
- val name: String
- val help: String
+ def name: String
+ def help: String
def error(msg: String) = {
- println(":" + name + " " + msg + ".")
+ out.println(":" + name + " " + msg + ".")
Result(true, None)
}
- def getHelp(): String = ":" + name + " " + help + "."
+ def usage(): String
}
case class NoArgs(name: String, help: String, f: () => Result) extends Command {
+ def usage(): String = ":" + name
def apply(args: List[String]) = if (args.isEmpty) f() else error("accepts no arguments")
}
case class LineArg(name: String, help: String, f: (String) => Result) extends Command {
- def apply(args: List[String]) =
- if (args.size == 1) f(args.head)
- else error("requires a line of input")
+ def usage(): String = ":" + name + " <line>"
+ def apply(args: List[String]) = f(args mkString " ")
}
case class OneArg(name: String, help: String, f: (String) => Result) extends Command {
+ def usage(): String = ":" + name + " <arg>"
def apply(args: List[String]) =
if (args.size == 1) f(args.head)
else error("requires exactly one argument")
}
case class VarArgs(name: String, help: String, f: (List[String]) => Result) extends Command {
+ def usage(): String = ":" + name + " [arg]"
def apply(args: List[String]) = f(args)
}
// the result of a single command
case class Result(keepRunning: Boolean, lineToRecord: Option[String])
}
-import InterpreterControl._
-
-// import scala.concurrent.ops.defaultRunner
/** The
* <a href="http://scala-lang.org/" target="_top">Scala</a>
@@ -71,7 +77,7 @@ import InterpreterControl._
* @author Lex Spoon
* @version 1.2
*/
-class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
+class InterpreterLoop(in0: Option[BufferedReader], protected val out: PrintWriter) extends InterpreterControl {
def this(in0: BufferedReader, out: PrintWriter) = this(Some(in0), out)
def this() = this(None, new PrintWriter(Console.out))
@@ -83,19 +89,18 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
var settings: Settings = _ // set by main()
var interpreter: Interpreter = _ // set by createInterpreter()
- def isettings = interpreter.isettings
- // XXX
- var addedClasspath: List[String] = Nil
+ // classpath entries added via :cp
+ var addedClasspath: String = ""
/** A reverse list of commands to replay if the user requests a :replay */
- var replayCommandsRev: List[String] = Nil
+ var replayCommandStack: List[String] = Nil
/** A list of commands to replay if the user requests a :replay */
- def replayCommands = replayCommandsRev.reverse
+ def replayCommands = replayCommandStack.reverse
/** Record a command for replay should the user request a :replay */
- def addReplay(cmd: String) = replayCommandsRev = cmd :: replayCommandsRev
+ def addReplay(cmd: String) = replayCommandStack ::= cmd
/** Close the interpreter and set the var to <code>null</code>. */
def closeInterpreter() {
@@ -108,27 +113,24 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
/** Create a new interpreter. */
def createInterpreter() {
- if (!addedClasspath.isEmpty)
- settings.classpath.value += addedClasspath.map(File.pathSeparator + _).mkString
+ if (addedClasspath != "")
+ settings.classpath append addedClasspath
interpreter = new Interpreter(settings, out) {
- override protected def parentClassLoader = classOf[InterpreterLoop].getClassLoader
+ override protected def parentClassLoader =
+ settings.explicitParentLoader.getOrElse( classOf[InterpreterLoop].getClassLoader )
}
interpreter.setContextClassLoader()
- }
-
- /** Bind the settings so that evaluated code can modify them */
- def bindSettings() {
- interpreter.beQuietDuring {
- interpreter.compileString(InterpreterSettings.sourceCodeForClass)
- interpreter.bind("settings", "scala.tools.nsc.InterpreterSettings", isettings)
- }
+ // interpreter.quietBind("settings", "scala.tools.nsc.InterpreterSettings", interpreter.isettings)
}
/** print a friendly help message */
def printHelp() = {
- out println "All commands can be abbreviated - for example :h or :he instead of :help.\n"
- commands foreach { c => out println c.getHelp }
+ out println "All commands can be abbreviated - for example :he instead of :help.\n"
+ val cmds = commands map (x => (x.usage, x.help))
+ val width: Int = cmds map { case (x, _) => x.length } max
+ val formatStr = "%-" + width + "s %s"
+ cmds foreach { case (usage, help) => out println formatStr.format(usage, help) }
}
/** Print a welcome message */
@@ -140,8 +142,42 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
|Type :help for more information.""" .
stripMargin.format(versionString, javaVmName, javaVersion)
- out println welcomeMsg
- out.flush
+ plushln(welcomeMsg)
+ }
+
+ /** Show the history */
+ def printHistory(xs: List[String]) {
+ val defaultLines = 20
+
+ if (in.history.isEmpty)
+ return println("No history available.")
+
+ val current = in.history.get.index
+ val count = try xs.head.toInt catch { case _: Exception => defaultLines }
+ val lines = in.historyList takeRight count
+ val offset = current - lines.size + 1
+
+ for ((line, index) <- lines.zipWithIndex)
+ println("%d %s".format(index + offset, line))
+ }
+
+ /** Some print conveniences */
+ def println(x: Any) = out println x
+ def plush(x: Any) = { out print x ; out.flush() }
+ def plushln(x: Any) = { out println x ; out.flush() }
+
+ /** Search the history */
+ def searchHistory(_cmdline: String) {
+ val cmdline = _cmdline.toLowerCase
+
+ if (in.history.isEmpty)
+ return println("No history available.")
+
+ val current = in.history.get.index
+ val offset = current - in.historyList.size + 1
+
+ for ((line, index) <- in.historyList.zipWithIndex ; if line.toLowerCase contains cmdline)
+ println("%d %s".format(index + offset, line))
}
/** Prompt to print when awaiting input */
@@ -161,27 +197,29 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
val standardCommands: List[Command] = {
import CommandImplicits._
List(
- NoArgs("help", "prints this help message", printHelp),
- OneArg("jar", "add a jar to the classpath", addJar),
- OneArg("load", "followed by a filename loads a Scala file", load),
+ OneArg("cp", "add an entry (jar or directory) to the classpath", addClasspath),
+ NoArgs("help", "print this help message", printHelp),
+ VarArgs("history", "show the history (optional arg: lines to show)", printHistory),
+ LineArg("h?", "search the history", searchHistory),
+ OneArg("load", "load and interpret a Scala file", load),
NoArgs("power", "enable power user mode", power),
- NoArgs("quit", "exits the interpreter", () => Result(false, None)),
- NoArgs("replay", "resets execution and replays all previous commands", replay),
+ NoArgs("quit", "exit the interpreter", () => Result(false, None)),
+ NoArgs("replay", "reset execution and replay all previous commands", replay),
+ LineArg("sh", "fork a shell and run a command", runShellCmd),
NoArgs("silent", "disable/enable automatic printing of results", verbosity)
)
}
/** Power user commands */
- // XXX - why does a third argument like "interpreter dumpState(_)" throw an NPE
- // while the version below works?
var powerUserOn = false
val powerCommands: List[Command] = {
import CommandImplicits._
List(
- VarArgs("dump", "displays a view of the interpreter's internal state",
- (xs: List[String]) => interpreter dumpState xs),
- VarArgs("tree", "displays ASTs for specified identifiers",
- (xs: List[String]) => interpreter dumpTrees xs)
+ OneArg("completions", "generate list of completions for a given String", completions),
+ NoArgs("dump", "displays a view of the interpreter's internal state", () => interpreter.power.dump())
+
+ // VarArgs("tree", "displays ASTs for specified identifiers",
+ // (xs: List[String]) => interpreter dumpTrees xs)
// LineArg("meta", "given code which produces scala code, executes the results",
// (xs: List[String]) => )
)
@@ -208,39 +246,22 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
case _ => true
}
- /* For some reason, the first interpreted command always takes
- * a second or two. So, wait until the welcome message
- * has been printed before calling bindSettings. That way,
- * the user can read the welcome message while this
- * command executes.
- */
- val futLine = scala.concurrent.ops.future(readOneLine)
- bindSettings()
- if (!processLine(futLine()))
- return
-
- // loops until false, then returns
while (processLine(readOneLine)) { }
}
/** interpret all lines from a specified file */
- def interpretAllFrom(filename: String) {
- val fileIn =
- try { new FileReader(filename) }
- catch { case _:IOException => return out.println("Error opening file: " + filename) }
-
+ def interpretAllFrom(file: File) {
val oldIn = in
- val oldReplay = replayCommandsRev
- try {
- val inFile = new BufferedReader(fileIn)
- in = new SimpleReader(inFile, out, false)
- out.println("Loading " + filename + "...")
- out.flush
- repl
- } finally {
+ val oldReplay = replayCommandStack
+
+ try file applyReader { reader =>
+ in = new SimpleReader(reader, out, false)
+ plushln("Loading " + file + "...")
+ repl()
+ }
+ finally {
in = oldIn
- replayCommandsRev = oldReplay
- fileIn.close
+ replayCommandStack = oldReplay
}
}
@@ -249,16 +270,31 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
closeInterpreter()
createInterpreter()
for (cmd <- replayCommands) {
- out.println("Replaying: " + cmd)
- out.flush() // because maybe cmd will have its own output
+ plushln("Replaying: " + cmd) // flush because maybe cmd will have its own output
command(cmd)
out.println
}
}
- def withFile(filename: String)(action: String => Unit) {
- if (! new File(filename).exists) out.println("That file does not exist")
- else action(filename)
+ /** fork a shell and run a command */
+ def runShellCmd(line: String) {
+ // we assume if they're using :sh they'd appreciate being able to pipeline
+ interpreter.beQuietDuring {
+ interpreter.interpret("import _root_.scala.tools.nsc.io.Process.Pipe._")
+ }
+ val p = Process(line)
+ // only bind non-empty streams
+ def add(name: String, it: Iterator[String]) =
+ if (it.hasNext) interpreter.bind(name, "scala.List[String]", it.toList)
+
+ List(("stdout", p.stdout), ("stderr", p.stderr)) foreach (add _).tupled
+ }
+
+ def withFile(filename: String)(action: File => Unit) {
+ val f = File(filename)
+
+ if (f.exists) action(f)
+ else out.println("That file does not exist")
}
def load(arg: String) = {
@@ -270,21 +306,37 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
Result(true, shouldReplay)
}
-
- def addJar(arg: String): Unit = {
- val f = new java.io.File(arg)
- if (!f.exists) {
- out.println("The file '" + f + "' doesn't seem to exist.")
- return
+ def addClasspath(arg: String): Unit = {
+ val f = File(arg).normalize
+ if (f.exists) {
+ addedClasspath = ClassPath.join(addedClasspath, f.path)
+ val totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath)
+ println("Added '%s'. Your new classpath is:\n%s".format(f.path, totalClasspath))
+ replay()
}
- addedClasspath = addedClasspath ::: List(f.getCanonicalPath)
- println("Added " + f.getCanonicalPath + " to your classpath.")
- replay()
+ else out.println("The path '" + f + "' doesn't seem to exist.")
}
- def power() = {
+ def completions(arg: String): Unit = {
+ val comp = in.completion getOrElse { return println("Completion unavailable.") }
+ val xs = comp completions arg
+
+ injectAndName(xs)
+ }
+
+ def power() {
+ val powerUserBanner =
+ """** Power User mode enabled - BEEP BOOP **
+ |** scala.tools.nsc._ has been imported **
+ |** New vals! Try repl, global, power **
+ |** New cmds! :help to discover them **
+ |** New defs! Type power.<tab> to reveal **""".stripMargin
+
powerUserOn = true
- interpreter.powerUser()
+ interpreter.unleash()
+ injectOne("history", in.historyList)
+ in.completion foreach (x => injectOne("completion", x))
+ out println powerUserBanner
}
def verbosity() = {
@@ -304,10 +356,13 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
def ambiguous(cmds: List[Command]) = "Ambiguous: did you mean " + cmds.map(":" + _.name).mkString(" or ") + "?"
// not a command
- if (!line.startsWith(":"))
- return Result(true, interpretStartingWith(line))
+ if (!line.startsWith(":")) {
+ // Notice failure to create compiler
+ if (interpreter.compiler == null) return Result(false, None)
+ else return Result(true, interpretStartingWith(line))
+ }
- val tokens = line.substring(1).split("""\s+""").toList
+ val tokens = (line drop 1 split """\s+""").toList
if (tokens.isEmpty)
return withError(ambiguous(commands))
@@ -321,14 +376,100 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
}
}
+ private val CONTINUATION_STRING = " | "
+ private val PROMPT_STRING = "scala> "
+
+ /** If it looks like they're pasting in a scala interpreter
+ * transcript, remove all the formatting we inserted so we
+ * can make some sense of it.
+ */
+ private var pasteStamp: Long = 0
+
+ /** Returns true if it's long enough to quit. */
+ def updatePasteStamp(): Boolean = {
+ /* Enough milliseconds between readLines to call it a day. */
+ val PASTE_FINISH = 1000
+
+ val prevStamp = pasteStamp
+ pasteStamp = System.currentTimeMillis
+
+ (pasteStamp - prevStamp > PASTE_FINISH)
+
+ }
+ /** TODO - we could look for the usage of resXX variables in the transcript.
+ * Right now backreferences to auto-named variables will break.
+ */
+
+ /** The trailing lines complication was an attempt to work around the introduction
+ * of newlines in e.g. email messages of repl sessions. It doesn't work because
+ * an unlucky newline can always leave you with a syntactically valid first line,
+ * which is executed before the next line is considered. So this doesn't actually
+ * accomplish anything, but I'm leaving it in case I decide to try harder.
+ */
+ case class PasteCommand(cmd: String, trailing: ListBuffer[String] = ListBuffer[String]())
+
+ /** Commands start on lines beginning with "scala>" and each successive
+ * line which begins with the continuation string is appended to that command.
+ * Everything else is discarded. When the end of the transcript is spotted,
+ * all the commands are replayed.
+ */
+ @tailrec private def cleanTranscript(lines: List[String], acc: List[PasteCommand]): List[PasteCommand] = lines match {
+ case Nil => acc.reverse
+ case x :: xs if x startsWith PROMPT_STRING =>
+ val first = x stripPrefix PROMPT_STRING
+ val (xs1, xs2) = xs span (_ startsWith CONTINUATION_STRING)
+ val rest = xs1 map (_ stripPrefix CONTINUATION_STRING)
+ val result = (first :: rest).mkString("", "\n", "\n")
+
+ cleanTranscript(xs2, PasteCommand(result) :: acc)
+
+ case ln :: lns =>
+ val newacc = acc match {
+ case Nil => Nil
+ case PasteCommand(cmd, trailing) :: accrest =>
+ PasteCommand(cmd, trailing :+ ln) :: accrest
+ }
+ cleanTranscript(lns, newacc)
+ }
+
+ /** The timestamp is for safety so it doesn't hang looking for the end
+ * of a transcript. Ad hoc parsing can't be too demanding. You can
+ * also use ctrl-D to start it parsing.
+ */
+ @tailrec private def interpretAsPastedTranscript(lines: List[String]) {
+ val line = in.readLine("")
+ val finished = updatePasteStamp()
+
+ if (line == null || finished || line.trim == PROMPT_STRING.trim) {
+ val xs = cleanTranscript(lines.reverse, Nil)
+ println("Replaying %d commands from interpreter transcript." format xs.size)
+ for (PasteCommand(cmd, trailing) <- xs) {
+ out.flush()
+ def runCode(code: String, extraLines: List[String]) {
+ (interpreter interpret code) match {
+ case IR.Incomplete if extraLines.nonEmpty =>
+ runCode(code + "\n" + extraLines.head, extraLines.tail)
+ case _ => ()
+ }
+ }
+ runCode(cmd, trailing.toList)
+ }
+ }
+ else
+ interpretAsPastedTranscript(line :: lines)
+ }
+
/** Interpret expressions starting with the first line.
* Read lines until a complete compilation unit is available
* or until a syntax error has been seen. If a full unit is
* read, go ahead and interpret it. Return the full string
* to be recorded for replay, if any.
*/
- def interpretStartingWith(code: String): Option[String] =
- interpreter.interpret(code) match {
+ def interpretStartingWith(code: String): Option[String] = {
+ // signal completion non-completion input has been received
+ in.completion foreach (_.resetVerbosity())
+
+ def reallyInterpret = interpreter.interpret(code) match {
case IR.Error => None
case IR.Success => Some(code)
case IR.Incomplete =>
@@ -336,19 +477,53 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
out.println("You typed two blank lines. Starting a new command.")
None
}
- else in.readLine(" | ") match {
- case null => None // end of file
+ else in.readLine(CONTINUATION_STRING) match {
+ case null =>
+ // we know compilation is going to fail since we're at EOF and the
+ // parser thinks the input is still incomplete, but since this is
+ // a file being read non-interactively we want to fail. So we send
+ // it straight to the compiler for the nice error message.
+ interpreter.compileString(code)
+ None
+
case line => interpretStartingWith(code + "\n" + line)
}
}
+ /** Here we place ourselves between the user and the interpreter and examine
+ * the input they are ostensibly submitting. We intervene in several cases:
+ *
+ * 1) If the line starts with "scala> " it is assumed to be an interpreter paste.
+ * 2) If the line starts with "." (but not ".." or "./") it is treated as an invocation
+ * on the previous result.
+ * 3) If the Completion object's execute returns Some(_), we inject that value
+ * and avoid the interpreter, as it's likely not valid scala code.
+ */
+ if (code == "") None
+ else if (code startsWith PROMPT_STRING) {
+ updatePasteStamp()
+ interpretAsPastedTranscript(List(code))
+ None
+ }
+ else if (Completion.looksLikeInvocation(code) && interpreter.mostRecentVar != "") {
+ interpretStartingWith(interpreter.mostRecentVar + code)
+ }
+ else {
+ val result = for (comp <- in.completion ; res <- comp execute code) yield res
+ result match {
+ case Some(res) => injectAndName(res) ; None // completion took responsibility, so do not parse
+ case _ => reallyInterpret
+ }
+ }
+ }
+
// runs :load <file> on any files passed via -i
def loadFiles(settings: Settings) = settings match {
case settings: GenericRunnerSettings =>
for (filename <- settings.loadfiles.value) {
val cmd = ":load " + filename
command(cmd)
- replayCommandsRev = cmd :: replayCommandsRev
+ addReplay(cmd)
out.println()
}
case _ =>
@@ -362,10 +537,8 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
in = in0 match {
case Some(in0) => new SimpleReader(in0, out, true)
case None =>
- val emacsShell = System.getProperty("env.emacs", "") != ""
-
- // the interpeter is passed as an argument to expose tab completion info
- if (settings.Xnojline.value || emacsShell) new SimpleReader
+ // the interpreter is passed as an argument to expose tab completion info
+ if (settings.Xnojline.value || Properties.isEmacsShell) new SimpleReader
else if (settings.noCompletion.value) InteractiveReader.createDefault()
else InteractiveReader.createDefault(interpreter)
}
@@ -376,21 +549,43 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
if (interpreter.reporter.hasErrors) return
printWelcome()
+
+ // this is about the illusion of snappiness. We call initialize()
+ // which spins off a separate thread, then print the prompt and try
+ // our best to look ready. Ideally the user will spend a
+ // couple seconds saying "wow, it starts so fast!" and by the time
+ // they type a command the compiler is ready to roll.
+ interpreter.initialize()
repl()
- } finally {
- closeInterpreter()
}
+ finally closeInterpreter()
+ }
+
+ private def objClass(x: Any) = x.asInstanceOf[AnyRef].getClass
+ private def objName(x: Any) = {
+ val clazz = objClass(x)
+ val typeParams = clazz.getTypeParameters
+ val basename = clazz.getName
+ val tpString = if (typeParams.isEmpty) "" else "[%s]".format(typeParams map (_ => "_") mkString ", ")
+
+ basename + tpString
}
// injects one value into the repl; returns pair of name and class
def injectOne(name: String, obj: Any): Tuple2[String, String] = {
- val className = obj.asInstanceOf[AnyRef].getClass.getName
+ val className = objName(obj)
+ interpreter.quietBind(name, className, obj)
+ (name, className)
+ }
+ def injectAndName(obj: Any): Tuple2[String, String] = {
+ val name = interpreter.getVarName
+ val className = objName(obj)
interpreter.bind(name, className, obj)
(name, className)
}
// injects list of values into the repl; returns summary string
- def inject(args: List[Any]): String = {
+ def injectDebug(args: List[Any]): String = {
val strs =
for ((arg, i) <- args.zipWithIndex) yield {
val varName = "p" + (i + 1)
@@ -413,7 +608,7 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
// if they asked for no help and command is valid, we call the real main
neededHelp() match {
case "" => if (command.ok) main(command.settings) // else nothing
- case help => out print help ; out flush
+ case help => plush(help)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/InterpreterResults.scala b/src/compiler/scala/tools/nsc/InterpreterResults.scala
index 37cf75663b..fad3fb653f 100644
--- a/src/compiler/scala/tools/nsc/InterpreterResults.scala
+++ b/src/compiler/scala/tools/nsc/InterpreterResults.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
diff --git a/src/compiler/scala/tools/nsc/InterpreterSettings.scala b/src/compiler/scala/tools/nsc/InterpreterSettings.scala
index e274294082..b53a6f6955 100644
--- a/src/compiler/scala/tools/nsc/InterpreterSettings.scala
+++ b/src/compiler/scala/tools/nsc/InterpreterSettings.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Alexander Spoon
*/
-// $Id$
package scala.tools.nsc
@@ -20,7 +19,17 @@ class InterpreterSettings(repl: Interpreter) {
* more than this number of characters, then the printout is
* truncated.
*/
- var maxPrintString = 2400
+ var maxPrintString = 800
+
+ /** The maximum number of completion candidates to print for tab
+ * completion without requiring confirmation.
+ */
+ var maxAutoprintCompletion = 250
+
+ /** String unwrapping can be disabled if it is causing issues.
+ * Settings this to false means you will see Strings like "$iw.$iw.".
+ */
+ var unwrapStrings = true
def deprecation_=(x: Boolean) = {
val old = repl.settings.deprecation.value
@@ -30,14 +39,21 @@ class InterpreterSettings(repl: Interpreter) {
}
def deprecation: Boolean = repl.settings.deprecation.value
- override def toString =
- "InterpreterSettings {\n" +
-// " loadPath = " + loadPath + "\n" +
- " maxPrintString = " + maxPrintString + "\n" +
- "}"
-}
+ def allSettings = Map(
+ "maxPrintString" -> maxPrintString,
+ "maxAutoprintCompletion" -> maxAutoprintCompletion,
+ "unwrapStrings" -> unwrapStrings,
+ "deprecation" -> deprecation
+ )
+ private def allSettingsString =
+ allSettings.toList sortBy (_._1) map { case (k, v) => " " + k + " = " + v + "\n" } mkString
+ override def toString = """
+ | InterpreterSettings {
+ | %s
+ | }""".stripMargin.format(allSettingsString)
+}
/* Utilities for the InterpreterSettings class
*
@@ -48,6 +64,10 @@ object InterpreterSettings {
/** Source code for the InterpreterSettings class. This is
* used so that the interpreter is sure to have the code
* available.
+ *
+ * XXX I'm not seeing why this degree of defensiveness is necessary.
+ * If files are missing the repl's not going to work, it's not as if
+ * we have string source backups for anything else.
*/
val sourceCodeForClass =
"""
diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala
index 53a8773ef1..1e6699079d 100644
--- a/src/compiler/scala/tools/nsc/Main.scala
+++ b/src/compiler/scala/tools/nsc/Main.scala
@@ -1,19 +1,18 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
import java.io.File
-
-import scala.concurrent.SyncVar
+import File.pathSeparator
import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager }
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
import scala.tools.nsc.util.{ BatchSourceFile, FakePos } //{Position}
+import Properties.{ versionString, copyrightString, residentPromptString, msilLibPath }
/** The main class for NSC, a compiler for the programming
* language Scala.
@@ -21,10 +20,10 @@ import scala.tools.nsc.util.{ BatchSourceFile, FakePos } //{Position}
object Main extends AnyRef with EvalLoop {
val versionMsg = "Scala compiler " +
- Properties.versionString + " -- " +
- Properties.copyrightString
+ versionString + " -- " +
+ copyrightString
- val prompt = Properties.residentPromptString
+ val prompt = residentPromptString
var reporter: ConsoleReporter = _
@@ -39,7 +38,8 @@ object Main extends AnyRef with EvalLoop {
def resident(compiler: Global) {
loop { line =>
val args = line.split(' ').toList
- val command = new CompilerCommand(args, new Settings(error), error, true)
+ val command = new CompilerCommand(args, new Settings(error))
+ compiler.reporter.reset
new compiler.Run() compile command.files
}
}
@@ -47,7 +47,7 @@ object Main extends AnyRef with EvalLoop {
def process(args: Array[String]) {
val settings = new Settings(error)
reporter = new ConsoleReporter(settings)
- val command = new CompilerCommand(args.toList, settings, error, false)
+ val command = new CompilerCommand(args.toList, settings)
if (command.settings.version.value)
reporter.info(null, versionMsg, true)
else if (command.settings.Yidedebug.value) {
@@ -57,7 +57,7 @@ object Main extends AnyRef with EvalLoop {
import compiler.{ reporter => _, _ }
val sfs = command.files.map(getSourceFile(_))
- val reloaded = new SyncVar[Either[Unit, Throwable]]
+ val reloaded = new interactive.Response[Unit]
askReload(sfs, reloaded)
reloaded.get.right.toOption match {
case Some(ex) => reporter.cancelled = true // Causes exit code to be non-0
@@ -77,16 +77,13 @@ object Main extends AnyRef with EvalLoop {
// enter resident mode
loop { line =>
val args = line.split(' ').toList
- val command = new CompilerCommand(args.toList, settings, error, true)
+ val command = new CompilerCommand(args.toList, settings)
buildManager.update(fileSet(command.files), Set.empty)
}
} else {
- if (command.settings.target.value == "msil") {
- val libpath = System.getProperty("msil.libpath")
- if (libpath != null)
- command.settings.assemrefs.value =
- command.settings.assemrefs.value + File.pathSeparator + libpath
- }
+ if (command.settings.target.value == "msil")
+ msilLibPath foreach (x => command.settings.assemrefs.value += (pathSeparator + x))
+
try {
val compiler = if (command.settings.Yrangepos.value) new interactive.Global(command.settings, reporter)
else new Global(command.settings, reporter)
diff --git a/src/compiler/scala/tools/nsc/MainGenericRunner.scala b/src/compiler/scala/tools/nsc/MainGenericRunner.scala
index 9cd9cdbd43..936ee3c1db 100644
--- a/src/compiler/scala/tools/nsc/MainGenericRunner.scala
+++ b/src/compiler/scala/tools/nsc/MainGenericRunner.scala
@@ -1,20 +1,19 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2009 LAMP/EPFL
+ * Copyright 2006-2010 LAMP/EPFL
* @author Lex Spoon
*/
-// $Id$
package scala.tools.nsc
-import java.io.{ File, IOException }
+import java.io.IOException
import java.lang.{ClassNotFoundException, NoSuchMethodException}
import java.lang.reflect.InvocationTargetException
import java.net.{ URL, MalformedURLException }
-import scala.util.ScalaClassLoader
+import scala.tools.util.PathResolver
-import util.ClassPath
-import File.pathSeparator
+import io.{ File, Process }
+import util.{ ClassPath, ScalaClassLoader }
import Properties.{ versionString, copyrightString }
/** An object that runs Scala code. It has three possible
@@ -22,110 +21,40 @@ import Properties.{ versionString, copyrightString }
* or interactive entry.
*/
object MainGenericRunner {
- /** Append jars found in ${scala.home}/lib to
- * a specified classpath. Also append "." if the
- * input classpath is empty; otherwise do not.
- *
- * @param classpath
- * @return the new classpath
- */
- private def addClasspathExtras(classpath: String): String = {
- val scalaHome = Properties.scalaHome
-
- def listDir(name: String): List[File] = {
- val libdir = new File(new File(scalaHome), name)
- if (!libdir.exists || libdir.isFile) Nil else libdir.listFiles.toList
- }
- lazy val jarsInLib = listDir("lib") filter (_.getName endsWith ".jar")
- lazy val dirsInClasses = listDir("classes") filter (_.isDirectory)
- val cpScala =
- if (scalaHome == null) {
- // this is to make the interpreter work when running without the scala script
- // (e.g. from eclipse). Before, "java.class.path" was added to the user classpath
- // in Settings; this was changed to match the behavior of Sun's javac.
- val javacp = System.getProperty("java.class.path")
- if (javacp == null) Nil
- else ClassPath.expandPath(javacp)
- }
- else (jarsInLib ::: dirsInClasses) map (_.toString)
-
- // either prepend existing classpath or append "."
- (if (classpath == "") cpScala ::: List(".") else classpath :: cpScala) mkString pathSeparator
- }
-
def main(args: Array[String]) {
def errorFn(str: String) = Console println str
-
- val command = new GenericRunnerCommand(args.toList, errorFn)
- val settings = command.settings
- def sampleCompiler = new Global(settings)
-
- if (!command.ok)
- return errorFn("%s\n%s".format(command.usageMsg, sampleCompiler.pluginOptionsHelp))
-
- settings.classpath.value = addClasspathExtras(settings.classpath.value)
- settings.defines.applyToCurrentJVM
-
- if (settings.version.value)
- return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString))
-
- if (command.shouldStopWithInfo)
- return errorFn(command getInfoMessage sampleCompiler)
-
def exitSuccess: Nothing = exit(0)
def exitFailure(msg: Any = null): Nothing = {
if (msg != null) errorFn(msg.toString)
exit(1)
}
- def exitCond(b: Boolean): Nothing =
- if (b) exitSuccess else exitFailure(null)
-
- def fileToURL(f: File): Option[URL] =
- try { Some(f.toURL) }
- catch { case e => Console.println(e); None }
-
- def paths(str: String): List[URL] =
- for (
- file <- ClassPath.expandPath(str) map (new File(_)) if file.exists;
- val url = fileToURL(file); if !url.isEmpty
- ) yield url.get
-
- def jars(dirs: String): List[URL] =
- for (
- libdir <- ClassPath.expandPath(dirs) map (new File(_)) if libdir.isDirectory;
- jarfile <- libdir.listFiles if jarfile.isFile && jarfile.getName.endsWith(".jar");
- val url = fileToURL(jarfile); if !url.isEmpty
- ) yield url.get
-
- def specToURL(spec: String): Option[URL] =
- try { Some(new URL(spec)) }
- catch { case e: MalformedURLException => Console.println(e); None }
-
- def urls(specs: String): List[URL] =
- if (specs == null || specs.length == 0) Nil
- else for (
- spec <- specs.split(" ").toList;
- val url = specToURL(spec); if !url.isEmpty
- ) yield url.get
-
- val classpath: List[URL] =
- paths(settings.bootclasspath.value) :::
- paths(settings.classpath.value) :::
- jars(settings.extdirs.value) :::
- urls(settings.Xcodebase.value)
-
- def createLoop(): InterpreterLoop = {
- val loop = new InterpreterLoop
- loop main settings
- loop
- }
+ def exitCond(b: Boolean): Nothing = if (b) exitSuccess else exitFailure(null)
+
+ val command = new GenericRunnerCommand(args.toList, errorFn _)
+ import command.settings
+ def sampleCompiler = new Global(settings) // def so its not created unless needed
+
+ if (!command.ok) return errorFn("%s\n%s".format(command.usageMsg, sampleCompiler.pluginOptionsHelp))
+ else if (settings.version.value) return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString))
+ else if (command.shouldStopWithInfo) return errorFn(command getInfoMessage sampleCompiler)
+ def isE = !settings.execute.isDefault
def dashe = settings.execute.value
+
+ def isI = !settings.loadfiles.isDefault
def dashi = settings.loadfiles.value
- def slurp = dashi map (file => io.File(file).slurp()) mkString "\n"
+
+ def combinedCode = {
+ val files = if (isI) dashi map (file => File(file).slurp()) else Nil
+ val str = if (isE) List(dashe) else Nil
+
+ files ++ str mkString "\n\n"
+ }
+
+ val classpath: List[URL] = new PathResolver(settings) asURLs
/** Was code given in a -e argument? */
- if (!settings.execute.isDefault) {
+ if (isE) {
/** If a -i argument was also given, we want to execute the code after the
* files have been included, so they are read into strings and prepended to
* the code given in -e. The -i option is documented to only make sense
@@ -134,14 +63,14 @@ object MainGenericRunner {
* This all needs a rewrite though.
*/
val fullArgs = command.thingToRun.toList ::: command.arguments
- val code =
- if (settings.loadfiles.isDefault) dashe
- else slurp + "\n" + dashe
- exitCond(ScriptRunner.runCommand(settings, code, fullArgs))
+ exitCond(ScriptRunner.runCommand(settings, combinedCode, fullArgs))
}
else command.thingToRun match {
- case None => createLoop()
+ case None =>
+ // Questionably, we start the interpreter when there are no arguments.
+ new InterpreterLoop main settings
+
case Some(thingToRun) =>
val isObjectName =
settings.howtorun.value match {
diff --git a/src/compiler/scala/tools/nsc/MainInterpreter.scala b/src/compiler/scala/tools/nsc/MainInterpreter.scala
index 6e5d9eebcf..0a0d1e4a33 100644
--- a/src/compiler/scala/tools/nsc/MainInterpreter.scala
+++ b/src/compiler/scala/tools/nsc/MainInterpreter.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Lex Spoon
*/
-// $Id$
package scala.tools.nsc
diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
index 760ab12822..fc0ea67051 100644
--- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala
+++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
@@ -37,7 +36,7 @@ object MainTokenMetric {
def process(args: Array[String]) {
val settings = new Settings(error)
reporter = new ConsoleReporter(settings)
- val command = new CompilerCommand(args.toList, settings, error, false)
+ val command = new CompilerCommand(args.toList, settings)
try {
val compiler = new Global(command.settings, reporter)
tokenMetric(compiler, command.files)
diff --git a/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala b/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala
index f7a297bfef..4ced491a8d 100644
--- a/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala
+++ b/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
import java.io.{Writer, PrintWriter}
diff --git a/src/compiler/scala/tools/nsc/NoPhase.scala b/src/compiler/scala/tools/nsc/NoPhase.scala
index 010c778910..8f5e3e440c 100644
--- a/src/compiler/scala/tools/nsc/NoPhase.scala
+++ b/src/compiler/scala/tools/nsc/NoPhase.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala
index e4e0826d32..f2ddc84445 100644
--- a/src/compiler/scala/tools/nsc/ObjectRunner.scala
+++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala
@@ -1,22 +1,20 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Lex Spoon
*/
-// $Id$
package scala.tools.nsc
import java.net.URL
-import scala.util.ScalaClassLoader
+import util.ScalaClassLoader
/** An object that runs another object specified by name.
*
* @author Lex Spoon
* @version 1.1, 2007/7/13
*/
-object ObjectRunner
-{
+object ObjectRunner {
/** Check whether a class with the specified name
* exists on the specified class path. */
def classExists(urls: List[URL], objectName: String): Boolean =
diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
index 0bd436018c..ae56941633 100644
--- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
@@ -12,11 +11,8 @@ package scala.tools.nsc
*/
class OfflineCompilerCommand(
arguments: List[String],
- settings: Settings,
- error: String => Unit,
- interactive: Boolean)
-extends CompilerCommand(arguments, new Settings(error), error, false)
-{
+ settings: Settings)
+extends CompilerCommand(arguments, settings) {
override val cmdName = "fsc"
import settings._
diff --git a/src/compiler/scala/tools/nsc/Phase.scala b/src/compiler/scala/tools/nsc/Phase.scala
index 89b4e1c670..6ff932acc2 100644
--- a/src/compiler/scala/tools/nsc/Phase.scala
+++ b/src/compiler/scala/tools/nsc/Phase.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
@@ -25,7 +24,7 @@ abstract class Phase(val prev: Phase) {
def flagMask: Long = fmask
private var nx: Phase = this
- if (prev ne null) prev.nx = this
+ if ((prev ne null) && (prev ne NoPhase)) prev.nx = this
def next: Phase = nx
@@ -34,9 +33,11 @@ abstract class Phase(val prev: Phase) {
// Will running with -Ycheck:name work?
def checkable: Boolean = true
def devirtualized: Boolean = false
+ def specialized: Boolean = false
def erasedTypes: Boolean = false
def flatClasses: Boolean = false
- def keepsTypeParams = false
+ def refChecked: Boolean = false
+ def keepsTypeParams = true
def run: Unit
override def toString() = name
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index 918b56fcfb..e0cc91cb80 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Anders Bach Nielsen
* @version 1.0
*/
-// $Id$
package scala.tools.nsc
@@ -12,7 +11,7 @@ import java.io.{BufferedWriter, FileWriter}
/**
* PhaseAssembly
- * Trait made to seperate the constraint solving of the phase order from
+ * Trait made to separate the constraint solving of the phase order from
* the rest of the compiler. See SIP 00002
*
*/
@@ -42,8 +41,8 @@ trait PhaseAssembly { self: Global =>
var level = 0
def allPhaseNames(): String = phaseobj match {
- case None => phasename
- case Some(lst) => lst.map(_.phaseName).reduceLeft(_+","+_)
+ case None => phasename
+ case Some(lst) => lst.map(_.phaseName).reduceLeft(_+","+_)
}
}
@@ -56,9 +55,9 @@ trait PhaseAssembly { self: Global =>
def getNodeByPhase(phs: SubComponent): Node = {
var node: Node = getNodeByPhase(phs.phaseName)
node.phaseobj match {
- case None =>
- node.phaseobj = Some(List[SubComponent](phs))
- case _ =>
+ case None =>
+ node.phaseobj = Some(List[SubComponent](phs))
+ case _ =>
}
node
}
@@ -66,13 +65,8 @@ trait PhaseAssembly { self: Global =>
/* Given the name of a phase object, get the node for that name. If the
* node object does not exits, then create it.
*/
- def getNodeByPhase(name: String): Node = {
- nodes.get(name).getOrElse {
- val node = new Node(name)
- nodes += (name -> node)
- node
- }
- }
+ def getNodeByPhase(name: String): Node =
+ nodes.getOrElseUpdate(name, new Node(name))
/* Connect the frm and to nodes with an edge and make it soft.
* Also add the edge object to the set of edges, and to the dependency
@@ -101,28 +95,15 @@ trait PhaseAssembly { self: Global =>
/* Given the entire graph, collect the phase objects at each level, where the phase
* names are sorted alphabetical at each level, into the compiler phase list
*/
- def compilerPhaseList(): List[SubComponent] = {
- var chain: List[SubComponent] = Nil
-
- var lvl = 1
- var nds = nodes.valuesIterator.filter(_.level == lvl).toList
- while(nds.size > 0) {
- nds = nds.sort((n1,n2) => (n1.phasename compareTo n2.phasename) < 0)
- for (n <- nds) {
- chain = chain ::: n.phaseobj.get
- }
- lvl += 1
- nds = nodes.valuesIterator.filter(_.level == lvl).toList
- }
- chain
- }
+ def compilerPhaseList(): List[SubComponent] =
+ nodes.values.toList filter (_.level > 0) sortBy (x => (x.level, x.phasename)) flatMap (_.phaseobj) flatten
/* Test if there are cycles in the graph, assign levels to the nodes
* and collapse hard links into nodes
*/
def collapseHardLinksAndLevels(node: Node, lvl: Int) {
if (node.visited) {
- throw new FatalError(
+ throw new FatalError(
"Cycle in compiler phase dependencies detected, phase " +
node.phasename + " reacted twice!")
}
@@ -131,19 +112,19 @@ trait PhaseAssembly { self: Global =>
var hls = Nil ++ node.before.filter(_.hard)
while (hls.size > 0) {
- for (hl <- hls) {
- node.phaseobj = Some(node.phaseobj.get ++ hl.frm.phaseobj.get)
- node.before = hl.frm.before
- nodes -= hl.frm.phasename
- edges -= hl
- for (edge <- node.before) edge.to = node
- }
- hls = Nil ++ node.before.filter(_.hard)
+ for (hl <- hls) {
+ node.phaseobj = Some(node.phaseobj.get ++ hl.frm.phaseobj.get)
+ node.before = hl.frm.before
+ nodes -= hl.frm.phasename
+ edges -= hl
+ for (edge <- node.before) edge.to = node
+ }
+ hls = Nil ++ node.before.filter(_.hard)
}
node.visited = true
for (edge <- node.before) {
- collapseHardLinksAndLevels( edge.frm, lvl + 1)
+ collapseHardLinksAndLevels( edge.frm, lvl + 1)
}
node.visited = false
@@ -156,44 +137,44 @@ trait PhaseAssembly { self: Global =>
def validateAndEnforceHardlinks() {
var hardlinks = edges.filter(_.hard)
for (hl <- hardlinks) {
- if (hl.frm.after.size > 1) {
- throw new FatalError("phase " + hl.frm.phasename + " want to run right after " + hl.to.phasename + ", but some phase has declared to run before " + hl.frm.phasename + ". Re-run with -Xgenerate-phase-graph <filename> to better see the problem.")
- }
+ if (hl.frm.after.size > 1) {
+ throw new FatalError("phase " + hl.frm.phasename + " want to run right after " + hl.to.phasename + ", but some phase has declared to run before " + hl.frm.phasename + ". Re-run with -Xgenerate-phase-graph <filename> to better see the problem.")
+ }
}
var rerun = true
while (rerun) {
- rerun = false
- hardlinks = edges.filter(_.hard)
- for (hl <- hardlinks) {
- var sanity = Nil ++ hl.to.before.filter(_.hard)
- if (sanity.length == 0) {
- throw new FatalError("There is no runs right after dependency, where there should be one! This is not supposed to happen!")
- } else if (sanity.length > 1) {
- var msg = "Multiple phases want to run right after the phase " + sanity.head.to.phasename + "\n"
- msg += "Phases: "
- sanity = sanity.sort((e1,e2) => (e1.frm.phasename compareTo e2.frm.phasename) < 0)
- for (edge <- sanity) {
- msg += edge.frm.phasename + ", "
- }
- msg += "\nRe-run with -Xgenerate-phase-graph <filename> to better see the problem."
- throw new FatalError(msg)
-
- } else {
-
- var promote = hl.to.before.filter(e => (!e.hard))
- hl.to.before.clear
- sanity foreach (edge => hl.to.before += edge)
- for (edge <- promote) {
- rerun = true
- informProgress(
+ rerun = false
+ hardlinks = edges.filter(_.hard)
+ for (hl <- hardlinks) {
+ var sanity = Nil ++ hl.to.before.filter(_.hard)
+ if (sanity.length == 0) {
+ throw new FatalError("There is no runs right after dependency, where there should be one! This is not supposed to happen!")
+ } else if (sanity.length > 1) {
+ var msg = "Multiple phases want to run right after the phase " + sanity.head.to.phasename + "\n"
+ msg += "Phases: "
+ sanity = sanity sortBy (_.frm.phasename)
+ for (edge <- sanity) {
+ msg += edge.frm.phasename + ", "
+ }
+ msg += "\nRe-run with -Xgenerate-phase-graph <filename> to better see the problem."
+ throw new FatalError(msg)
+
+ } else {
+
+ var promote = hl.to.before.filter(e => (!e.hard))
+ hl.to.before.clear
+ sanity foreach (edge => hl.to.before += edge)
+ for (edge <- promote) {
+ rerun = true
+ informProgress(
"promote the dependency of " + edge.frm.phasename +
": " + edge.to.phasename + " => " + hl.frm.phasename)
- edge.to = hl.frm
- hl.frm.before += edge
- }
- }
- }
+ edge.to = hl.frm
+ hl.frm.before += edge
+ }
+ }
+ }
}
}
@@ -205,17 +186,17 @@ trait PhaseAssembly { self: Global =>
def removeDanglingNodes() {
var dnodes = nodes.valuesIterator filter (_.phaseobj.isEmpty)
for (node <- dnodes) {
- val msg = "dropping dependency on node with no phase object: "+node.phasename
+ val msg = "dropping dependency on node with no phase object: "+node.phasename
informProgress(msg)
- nodes -= node.phasename
- for (edge <- node.before) {
- edges -= edge
- edge.frm.after -= edge
- edge.frm.phaseobj match {
- case Some(lsc) => if (! lsc.head.internal) warning(msg)
- case _ =>
- }
- }
+ nodes -= node.phasename
+ for (edge <- node.before) {
+ edges -= edge
+ edge.frm.after -= edge
+ edge.frm.phaseobj match {
+ case Some(lsc) => if (! lsc.head.internal) warning(msg)
+ case _ =>
+ }
+ }
}
}
@@ -268,30 +249,30 @@ trait PhaseAssembly { self: Global =>
var fromnode = graph.getNodeByPhase(phs)
phs.runsRightAfter match {
- case None =>
- for (phsname <- phs.runsAfter) {
- if (phsname != "terminal") {
- val tonode = graph.getNodeByPhase(phsname)
- graph.softConnectNodes(fromnode, tonode)
- } else {
- error("[phase assembly, after dependency on terminal phase not allowed: " + fromnode.phasename + " => "+ phsname + "]")
- }
- }
- for (phsname <- phs.runsBefore) {
- if (phsname != "parser") {
- val tonode = graph.getNodeByPhase(phsname)
- graph.softConnectNodes(tonode, fromnode)
- } else {
- error("[phase assembly, before dependency on parser phase not allowed: " + phsname + " => "+ fromnode.phasename + "]")
- }
- }
- case Some(phsname) =>
- if (phsname != "terminal") {
- val tonode = graph.getNodeByPhase(phsname)
+ case None =>
+ for (phsname <- phs.runsAfter) {
+ if (phsname != "terminal") {
+ val tonode = graph.getNodeByPhase(phsname)
+ graph.softConnectNodes(fromnode, tonode)
+ } else {
+ error("[phase assembly, after dependency on terminal phase not allowed: " + fromnode.phasename + " => "+ phsname + "]")
+ }
+ }
+ for (phsname <- phs.runsBefore) {
+ if (phsname != "parser") {
+ val tonode = graph.getNodeByPhase(phsname)
+ graph.softConnectNodes(tonode, fromnode)
+ } else {
+ error("[phase assembly, before dependency on parser phase not allowed: " + phsname + " => "+ fromnode.phasename + "]")
+ }
+ }
+ case Some(phsname) =>
+ if (phsname != "terminal") {
+ val tonode = graph.getNodeByPhase(phsname)
graph.hardConnectNodes(fromnode, tonode)
- } else {
- error("[phase assembly, right after dependency on terminal phase not allowed: " + fromnode.phasename + " => "+ phsname + "]")
- }
+ } else {
+ error("[phase assembly, right after dependency on terminal phase not allowed: " + fromnode.phasename + " => "+ phsname + "]")
+ }
}
}
graph
@@ -309,14 +290,14 @@ trait PhaseAssembly { self: Global =>
for (edge <- graph.edges) {
sbuf.append("\"" + edge.frm.allPhaseNames + "(" + edge.frm.level + ")" + "\"->\"" + edge.to.allPhaseNames + "(" + edge.to.level + ")" + "\"")
if (! edge.frm.phaseobj.get.head.internal) {
- extnodes += edge.frm
+ extnodes += edge.frm
}
edge.frm.phaseobj match { case None => null case Some(ln) => if(ln.size > 1) fatnodes += edge.frm }
edge.to.phaseobj match { case None => null case Some(ln) => if(ln.size > 1) fatnodes += edge.to }
if (edge.hard) {
- sbuf.append(" [color=\"#0000ff\"]\n")
+ sbuf.append(" [color=\"#0000ff\"]\n")
} else {
- sbuf.append(" [color=\"#000000\"]\n")
+ sbuf.append(" [color=\"#000000\"]\n")
}
}
for (node <- extnodes) {
diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala
index 4f1e9553dd..4d488e65cf 100644
--- a/src/compiler/scala/tools/nsc/Properties.scala
+++ b/src/compiler/scala/tools/nsc/Properties.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2009 LAMP/EPFL
+ * Copyright 2006-2010 LAMP/EPFL
* @author Stephane Micheloud
*/
-// $Id$
package scala.tools.nsc
@@ -13,12 +12,14 @@ object Properties extends scala.util.PropertiesTrait {
protected def pickJarBasedOn = classOf[Global]
// settings based on jar properties
- val fileEndingString = prop("file.ending", ".scala|.java")
- val residentPromptString = prop("resident.prompt", "\nnsc> ")
- val shellPromptString = prop("shell.prompt", "\nscala> ")
+ def fileEndingString = scalaPropOrElse("file.ending", ".scala|.java")
+ def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ")
+ def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ")
- // derived values
- val cmdName = if (isWin) "scala.bat" else "scala"
- val fileEndings = fileEndingString.split("""\|""").toList
+ // settings based on system properties
+ def msilLibPath = propOrNone("msil.libpath")
+ // derived values
+ def isEmacsShell = propOrEmpty("env.emacs") != ""
+ def fileEndings = fileEndingString.split("""\|""").toList
}
diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/compiler/scala/tools/nsc/ScalaDoc.scala
index 6f8c258cc2..4354608e10 100644
--- a/src/compiler/scala/tools/nsc/ScalaDoc.scala
+++ b/src/compiler/scala/tools/nsc/ScalaDoc.scala
@@ -1,96 +1,85 @@
/* scaladoc, a documentation generator for Scala
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
* @author Geoffrey Washburn
*/
-// $Id$
package scala.tools.nsc
import java.io.File
-import scala.tools.nsc.doc.DefaultDocDriver
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
import scala.tools.nsc.util.FakePos //{Position}
-
+import Properties.msilLibPath
+import File.pathSeparator
/** The main class for scaladoc, a front-end for the Scala compiler
* that generates documentation from source files.
*/
object ScalaDoc {
- val versionMsg = "Scala documentation generator " +
+ val versionMsg: String =
+ "Scaladoc " +
Properties.versionString + " -- " +
Properties.copyrightString
var reporter: ConsoleReporter = _
- def error(msg: String) {
- reporter.error(/*new Position */FakePos("scalac"),
- msg + "\n scalac -help gives more information")
+ def error(msg: String): Unit = {
+ reporter.error(FakePos("scalac"), msg + "\n scalac -help gives more information")
}
- def process(args: Array[String]) {
- val docSettings : doc.Settings = new doc.Settings(error)
- reporter = new ConsoleReporter(docSettings)
- val command = new CompilerCommand(args.toList, docSettings, error, false)
- if (command.settings.version.value)
- reporter.info(null, versionMsg, true)
- else {
- if (command.settings.target.value == "msil") {
- val libpath = System.getProperty("msil.libpath")
- if (libpath != null)
- command.settings.assemrefs.value =
- command.settings.assemrefs.value + File.pathSeparator + libpath
+ def process(args: Array[String]): Unit = {
+
+ val docSettings: doc.Settings =
+ new doc.Settings(error)
+
+ reporter = new ConsoleReporter(docSettings) {
+ override def hasErrors = false // need to do this so that the Global instance doesn't trash all the symbols just because there was an error
+ }
+
+ val command =
+ new CompilerCommand(args.toList, docSettings)
+
+ if (!reporter.hasErrors) { // No need to continue if reading the command generated errors
+
+ if (docSettings.version.value)
+ reporter.info(null, versionMsg, true)
+ else if (docSettings.help.value) {
+ reporter.info(null, command.usageMsg, true)
}
- try {
- object compiler extends Global(command.settings, reporter) {
- override protected def computeInternalPhases() {
- phasesSet += syntaxAnalyzer
- phasesSet += analyzer.namerFactory
- phasesSet += analyzer.typerFactory
- }
- override def onlyPresentation = true
- }
- if (reporter.hasErrors) {
- reporter.flush()
- return
- }
-
- if (command.settings.help.value || command.settings.Xhelp.value || command.settings.Yhelp.value) {
- if (command.settings.help.value) {
- reporter.info(null, command.usageMsg, true)
- reporter.info(null, compiler.pluginOptionsHelp, true)
- }
- if (command.settings.Xhelp.value)
- reporter.info(null, command.xusageMsg, true)
- if (command.settings.Yhelp.value)
- reporter.info(null, command.yusageMsg, true)
- } else if (command.settings.showPlugins.value)
- reporter.info(null, compiler.pluginDescriptions, true)
- else if (command.settings.showPhases.value)
- reporter.info(null, compiler.phaseDescriptions, true)
- else {
- val run = new compiler.Run()
- run compile command.files
- val generator = new DefaultDocDriver {
- lazy val global: compiler.type = compiler
- lazy val settings = docSettings
- }
- generator.process(run.units)
- reporter.printSummary()
- }
- } catch {
+ else if (docSettings.Xhelp.value)
+ reporter.info(null, command.xusageMsg, true)
+ else if (docSettings.Yhelp.value)
+ reporter.info(null, command.yusageMsg, true)
+ else if (docSettings.showPlugins.value)
+ reporter.warning(null, "Plugins are not available when using Scaladoc")
+ else if (docSettings.showPhases.value)
+ reporter.warning(null, "Phases are restricted when using Scaladoc")
+ else try {
+
+ if (docSettings.target.value == "msil")
+ msilLibPath foreach (x => docSettings.assemrefs.value += (pathSeparator + x))
+
+ val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings)
+ docProcessor.document(command.files)
+
+ }
+ catch {
case ex @ FatalError(msg) =>
- if (command.settings.debug.value)
- ex.printStackTrace();
- reporter.error(null, "fatal error: " + msg)
+ if (docSettings.debug.value) ex.printStackTrace();
+ reporter.error(null, "fatal error: " + msg)
+ }
+ finally {
+ reporter.printSummary()
}
}
+
}
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
process(args)
exit(if (reporter.hasErrors) 1 else 0)
}
+
}
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index 392fce46a3..032e8d0bb8 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
@@ -17,10 +16,9 @@ import io.{ Directory, File, Path, PlainFile }
import java.lang.reflect.InvocationTargetException
import java.net.URL
import java.util.jar.{ JarEntry, JarOutputStream }
-import java.util.regex.Pattern
+import scala.tools.util.PathResolver
import scala.tools.nsc.reporters.{Reporter,ConsoleReporter}
-import scala.tools.nsc.util.{ClassPath, CompoundSourceFile, BatchSourceFile, SourceFile, SourceFileFragment}
/** An object that runs Scala code in script files.
*
@@ -48,8 +46,7 @@ import scala.tools.nsc.util.{ClassPath, CompoundSourceFile, BatchSourceFile, Sou
* @todo It would be better if error output went to stderr instead
* of stdout...
*/
-object ScriptRunner
-{
+object ScriptRunner {
/* While I'm chasing down the fsc and script bugs. */
def DBG(msg: Any) {
System.err.println(msg.toString)
@@ -68,6 +65,8 @@ object ScriptRunner
case x => x
}
+ def isScript(settings: Settings) = settings.script.value != ""
+
/** Choose a jar filename to hold the compiled version of a script. */
private def jarFileFor(scriptFile: String): File = {
val name =
@@ -111,29 +110,13 @@ object ScriptRunner
jar.close
}
catch {
- case _: Error => jarFile.delete() // XXX what errors to catch?
+ case _: Exception => jarFile.delete()
}
}
/** Read the entire contents of a file as a String. */
private def contentsOfFile(filename: String) = File(filename).slurp()
- /** Find the length of the header in the specified file, if
- * there is one. The header part starts with "#!" or "::#!"
- * and ends with a line that begins with "!#" or "::!#".
- */
- private def headerLength(filename: String): Int = {
- val headerPattern = Pattern.compile("""^(::)?!#.*(\r|\n|\r\n)""", Pattern.MULTILINE)
- val fileContents = contentsOfFile(filename)
- def isValid = List("#!", "::#!") exists (fileContents startsWith _)
-
- if (!isValid) 0 else {
- val matcher = headerPattern matcher fileContents
- if (matcher.find) matcher.end
- else throw new IOException("script file does not close its header with !# or ::!#")
- }
- }
-
/** Split a fully qualified object name into a
* package and an unqualified object name */
private def splitObjectName(fullname: String): (Option[String], String) =
@@ -142,49 +125,7 @@ object ScriptRunner
case idx => (Some(fullname take idx), fullname drop (idx + 1))
}
- /** Code that is added to the beginning of a script file to make
- * it a complete Scala compilation unit.
- */
- protected def preambleCode(objectName: String): String = {
- val (maybePack, objName) = splitObjectName(objectName)
- val packageDecl = maybePack map ("package %s\n" format _) getOrElse ("")
-
- return """|
- | object %s {
- | def main(argv: Array[String]): Unit = {
- | val args = argv
- | new AnyRef {
- |""".stripMargin.format(objName)
- }
-
- /** Code that is added to the end of a script file to make
- * it a complete Scala compilation unit.
- */
- val endCode = """
- | }
- | }
- | }
- |""".stripMargin
-
- /** Wrap a script file into a runnable object named
- * <code>scala.scripting.Main</code>.
- */
- def wrappedScript(
- objectName: String,
- filename: String,
- getSourceFile: PlainFile => BatchSourceFile): SourceFile =
- {
- val preamble = new BatchSourceFile("<script preamble>", preambleCode(objectName).toCharArray)
- val middle = {
- val bsf = getSourceFile(PlainFile fromPath filename)
- new SourceFileFragment(bsf, headerLength(filename), bsf.length)
- }
- val end = new BatchSourceFile("<script trailer>", endCode.toCharArray)
-
- new CompoundSourceFile(preamble, middle, end)
- }
-
- /** Compile a script using the fsc compilation deamon.
+ /** Compile a script using the fsc compilation daemon.
*
* @param settings ...
* @param scriptFileIn ...
@@ -194,31 +135,15 @@ object ScriptRunner
settings: GenericRunnerSettings,
scriptFileIn: String): Boolean =
{
- val scriptFile = CompileClient absFileName scriptFileIn
-
- {
- import settings._
- for (setting <- List(classpath, sourcepath, bootclasspath, extdirs, outdir)) {
- // DBG("%s = %s".format(setting.name, setting.value))
- setting.value = CompileClient absFileName setting.value
- }
- }
-
- val compSettingNames = new Settings(error).allSettings map (_.name)
- val compSettings = settings.allSettings filter (compSettingNames contains _.name)
+ val scriptFile = Path(scriptFileIn).toAbsolute.path
+ val compSettingNames = new Settings(error).visibleSettings.toList map (_.name)
+ val compSettings = settings.visibleSettings.toList filter (compSettingNames contains _.name)
val coreCompArgs = compSettings flatMap (_.unparse)
val compArgs = coreCompArgs ::: List("-Xscript", scriptMain(settings), scriptFile)
var compok = true
- // XXX temporary as I started using ManagedResource not remembering it wasn't checked in.
- def ManagedResource[T](x: => T) = Some(x)
-
- for {
- socket <- ManagedResource(CompileSocket getOrCreateSocket "")
- val _ = if (socket == null) return false
- out <- ManagedResource(new PrintWriter(socket.getOutputStream(), true))
- in <- ManagedResource(new BufferedReader(new InputStreamReader(socket.getInputStream())))
- } {
+ val socket = CompileSocket getOrCreateSocket "" getOrElse (return false)
+ socket.applyReaderAndWriter { (in, out) =>
out println (CompileSocket getPassword socket.getPort)
out println (compArgs mkString "\0")
@@ -227,8 +152,7 @@ object ScriptRunner
if (CompileSocket.errorPattern matcher fromServer matches)
compok = false
}
- // XXX temp until managed resource is available
- in.close() ; out.close() ; socket.close()
+ socket.close()
}
compok
@@ -240,7 +164,7 @@ object ScriptRunner
/** Compile a script and then run the specified closure with
* a classpath for the compiled script.
*
- * @returns true if compilation and the handler succeeds, false otherwise.
+ * @return true if compilation and the handler succeeds, false otherwise.
*/
private def withCompiledScript(
settings: GenericRunnerSettings,
@@ -259,12 +183,15 @@ object ScriptRunner
settings.outdir.value = compiledPath.path
if (settings.nocompdaemon.value) {
+ /** Setting settings.script.value informs the compiler this is not a
+ * self contained compilation unit.
+ */
+ settings.script.value = scriptMain(settings)
val reporter = new ConsoleReporter(settings)
val compiler = newGlobal(settings, reporter)
val cr = new compiler.Run
- val wrapped = wrappedScript(scriptMain(settings), scriptFile, compiler getSourceFile _)
- cr compileSources List(wrapped)
+ cr compile List(scriptFile)
if (reporter.hasErrors) None else Some(compiledPath)
}
else if (compileWithDaemon(settings, scriptFile)) Some(compiledPath)
@@ -300,33 +227,18 @@ object ScriptRunner
/** Run a script after it has been compiled
*
- * @returns true if execution succeeded, false otherwise
+ * @return true if execution succeeded, false otherwise
*/
private def runCompiled(
settings: GenericRunnerSettings,
compiledLocation: String,
scriptArgs: List[String]): Boolean =
{
- def fileToURL(f: JFile): Option[URL] =
- try Some(f.toURL) catch { case _: Exception => None }
-
- def paths(str: String, expandStar: Boolean): List[URL] =
- for {
- file <- ClassPath.expandPath(str, expandStar) map (new JFile(_))
- if file.exists
- url <- fileToURL(file)
- } yield url
-
- val classpath =
- (paths(settings.bootclasspath.value, true) :::
- paths(compiledLocation, false) :::
- paths(settings.classpath.value, true))
+ val pr = new PathResolver(settings)
+ val classpath = File(compiledLocation).toURL +: pr.asURLs
try {
- ObjectRunner.run(
- classpath,
- scriptMain(settings),
- scriptArgs)
+ ObjectRunner.run(classpath, scriptMain(settings), scriptArgs)
true
}
catch {
@@ -342,7 +254,7 @@ object ScriptRunner
/** Run a script file with the specified arguments and compilation
* settings.
*
- * @returns true if compilation and execution succeeded, false otherwise.
+ * @return true if compilation and execution succeeded, false otherwise.
*/
def runScript(
settings: GenericRunnerSettings,
@@ -357,7 +269,7 @@ object ScriptRunner
/** Run a command
*
- * @returns true if compilation and execution succeeded, false otherwise.
+ * @return true if compilation and execution succeeded, false otherwise.
*/
def runCommand(
settings: GenericRunnerSettings,
@@ -366,7 +278,7 @@ object ScriptRunner
{
val scriptFile = File.makeTemp("scalacmd", ".scala")
// save the command to the file
- scriptFile writeAll List(command)
+ scriptFile writeAll command
try withCompiledScript(settings, scriptFile.path) { runCompiled(settings, _, scriptArgs) }
finally scriptFile.delete() // in case there was a compilation error
diff --git a/src/compiler/scala/tools/nsc/Settings.scala b/src/compiler/scala/tools/nsc/Settings.scala
index 0f59d9a3d9..8765ad3686 100644
--- a/src/compiler/scala/tools/nsc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/Settings.scala
@@ -1,858 +1,14 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
-import java.io.File
-import io.AbstractFile
-import util.SourceFile
-import Settings._
-import annotation.elidable
+import settings.MutableSettings
-class Settings(errorFn: String => Unit) extends ScalacSettings {
+/** A compatibility stub.
+ */
+class Settings(errorFn: String => Unit) extends MutableSettings(errorFn) {
def this() = this(Console.println)
-
- // optionizes a system property
- private def syspropopt(name: String): Option[String] = onull(System.getProperty(name))
- private def sysenvopt(name: String): Option[String] = onull(System.getenv(name))
-
- // given any number of possible path segments, flattens down to a
- // :-separated style path
- private def concatPath(segments: Option[String]*): String =
- segments.toList.flatMap(x => x) mkString File.pathSeparator
-
- protected def classpathDefault =
- sysenvopt("CLASSPATH") getOrElse "."
-
- protected def bootclasspathDefault =
- concatPath(syspropopt("sun.boot.class.path"), guessedScalaBootClassPath)
- // syspropopt("sun.boot.class.path") getOrElse ""
- // XXX scala-library.jar was being added to both boot and regular classpath until 8/18/09
- // Removing from boot classpath caused build/quick/bin/scala to fail.
- // Note to self, figure out how/why the bootclasspath is tied up with the locker/quick/pack.
-
- protected def extdirsDefault =
- concatPath(syspropopt("java.ext.dirs"), guessedScalaExtDirs)
-
- protected def assemExtdirsDefault =
- concatPath(guessedScalaExtDirs)
-
- protected def pluginsDirDefault =
- guess(List("misc", "scala-devel", "plugins"), _.isDirectory) getOrElse ""
-
- def onull[T <: AnyRef](x: T): Option[T] = if (x eq null) None else Some(x)
- def mkPath(base: String, segments: String*) = new File(base, segments.mkString(File.separator))
- def scalaHome: Option[String] = onull(Properties.scalaHome)
-
- // examine path relative to scala home and return Some(path) if it meets condition
- private def guess(xs: List[String], cond: (File) => Boolean): Option[String] = {
- if (scalaHome.isEmpty) return None
- val f = mkPath(scalaHome.get, xs: _*)
- if (cond(f)) Some(f.getAbsolutePath) else None
- }
-
- private def guessedScalaBootClassPath: Option[String] =
- guess(List("lib", "scala-library.jar"), _.isFile) orElse
- guess(List("classes", "library"), _.isDirectory)
-
- private def guessedScalaExtDirs: Option[String] =
- guess(List("lib"), _.isDirectory)
-
- override def hashCode() = allSettings.hashCode
- override def equals(that: Any) = that match {
- case s: Settings => this.allSettings == s.allSettings
- case _ => false
- }
-
- def checkDependencies: Boolean = {
- def hasValue(s: Setting, value: String): Boolean = s match {
- case bs: BooleanSetting => bs.value
- case ss: StringSetting => ss.value == value
- case cs: ChoiceSetting => cs.value == value
- case _ => "" == value
- }
-
- for (setting <- allSettings ; (dep, value) <- setting.dependency)
- if (!setting.isDefault && !hasValue(dep, value)) {
- errorFn("incomplete option " + setting.name + " (requires " + dep.name + ")")
- return false
- }
-
- true
- }
-
-
- /** A list pairing source directories with their output directory.
- * This option is not available on the command line, but can be set by
- * other tools (IDEs especially). The command line specifies a single
- * output directory that is used for all source files, denoted by a
- * '*' in this list.
- */
- lazy val outputDirs = new OutputDirs
-
-
- /** Try to add additional command line parameters.
- * Returns unconsumed arguments.
- */
- def parseParams(line: String): List[String] =
- parseParams(line.trim.split("""\s+""").toList)
-
- def parseParams(args: List[String]): List[String] = {
- // verify command exists and call setter
- def tryToSetIfExists(
- cmd: String,
- args: List[String],
- setter: (Setting) => (List[String] => Option[List[String]])
- ): Option[List[String]] =
- lookupSetting(cmd) match {
- case None => errorFn("Parameter '" + cmd + "' is not recognised by Scalac.") ; None
- case Some(cmd) =>
- val res = setter(cmd)(args)
- cmd.postSetHook()
- res
- }
-
- // if arg is of form -Xfoo:bar,baz,quux
- def parseColonArg(s: String): Option[List[String]] = {
- val idx = s indexWhere (_ == ':')
- val (p, args) = (s.substring(0, idx), s.substring(idx+1).split(",").toList)
-
- // any non-Nil return value means failure and we return s unmodified
- tryToSetIfExists(p, args, (s: Setting) => s.tryToSetColon _)
- }
- // if arg is of form -Dfoo=bar or -Dfoo (name = "-D")
- def isPropertyArg(s: String) = lookupSetting(s.substring(0, 2)) match {
- case Some(x: DefinesSetting) => true
- case _ => false
- }
- def parsePropertyArg(s: String): Option[List[String]] = {
- val (p, args) = (s.substring(0, 2), s.substring(2))
-
- tryToSetIfExists(p, List(args), (s: Setting) => s.tryToSetProperty _)
- }
-
- // if arg is of form -Xfoo or -Xfoo bar (name = "-Xfoo")
- def parseNormalArg(p: String, args: List[String]): Option[List[String]] =
- tryToSetIfExists(p, args, (s: Setting) => s.tryToSet _)
-
- def doArgs(args: List[String]): List[String] = {
- if (args.isEmpty) return Nil
- val arg :: rest = args
- if (arg == "") {
- // it looks like Ant passes "" sometimes
- rest
- }
- else if (!arg.startsWith("-")) {
- errorFn("Argument '" + arg + "' does not start with '-'.")
- args
- }
- else if (arg == "-") {
- errorFn("'-' is not a valid argument.")
- args
- }
- else
- // we dispatch differently based on the appearance of p:
- // 1) If it has a : it is presumed to be -Xfoo:bar,baz
- // 2) If the first two chars are the name of a command, -Dfoo=bar
- // 3) Otherwise, the whole string should be a command name
- //
- // Internally we use Option[List[String]] to discover error,
- // but the outside expects our arguments back unchanged on failure
- if (arg contains ":") parseColonArg(arg) match {
- case Some(_) => rest
- case None => args
- }
- else if (isPropertyArg(arg)) parsePropertyArg(arg) match {
- case Some(_) => rest
- case None => args
- }
- else parseNormalArg(arg, rest) match {
- case Some(xs) => xs
- case None => args
- }
- }
-
- doArgs(args)
- }
-
- // checks both name and any available abbreviations
- def lookupSetting(cmd: String): Option[Setting] =
- settingSet.find(x => x.name == cmd || (x.abbreviations contains cmd))
-
- // The *Setting classes used to be case classes defined inside of Settings.
- // The choice of location was poor because it tied the type of each setting
- // to its enclosing instance, which broke equality, so I moved the class
- // definitions into the companion object. The one benefit it was getting
- // out of this was using its knowledge of the enclosing instance to add
- // itself to the list of settings in the Setting constructor. However,
- // this was dicey and not working predictably, as illustrated in the comment
- // in GenericRunnerSettings:
- //
- // For some reason, "object defines extends Setting(...)"
- // does not work here. The object is present but the setting
- // is not added to allsettings.
- //
- // To capture similar semantics, I created instance methods on setting
- // which call a factory method for the right kind of object and then add
- // the newly constructed instance to allsettings. The constructors are
- // private to force all creation to go through these methods.
- //
- // The usage of case classes was becoming problematic (due to custom
- // equality, case class inheritance, and the need to control object
- // creation without a synthetic apply method getting in the way) and
- // it was providing little benefit, so they are no longer cases.
-
- // a wrapper for all Setting creators to keep our list up to date
- // and tell them how to announce errors
- private def add[T <: Setting](s: T): T = {
- s setErrorHandler errorFn
- allsettings += s
- s
- }
-
- /**
- * The canonical creators for Setting objects.
- */
- import Function.{ tupled, untupled }
- import Setting._
-
- // A bit too clever, but I haven't found any other way to compose
- // functions with arity 2+ without having to annotate parameter types
- lazy val IntSetting = untupled(tupled(sint _) andThen add[IntSetting])
- lazy val BooleanSetting = untupled(tupled(bool _) andThen add[BooleanSetting])
- lazy val StringSetting = untupled(tupled(str _) andThen add[StringSetting])
- lazy val MultiStringSetting = untupled(tupled(multi _) andThen add[MultiStringSetting])
- lazy val ChoiceSetting = untupled(tupled(choice _) andThen add[ChoiceSetting])
- lazy val DebugSetting = untupled(tupled(sdebug _) andThen add[DebugSetting])
- lazy val PhasesSetting = untupled(tupled(phase _) andThen add[PhasesSetting])
- lazy val DefinesSetting = add(defines())
- lazy val OutputSetting = untupled(tupled(output _) andThen add[OutputSetting])
-
- override def toString() =
- "Settings(\n%s)" format (settingSet filter (s => !s.isDefault) map (" " + _ + "\n") mkString)
-}
-
-object Settings {
- // basically this is a value which remembers if it's been modified
- trait SettingValue {
- type T <: Any
- protected var v: T
- private var setByUser: Boolean = false
- def isDefault: Boolean = !setByUser
- def value: T = v
- def value_=(arg: T) = { setByUser = true ; v = arg }
- val choices : List[T] = Nil
- }
-
- /** A class for holding mappings from source directories to
- * their output location. This functionality can be accessed
- * only programmatically. The command line compiler uses a
- * single output location, but tools may use this functionality
- * to set output location per source directory.
- */
- class OutputDirs {
- /** Pairs of source directory - destination directory. */
- private var outputDirs: List[(AbstractFile, AbstractFile)] = Nil
-
- /** If this is not None, the output location where all
- * classes should go.
- */
- private var singleOutDir: Option[AbstractFile] = None
-
- /** Add a destination directory for sources found under srcdir.
- * Both directories should exits.
- */
- def add(srcDir: String, outDir: String): Unit =
- add(checkDir(AbstractFile.getDirectory(srcDir), srcDir),
- checkDir(AbstractFile.getDirectory(outDir), outDir))
-
- /** Check that dir is exists and is a directory. */
- private def checkDir(dir: AbstractFile, name: String): AbstractFile = {
- if ((dir eq null) || !dir.isDirectory)
- throw new FatalError(name + " does not exist or is not a directory")
- dir
- }
-
- /** Set the single output directory. From now on, all files will
- * be dumped in there, regardless of previous calls to 'add'.
- */
- def setSingleOutput(outDir: String) {
- val dst = AbstractFile.getDirectory(outDir)
- setSingleOutput(checkDir(dst, outDir))
- }
-
- /** Set the single output directory. From now on, all files will
- * be dumped in there, regardless of previous calls to 'add'.
- */
- def setSingleOutput(dir: AbstractFile) {
- singleOutDir = Some(dir)
- }
-
- def add(src: AbstractFile, dst: AbstractFile) {
- singleOutDir = None
- outputDirs ::= (src, dst)
- }
-
- /** Return the list of source-destination directory pairs. */
- def outputs: List[(AbstractFile, AbstractFile)] = outputDirs
-
- /** Return the output directory for the given file.
- */
- def outputDirFor(src: AbstractFile): AbstractFile = {
- def isBelow(srcDir: AbstractFile, outDir: AbstractFile) =
- src.path.startsWith(srcDir.path)
-
- singleOutDir match {
- case Some(d) => d
- case None =>
- (outputs find Function.tupled(isBelow)) match {
- case Some((_, d)) => d
- case _ =>
- throw new FatalError("Could not find an output directory for "
- + src.path + " in " + outputs)
- }
- }
- }
- }
-
- // The Setting companion object holds all the factory methods
- object Setting {
- def bool(name: String, descr: String) =
- new BooleanSetting(name, descr)
-
- def str(name: String, arg: String, descr: String, default: String) =
- new StringSetting(name, arg, descr, default)
-
- def sint(
- name: String,
- descr: String,
- default: Int,
- range: Option[(Int, Int)] = None,
- parser: String => Option[Int] = _ => None
- ) =
- new IntSetting(name, descr, default, range, parser)
-
- def multi(name: String, arg: String, descr: String) =
- new MultiStringSetting(name, arg, descr)
-
- def choice(name: String, descr: String, choices: List[String], default: String): ChoiceSetting =
- new ChoiceSetting(name, descr, choices, default)
-
- def sdebug(name: String, descr: String, choices: List[String], default: String, defaultEmpty: String) =
- new DebugSetting(name, descr, choices, default, defaultEmpty)
-
- def phase(name: String, descr: String) =
- new PhasesSetting(name, descr)
-
- def defines() = new DefinesSetting()
-
- def output(outputDirs: OutputDirs, default: String) =
- new OutputSetting(outputDirs, default)
- }
-
- implicit val SettingOrdering : Ordering[Setting] = Ordering.ordered;
- /** A base class for settings of all types.
- * Subclasses each define a `value' field of the appropriate type.
- */
- abstract class Setting(descr: String) extends Ordered[Setting] with SettingValue {
- /** The name of the option as written on the command line, '-' included. */
- def name: String
-
- /** Error handling function, set after creation by enclosing Settings instance */
- private var _errorFn: String => Unit = _
- private[Settings] def setErrorHandler(e: String => Unit) = _errorFn = e
- def errorFn(msg: String) = _errorFn(msg)
- def errorAndValue[T](msg: String, x: T): T = { errorFn(msg) ; x }
-
- /** Will be called after this Setting is set, for any cases where the
- * Setting wants to perform extra work. */
- private var _postSetHook: () => Unit = () => ()
- def postSetHook(): Unit = _postSetHook()
- def withPostSetHook(f: () => Unit): this.type = { _postSetHook = f ; this }
-
- /** After correct Setting has been selected, tryToSet is called with the
- * remainder of the command line. It consumes any applicable arguments and
- * returns the unconsumed ones.
- */
- private[Settings] def tryToSet(args: List[String]): Option[List[String]]
-
- /** Commands which can take lists of arguments in form -Xfoo:bar,baz override
- * this method and accept them as a list. It returns List[String] for
- * consistency with tryToSet, and should return its incoming arguments
- * unmodified on failure, and Nil on success.
- */
- private[Settings] def tryToSetColon(args: List[String]): Option[List[String]] =
- errorAndValue("'" + name + "' does not accept multiple arguments", None)
-
- /** Commands which take properties in form -Dfoo=bar or -Dfoo
- */
- private[Settings] def tryToSetProperty(args: List[String]): Option[List[String]] =
- errorAndValue("'" + name + "' does not accept property style arguments", None)
-
- /**
- * Attempt to set from a properties file style property value.
- */
- def tryToSetFromPropertyValue(s : String) {
- tryToSet(s :: Nil)
- }
-
- /** The syntax defining this setting in a help string */
- private var _helpSyntax = name
- def helpSyntax: String = _helpSyntax
- def withHelpSyntax(s: String): this.type = { _helpSyntax = s ; this }
-
- /** Abbreviations for this setting */
- private var _abbreviations: List[String] = Nil
- def abbreviations = _abbreviations
- def withAbbreviation(s: String): this.type = { _abbreviations ++= List(s) ; this }
-
- /** A description of the purpose of this setting in a help string */
- def helpDescription = descr
-
- /** A list of Strings which can recreate this setting. */
- def unparse: List[String]
-
- /** Optional dependency on another setting */
- protected[Settings] var dependency: Option[(Setting, String)] = None
- def dependsOn(s: Setting, value: String): this.type = { dependency = Some((s, value)); this }
- def dependsOn(s: Setting): this.type = dependsOn(s, "")
-
- def isStandard: Boolean = !isAdvanced && !isPrivate && name != "-Y"
- def isAdvanced: Boolean = (name startsWith "-X") && name != "-X"
- def isPrivate: Boolean = (name == "-P") || ((name startsWith "-Y") && name != "-Y")
-
- // Ordered (so we can use TreeSet)
- def compare(that: Setting): Int = name compare that.name
- def compareLists[T <% Ordered[T]](xs: List[T], ys: List[T]): Boolean = xs.sort(_ < _) == ys.sort(_ < _)
-
- // Equality
- def eqValues: List[Any] = List(name, value)
- def isEq(other: Setting) = eqValues == other.eqValues
- override def hashCode() = name.hashCode
- override def toString() = "%s = %s".format(name, value)
- }
-
- /** A setting represented by an integer */
- class IntSetting private[Settings](
- val name: String,
- val descr: String,
- val default: Int,
- val range: Option[(Int, Int)],
- parser: String => Option[Int])
- extends Setting(descr) {
- type T = Int
- protected var v = default
-
- // not stable values!
- val IntMin = Int.MinValue
- val IntMax = Int.MaxValue
- def min = range map (_._1) getOrElse IntMin
- def max = range map (_._2) getOrElse IntMax
-
- override def value_=(s: Int) =
- if (isInputValid(s)) super.value_=(s) else errorMsg
-
- // Validate that min and max are consistent
- assert(min <= max)
-
- // Helper to validate an input
- private def isInputValid(k: Int): Boolean = (min <= k) && (k <= max)
-
- // Helper to generate a textual explaination of valid inputs
- private def getValidText: String = (min, max) match {
- case (IntMin, IntMax) => "can be any integer"
- case (IntMin, x) => "must be less than or equal to "+x
- case (x, IntMax) => "must be greater than or equal to "+x
- case _ => "must be between %d and %d".format(min, max)
- }
-
- // Ensure that the default value is actually valid
- assert(isInputValid(default))
-
- def parseArgument(x: String): Option[Int] = {
- parser(x) orElse {
- try { Some(x.toInt) }
- catch { case _: NumberFormatException => None }
- }
- }
-
- def errorMsg = errorFn("invalid setting for -"+name+" "+getValidText)
-
- def tryToSet(args: List[String]) =
- if (args.isEmpty) errorAndValue("missing argument", None)
- else parseArgument(args.head) match {
- case Some(i) => value = i ; Some(args.tail)
- case None => errorMsg ; None
- }
-
- def unparse: List[String] =
- if (value == default) Nil
- else List(name, value.toString)
-
- override def equals(that: Any) = that match {
- case x: IntSetting => this isEq x
- case _ => false
- }
- }
-
- /** A setting represented by a boolean flag (false, unless set) */
- class BooleanSetting private[Settings](
- val name: String,
- val descr: String)
- extends Setting(descr) {
- type T = Boolean
- protected var v = false
-
- def tryToSet(args: List[String]) = { value = true ; Some(args) }
- def unparse: List[String] = if (value) List(name) else Nil
- override def tryToSetFromPropertyValue(s : String) {
- value = s.equalsIgnoreCase("true")
- }
- override def equals(that: Any) = that match {
- case x: BooleanSetting => this isEq x
- case _ => false
- }
- }
-
- /** A setting represented by a string, (`default' unless set) */
- class StringSetting private[Settings](
- val name: String,
- val arg: String,
- val descr: String,
- val default: String)
- extends Setting(descr) {
- type T = String
- protected var v = default
-
- def tryToSet(args: List[String]) = args match {
- case Nil => errorAndValue("missing argument", None)
- case x :: xs => value = x ; Some(xs)
- }
- def unparse: List[String] = if (value == default) Nil else List(name, value)
-
- withHelpSyntax(name + " <" + arg + ">")
-
- override def equals(that: Any) = that match {
- case x: StringSetting => this isEq x
- case _ => false
- }
- }
-
- /** Set the output directory. */
- class OutputSetting private[Settings](
- outputDirs: OutputDirs,
- default: String)
- extends StringSetting("-d", "directory", "Specify where to place generated class files", default) {
- value = default
- override def value_=(str: String) {
- super.value_=(str)
- outputDirs.setSingleOutput(str)
- }
- }
-
- /** A setting that accumulates all strings supplied to it,
- * until it encounters one starting with a '-'. */
- class MultiStringSetting private[Settings](
- val name: String,
- val arg: String,
- val descr: String)
- extends Setting(descr) {
- type T = List[String]
- protected var v: List[String] = Nil
- def appendToValue(str: String) { value ++= List(str) }
-
- def tryToSet(args: List[String]) = {
- val (strings, rest) = args span (x => !x.startsWith("-"))
- strings foreach appendToValue
-
- Some(rest)
- }
- override def tryToSetColon(args: List[String]) = tryToSet(args)
- def unparse: List[String] = value map { name + ":" + _ }
-
- withHelpSyntax(name + ":<" + arg + ">")
- override def equals(that: Any) = that match {
- case x: MultiStringSetting => this isEq x
- case _ => false
- }
- }
-
- /** A setting represented by a string in a given set of <code>choices</code>,
- * (<code>default</code> unless set).
- */
- class ChoiceSetting private[Settings](
- val name: String,
- val descr: String,
- override val choices: List[String],
- val default: String)
- extends Setting(descr + choices.mkString(" (", ",", ")")) {
- type T = String
- protected var v: String = default
- protected def argument: String = name.substring(1)
-
- def tryToSet(args: List[String]) = { value = default ; Some(args) }
- override def tryToSetColon(args: List[String]) = args match {
- case Nil => errorAndValue("missing " + argument, None)
- case List(x) if choices contains x => value = x ; Some(Nil)
- case List(x) => errorAndValue("'" + x + "' is not a valid choice for '" + name + "'", None)
- case xs => errorAndValue("'" + name + "' does not accept multiple arguments.", None)
- }
- def unparse: List[String] =
- if (value == default) Nil else List(name + ":" + value)
-
- withHelpSyntax(name + ":<" + argument + ">")
- override def equals(that: Any) = that match {
- case x: ChoiceSetting => this isEq x
- case _ => false
- }
- }
-
- /** Same as ChoiceSetting but have a <code>level</code> int which tells the
- * index of the selected choice. The <code>defaultEmpty</code> is used when
- * this setting is used without specifying any of the available choices.
- */
- class DebugSetting private[Settings](
- name: String,
- descr: String,
- choices: List[String],
- default: String,
- val defaultEmpty: String)
- extends ChoiceSetting(name, descr, choices, default) {
- def indexOf[T](xs: List[T], e: T): Option[Int] = xs.indexOf(e) match {
- case -1 => None
- case x => Some(x)
- }
- var level: Int = indexOf(choices, default).get
-
- override def value_=(choice: String) = {
- super.value_=(choice)
- level = indexOf(choices, choice).get
- }
-
- override def tryToSet(args: List[String]) =
- if (args.isEmpty) { value = defaultEmpty ; Some(Nil) }
- else super.tryToSet(args)
- override def equals(that: Any) = that match {
- case x: DebugSetting => this isEq x
- case _ => false
- }
- }
-
- /** A setting represented by a list of strings which should be prefixes of
- * phase names. This is not checked here, however. Alternatively the string
- * "all" can be used to represent all phases.
- * (the empty list, unless set)
- */
- class PhasesSetting private[Settings](
- val name: String,
- val descr: String)
- extends Setting(descr + " <phase> or \"all\"") {
- type T = List[String]
- protected var v: List[String] = Nil
-
- def tryToSet(args: List[String]) = errorAndValue("missing phase", None)
- override def tryToSetColon(args: List[String]) = args match {
- case Nil => errorAndValue("missing phase", None)
- case xs => value ++= xs ; Some(Nil)
- }
- // we slightly abuse the usual meaning of "contains" here by returning
- // true if our phase list contains "all", regardless of the incoming argument
- def contains(phasename: String): Boolean =
- doAllPhases || (value exists { phasename startsWith _ } )
-
- def doAllPhases() = value contains "all"
- def unparse: List[String] = value map { name + ":" + _ }
-
- override def equals(that: Any) = that match {
- case ps: PhasesSetting if name == ps.name =>
- (doAllPhases && ps.doAllPhases) || compareLists(value, ps.value)
- case _ => false
- }
-
- withHelpSyntax(name + ":<phase>")
- }
-
- /** A setting for a -D style property definition */
- class DefinesSetting private[Settings] extends Setting("set a Java property") {
- type T = List[(String, String)]
- protected var v: T = Nil
- def name = "-D"
- withHelpSyntax(name + "<prop>")
-
- // given foo=bar returns Some(foo, bar), or None if parse fails
- def parseArg(s: String): Option[(String, String)] = {
- if (s == "") return None
- val regexp = """^(.*)?=(.*)$""".r
-
- regexp.findAllIn(s).matchData.toList match {
- case Nil => Some(s, "")
- case List(md) => md.subgroups match { case List(a,b) => Some(a,b) }
- }
- }
-
- def tryToSet(args: List[String]) =
- if (args.isEmpty) None
- else parseArg(args.head) match {
- case None => None
- case Some((a, b)) => value ++= List((a, b)) ; Some(args.tail)
- }
-
- /** Apply the specified properties to the current JVM */
- def applyToCurrentJVM =
- value foreach { case (k, v) => System.getProperties.setProperty(k, v) }
-
- def unparse: List[String] =
- value map { case (k,v) => "-D" + k + (if (v == "") "" else "=" + v) }
- override def equals(that: Any) = that match {
- case x: DefinesSetting => this isEq x
- case _ => false
- }
- }
-
-}
-
-trait ScalacSettings {
- self: Settings =>
-
- import collection.immutable.TreeSet
-
- /** A list of all settings */
- protected var allsettings: Set[Setting] = TreeSet[Setting]()
- def settingSet: Set[Setting] = allsettings
- def allSettings: List[Setting] = settingSet.toList
-
- /** Disable a setting */
- def disable(s: Setting) = allsettings -= s
-
- /**
- * Temporary Settings
- */
- val suppressVTWarn = BooleanSetting ("-Ysuppress-vt-typer-warnings", "Suppress warnings from the typer when testing the virtual class encoding, NOT FOR FINAL!")
-
- /**
- * Standard settings
- */
- // argfiles is only for the help message
- val argfiles = BooleanSetting ("@<file>", "A text file containing compiler arguments (options and source files)")
- val bootclasspath = StringSetting ("-bootclasspath", "path", "Override location of bootstrap class files", bootclasspathDefault)
- val classpath = StringSetting ("-classpath", "path", "Specify where to find user class files", classpathDefault).withAbbreviation("-cp")
- val outdir = OutputSetting (outputDirs, ".")
- val dependenciesFile = StringSetting ("-dependencyfile", "file", "Specify the file in which dependencies are tracked", ".scala_dependencies")
- val deprecation = BooleanSetting ("-deprecation", "Output source locations where deprecated APIs are used")
- val encoding = StringSetting ("-encoding", "encoding", "Specify character encoding used by source files", Properties.sourceEncoding)
- val explaintypes = BooleanSetting ("-explaintypes", "Explain type errors in more detail")
- val extdirs = StringSetting ("-extdirs", "dirs", "Override location of installed extensions", extdirsDefault)
- val debuginfo = DebugSetting ("-g", "Specify level of generated debugging info", List("none", "source", "line", "vars", "notailcalls"), "vars", "vars")
- val help = BooleanSetting ("-help", "Print a synopsis of standard options")
- val make = ChoiceSetting ("-make", "Specify recompilation detection strategy", List("all", "changed", "immediate", "transitive"), "all") .
- withHelpSyntax("-make:<strategy>")
- val nowarnings = BooleanSetting ("-nowarn", "Generate no warnings")
- val XO = BooleanSetting ("-optimise", "Generates faster bytecode by applying optimisations to the program").withAbbreviation("-optimize")
- val printLate = BooleanSetting ("-print", "Print program with all Scala-specific features removed")
- val sourcepath = StringSetting ("-sourcepath", "path", "Specify where to find input source files", "")
- val target = ChoiceSetting ("-target", "Specify for which target object files should be built", List("jvm-1.5", "msil"), "jvm-1.5")
- val unchecked = BooleanSetting ("-unchecked", "Enable detailed unchecked warnings")
- val uniqid = BooleanSetting ("-uniqid", "Print identifiers with unique names for debugging")
- val verbose = BooleanSetting ("-verbose", "Output messages about what the compiler is doing")
- val version = BooleanSetting ("-version", "Print product version and exit")
-
- /**
- * -X "Advanced" settings
- */
- val Xhelp = BooleanSetting ("-X", "Print a synopsis of advanced options")
- val assemname = StringSetting ("-Xassem-name", "file", "Name of the output assembly (only relevant with -target:msil)", "").dependsOn(target, "msil")
- val assemrefs = StringSetting ("-Xassem-path", "path", "List of assemblies referenced by the program (only relevant with -target:msil)", ".").dependsOn(target, "msil")
- val assemextdirs = StringSetting ("-Xassem-extdirs", "dirs", "List of directories containing assemblies, defaults to `lib'", assemExtdirsDefault).dependsOn(target, "msil")
- val sourcedir = StringSetting ("-Xsourcedir", "directory", "When -target:msil, the source folder structure is mirrored in output directory.", ".").dependsOn(target, "msil")
- val checkInit = BooleanSetting ("-Xcheckinit", "Add runtime checks on field accessors. Uninitialized accesses result in an exception being thrown.")
- val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions and assumptions")
- val elideLevel = IntSetting ("-Xelide-level", "Generate calls to @elidable-marked methods only method priority is greater than argument.",
- elidable.ASSERTION, None, elidable.byName.get(_))
- val Xexperimental = BooleanSetting ("-Xexperimental", "Enable experimental extensions")
- val noForwarders = BooleanSetting ("-Xno-forwarders", "Do not generate static forwarders in mirror classes")
- val future = BooleanSetting ("-Xfuture", "Turn on future language features")
- val genPhaseGraph = StringSetting ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot", "")
- val XlogImplicits = BooleanSetting ("-Xlog-implicits", "Show more info on why some implicits are not applicable")
- val nouescape = BooleanSetting ("-Xno-uescape", "Disables handling of \\u unicode escapes")
- val XnoVarargsConversion = BooleanSetting("-Xno-varargs-conversion", "disable varags conversion")
- val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing")
- val plugin = MultiStringSetting("-Xplugin", "file", "Load a plugin from a file")
- val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable a plugin")
- val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins")
- val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort unless a plugin is available")
- val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Location to find compiler plugins", pluginsDirDefault)
- val print = PhasesSetting ("-Xprint", "Print out program after")
- val writeICode = BooleanSetting ("-Xprint-icode", "Log internal icode to *.icode files")
- val Xprintpos = BooleanSetting ("-Xprint-pos", "Print tree positions (as offsets)")
- val printtypes = BooleanSetting ("-Xprint-types", "Print tree types (debugging option)")
- val prompt = BooleanSetting ("-Xprompt", "Display a prompt after each error (debugging option)")
- val resident = BooleanSetting ("-Xresident", "Compiler stays resident, files to compile are read from standard input")
- val script = StringSetting ("-Xscript", "object", "Compile as a script, wrapping the code into object.main()", "")
- val Xshowcls = StringSetting ("-Xshow-class", "class", "Show class info", "")
- val Xshowobj = StringSetting ("-Xshow-object", "object", "Show object info", "")
- val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases")
- val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files", "scala.tools.nsc.io.SourceReader")
- val newArrays = BooleanSetting ("-Ynewarrays", "Generate code for new array scheme")
-
- /**
- * -Y "Private" settings
- */
- val Yhelp = BooleanSetting ("-Y", "Print a synopsis of private options")
- val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after")
- val check = PhasesSetting ("-Ycheck", "Check the tree at the end of")
- val Xcloselim = BooleanSetting ("-Yclosure-elim", "Perform closure elimination")
- val Xcodebase = StringSetting ("-Ycodebase", "codebase", "Specify the URL containing the Scala libraries", "")
- val noCompletion = BooleanSetting ("-Yno-completion", "Disable tab-completion in the REPL")
- val Xdce = BooleanSetting ("-Ydead-code", "Perform dead code elimination")
- val debug = BooleanSetting ("-Ydebug", "Output debugging messages")
- val Xdetach = BooleanSetting ("-Ydetach", "Perform detaching of remote closures")
- // val doc = BooleanSetting ("-Ydoc", "Generate documentation")
- val inline = BooleanSetting ("-Yinline", "Perform inlining when possible")
- val Xlinearizer = ChoiceSetting ("-Ylinearizer", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo") .
- withHelpSyntax("-Ylinearizer:<which>")
- val log = PhasesSetting ("-Ylog", "Log operations in")
- val Ynogenericsig = BooleanSetting ("-Yno-generic-signatures", "Suppress generation of generic signatures for Java")
- val noimports = BooleanSetting ("-Yno-imports", "Compile without any implicit imports")
- val nopredefs = BooleanSetting ("-Yno-predefs", "Compile without any implicit predefined values")
- val Yrecursion = IntSetting ("-Yrecursion", "Recursion depth used when locking symbols", 0, Some(0, Int.MaxValue), _ => None)
- val selfInAnnots = BooleanSetting ("-Yself-in-annots", "Include a \"self\" identifier inside of annotations")
- val Xshowtrees = BooleanSetting ("-Yshow-trees", "Show detailed trees when used in connection with -print:phase")
- val skip = PhasesSetting ("-Yskip", "Skip")
- val Xsqueeze = ChoiceSetting ("-Ysqueeze", "if on, creates compact code in matching", List("on","off"), "on") .
- withHelpSyntax("-Ysqueeze:<enabled>")
- val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics")
- val stop = PhasesSetting ("-Ystop", "Stop after phase")
- val refinementMethodDispatch =
- ChoiceSetting ("-Ystruct-dispatch", "Selects dispatch method for structural refinement method calls",
- List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache") .
- withHelpSyntax("-Ystruct-dispatch:<method>")
- val specialize = BooleanSetting ("-Yspecialize", "Specialize generic code on types.")
- val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
- val Yidedebug = BooleanSetting ("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
- val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "Compile using the specified build manager", List("none", "refined", "simple"), "none")
- val Ytyperdebug = BooleanSetting ("-Ytyper-debug", "Trace all type assignements")
- val Ypmatdebug = BooleanSetting ("-Ypmat-debug", "Trace all pattern matcher activity.")
- val Ytailrec = BooleanSetting ("-Ytailrecommend", "Alert methods which would be tail-recursive if private or final.")
- val YhigherKindedRaw = BooleanSetting ("-Yhigher-kinded-raw", "(temporary!) Treat raw Java types as higher-kinded types.")
- val Yjenkins = BooleanSetting ("-Yjenkins-hashCodes", "Use jenkins hash algorithm for case class generated hashCodes.")
-
- // Warnings
- val Xwarninit = BooleanSetting ("-Xwarninit", "Warn about possible changes in initialization semantics")
- val Xchecknull = BooleanSetting ("-Xcheck-null", "Emit warning on selection of nullable reference")
- val Xwarndeadcode = BooleanSetting ("-Ywarn-dead-code", "Emit warnings for dead code")
- val YwarnShadow = BooleanSetting ("-Ywarn-shadowing", "Emit warnings about possible variable shadowing.")
- val YwarnCatches = BooleanSetting ("-Ywarn-catches", "Emit warnings about catch blocks which catch everything.")
- val Xwarnings = BooleanSetting ("-Xstrict-warnings", "Emit warnings about lots of things.") .
- withPostSetHook(() =>
- List(YwarnShadow, YwarnCatches, Xwarndeadcode, Xwarninit) foreach (_.value = true)
- )
-
- /**
- * -P "Plugin" settings
- */
- val pluginOptions = MultiStringSetting("-P", "plugin:opt", "Pass an option to a plugin") .
- withHelpSyntax("-P:<plugin>:<opt>")
}
diff --git a/src/compiler/scala/tools/nsc/SubComponent.scala b/src/compiler/scala/tools/nsc/SubComponent.scala
index 42008b58d2..445021f22a 100644
--- a/src/compiler/scala/tools/nsc/SubComponent.scala
+++ b/src/compiler/scala/tools/nsc/SubComponent.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
new file mode 100755
index 0000000000..79ac89e360
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -0,0 +1,431 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package ast
+
+import symtab._
+import util.{Position, NoPosition}
+import util.DocStrings._
+import util.Chars._
+import scala.collection.mutable.{HashMap, ListBuffer, StringBuilder}
+
+/*
+ * @author Martin Odersky
+ * @version 1.0
+ */
+trait DocComments { self: SymbolTable =>
+
+ /** The raw doc comment map */
+ val docComments = new HashMap[Symbol, DocComment]
+
+ /** Associate comment with symbol `sym` at position `pos`. */
+ def docComment(sym: Symbol, docStr: String, pos: Position = NoPosition) =
+ if ((sym ne null) && (sym ne NoSymbol))
+ docComments += (sym -> DocComment(docStr, pos))
+
+ /** The raw doc comment of symbol `sym`, as it appears in the source text, "" if missing.
+ */
+ def rawDocComment(sym: Symbol): String =
+ docComments get sym map (_.raw) getOrElse ""
+
+ /** The position of the raw doc comment of symbol `sym`, or NoPosition if missing
+ * If a symbol does not have a doc comment but some overridden version of it does,
+ * the position of the doc comment of the overridden version is returned instead.
+ */
+ def docCommentPos(sym: Symbol): Position =
+ getDocComment(sym) map (_.pos) getOrElse NoPosition
+
+ /** The raw doc comment of symbol `sym`, minus @usecase and @define sections, augmented by
+ * missing sections of an inherited doc comment.
+ * If a symbol does not have a doc comment but some overridden version of it does,
+ * the doc comment of the overridden version is copied instead.
+ */
+ def cookedDocComment(sym: Symbol, docStr: String = ""): String = {
+ val ownComment = if (docStr.length == 0) docComments get sym map (_.template) getOrElse ""
+ else DocComment(docStr).template
+ superComment(sym) match {
+ case None =>
+ ownComment
+ case Some(sc) =>
+ if (ownComment == "") sc
+ else merge(sc, ownComment, sym)
+ }
+ }
+
+ /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing.
+ *
+ * @param sym The symbol for which doc comment is returned
+ * @param site The class for which doc comments are generated
+ * @throws ExpansionLimitExceeded when more than 10 successive expansions
+ * of the same string are done, which is
+ * interpreted as a recursive variable definition.
+ */
+ def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = ""): String = {
+ // when parsing a top level class or module, use the (module-)class itself to look up variable definitions
+ val site1 = if ((sym.isModule || sym.isClass) && (site hasFlag Flags.PACKAGE)) sym
+ else site
+ expandVariables(cookedDocComment(sym, docStr), sym, site1)
+ }
+
+ /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing.
+ * @param sym The symbol for which doc comment is returned (site is always the containing class)
+ */
+ def expandedDocComment(sym: Symbol): String = expandedDocComment(sym, sym.enclClass)
+
+ /** The list of use cases of doc comment of symbol `sym` seen as a member of class
+ * `site`. Each use case consists of a synthetic symbol (which is entered nowhere else),
+ * of an expanded doc comment string, and of its position.
+ *
+ * @param sym The symbol for which use cases are returned
+ * @param site The class for which doc comments are generated
+ * @throws ExpansionLimitExceeded when more than 10 successive expansions
+ * of the same string are done, which is
+ * interpreted as a recursive variable definition.
+ */
+ def useCases(sym: Symbol, site: Symbol): List[(Symbol, String, Position)] = {
+ def getUseCases(dc: DocComment) = {
+ for (uc <- dc.useCases; defn <- uc.expandedDefs(site)) yield
+ (defn,
+ expandVariables(merge(cookedDocComment(sym), uc.comment.raw, defn, copyFirstPara = true), sym, site),
+ uc.pos)
+ }
+ getDocComment(sym) map getUseCases getOrElse List()
+ }
+
+ def useCases(sym: Symbol): List[(Symbol, String, Position)] = useCases(sym, sym.enclClass)
+
+ /** Returns the javadoc format of doc comment string `s`, including wiki expansion
+ */
+ def toJavaDoc(s: String): String = expandWiki(s)
+
+ private val wikiReplacements = List(
+ ("""(\n\s*\*?)(\s*\n)""" .r, """$1 <p>$2"""),
+ ("""<([^\w/])""" .r, """&lt;$1"""),
+ ("""([^\w/])>""" .r, """$1&gt;"""),
+ ("""\{\{\{(.*(?:\n.*)*)\}\}\}""".r, """<pre>$1</pre>"""),
+ ("""`([^`]*)`""" .r, """<code>$1</code>"""),
+ ("""__([^_]*)__""" .r, """<u>$1</u>"""),
+ ("""''([^']*)''""" .r, """<i>$1</i>"""),
+ ("""'''([^']*)'''""" .r, """<b>$1</b>"""),
+ ("""\^([^^]*)\^""" .r, """<sup>$1</sup>"""),
+ (""",,([^,]*),,""" .r, """<sub>$1</sub>"""))
+
+ /** Returns just the wiki expansion (this would correspond to
+ * a comment in the input format of the JavaDoc tool, modulo differences
+ * in tags.)
+ */
+ def expandWiki(str: String): String =
+ (str /: wikiReplacements) { (str1, regexRepl) => regexRepl._1 replaceAllIn(str1, regexRepl._2) }
+
+
+ private def getDocComment(sym: Symbol): Option[DocComment] = docComments get sym match {
+ case None => mapFind(sym.allOverriddenSymbols)(docComments get)
+ case some => some
+ }
+
+ /** The cooked doc comment of an overridden symbol */
+ protected def superComment(sym: Symbol): Option[String] =
+ sym.allOverriddenSymbols.view map { cookedDocComment(_) } find ("" !=)
+
+ private def mapFind[A, B](xs: Iterable[A])(f: A => Option[B]): Option[B] = {
+ var res: Option[B] = None
+ val it = xs.iterator
+ while (res.isEmpty && it.hasNext) {
+ res = f(it.next())
+ }
+ res
+ }
+
+ private def isMovable(str: String, sec: (Int, Int)): Boolean =
+ startsWithTag(str, sec, "@param") ||
+ startsWithTag(str, sec, "@tparam") ||
+ startsWithTag(str, sec, "@return")
+
+ /** Merge elements of doccomment `src` into doc comment `dst` for symbol `sym`.
+ * In detail:
+ * 1. If `copyFirstPara` is true, copy first paragraph
+ * 2. For all parameters of `sym` if there is no @param section
+ * in `dst` for that parameter name, but there is one on `src`, copy that section.
+ * 3. If there is no @return section in `dst` but there is one in `src`, copy it.
+ */
+ def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = {
+ val srcSections = tagIndex(src)
+ val dstSections = tagIndex(dst)
+ val srcParams = paramDocs(src, "@param", srcSections)
+ val dstParams = paramDocs(dst, "@param", dstSections)
+ val srcTParams = paramDocs(src, "@tparam", srcSections)
+ val dstTParams = paramDocs(dst, "@tparam", dstSections)
+ val out = new StringBuilder
+ var copied = 0
+ var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _)))
+
+ if (copyFirstPara) {
+ val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment
+ (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections)
+ out append src.substring(0, eop).trim
+ copied = 3
+ tocopy = 3
+ }
+
+ def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match {
+ case Some((start, end)) =>
+ if (end > tocopy) tocopy = end
+ case None =>
+ srcSec match {
+ case Some((start1, end1)) =>
+ out append dst.substring(copied, tocopy).trim
+ copied = tocopy
+ out append src.substring(start1, end1).trim
+ case None =>
+ }
+ }
+
+ for (params <- sym.paramss; param <- params)
+ mergeSection(srcParams get param.name.toString, dstParams get param.name.toString)
+ for (tparam <- sym.typeParams)
+ mergeSection(srcTParams get tparam.name.toString, dstTParams get tparam.name.toString)
+ mergeSection(returnDoc(src, srcSections), returnDoc(dst, dstSections))
+
+ if (out.length == 0) dst
+ else {
+ out append dst.substring(copied)
+ out.toString
+ }
+ }
+
+ /** Maps symbols to the variable -> replacement maps that are defined
+ * in their doc comments
+ */
+ private val defs = new HashMap[Symbol, Map[String, String]] {
+ override def default(key: Symbol) = Map()
+ }
+
+ /** Lookup definition of variable.
+ *
+ * @param vble The variable for which a definition is searched
+ * @param owner The current owner in which variable definitions are searched.
+ * @param site The class for which doc comments are generated
+ */
+ def lookupVariable(vble: String, site: Symbol): Option[String] =
+ if (site == NoSymbol)
+ None
+ else {
+ def lookInBaseClasses = mapFind(site.info.baseClasses)(defs(_).get(vble)) match {
+ case None => lookupVariable(vble, site.owner)
+ case someStr => someStr
+ }
+ if (site.isModule)
+ defs(site).get(vble) match {
+ case Some(str) => return Some(str)
+ case None => lookInBaseClasses
+ }
+ else lookInBaseClasses
+ }
+
+ private var expandCount = 0
+ private final val expandLimit = 10
+
+ /** Expand variable occurrences in string `str', until a fix point is reached or
+ * a expandLimit is exceeded.
+ *
+ * @param str The string to be expanded
+ * @param sym The symbol for which doc comments are generated
+ * @param site The class for which doc comments are generated
+ * @return Expanded string
+ */
+ protected def expandVariables(str: String, sym: Symbol, site: Symbol): String =
+ if (expandCount < expandLimit) {
+ try {
+ val out = new StringBuilder
+ var copied = 0
+ var idx = 0
+ while (idx < str.length) {
+ if ((str charAt idx) == '$') {
+ val vstart = idx
+ idx = skipVariable(str, idx + 1)
+ def replaceWith(repl: String) {
+ out append str.substring(copied, vstart)
+ out append repl
+ copied = idx
+ }
+ val vname = variableName(str.substring(vstart + 1, idx))
+ if (vname == "super") {
+ superComment(sym) match {
+ case Some(sc) =>
+ val superSections = tagIndex(sc)
+ replaceWith(sc.substring(3, startTag(sc, superSections)))
+ for (sec @ (start, end) <- superSections)
+ if (!isMovable(sc, sec)) out append sc.substring(start, end)
+ case None =>
+ }
+ } else if (vname.length > 0) {
+ lookupVariable(vname, site) match {
+ case Some(replacement) => replaceWith(replacement)
+ case None => //println("no replacement for "+vname) // DEBUG
+ }
+ } else idx += 1
+ } else idx += 1
+ }
+ if (out.length == 0) str
+ else {
+ out append str.substring(copied)
+ expandVariables(out.toString, sym, site)
+ }
+ } finally {
+ expandCount -= 1
+ }
+ } else throw new ExpansionLimitExceeded(str)
+
+
+ // !!! todo: inherit from Comment?
+ case class DocComment(raw: String, pos: Position = NoPosition) {
+
+ /** Returns:
+ * template: the doc comment minus all @define and @usecase sections
+ * defines : all define sections (as strings)
+ * useCases: all usecase sections (as instances of class UseCase)
+ */
+ lazy val (template, defines, useCases) = {
+ val sections = tagIndex(raw, idx =>
+ startsWithTag(raw, idx, "@define") || startsWithTag(raw, idx, "@usecase"))
+ val (defines, usecases) = sections partition (startsWithTag(raw, _, "@define"))
+ val end = startTag(raw, sections)
+ /*
+ println("processing doc comment:")
+ println(raw)
+ println("===========>")
+ println(raw.substring(0, end))
+ println("++++++++++++++++")
+ println(sections map { case (s, e) => raw.substring(s, e) })
+ */
+ (if (end == raw.length - 2) raw else raw.substring(0, end) + "*/",
+ defines map { case (start, end) => raw.substring(start, end) },
+ usecases map { case (start, end) => decomposeUseCase(start, end) })
+ }
+
+ private def decomposeUseCase(start: Int, end: Int): UseCase = {
+ val codeStart = skipWhitespace(raw, start + "@usecase".length)
+ val codeEnd = skipToEol(raw, codeStart)
+ val code = raw.substring(codeStart, codeEnd)
+ val codePos = subPos(codeStart, codeEnd)
+ val commentStart = skipLineLead(raw, codeEnd + 1) min end
+ val comment = "/** " + raw.substring(commentStart, end) + "*/"
+ val commentPos = subPos(commentStart, end)
+ UseCase(DocComment(comment, commentPos), code, codePos)
+ }
+
+ private def subPos(start: Int, end: Int) =
+ if (pos == NoPosition) NoPosition
+ else {
+ val start1 = pos.start + start
+ val end1 = pos.end + end
+ pos withStart start1 withPoint start1 withEnd end1
+ }
+
+ def defineVariables(sym: Symbol) {
+ for (str <- defines) {
+ val start = skipWhitespace(str, "@define".length)
+ var idx = skipVariable(str, start)
+ val vble = variableName(str.substring(start, idx))
+ if (idx < str.length && isWhitespace(str charAt idx)) idx += 1
+ var end = str.lastIndexOf('\n')
+ if (end == -1) end = str.length
+ defs(sym) += vble -> str.substring(idx, end)
+ }
+ //if (defs(sym).nonEmpty) println("vars of "+sym+" = "+defs(sym)) // !!!
+ }
+ }
+
+ case class UseCase(comment: DocComment, body: String, pos: Position) {
+ var defined: List[Symbol] = List() // initialized by Typer
+ var aliases: List[Symbol] = List() // initialized by Typer
+
+ def expandedDefs(site: Symbol): List[Symbol] = {
+
+ def select(site: Type, name: Name, orElse: => Type): Type = {
+ val member = site.nonPrivateMember(name)
+ if (member.isTerm) SingleType(site, member)
+ else if (member.isType) site.memberType(member)
+ else orElse
+ }
+
+ def getSite(name: Name): Type = {
+ def findIn(sites: List[Symbol]): Type = sites match {
+ case List() => NoType
+ case site :: sites1 => select(site.thisType, name, findIn(sites1))
+ }
+ val (classes, pkgs) = site.ownerChain.span(!_.isPackageClass)
+ findIn(classes ::: List(pkgs.head, definitions.RootClass))
+ }
+
+ def getType(str: String): Type = {
+ def getParts(start: Int): List[String] = {
+ val end = skipIdent(str, start)
+ if (end == start) List()
+ else str.substring (start, end) :: {
+ if (end < str.length && (str charAt end) == '.') getParts(end + 1)
+ else List()
+ }
+ }
+ val parts = getParts(0)
+ assert(parts.length > 0, "parts is empty '" + str + "' in site " + site)
+ val partnames = (parts.init map newTermName) ::: List(newTypeName(parts.last))
+ val (start, rest) =
+ if (parts.head == "this")
+ (site.thisType, partnames.tail)
+ else if (parts.tail.nonEmpty && parts(1) == "this")
+ site.ownerChain.find(_.name.toString == parts.head) match {
+ case Some(clazz) => (clazz.thisType, partnames.drop(2))
+ case None => (NoType, List())
+ }
+ else
+ (getSite(partnames.head), partnames.tail)
+ (start /: rest)(select(_, _, NoType))
+ }
+
+ val aliasExpansions: List[Type] =
+ for (alias <- aliases) yield
+ lookupVariable(alias.name.toString.substring(1), site) match {
+ case Some(repl) =>
+ val tpe = getType(repl.trim)
+ if (tpe != NoType) tpe
+ else {
+ val alias1 = alias.cloneSymbol(definitions.RootClass)
+ alias1.name = repl.toTypeName
+ TypeRef(NoPrefix, alias1, List())
+ }
+ case None =>
+ TypeRef(NoPrefix, alias, List())
+ }
+
+ def subst(sym: Symbol, from: List[Symbol], to: List[Type]): Type =
+ if (from.isEmpty) sym.tpe
+ else if (from.head == sym) to.head
+ else subst(sym, from.tail, to.tail)
+
+ val substAliases = new TypeMap {
+ def apply(tp: Type) = mapOver(tp) match {
+ case tp1 @ TypeRef(pre, sym, args) if (sym.name.length > 1 && sym.name(0) == '$') =>
+ subst(sym, aliases, aliasExpansions) match {
+ case TypeRef(pre1, sym1, _) =>
+ TypeRef(pre1, sym1, args)
+ case _ =>
+ tp1
+ }
+ case tp1 =>
+ tp1
+ }
+ }
+
+ for (defn <- defined) yield {
+ defn.cloneSymbol.setFlag(Flags.SYNTHETIC).setInfo(
+ substAliases(defn.info).asSeenFrom(site.thisType, defn.owner))
+ }
+ }
+ }
+
+ class ExpansionLimitExceeded(str: String) extends Exception
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index 6d4d5f99fd..111bf48a45 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package ast
@@ -76,12 +75,11 @@ abstract class NodePrinters {
if (sym hasFlag MUTABLE ) buf.append(" | MUTABLE")
if (sym hasFlag PARAM ) buf.append(" | PARAM")
if (sym hasFlag PACKAGE ) buf.append(" | PACKAGE")
- if (sym hasFlag DEPRECATED ) buf.append(" | DEPRECATED")
if (sym hasFlag COVARIANT ) buf.append(" | COVARIANT")
if (sym hasFlag CAPTURED ) buf.append(" | CAPTURED")
if (sym hasFlag BYNAMEPARAM ) buf.append(" | BYNAMEPARAM")
- if (sym hasFlag CONTRAVARIANT) buf.append(" | CONTRVARIANT")
+ if (sym hasFlag CONTRAVARIANT) buf.append(" | CONTRAVARIANT")
if (sym hasFlag LABEL ) buf.append(" | LABEL")
if (sym hasFlag INCONSTRUCTOR) buf.append(" | INCONSTRUCTOR")
if (sym hasFlag ABSOVERRIDE ) buf.append(" | ABSOVERRIDE")
@@ -159,46 +157,27 @@ abstract class NodePrinters {
}
def nodeinfo2(tree: Tree): String =
(if (comma) "," else "") + nodeinfo(tree)
+
+ def applyCommon(name: String, tree: Tree, fun: Tree, args: List[Tree]) {
+ println(name + "(" + nodeinfo(tree))
+ traverse(fun, level + 1, true)
+ if (args.isEmpty)
+ println(" Nil // no argument")
+ else {
+ val n = args.length
+ println(" List( // " + n + " arguments(s)")
+ for (i <- 0 until n)
+ traverse(args(i), level + 2, i < n-1)
+ println(" )")
+ }
+ printcln(")")
+ }
+
tree match {
- case AppliedTypeTree(tpt, args) =>
- println("AppliedTypeTree(" + nodeinfo(tree))
- traverse(tpt, level + 1, true)
- if (args.isEmpty)
- println(" List() // no argument")
- else {
- val n = args.length
- println(" List( // " + n + " arguments(s)")
- for (i <- 0 until n)
- traverse(args(i), level + 2, i < n-1)
- println(" )")
- }
- printcln(")")
- case Apply(fun, args) =>
- println("Apply(" + nodeinfo(tree))
- traverse(fun, level + 1, true)
- if (args.isEmpty)
- println(" List() // no argument")
- else {
- val n = args.length
- println(" List( // " + n + " argument(s)")
- for (i <- 0 until n)
- traverse(args(i), level + 2, i < n-1)
- println(" )")
- }
- printcln(")")
- case ApplyDynamic(fun, args) =>
- println("ApplyDynamic(" + nodeinfo(tree))
- traverse(fun, level + 1, true)
- if (args.isEmpty)
- println(" List() // no argument")
- else {
- val n = args.length
- println(" List( // " + n + " argument(s)")
- for (i <- 0 until n)
- traverse(args(i), level + 2, i < n-1)
- println(" )")
- }
- printcln(")")
+ case AppliedTypeTree(tpt, args) => applyCommon("AppliedTypeTree", tree, tpt, args)
+ case Apply(fun, args) => applyCommon("Apply", tree, fun, args)
+ case ApplyDynamic(fun, args) => applyCommon("ApplyDynamic", tree, fun, args)
+
case Block(stats, expr) =>
println("Block(" + nodeinfo(tree))
if (stats.isEmpty)
@@ -356,16 +335,11 @@ abstract class NodePrinters {
def printUnit(unit: CompilationUnit) {
print("// Scala source: " + unit.source + "\n")
- if (unit.body ne null) {
- print(nodeToString(unit.body)); println()
- } else {
- print("<null>")
- }
- println()
+ println(Option(unit.body) map (x => nodeToString(x) + "\n") getOrElse "<null>")
}
def printAll() {
print("[[syntax trees at end of " + phase + "]]")
- for (unit <- global.currentRun.units) printUnit(unit)
+ global.currentRun.units foreach printUnit
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index e6a247483d..0555f6c035 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package ast
@@ -224,8 +223,12 @@ abstract class TreeBrowsers {
t.symbol.owner.toString
else
"NoSymbol has no owner")
- if ((t.symbol ne null) && t.symbol.isType)
- str.append("\ntermSymbol: " + t.symbol.tpe.termSymbol + "\ntypeSymbol: " + t.symbol.tpe.typeSymbol)
+ if ((t.symbol ne null) && t.symbol.isType) {
+ str.append("\ntermSymbol: " + t.symbol.tpe.termSymbol
+ + "\ntypeSymbol: " + t.symbol.tpe.typeSymbol)
+ if (t.symbol.isTypeSkolem)
+ str.append("\nSkolem of: " + t.symbol.deSkolemize)
+ }
str.append("\nSymbol tpe: ")
if (t.symbol ne null) {
str.append(t.symbol.tpe).append("\n")
@@ -562,7 +565,8 @@ abstract class TreeBrowsers {
if ((s ne null) && (s != NoSymbol)) {
var str = flagsToString(s.flags)
if (s.isStaticMember) str = str + " isStatic ";
- str + " annotations: " + s.annotations.mkString("", " ", "")
+ (str + " annotations: " + s.annotations.mkString("", " ", "")
+ + (if (s.isTypeSkolem) "\ndeSkolemized annotations: " + s.deSkolemize.annotations.mkString("", " ", "") else ""))
}
else ""
}
@@ -680,7 +684,7 @@ abstract class TreeBrowsers {
toDocument(thistpe) :/: ", " :/:
toDocument(supertpe) ::")"))
case _ =>
- throw new Error("Unknown case: " + t.toString() +", "+ t.getClass)
+ Predef.error("Unknown case: " + t.toString() +", "+ t.getClass)
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index c5cef0dd32..a24c8c01d3 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
*
* @author Paul Phillips
*/
@@ -7,6 +7,8 @@
package scala.tools.nsc
package ast
+import PartialFunction._
+
/** A DSL for generating scala code. The goal is that the
* code generating code should look a lot like the code it
* generates.
@@ -24,27 +26,14 @@ trait TreeDSL {
def nullSafe[T](f: Tree => Tree, ifNull: Tree): Tree => Tree =
tree => IF (tree MEMBER_== NULL) THEN ifNull ELSE f(tree)
- // XXX these two are in scala.PartialFunction now, just have to
- // settle on the final names.
-
- // Create a conditional based on a partial function - for values not defined
- // on the partial, it is false.
- def cond[T](x: T)(f: PartialFunction[T, Boolean]) = (f isDefinedAt x) && f(x)
-
- // Like cond, but transforms the value T => Some(U) if the pf is defined,
- // or returns None if it is not.
- def condOpt[T,U](x: T)(f: PartialFunction[T, U]): Option[U] =
- if (f isDefinedAt x) Some(f(x)) else None
-
- // Applies a function to a value and then returns the value.
- def returning[T](f: T => Unit)(x: T): T = { f(x) ; x }
-
// strip bindings to find what lies beneath
final def unbind(x: Tree): Tree = x match {
case Bind(_, y) => unbind(y)
case y => y
}
+ def returning[T](x: T)(f: T => Unit): T = util.returning(x)(f)
+
object LIT extends (Any => Literal) {
def apply(x: Any) = Literal(Constant(x))
def unapply(x: Any) = condOpt(x) { case Literal(Constant(value)) => value }
@@ -87,21 +76,28 @@ trait TreeDSL {
else gen.mkAnd(target, other)
/** Note - calling ANY_== in the matcher caused primitives to get boxed
- * for the comparison, whereas looking up nme.EQ does not.
+ * for the comparison, whereas looking up nme.EQ does not. See #3570 for
+ * an example of how target.tpe can be non-null, yet it claims not to have
+ * a mmeber called nme.EQ. Not sure if that should happen, but we can be
+ * robust by dragging in Any regardless.
*/
def MEMBER_== (other: Tree) = {
- if (target.tpe == null) ANY_==(other)
- else fn(target, target.tpe member nme.EQ, other)
+ val opSym = if (target.tpe == null) NoSymbol else target.tpe member nme.EQ
+ if (opSym == NoSymbol) ANY_==(other)
+ else fn(target, opSym, other)
}
- def ANY_NE (other: Tree) = fn(target, nme.ne, toAnyRef(other))
def ANY_EQ (other: Tree) = fn(target, nme.eq, toAnyRef(other))
+ def ANY_NE (other: Tree) = fn(target, nme.ne, toAnyRef(other))
def ANY_== (other: Tree) = fn(target, Any_==, other)
- def ANY_>= (other: Tree) = fn(target, nme.GE, other)
- def ANY_<= (other: Tree) = fn(target, nme.LE, other)
- def OBJ_!= (other: Tree) = fn(target, Object_ne, other)
+ def ANY_!= (other: Tree) = fn(target, Any_!=, other)
+ def OBJ_== (other: Tree) = fn(target, Object_==, other)
+ def OBJ_!= (other: Tree) = fn(target, Object_!=, other)
+ def OBJ_EQ (other: Tree) = fn(target, Object_eq, other)
+ def OBJ_NE (other: Tree) = fn(target, Object_ne, other)
def INT_| (other: Tree) = fn(target, getMember(IntClass, nme.OR), other)
def INT_& (other: Tree) = fn(target, getMember(IntClass, nme.AND), other)
+ def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other)
def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other)
def INT_!= (other: Tree) = fn(target, getMember(IntClass, nme.NE), other)
@@ -186,7 +182,10 @@ trait TreeDSL {
if (target.tpe.typeSymbol == SomeClass) TRUE // is Some[_]
else NOT(ID(target) DOT nme.isEmpty) // is Option[_]
- def GET() = fn(ID(target), nme.get)
+ def IS_NULL() = REF(target) ANY_EQ NULL
+ def NOT_NULL() = REF(target) ANY_NE NULL
+
+ def GET() = fn(REF(target), nme.get)
// name of nth indexed argument to a method (first parameter list), defaults to 1st
def ARG(idx: Int = 0) = Ident(target.paramss.head(idx))
@@ -195,10 +194,11 @@ trait TreeDSL {
}
/** Top level accessible. */
- def THROW(sym: Symbol, msg: Tree = null) = {
- val arg: List[Tree] = if (msg == null) Nil else List(msg.TOSTRING())
- Throw(New(TypeTree(sym.tpe), List(arg)))
- }
+ def MATCHERROR(arg: Tree) = Throw(New(TypeTree(MatchErrorClass.tpe), List(List(arg))))
+ /** !!! should generalize null guard from match error here. */
+ def THROW(sym: Symbol): Throw = Throw(New(TypeTree(sym.tpe), List(Nil)))
+ def THROW(sym: Symbol, msg: Tree): Throw = Throw(New(TypeTree(sym.tpe), List(List(msg.TOSTRING()))))
+
def NEW(tpe: Tree, args: Tree*) = New(tpe, List(args.toList))
def NEW(sym: Symbol, args: Tree*) =
if (args.isEmpty) New(TypeTree(sym.tpe))
@@ -210,6 +210,10 @@ trait TreeDSL {
if (guards.isEmpty) EmptyTree
else guards reduceLeft gen.mkAnd
+ def OR(guards: Tree*) =
+ if (guards.isEmpty) EmptyTree
+ else guards reduceLeft gen.mkOr
+
def IF(tree: Tree) = new IfStart(tree, EmptyTree)
def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree)
def BLOCK(xs: Tree*) = Block(xs.init.toList, xs.last)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index bca52c1d44..a5904971b9 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package ast
@@ -14,8 +13,7 @@ import symtab.SymbolTable
/** XXX to resolve: TreeGen only assumes global is a SymbolTable, but
* TreeDSL at the moment expects a Global. Can we get by with SymbolTable?
*/
-abstract class TreeGen
-{
+abstract class TreeGen {
val global: SymbolTable
import global._
@@ -31,10 +29,13 @@ abstract class TreeGen
private def isRootOrEmptyPackageClass(s: Symbol) = s.isRoot || s.isEmptyPackageClass
- def scalaFunctionConstr(argtpes: List[Tree], restpe: Tree): Tree =
- AppliedTypeTree(
- scalaDot(newTypeName("Function"+argtpes.length)),
- argtpes ::: List(restpe))
+ def scalaFunctionConstr(argtpes: List[Tree], restpe: Tree, abstractFun: Boolean = false): Tree = {
+ val cls = if (abstractFun)
+ mkAttributedRef(AbstractFunctionClass(argtpes.length))
+ else
+ mkAttributedRef(FunctionClass(argtpes.length))
+ AppliedTypeTree(cls, argtpes ::: List(restpe))
+ }
/** Builds a reference to value whose type is given stable prefix.
* The type must be suitable for this. For example, it
@@ -98,7 +99,7 @@ abstract class TreeGen
mkAttributedQualifier(firstStable.get)
case _ =>
- throw new Error("bad qualifier: " + tpe)
+ abort("bad qualifier: " + tpe)
}
/** Builds a reference to given symbol with given stable prefix. */
@@ -116,18 +117,26 @@ abstract class TreeGen
if (sym.owner.isClass) mkAttributedRef(sym.owner.thisType, sym)
else mkAttributedIdent(sym)
+ /** Builds an untyped reference to given symbol. */
+ def mkUnattributedRef(sym: Symbol): Tree =
+ if (sym.owner.isClass) Select(This(sym.owner), sym)
+ else Ident(sym)
+
/** Replaces tree type with a stable type if possible */
- def stabilize(tree: Tree): Tree = tree match {
- case Ident(_) =>
- if (tree.symbol.isStable) tree.setType(singleType(tree.symbol.owner.thisType, tree.symbol))
- else tree
- case Select(qual, _) =>
- assert((tree.symbol ne null) && (qual.tpe ne null))
- if (tree.symbol.isStable && qual.tpe.isStable)
- tree.setType(singleType(qual.tpe, tree.symbol))
- else tree
+ def stabilize(tree: Tree): Tree = {
+ for(tp <- stableTypeFor(tree)) tree.tpe = tp
+ tree
+ }
+
+ /** Computes stable type for a tree if possible */
+ def stableTypeFor(tree: Tree): Option[Type] = tree match {
+ case Ident(_) if tree.symbol.isStable =>
+ Some(singleType(tree.symbol.owner.thisType, tree.symbol))
+ case Select(qual, _) if {assert((tree.symbol ne null) && (qual.tpe ne null));
+ tree.symbol.isStable && qual.tpe.isStable} =>
+ Some(singleType(qual.tpe, tree.symbol))
case _ =>
- tree
+ None
}
/** Cast `tree' to type `pt' */
@@ -136,7 +145,7 @@ abstract class TreeGen
assert(!tree.tpe.isInstanceOf[MethodType], tree)
assert(!pt.typeSymbol.isPackageClass)
assert(!pt.typeSymbol.isPackageObjectClass)
- assert(pt eq pt.normalize) //@MAT only called during erasure, which already takes care of that
+ assert(pt eq pt.normalize, tree +" : "+ debugString(pt) +" ~>"+ debugString(pt.normalize)) //@MAT only called during erasure, which already takes care of that
atPos(tree.pos)(mkAsInstanceOf(tree, pt, false))
}
@@ -191,8 +200,21 @@ abstract class TreeGen
def mkAsInstanceOf(value: Tree, tpe: Type, any: Boolean = true): Tree =
mkTypeApply(value, tpe, (if (any) Any_asInstanceOf else Object_asInstanceOf))
+ /** Cast `tree' to 'pt', unless tpe is a subtype of pt, or pt is Unit. */
+ def maybeMkAsInstanceOf(tree: Tree, pt: Type, tpe: Type, beforeRefChecks: Boolean = false): Tree =
+ if ((pt == UnitClass.tpe) || (tpe <:< pt)) {
+ log("no need to cast from " + tpe + " to " + pt)
+ tree
+ } else
+ atPos(tree.pos) {
+ if (beforeRefChecks)
+ TypeApply(mkAttributedSelect(tree, Any_asInstanceOf), List(TypeTree(pt)))
+ else
+ mkAsInstanceOf(tree, pt)
+ }
+
def mkClassOf(tp: Type): Tree =
- Literal(Constant(tp)) setType Predef_classOfType(tp)
+ Literal(Constant(tp)) setType ConstantType(Constant(tp))// ClassType(tp)
def mkCheckInit(tree: Tree): Tree = {
val tpe =
@@ -228,8 +250,11 @@ abstract class TreeGen
def mkOr(tree1: Tree, tree2: Tree): Tree =
Apply(Select(tree1, Boolean_or), List(tree2))
+ // wrap the given expression in a SoftReference so it can be gc-ed
+ def mkSoftRef(expr: Tree): Tree = New(TypeTree(SoftReferenceClass.tpe), List(List(expr)))
+
def mkCached(cvar: Symbol, expr: Tree): Tree = {
- val cvarRef = if (cvar.owner.isClass) Select(This(cvar.owner), cvar) else Ident(cvar)
+ val cvarRef = mkUnattributedRef(cvar)
Block(
List(
If(Apply(Select(cvarRef, nme.eq), List(Literal(Constant(null)))),
@@ -239,30 +264,25 @@ abstract class TreeGen
)
}
- // var m$: T = null; or, if class member: local var m$: T = _;
- /*!!!
- def mkModuleValDef(accessor: Symbol) = {
- val mval = accessor.owner.newValue(accessor.pos.focus, nme.moduleVarName(accessor.name))
- .setInfo(accessor.tpe.finalResultType)
- .setFlag(LAZY);
- if (mval.owner.isClass) {
- mval setFlag (PRIVATE | LOCAL | SYNTHETIC)
- mval.owner.info.decls.enter(mval)
- }
- ValDef(mval, New(TypeTree(mval.tpe), List(List())))
+ // Builds a tree of the form "{ lhs = rhs ; lhs }"
+ def mkAssignAndReturn(lhs: Symbol, rhs: Tree): Tree = {
+ val lhsRef = mkUnattributedRef(lhs)
+ Block(Assign(lhsRef, rhs) :: Nil, lhsRef)
}
- */
- // var m$: T = null; or, if class member: local var m$: T = _;
def mkModuleVarDef(accessor: Symbol) = {
- val mvar = accessor.owner.newVariable(accessor.pos.focus, nme.moduleVarName(accessor.name))
- .setInfo(accessor.tpe.finalResultType)
- .setFlag(MODULEVAR);
- if (mvar.owner.isClass) {
- mvar setFlag (PRIVATE | LOCAL | SYNTHETIC)
- mvar.owner.info.decls.enter(mvar)
+ val mval = (
+ accessor.owner.newVariable(accessor.pos.focus, nme.moduleVarName(accessor.name))
+ setInfo accessor.tpe.finalResultType
+ setFlag (MODULEVAR)
+ )
+
+ mval.addAnnotation(AnnotationInfo(VolatileAttr.tpe, Nil, Nil))
+ if (mval.owner.isClass) {
+ mval setFlag (PRIVATE | LOCAL | SYNTHETIC)
+ mval.owner.info.decls.enter(mval)
}
- ValDef(mvar, if (mvar.owner.isClass) EmptyTree else Literal(Constant(null)))
+ ValDef(mval)
}
// def m: T = { if (m$ eq null) m$ = new m$class(...) m$ }
@@ -275,7 +295,7 @@ abstract class TreeGen
def mkModuleAccessDef(accessor: Symbol, tpe: Type) =
DefDef(accessor, newModule(accessor, tpe))
- private def newModule(accessor: Symbol, tpe: Type) =
+ def newModule(accessor: Symbol, tpe: Type) =
New(TypeTree(tpe),
List(for (pt <- tpe.typeSymbol.primaryConstructor.info.paramTypes)
yield This(accessor.owner.enclClass)))
@@ -328,6 +348,17 @@ abstract class TreeGen
Apply(meth, List(tree))
}
+ /** Try to convert Select(qual, name) to a SelectFromTypeTree.
+ */
+ def convertToSelectFromType(qual: Tree, name: Name): Tree = {
+ def selFromType(qual1: Tree) = SelectFromTypeTree(qual1 setPos qual.pos, name)
+ qual match {
+ case Select(qual1, name) => selFromType(Select(qual1, name.toTypeName))
+ case Ident(name) => selFromType(Ident(name.toTypeName))
+ case _ => EmptyTree
+ }
+ }
+
/** Used in situations where you need to access value of an expression several times
*/
def evalOnce(expr: Tree, owner: Symbol, unit: CompilationUnit)(within: (() => Tree) => Tree): Tree = {
@@ -368,4 +399,21 @@ abstract class TreeGen
if (prefix.isEmpty) containing
else Block(prefix, containing) setPos (prefix.head.pos union containing.pos)
}
+
+ /** Return a double-checked locking idiom around the syncBody tree. It guards with 'cond' and
+ * synchronizez on 'clazz.this'. Additional statements can be included after initialization,
+ * (outside the synchronized block).
+ *
+ * The idiom works only if the condition is using a volatile field.
+ * @see http://www.cs.umd.edu/~pugh/java/memoryModel/DoubleCheckedLocking.html
+ */
+ def mkDoubleCheckedLocking(clazz: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree = {
+ If(cond,
+ Block(
+ mkSynchronized(
+ mkAttributedThis(clazz),
+ If(cond, Block(syncBody: _*), EmptyTree)) ::
+ stats: _*),
+ EmptyTree)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index e128b4e12f..394e9709a1 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package ast
@@ -87,7 +86,12 @@ abstract class TreeInfo {
case TypeApply(fn, _) =>
isPureExpr(fn)
case Apply(fn, List()) =>
- isPureExpr(fn)
+ /* Note: After uncurry, field accesses are represented as Apply(getter, Nil),
+ * so an Apply can also be pure.
+ * However, before typing, applications of nullary functional values are also
+ * Apply(function, Nil) trees. To prevent them from being treated as pure,
+ * we check that the callee is a method. */
+ fn.symbol.isMethod && isPureExpr(fn)
case Typed(expr, _) =>
isPureExpr(expr)
case Block(stats, expr) =>
@@ -98,21 +102,24 @@ abstract class TreeInfo {
def mayBeVarGetter(sym: Symbol) = sym.info match {
case PolyType(List(), _) => sym.owner.isClass && !sym.isStable
- case _: ImplicitMethodType => sym.owner.isClass && !sym.isStable
+ case mt: MethodType => mt.isImplicit && sym.owner.isClass && !sym.isStable
case _ => false
}
- def isVariableOrGetter(tree: Tree) = tree match {
- case Ident(_) =>
- tree.symbol.isVariable
- case Select(qual, _) =>
- tree.symbol.isVariable ||
- (mayBeVarGetter(tree.symbol) &&
- tree.symbol.owner.info.member(nme.getterToSetter(tree.symbol.name)) != NoSymbol)
- case Apply(Select(qual, nme.apply), _) =>
- qual.tpe.member(nme.update) != NoSymbol
- case _ =>
- false
+ def isVariableOrGetter(tree: Tree) = {
+ def sym = tree.symbol
+ def isVar = sym.isVariable
+ def isGetter = mayBeVarGetter(sym) && sym.owner.info.member(nme.getterToSetter(sym.name)) != NoSymbol
+
+ tree match {
+ case Ident(_) => isVar
+ case Select(_, _) => isVar || isGetter
+ case _ =>
+ methPart(tree) match {
+ case Select(qual, nme.apply) => qual.tpe.member(nme.update) != NoSymbol
+ case _ => false
+ }
+ }
}
/** Is tree a self constructor call?
@@ -149,6 +156,12 @@ abstract class TreeInfo {
case _ :: stats1 => firstConstructor(stats1)
}
+ /** The arguments to the first constructor in `stats'. */
+ def firstConstructorArgs(stats: List[Tree]): List[Tree] = firstConstructor(stats) match {
+ case DefDef(_, _, _, args :: _, _, _) => args
+ case _ => Nil
+ }
+
/** The value definitions marked PRESUPER in this statement sequence */
def preSuperFields(stats: List[Tree]): List[ValDef] =
for (vdef @ ValDef(mods, _, _, _) <- stats if mods hasFlag PRESUPER) yield vdef
@@ -193,15 +206,6 @@ abstract class TreeInfo {
reserved addEntry nme.false_
reserved addEntry nme.true_
reserved addEntry nme.null_
- reserved addEntry newTypeName("byte")
- reserved addEntry newTypeName("char")
- reserved addEntry newTypeName("short")
- reserved addEntry newTypeName("int")
- reserved addEntry newTypeName("long")
- reserved addEntry newTypeName("float")
- reserved addEntry newTypeName("double")
- reserved addEntry newTypeName("boolean")
- reserved addEntry newTypeName("unit")
/** Is name a variable name? */
def isVariableName(name: Name): Boolean = {
@@ -304,10 +308,10 @@ abstract class TreeInfo {
/** The method part of an application node
*/
def methPart(tree: Tree): Tree = tree match {
- case Apply(fn, _) => methPart(fn)
- case TypeApply(fn, _) => methPart(fn)
+ case Apply(fn, _) => methPart(fn)
+ case TypeApply(fn, _) => methPart(fn)
case AppliedTypeTree(fn, _) => methPart(fn)
- case _ => tree
+ case _ => tree
}
def firstArgument(tree: Tree): Tree = tree match {
@@ -334,18 +338,19 @@ abstract class TreeInfo {
false
}
- /** Compilation unit is the predef object
+ /** Compilation unit is class or object 'name' in package 'scala'
*/
def isUnitInScala(tree: Tree, name: Name) = tree match {
- case PackageDef(Ident(nme.scala_), defs) => isObject(defs, name)
+ case PackageDef(Ident(nme.scala_), defs) => isImplDef(defs, name)
case _ => false
}
- private def isObject(trees: List[Tree], name: Name): Boolean = trees match {
- case Import(_, _) :: xs => isObject(xs, name)
- case DocDef(_, tree1) :: Nil => isObject(List(tree1), name)
- case Annotated(_, tree1) :: Nil => isObject(List(tree1), name)
+ private def isImplDef(trees: List[Tree], name: Name): Boolean = trees match {
+ case Import(_, _) :: xs => isImplDef(xs, name)
+ case DocDef(_, tree1) :: Nil => isImplDef(List(tree1), name)
+ case Annotated(_, tree1) :: Nil => isImplDef(List(tree1), name)
case ModuleDef(_, `name`, _) :: Nil => true
+ case ClassDef(_, `name`, _, _) :: Nil => true
case _ => false
}
@@ -359,4 +364,21 @@ abstract class TreeInfo {
case TypeDef(_, _, _, _) => !isAbsTypeDef(tree)
case _ => false
}
+
+ /** Some handy extractors for spotting true and false expressions
+ * through the haze of braces.
+ */
+ abstract class SeeThroughBlocks[T] {
+ protected def unapplyImpl(x: Tree): T
+ def unapply(x: Tree): T = x match {
+ case Block(Nil, expr) => unapply(expr)
+ case _ => unapplyImpl(x)
+ }
+ }
+ object IsTrue extends SeeThroughBlocks[Boolean] {
+ protected def unapplyImpl(x: Tree): Boolean = x equalsStructure Literal(Constant(true))
+ }
+ object IsFalse extends SeeThroughBlocks[Boolean] {
+ protected def unapplyImpl(x: Tree): Boolean = x equalsStructure Literal(Constant(false))
+ }
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
index 0423044aa3..10b50db6d5 100644
--- a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
@@ -1,25 +1,23 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package ast
import compat.Platform.{EOL => LINE_SEPARATOR}
-import java.io.{OutputStream, PrintWriter, Writer}
+import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import symtab.Flags._
import symtab.SymbolTable
-abstract class TreePrinters {
+trait TreePrinters { trees: SymbolTable =>
- val trees: SymbolTable
- import trees._
+ import treeInfo.{ IsTrue, IsFalse }
final val showOuterTests = false
- class TreePrinter(out: PrintWriter) {
+ class TreePrinter(out: PrintWriter) extends trees.AbsTreePrinter(out) {
protected var indentMargin = 0
protected val indentStep = 2
protected var indentString = " " // 40
@@ -29,7 +27,10 @@ abstract class TreePrinters {
def indent = indentMargin += indentStep
def undent = indentMargin -= indentStep
- def println {
+ protected def doPrintPositions = settings.Xprintpos.value
+ def printPosition(tree: Tree) = if (doPrintPositions) print(tree.pos.show)
+
+ def println() {
out.println()
while (indentMargin > indentString.length())
indentString += indentString
@@ -46,8 +47,8 @@ abstract class TreePrinters {
}
def printColumn(ts: List[Tree], start: String, sep: String, end: String) {
- print(start); indent; println
- printSeq(ts){print}{print(sep); println}; undent; println; print(end)
+ print(start); indent; println()
+ printSeq(ts){print}{print(sep); println()}; undent; println(); print(end)
}
def printRow(ts: List[Tree], start: String, sep: String, end: String) {
@@ -75,11 +76,11 @@ abstract class TreePrinters {
def printParam(tree: Tree) {
tree match {
case ValDef(mods, name, tp, rhs) =>
- if (settings.Xprintpos.value) print(tree.pos.show)
+ printPosition(tree)
printAnnotations(tree)
print(symName(tree, name)); printOpt(": ", tp); printOpt(" = ", rhs)
case TypeDef(mods, name, tparams, rhs) =>
- if (settings.Xprintpos.value) print(tree.pos.show)
+ printPosition(tree)
print(symName(tree, name))
printTypeParams(tparams); print(rhs)
}
@@ -93,46 +94,47 @@ abstract class TreePrinters {
printColumn(List(tree), "{", ";", "}")
}
}
- def symName(tree: Tree, name: Name): String =
- if (tree.symbol != null && tree.symbol != NoSymbol) {
- ((if (tree.symbol.isMixinConstructor) "/*"+tree.symbol.owner.name+"*/" else "") +
- tree.symbol.nameString)
- } else name.toString();
+
+ private def symNameInternal(tree: Tree, name: Name, decoded: Boolean): String = {
+ val nameToString: Name => String = if (decoded) _.decode else _.toString
+
+ tree.symbol match {
+ case null | NoSymbol => nameToString(name)
+ case sym =>
+ val prefix = if (sym.isMixinConstructor) "/*%s*/".format(nameToString(sym.owner.name)) else ""
+ prefix + tree.symbol.nameString
+ }
+ }
+
+ def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, true)
+ def symName(tree: Tree, name: Name) = symNameInternal(tree, name, false)
def printOpt(prefix: String, tree: Tree) {
if (!tree.isEmpty) { print(prefix); print(tree) }
}
def printModifiers(tree: Tree, mods: Modifiers) {
- if (tree.symbol == NoSymbol)
- printFlags(mods.flags, mods.privateWithin.toString)
- else if (tree.symbol.privateWithin == NoSymbol ||
- tree.symbol.privateWithin == tree.symbol.owner)
- printFlags(tree.symbol.flags, "")
- else
- printFlags(tree.symbol.flags, tree.symbol.privateWithin.name.toString)
+ def pw = tree.symbol.privateWithin
+ val args =
+ if (tree.symbol == NoSymbol) (mods.flags, mods.privateWithin)
+ else if (pw == NoSymbol) (tree.symbol.flags, "")
+ else (tree.symbol.flags, pw.name)
+
+ printFlags(args._1, args._2.toString)
}
def printFlags(flags: Long, privateWithin: String) {
var mask: Long = if (settings.debug.value) -1L else PrintableFlags
val s = flagsToString(flags & mask, privateWithin)
- if (s.length() != 0) print(s + " ")
+ if (s != "") print(s + " ")
}
def printAnnotations(tree: Tree) {
- if (!tree.symbol.rawAnnotations.isEmpty) {
- val annots = tree.symbol.annotations
- if (!annots.isEmpty) {
- annots foreach { annot => print("@"+annot+" ") }
- println
- }
- } else {
- val annots = tree.asInstanceOf[MemberDef].mods.annotations
- if (!annots.isEmpty) {
- annots foreach { annot => print("@"+annot+" ") }
- println
- }
- }
+ val annots =
+ if (tree.symbol.rawAnnotations.nonEmpty) tree.symbol.annotations
+ else tree.asInstanceOf[MemberDef].mods.annotations
+
+ annots foreach (annot => print("@"+annot+" "))
}
def print(str: String) { out.print(str) }
@@ -165,7 +167,7 @@ abstract class TreePrinters {
case ValDef(mods, name, tp, rhs) =>
printAnnotations(tree)
printModifiers(tree, mods)
- print(if (mods.hasFlag(MUTABLE)) "var " else "val ")
+ print(if (mods.isVariable) "var " else "val ")
print(symName(tree, name))
printOpt(": ", tp)
if (!mods.hasFlag(DEFERRED)) {
@@ -215,7 +217,7 @@ abstract class TreePrinters {
}
case DocDef(comment, definition) =>
- print(comment); println; print(definition)
+ print(comment.raw); println(); print(definition)
case Template(parents, self, body) =>
val currentOwner1 = currentOwner
@@ -281,10 +283,10 @@ abstract class TreePrinters {
print(lhs); print(" = "); print(rhs)
case If(cond, thenp, elsep) =>
- print("if ("); print(cond); print(")"); indent; println
+ print("if ("); print(cond); print(")"); indent; println()
print(thenp); undent
if (!elsep.isEmpty) {
- println; print("else"); indent; println; print(elsep); undent
+ println(); print("else"); indent; println(); print(elsep); undent
}
case Return(expr) =>
@@ -377,8 +379,8 @@ abstract class TreePrinters {
case SelectFromArray(qualifier, name, _) =>
print(qualifier); print(".<arr>"); print(symName(tree, name))
- case tree: StubTree =>
- print(tree.toString)
+ case TypeTreeWithDeferredRefCheck() =>
+ print("<tree with deferred refcheck>")
case tree =>
print("<unknown tree of class "+tree.getClass+">")
@@ -389,11 +391,11 @@ abstract class TreePrinters {
}
def print(tree: Tree) {
- if (settings.Xprintpos.value) print(tree.pos.show)
+ printPosition(tree)
printRaw(
if (tree.isDef && tree.symbol != NoSymbol && tree.symbol.isInitialized) {
tree match {
- case ClassDef(_, _, _, impl @ Template(ps, trees.emptyValDef, body))
+ case ClassDef(_, _, _, impl @ Template(ps, emptyValDef, body))
if (tree.symbol.thisSym != tree.symbol) =>
ClassDef(tree.symbol, Template(ps, ValDef(tree.symbol.thisSym), body))
case ClassDef(_, _, _, impl) => ClassDef(tree.symbol, impl)
@@ -409,19 +411,256 @@ abstract class TreePrinters {
def print(unit: CompilationUnit) {
print("// Scala source: " + unit.source + LINE_SEPARATOR)
if (unit.body ne null) {
- print(unit.body); println
+ print(unit.body); println()
} else {
print("<null>")
}
- println; flush
+ println(); flush
}
}
- def create(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
- def create(stream: OutputStream): TreePrinter = create(new PrintWriter(stream))
- def create(): TreePrinter = {
- create(new PrintWriter(ConsoleWriter))
+ /** A tree printer which is stingier about vertical whitespace and unnecessary
+ * punctuation than the standard one.
+ */
+ class CompactTreePrinter(out: PrintWriter) extends TreePrinter(out) {
+ override def printRow(ts: List[Tree], start: String, sep: String, end: String) {
+ print(start)
+ printSeq(ts)(print)(print(sep))
+ print(end)
+ }
+
+ // drill down through Blocks and pull out the real statements.
+ def allStatements(t: Tree): List[Tree] = t match {
+ case Block(stmts, expr) => (stmts flatMap allStatements) ::: List(expr)
+ case _ => List(t)
+ }
+
+ def printLogicalOr(t1: (Tree, Boolean), t2: (Tree, Boolean)) =
+ printLogicalOp(t1, t2, "||")
+
+ def printLogicalAnd(t1: (Tree, Boolean), t2: (Tree, Boolean)) =
+ printLogicalOp(t1, t2, "&&")
+
+ def printLogicalOp(t1: (Tree, Boolean), t2: (Tree, Boolean), op: String) = {
+ def maybenot(tvalue: Boolean) = if (tvalue) "" else "!"
+
+ print("%s(" format maybenot(t1._2))
+ printRaw(t1._1)
+ print(") %s %s(".format(op, maybenot(t2._2)))
+ printRaw(t2._1)
+ print(")")
+ }
+
+ override def printRaw(tree: Tree): Unit = {
+ // routing supercalls through this for debugging ease
+ def s() = super.printRaw(tree)
+
+ tree match {
+ // labels used for jumps - does not map to valid scala code
+ case LabelDef(name, params, rhs) =>
+ print("labeldef %s(%s) = ".format(name, params mkString ","))
+ printRaw(rhs)
+
+ case Ident(name) =>
+ print(decodedSymName(tree, name))
+
+ // target.method(arg) ==> target method arg
+ case Apply(Select(target, method), List(arg)) =>
+ if (method.decode.toString == "||")
+ printLogicalOr(target -> true, arg -> true)
+ else if (method.decode.toString == "&&")
+ printLogicalAnd(target -> true, arg -> true)
+ else (target, arg) match {
+ case (_: Ident, _: Literal | _: Ident) =>
+ printRaw(target)
+ print(" ")
+ printRaw(Ident(method))
+ print(" ")
+ printRaw(arg)
+ case _ => s()
+ }
+
+ // target.unary_! ==> !target
+ case Select(qualifier, name) if (name.decode startsWith "unary_") =>
+ print(name.decode drop 6)
+ printRaw(qualifier)
+
+ case Select(qualifier, name) =>
+ printRaw(qualifier)
+ print(".")
+ print(name.decode)
+
+ // target.toString() ==> target.toString
+ case Apply(fn, Nil) => printRaw(fn)
+
+ // if a Block only continues one actual statement, just print it.
+ case Block(stats, expr) =>
+ allStatements(tree) match {
+ case List(x) => printRaw(x)
+ case xs => s()
+ }
+
+ // We get a lot of this stuff
+ case If( IsTrue(), x, _) => printRaw(x)
+ case If(IsFalse(), _, x) => printRaw(x)
+
+ case If(cond, IsTrue(), elsep) => printLogicalOr(cond -> true, elsep -> true)
+ case If(cond, IsFalse(), elsep) => printLogicalAnd(cond -> false, elsep -> true)
+ case If(cond, thenp, IsTrue()) => printLogicalOr(cond -> false, thenp -> true)
+ case If(cond, thenp, IsFalse()) => printLogicalAnd(cond -> true, thenp -> true)
+
+ // If thenp or elsep has only one statement, it doesn't need more than one line.
+ case If(cond, thenp, elsep) =>
+ def ifIndented(x: Tree) = {
+ indent ; println() ; printRaw(x) ; undent
+ }
+
+ val List(thenStmts, elseStmts) = List(thenp, elsep) map allStatements
+ print("if ("); print(cond); print(")")
+
+ thenStmts match {
+ case List(x: If) => ifIndented(x)
+ case List(x) => printRaw(x)
+ case _ => printRaw(thenp)
+ }
+
+ if (elseStmts.nonEmpty) {
+ print("else")
+ indent ; println()
+ elseStmts match {
+ case List(x) => printRaw(x)
+ case _ => printRaw(elsep)
+ }
+ undent ; println()
+ }
+ case _ => s()
+ }
+ }
}
+
+ /** This must guarantee not to force any evaluation, so we can learn
+ * a little bit about trees in the midst of compilation without altering
+ * the natural course of events.
+ */
+ class SafeTreePrinter(out: PrintWriter) extends TreePrinter(out) {
+ override def print(tree: Tree) {
+ printPosition(tree)
+ printRaw(tree)
+ }
+ private def default(t: Tree) = t.getClass.getName.reverse.takeWhile(_ != '.').reverse
+ private def params(trees: List[Tree]): String = trees map safe mkString ", "
+
+ private def safe(name: Name): String = name.decode
+ private def safe(tree: Tree): String = tree match {
+ case Apply(fn, args) => "%s(%s)".format(safe(fn), params(args))
+ case Select(qual, name) => safe(qual) + "." + safe(name)
+ case This(qual) => safe(qual) + ".this"
+ case Ident(name) => safe(name)
+ case Literal(value) => value.stringValue
+ case _ => "(?: %s)".format(default(tree))
+ }
+
+ override def printRaw(tree: Tree) { print(safe(tree)) }
+ }
+
+ class TreeMatchTemplate {
+ // non-trees defined in Trees
+ //
+ // case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
+ // case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position])
+ //
+ def apply(t: Tree): Unit = t match {
+ // eliminated by typer
+ case Annotated(annot, arg) =>
+ case AssignOrNamedArg(lhs, rhs) =>
+ case DocDef(comment, definition) =>
+ case Import(expr, selectors) =>
+
+ // eliminated by refchecks
+ case ModuleDef(mods, name, impl) =>
+ case TypeTreeWithDeferredRefCheck() =>
+
+ // eliminated by erasure
+ case TypeDef(mods, name, tparams, rhs) =>
+ case Typed(expr, tpt) =>
+
+ // eliminated by cleanup
+ case ApplyDynamic(qual, args) =>
+
+ // eliminated by explicitouter
+ case Alternative(trees) =>
+ case Bind(name, body) =>
+ case CaseDef(pat, guard, body) =>
+ case Star(elem) =>
+ case UnApply(fun, args) =>
+
+ // eliminated by lambdalift
+ case Function(vparams, body) =>
+
+ // eliminated by uncurry
+ case AppliedTypeTree(tpt, args) =>
+ case CompoundTypeTree(templ) =>
+ case ExistentialTypeTree(tpt, whereClauses) =>
+ case SelectFromTypeTree(qual, selector) =>
+ case SingletonTypeTree(ref) =>
+ case TypeBoundsTree(lo, hi) =>
+
+ // survivors
+ case Apply(fun, args) =>
+ case ArrayValue(elemtpt, trees) =>
+ case Assign(lhs, rhs) =>
+ case Block(stats, expr) =>
+ case ClassDef(mods, name, tparams, impl) =>
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case EmptyTree =>
+ case Ident(name) =>
+ case If(cond, thenp, elsep) =>
+ case LabelDef(name, params, rhs) =>
+ case Literal(value) =>
+ case Match(selector, cases) =>
+ case New(tpt) =>
+ case PackageDef(pid, stats) =>
+ case Return(expr) =>
+ case Select(qualifier, selector) =>
+ case Super(qual, mix) =>
+ case Template(parents, self, body) =>
+ case This(qual) =>
+ case Throw(expr) =>
+ case Try(block, catches, finalizer) =>
+ case TypeApply(fun, args) =>
+ case TypeTree() =>
+ case ValDef(mods, name, tpt, rhs) =>
+
+ // missing from the Trees comment
+ case Parens(args) => // only used during parsing
+ case SelectFromArray(qual, name, erasure) => // only used during erasure
+ }
+ }
+
+ private def asStringInternal(t: Tree, f: PrintWriter => TreePrinter): String = {
+ val buffer = new StringWriter()
+ val printer = f(new PrintWriter(buffer))
+ printer.print(t)
+ printer.flush()
+ buffer.toString
+ }
+ def asString(t: Tree): String = asStringInternal(t, newStandardTreePrinter)
+ def asCompactString(t: Tree): String = asStringInternal(t, newCompactTreePrinter)
+
+ def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
+ def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream))
+ def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter))
+
+ def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer)
+ def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream))
+ def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter))
+
+ def newTreePrinter(writer: PrintWriter): TreePrinter =
+ if (settings.Ycompacttrees.value) newCompactTreePrinter(writer)
+ else newStandardTreePrinter(writer)
+ def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream))
+ def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter))
+
/** A writer that writes to the current Console and
* is sensitive to replacement of the Console's
* output stream.
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index bff4bd51c3..0a107990dc 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -1,26 +1,17 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package ast
-import java.io.{PrintWriter, StringWriter}
-
import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.symtab.{Flags, SymbolTable}
+import scala.tools.nsc.symtab.SymbolTable
import scala.tools.nsc.symtab.Flags._
-import scala.tools.nsc.util.{FreshNameCreator, HashSet, Position, NoPosition, SourceFile}
-
-
-trait Trees {
- self: SymbolTable =>
+import scala.tools.nsc.util.{FreshNameCreator, HashSet, SourceFile}
- //statistics
-
- var nodeCount = 0
+trait Trees extends reflect.generic.Trees { self: SymbolTable =>
trait CompilationUnitTrait {
var body: Tree
@@ -32,11 +23,7 @@ trait Trees {
// sub-components --------------------------------------------------
- object treePrinters extends {
- val trees: Trees.this.type = Trees.this
- } with TreePrinters
-
- lazy val treePrinter = treePrinters.create()
+ lazy val treePrinter = newTreePrinter()
object treeInfo extends {
val trees: Trees.this.type = Trees.this
@@ -44,121 +31,39 @@ trait Trees {
val treeCopy = new LazyTreeCopier()
- // modifiers --------------------------------------------------------
-
- /** @param privateWithin the qualifier for a private (a type name)
- * or nme.EMPTY.toTypeName, if none is given.
- * @param annotations the annotations for the definition.
- * <strong>Note:</strong> the typechecker drops these annotations,
- * use the AnnotationInfo's (Symbol.annotations) in later phases.
- */
- case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position]) {
- def isCovariant = hasFlag(COVARIANT ) // marked with `+'
- def isContravariant = hasFlag(CONTRAVARIANT) // marked with `-'
- def isPrivate = hasFlag(PRIVATE )
- def isProtected = hasFlag(PROTECTED)
- def isVariable = hasFlag(MUTABLE )
- def isArgument = hasFlag(PARAM )
- def isAccessor = hasFlag(ACCESSOR )
- def isOverride = hasFlag(OVERRIDE )
- def isAbstract = hasFlag(ABSTRACT )
- def isDeferred = hasFlag(DEFERRED )
- def isCase = hasFlag(CASE )
- def isLazy = hasFlag(LAZY )
- def isSealed = hasFlag(SEALED )
- def isFinal = hasFlag(FINAL )
- def isTrait = hasFlag(TRAIT | notDEFERRED) // (part of DEVIRTUALIZE)
- def isImplicit = hasFlag(IMPLICIT )
- def isPublic = !isPrivate && !isProtected
- def hasFlag(flag: Long) = (flag & flags) != 0L
- def & (flag: Long): Modifiers = {
- val flags1 = flags & flag
- if (flags1 == flags) this
- else Modifiers(flags1, privateWithin, annotations, positions)
- }
- def &~ (flag: Long): Modifiers = {
- val flags1 = flags & (~flag)
- if (flags1 == flags) this
- else Modifiers(flags1, privateWithin, annotations, positions)
- }
- def | (flag: Long): Modifiers = {
- val flags1 = flags | flag
- if (flags1 == flags) this
- else Modifiers(flags1, privateWithin, annotations, positions)
- }
- def withAnnotations(annots: List[Tree]) =
- if (annots.isEmpty) this
- else Modifiers(flags, privateWithin, annotations ::: annots, positions)
- def withPosition(flag: Long, position: Position) =
- Modifiers(flags, privateWithin, annotations, positions + (flag -> position))
- }
-
- def Modifiers(flags: Long, privateWithin: Name): Modifiers = Modifiers(flags, privateWithin, List(), new Map.EmptyMap)
- def Modifiers(flags: Long): Modifiers = Modifiers(flags, nme.EMPTY.toTypeName)
+ implicit def treeWrapper(tree: Tree): TreeOps = new TreeOps(tree)
- val NoMods = Modifiers(0)
+ class TreeOps(tree: Tree) {
- // @M helper method for asserts that check consistency in kinding
- //def kindingIrrelevant(tp: Type) = (tp eq null) || phase.name == "erasure" || phase.erasedTypes
-
- abstract class Tree extends Product {
- {
- import util.Statistics
- if (Statistics.enabled) nodeCount += 1
- }
-
- val id = nodeCount
-// assert(id != 1223)
- nodeCount += 1
-
- private var rawpos: Position = NoPosition
-
- def pos = rawpos
-
- private[this] var rawtpe: Type = _
-
- def tpe = rawtpe
- def tpe_=(t: Type) = rawtpe = t
-
- def setPos(pos: Position): this.type = {
- rawpos = pos
-/*
- for (c <- this.children)
- if (c.pos.isOpaqueRange && !pos.includes(c.pos)) {
- assert(false, "non-enclosing positions in "+this)
- }
-*/
- this
- }
-
- def setType(tp: Type): this.type = {
- /*assert(kindingIrrelevant(tp) || !kindStar || !tp.isHigherKinded,
- tp+" should not be higher-kinded");*/
- tpe = tp
- this
+ def isTerm: Boolean = tree match {
+ case _: TermTree => true
+ case Bind(name, _) => name.isTermName
+ case Select(_, name) => name.isTermName
+ case Ident(name) => name.isTermName
+ case Annotated(_, arg) => arg.isTerm
+ case DocDef(_, defn) => defn.isTerm
+ case _ => false
}
- def symbol: Symbol = null
- def symbol_=(sym: Symbol) {
- throw new Error("symbol_= inapplicable for " + this)
+ def isType: Boolean = tree match {
+ case _: TypTree => true
+ case Bind(name, _) => name.isTypeName
+ case Select(_, name) => name.isTypeName
+ case Ident(name) => name.isTypeName
+ case Annotated(_, arg) => arg.isType
+ case DocDef(_, defn) => defn.isType
+ case _ => false
}
- def setSymbol(sym: Symbol): this.type = { symbol = sym; this }
- def hasSymbol = false
- def isDef = false
- def isTerm = false
- def isType = false
- def isEmpty = false
-
- def isErroneous = (tpe ne null) && tpe.isErroneous
+ def isErroneous = (tree.tpe ne null) && tree.tpe.isErroneous
/** Apply `f' to each subtree */
- def foreach(f: Tree => Unit) { new ForeachTreeTraverser(f).traverse(this) }
+ def foreach(f: Tree => Unit) { new ForeachTreeTraverser(f).traverse(tree) }
/** Find all subtrees matching predicate `p' */
def filter(f: Tree => Boolean): List[Tree] = {
val ft = new FilterTreeTraverser(f)
- ft.traverse(this)
+ ft.traverse(tree)
ft.hits.toList
}
@@ -167,204 +72,47 @@ trait Trees {
*/
def find(p: Tree => Boolean): Option[Tree] = {
val ft = new FindTreeTraverser(p)
- ft.traverse(this)
+ ft.traverse(tree)
ft.result
}
- /** Is there part of this tree which satisfies predicate `p'? */
- def exists(p: Tree => Boolean): Boolean = !find(p).isEmpty
-
- /** The direct child trees of this tree
- * EmptyTrees are always omitted. Lists are collapsed.
- */
- def children: List[Tree] = {
- def subtrees(x: Any): List[Tree] = x match {
- case EmptyTree => List()
- case t: Tree => List(t)
- case xs: List[_] => xs flatMap subtrees
- case _ => List()
+ def changeOwner(pairs: (Symbol, Symbol)*): Tree = {
+ pairs.foldLeft(tree) { case (t, (oldOwner, newOwner)) =>
+ new ChangeOwnerTraverser(oldOwner, newOwner) apply t
}
- productIterator.toList flatMap subtrees
- }
-
- override def toString(): String = {
- val buffer = new StringWriter()
- val printer = treePrinters.create(new PrintWriter(buffer))
- printer.print(this)
- printer.flush()
- buffer.toString
}
- override def hashCode(): Int = super.hashCode()
+ /** Is there part of this tree which satisfies predicate `p'? */
+ def exists(p: Tree => Boolean): Boolean = !find(p).isEmpty
- override def equals(that: Any): Boolean = that match {
- case t: Tree => this eq t
- case _ => false
- }
- def hashCodeStructure: Int = {
- var hc = getClass.hashCode
- def f(what : Any) : Unit = what match {
- case what : Tree => hc += what.hashCodeStructure
- case what : Iterable[_] => what.foreach(f)
- case what : Product => g(what)
- case null =>
- case what => hc += what.hashCode
- }
- def g(what: Product) {
- hc += what.productArity
- var i = 0
- while (i < what.productArity) {
- f(what.productElement(i))
- i += 1
+ def equalsStructure(that : Tree) = equalsStructure0(that)(_ eq _)
+ def equalsStructure0(that: Tree)(f: (Tree,Tree) => Boolean): Boolean =
+ (tree == that) || ((tree.getClass == that.getClass) && { // XXX defining any kind of equality in terms of getClass is a mistake
+ assert(tree.productArity == that.productArity)
+ def equals0(this0: Any, that0: Any): Boolean = (this0, that0) match {
+ case (x: Tree, y: Tree) => f(x, y) || (x equalsStructure0 y)(f)
+ case (xs: List[_], ys: List[_]) => (xs corresponds ys)(equals0)
+ case _ => this0 == that0
+ }
+ def compareOriginals() = (this, that) match {
+ case (x: TypeTree, y: TypeTree) if x.original != null && y.original != null =>
+ (x.original equalsStructure0 y.original)(f)
+ case _ =>
+ true
}
- }
- g(this)
- hc
- }
- def equalsStructure(that : Tree) = equalsStructure0(that){case (t0,t1) => false}
- def equalsStructure0(that: Tree)(f : (Tree,Tree) => Boolean): Boolean = {
- if (this == that) return true
- if (this.getClass != that.getClass) return false
- val this0 = this.asInstanceOf[Product]
- val that0 = that.asInstanceOf[Product]
- assert(this0.productArity == that0.productArity)
- def equals0(thiz: Any, that: Any): Boolean = thiz match {
- case thiz: Tree =>
- f(thiz,that.asInstanceOf[Tree]) || thiz.equalsStructure0(that.asInstanceOf[Tree])(f)
- case thiz: List[_] =>
- val that0 = that.asInstanceOf[List[Any]]
- if (thiz.length != that0.length) false
- else {
- val results0 = for (i <- 0.until(thiz.length).toList)
- yield equals0(thiz(i), that0(i))
- results0.foldLeft(true)((x,y) => x && y)
- }
- case thiz =>
- thiz == that
- }
- val results = for (i <- 0.until(this0.productArity).toList) yield
- equals0(this0.productElement(i), that0.productElement(i))
- val b = results.foldLeft(true)((x,y) => x && y)
- if (b) (this,that) match {
- case (this0 : TypeTree, that0 : TypeTree) if this0.original != null && that0.original != null =>
- this0.original.equalsStructure0(that0.original)(f)
- case _ => true
- } else false
- }
-
- /** Make a copy of this tree, keeping all attributes,
- * except that all positions are focussed (so nothing
- * in this tree will be found when searching by position).
- */
- def duplicate: this.type =
- (duplicator transform this).asInstanceOf[this.type]
-
- def shallowDuplicate: this.type =
- ((new ShallowDuplicator(this)) transform this).asInstanceOf[this.type]
-
- def copyAttrs(tree: Tree): this.type = {
- rawpos = tree.rawpos
- tpe = tree.tpe
- if (hasSymbol) symbol = tree.symbol
- this
- }
- }
-
- trait SymTree extends Tree {
- override def hasSymbol = true
- override var symbol: Symbol = NoSymbol
- }
-
- trait RefTree extends SymTree {
- def name: Name
- }
-
- abstract class DefTree extends SymTree {
- def name: Name
- override def isDef = true
- }
-
- trait TermTree extends Tree {
- override def isTerm = true
- }
-
- /** A tree for a type. Note that not all type trees implement
- * this trait; in particular, Ident's are an exception. */
- trait TypTree extends Tree {
- override def isType = true
- }
-
-// ----- auxiliary objects and methods ------------------------------
-
- private lazy val duplicator = new Transformer {
- override val treeCopy = new StrictTreeCopier
- override def transform(t: Tree) = {
- val t1 = super.transform(t)
- if ((t1 ne t) && t1.pos.isRange) t1 setPos t.pos.focus
- t1
- }
- }
-
- private class ShallowDuplicator(orig: Tree) extends Transformer {
- override val treeCopy = new StrictTreeCopier
- override def transform(tree: Tree) =
- if (tree eq orig)
- super.transform(tree)
- else
- tree
- }
-
-// def nextPhase = if (phase.id > globalPhase.id) phase else phase.next;
-
-// ----- tree node alternatives --------------------------------------
-
- /** The empty tree */
- case object EmptyTree extends TermTree {
- super.tpe_=(NoType)
- override def tpe_=(t: Type) =
- if (t != NoType) throw new Error("tpe_=("+t+") inapplicable for <empty>")
- override def isEmpty = true
- }
- abstract class MemberDef extends DefTree {
- def mods: Modifiers
- def keyword: String = this match {
- case TypeDef(_, _, _, _) => "type"
- case ClassDef(mods, _, _, _) => if (mods.isTrait) "trait" else "class"
- case DefDef(_, _, _, _, _, _) => "def"
- case ModuleDef(_, _, _) => "object"
- case PackageDef(_, _) => "package"
- case ValDef(mods, _, _, _) => if (mods.isVariable) "var" else "val"
- case _ => ""
- }
- final def hasFlag(mask: Long): Boolean = (mods.flags & mask) != 0L
- }
+ (tree.productIterator.toList corresponds that.productIterator.toList)(equals0) && compareOriginals()
+ })
- /** Package clause
- */
- case class PackageDef(pid: RefTree, stats: List[Tree])
- extends MemberDef {
- def name = pid.name
- def mods = NoMods
+ def shallowDuplicate: Tree = new ShallowDuplicator(tree) transform tree
}
-/* disabled, as this is now dangerous
- def PackageDef(sym: Symbol, stats: List[Tree]): PackageDef =
- PackageDef(Ident(sym.name), stats) setSymbol sym
-*/
-
- abstract class ImplDef extends MemberDef {
- def impl: Template
- }
+ private[scala] override def duplicateTree(tree: Tree): Tree = duplicator transform tree
- /** Class definition */
- case class ClassDef(mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template)
- extends ImplDef
+// ---- values and creators ---------------------------------------
- /**
- * @param sym the class symbol
- * @param impl ...
- * @return ...
+ /** @param sym the class symbol
+ * @return the implementation template
*/
def ClassDef(sym: Symbol, impl: Template): ClassDef =
atPos(sym.pos) {
@@ -384,7 +132,6 @@ trait Trees {
* @param argss the supercall arguments
* @param body the template statements without primary constructor
* and value parameter fields.
- * @return ...
*/
def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): ClassDef =
ClassDef(sym,
@@ -392,45 +139,15 @@ trait Trees {
if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
constrMods, vparamss, argss, body, superPos))
- /** Singleton object definition
- *
- * @param mods
- * @param name
- * @param impl
- */
- case class ModuleDef(mods: Modifiers, name: Name, impl: Template)
- extends ImplDef
-
/**
* @param sym the class symbol
- * @param impl ...
- * @return ...
+ * @param impl the implementation template
*/
def ModuleDef(sym: Symbol, impl: Template): ModuleDef =
atPos(sym.pos) {
ModuleDef(Modifiers(sym.flags), sym.name, impl) setSymbol sym
}
- abstract class ValOrDefDef extends MemberDef {
- def tpt: Tree
- def rhs: Tree
- }
-
- /** Value definition
- *
- * @param mods
- * @param name
- * @param tpt
- * @param rhs
- */
- case class ValDef(mods: Modifiers, name: Name, tpt: Tree, rhs: Tree)
- extends ValOrDefDef {
- assert(tpt.isType, tpt)
- //assert(kindingIrrelevant(tpt.tpe) || !tpt.tpe.isHigherKinded, tpt.tpe) //@M a value definition should never be typed with a higher-kinded type (values must be classified by types with kind *)
- //tpt.kindStar=true //@M turn on consistency checking in Tree
- assert(rhs.isTerm, rhs)
- }
-
def ValDef(sym: Symbol, rhs: Tree): ValDef =
atPos(sym.pos) {
ValDef(Modifiers(sym.flags), sym.name,
@@ -440,31 +157,12 @@ trait Trees {
def ValDef(sym: Symbol): ValDef = ValDef(sym, EmptyTree)
- object emptyValDef
- extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) {
+ object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) {
override def isEmpty = true
super.setPos(NoPosition)
override def setPos(pos: Position) = { assert(false); this }
}
- /** Method definition
- *
- * @param mods
- * @param name
- * @param tparams
- * @param vparamss
- * @param tpt
- * @param rhs
- */
- case class DefDef(mods: Modifiers, name: Name, tparams: List[TypeDef],
- vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree)
- extends ValOrDefDef {
- assert(tpt.isType, tpt)
- //assert(kindingIrrelevant(tpt.tpe) || !tpt.tpe.isHigherKinded, tpt.tpe) //@M a method definition should never be typed with a higher-kinded type (values must be classified by types with kind *)
- //tpt.kindStar=true //@M turn on consistency checking in Tree
- assert(rhs.isTerm, rhs)
- }
-
def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
atPos(sym.pos) {
assert(sym != NoSymbol)
@@ -489,10 +187,6 @@ trait Trees {
DefDef(sym, rhs(sym.info.paramss))
}
- /** Abstract type, type parameter, or type alias */
- case class TypeDef(mods: Modifiers, name: Name, tparams: List[TypeDef], rhs: Tree)
- extends MemberDef
-
/** A TypeDef node which defines given `sym' with given tight hand side `rhs'. */
def TypeDef(sym: Symbol, rhs: Tree): TypeDef =
atPos(sym.pos) {
@@ -503,91 +197,11 @@ trait Trees {
def TypeDef(sym: Symbol): TypeDef =
TypeDef(sym, TypeBoundsTree(TypeTree(sym.info.bounds.lo), TypeTree(sym.info.bounds.hi)))
- /** <p>
- * Labelled expression - the symbols in the array (must be Idents!)
- * are those the label takes as argument
- * </p>
- * <p>
- * The symbol that is given to the labeldef should have a MethodType
- * (as if it were a nested function)
- * </p>
- * <p>
- * Jumps are apply nodes attributed with label symbol, the arguments
- * will get assigned to the idents.
- * </p>
- * <p>
- * Note: on 2005-06-09 Martin, Iuli, Burak agreed to have forward
- * jumps within a Block.
- * </p>
- */
- case class LabelDef(name: Name, params: List[Ident], rhs: Tree)
- extends DefTree with TermTree {
- assert(rhs.isTerm)
- }
-
- /**
- * @param sym the class symbol
- * @param params ...
- * @param rhs ...
- * @return ...
- */
def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef =
atPos(sym.pos) {
LabelDef(sym.name, params map Ident, rhs) setSymbol sym
}
- /** Import selector
- *
- * Representation of an imported name its optional rename and their optional positions
- *
- * @param name the imported name
- * @param namePos its position or -1 if undefined
- * @param rename the name the import is renamed to (== name if no renaming)
- * @param renamePos the position of the rename or -1 if undefined
- */
- case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
-
- /** Import clause
- *
- * @param expr
- * @param selectors
- */
- case class Import(expr: Tree, selectors: List[ImportSelector])
- extends SymTree
- // The symbol of an Import is an import symbol @see Symbol.newImport
- // It's used primarily as a marker to check that the import has been typechecked.
-
- /** Documented definition, eliminated by analyzer */
- case class DocDef(comment: String, definition: Tree)
- extends Tree {
- override def symbol: Symbol = definition.symbol
- override def symbol_=(sym: Symbol) { definition.symbol = sym }
- // sean: seems to be important to the IDE
- override def isDef = definition.isDef
- override def isTerm = definition.isTerm
- override def isType = definition.isType
- }
-
- /** Instantiation template of a class or trait
- *
- * @param parents
- * @param body
- */
- case class Template(parents: List[Tree], self: ValDef, body: List[Tree])
- extends SymTree {
- // the symbol of a template is a local dummy. @see Symbol.newLocalDummy
- // the owner of the local dummy is the enclosing trait or class.
- // the local dummy is itself the owner of any local blocks
- // For example:
- //
- // class C {
- // def foo // owner is C
- // {
- // def bar // owner is local dummy
- // }
- // System.err.println("TEMPLATE: " + parents)
- }
-
/** Generates a template with constructor corresponding to
*
* constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
@@ -613,22 +227,21 @@ trait Trees {
vparamss map (vps => vps.map { vd =>
atPos(vd.pos.focus) {
ValDef(
- Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM) | PARAM) withAnnotations vd.mods.annotations,
+ Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | PARAMACCESSOR) withAnnotations vd.mods.annotations,
vd.name, vd.tpt.duplicate, vd.rhs.duplicate)
}})
val (edefs, rest) = body span treeInfo.isEarlyDef
val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
- val (lvdefs, gvdefs) = List.unzip {
- evdefs map {
- case vdef @ ValDef(mods, name, tpt, rhs) =>
- val fld = treeCopy.ValDef(
- vdef.duplicate, mods, name,
- atPos(vdef.pos.focus) { TypeTree() setOriginal tpt setPos tpt.pos.focus }, // atPos in case
- EmptyTree)
- val local = treeCopy.ValDef(vdef, Modifiers(PRESUPER), name, tpt, rhs)
- (local, fld)
- }
- }
+ val (lvdefs, gvdefs) = evdefs map {
+ case vdef @ ValDef(mods, name, tpt, rhs) =>
+ val fld = treeCopy.ValDef(
+ vdef.duplicate, mods, name,
+ atPos(vdef.pos.focus) { TypeTree() setOriginal tpt setPos tpt.pos.focus }, // atPos in case
+ EmptyTree)
+ val local = treeCopy.ValDef(vdef, Modifiers(PRESUPER), name, tpt, rhs)
+ (local, fld)
+ } unzip
+
val constrs = {
if (constrMods.isTrait) {
if (body forall treeInfo.isInterfaceMember) List()
@@ -651,126 +264,22 @@ trait Trees {
}
// println("typed template, gvdefs = "+gvdefs+", parents = "+parents+", constrs = "+constrs)
constrs foreach (ensureNonOverlapping(_, parents ::: gvdefs))
- // remove defaults
+ // vparamss2 are used as field definitions for the class. remove defaults
val vparamss2 = vparamss map (vps => vps map { vd =>
treeCopy.ValDef(vd, vd.mods &~ DEFAULTPARAM, vd.name, vd.tpt, EmptyTree)
})
Template(parents, self, gvdefs ::: vparamss2.flatten ::: constrs ::: etdefs ::: rest)
}
- /** Block of expressions (semicolon separated expressions) */
- case class Block(stats: List[Tree], expr: Tree)
- extends TermTree
-
- /** Case clause in a pattern match, eliminated by TransMatch
- * (except for occurences in switch statements)
- */
- case class CaseDef(pat: Tree, guard: Tree, body: Tree)
- extends Tree
-
/** casedef shorthand */
def CaseDef(pat: Tree, body: Tree): CaseDef = CaseDef(pat, EmptyTree, body)
- /** Alternatives of patterns, eliminated by TransMatch, except for
- * occurences in encoded Switch stmt (=remaining Match(CaseDef(...))
- */
- case class Alternative(trees: List[Tree])
- extends TermTree
-
- /** Repetition of pattern, eliminated by TransMatch */
- case class Star(elem: Tree)
- extends TermTree
-
- /** Bind of a variable to a rhs pattern, eliminated by TransMatch
- *
- * @param name
- * @param body
- */
- case class Bind(name: Name, body: Tree)
- extends DefTree {
- override def isTerm = name.isTermName
- override def isType = name.isTypeName
- }
-
def Bind(sym: Symbol, body: Tree): Bind =
Bind(sym.name, body) setSymbol sym
- case class UnApply(fun: Tree, args: List[Tree])
- extends TermTree
- /** Array of expressions, needs to be translated in backend,
- */
- case class ArrayValue(elemtpt: Tree, elems: List[Tree])
- extends TermTree
-
- /** Anonymous function, eliminated by analyzer */
- case class Function(vparams: List[ValDef], body: Tree)
- extends TermTree with SymTree
- // The symbol of a Function is a synthetic value of name nme.ANON_FUN_NAME
- // It is the owner of the function's parameters.
-
- /** Assignment */
- case class Assign(lhs: Tree, rhs: Tree)
- extends TermTree
-
- /** Either an assignment or a named argument. Only appears in argument lists,
- * eliminated by typecheck (doTypedApply)
- */
- case class AssignOrNamedArg(lhs: Tree, rhs: Tree)
- extends TermTree
-
- /** Conditional expression */
- case class If(cond: Tree, thenp: Tree, elsep: Tree)
- extends TermTree
-
- /** <p>
- * Pattern matching expression (before <code>TransMatch</code>)
- * Switch statements (after TransMatch)
- * </p>
- * <p>
- * After <code>TransMatch</code>, cases will satisfy the following
- * constraints:
- * </p>
- * <ul>
- * <li>all guards are EmptyTree,</li>
- * <li>all patterns will be either <code>Literal(Constant(x:Int))</code>
- * or <code>Alternative(lit|...|lit)</code></li>
- * <li>except for an "otherwise" branch, which has pattern
- * <code>Ident(nme.WILDCARD)</code></li>
- * </ul>
- */
- case class Match(selector: Tree, cases: List[CaseDef])
- extends TermTree
-
- /** Return expression */
- case class Return(expr: Tree)
- extends TermTree with SymTree
- // The symbol of a Return node is the enclosing method.
-
- case class Try(block: Tree, catches: List[CaseDef], finalizer: Tree)
- extends TermTree
-
- /** Throw expression */
- case class Throw(expr: Tree)
- extends TermTree
-
- /** Object instantiation
- * One should always use factory method below to build a user level new.
- *
- * @param tpt a class type
- */
- case class New(tpt: Tree)
- extends TermTree {
- assert(tpt.isType)
- }
-
- /** Factory method for object creation <code>&lt;new tpt(args_1)...(args_n)&gt;</code>.
- * A New(t, as) is expanded to:
- * (new t).<init>(as)
- *
- * @param tpt ...
- * @param argss ...
- * @return ...
+ /** Factory method for object creation `new tpt(args_1)...(args_n)`
+ * A `New(t, as)` is expanded to: `(new t).<init>(as)`
*/
def New(tpt: Tree, argss: List[List[Tree]]): Tree = {
assert(!argss.isEmpty)
@@ -778,265 +287,73 @@ trait Trees {
(superRef /: argss) (Apply)
}
- /** Type annotation, eliminated by explicit outer */
- case class Typed(expr: Tree, tpt: Tree)
- extends TermTree
-
- // Martin to Sean: Should GenericApply/TypeApply/Apply not be SymTree's? After all,
- // ApplyDynamic is a SymTree.
- abstract class GenericApply extends TermTree {
- val fun: Tree
- val args: List[Tree]
- }
-
- /** Type application */
- case class TypeApply(fun: Tree, args: List[Tree])
- extends GenericApply {
- override def symbol: Symbol = fun.symbol
- override def symbol_=(sym: Symbol) { fun.symbol = sym }
- }
-
- /** Value application */
- case class Apply(fun: Tree, args: List[Tree])
- extends GenericApply {
- override def symbol: Symbol = fun.symbol
- override def symbol_=(sym: Symbol) { fun.symbol = sym }
- }
-
- /** Dynamic value application.
- * In a dynamic application q.f(as)
- * - q is stored in qual
- * - as is stored in args
- * - f is stored as the node's symbol field.
- */
- case class ApplyDynamic(qual: Tree, args: List[Tree])
- extends TermTree with SymTree
- // The symbol of an ApplyDynamic is the function symbol of `qual', or NoSymbol, if there is none.
-
- /** Super reference */
- case class Super(qual: Name, mix: Name)
- extends TermTree with SymTree
- // The symbol of a Super is the class _from_ which the super reference is made.
- // For instance in C.super(...), it would be C.
-
def Super(sym: Symbol, mix: Name): Tree = Super(sym.name, mix) setSymbol sym
- /** Self reference */
- case class This(qual: Name)
- extends TermTree with SymTree
- // The symbol of a This is the class to which the this refers.
- // For instance in C.this, it would be C.
-
def This(sym: Symbol): Tree = This(sym.name) setSymbol sym
- /** Designator <qualifier> . <name> */
- case class Select(qualifier: Tree, name: Name)
- extends RefTree {
- override def isTerm = name.isTermName
- override def isType = name.isTypeName
- }
-
def Select(qualifier: Tree, sym: Symbol): Select =
Select(qualifier, sym.name) setSymbol sym
- /** Identifier <name> */
- case class Ident(name: Name)
- extends RefTree {
- override def isTerm = name.isTermName
- override def isType = name.isTypeName
- }
-
- class BackQuotedIdent(name: Name) extends Ident(name)
-
def Ident(sym: Symbol): Ident =
Ident(sym.name) setSymbol sym
- /** Literal */
- case class Literal(value: Constant)
- extends TermTree {
- assert(value ne null)
+ /** Block factory that flattens directly nested blocks.
+ */
+ def Block(stats: Tree*): Block = stats match {
+ case Seq(b @ Block(_, _)) => b
+ case Seq(stat) => Block(stats.toList, Literal(Constant(())))
+ case Seq(_, rest @ _*) => Block(stats.init.toList, stats.last)
}
- def Literal(value: Any): Literal =
- Literal(Constant(value))
-
/** A synthetic term holding an arbitrary type. Not to be confused with
* with TypTree, the trait for trees that are only used for type trees.
* TypeTree's are inserted in several places, but most notably in
* <code>RefCheck</code>, where the arbitrary type trees are all replaced by
* TypeTree's. */
- case class TypeTree() extends TypTree {
- override def symbol = if (tpe == null) null else tpe.typeSymbol
-
- private var orig: Tree = null // should be EmptyTree?
+ case class TypeTree() extends AbsTypeTree {
+ private var orig: Tree = null
+ private[Trees] var wasEmpty: Boolean = false
def original: Tree = orig
-
def setOriginal(tree: Tree): this.type = { orig = tree; setPos(tree.pos); this }
- override def isEmpty = (tpe eq null) || tpe == NoType
+ override def defineType(tp: Type): this.type = {
+ wasEmpty = isEmpty
+ setType(tp)
+ }
}
+ object TypeTree extends TypeTreeExtractor
+
def TypeTree(tp: Type): TypeTree = TypeTree() setType tp
- // def TypeTree(tp: Type, tree : Tree): TypeTree = TypeTree(tree) setType tp
- /** A tree that has an annotation attached to it. Only used for annotated types and
- * annotation ascriptions, annotations on definitions are stored in the Modifiers.
- * Eliminated by typechecker (typedAnnotated), the annotations are then stored in
- * an AnnotatedType.
- */
- case class Annotated(annot: Tree, arg: Tree) extends Tree {
- override def isType = arg.isType
- override def isTerm = arg.isTerm
+ /** Documented definition, eliminated by analyzer */
+ case class DocDef(comment: DocComment, definition: Tree)
+ extends Tree {
+ override def symbol: Symbol = definition.symbol
+ override def symbol_=(sym: Symbol) { definition.symbol = sym }
+ // sean: seems to be important to the IDE
+ override def isDef = definition.isDef
}
- /** Singleton type, eliminated by RefCheck */
- case class SingletonTypeTree(ref: Tree)
- extends TypTree
-
- /** Type selection <qualifier> # <name>, eliminated by RefCheck */
- case class SelectFromTypeTree(qualifier: Tree, name: Name)
- extends TypTree with RefTree
-
- /** Intersection type <parent1> with ... with <parentN> { <decls> }, eliminated by RefCheck */
- case class CompoundTypeTree(templ: Template)
- extends TypTree
-
- /** Applied type <tpt> [ <args> ], eliminated by RefCheck */
- case class AppliedTypeTree(tpt: Tree, args: List[Tree])
- extends TypTree {
- override def symbol: Symbol = tpt.symbol
- override def symbol_=(sym: Symbol) { tpt.symbol = sym }
- }
+ /** Either an assignment or a named argument. Only appears in argument lists,
+ * eliminated by typecheck (doTypedApply)
+ */
+ case class AssignOrNamedArg(lhs: Tree, rhs: Tree)
+ extends TermTree
- case class TypeBoundsTree(lo: Tree, hi: Tree)
- extends TypTree
+ case class Parens(args: List[Tree]) extends Tree // only used during parsing
- case class ExistentialTypeTree(tpt: Tree, whereClauses: List[Tree])
- extends TypTree
+ /** emitted by typer, eliminated by refchecks **/
+ case class TypeTreeWithDeferredRefCheck()(val check: () => TypeTree) extends AbsTypeTree
- case class Parens(args: List[Tree]) extends Tree // only used during parsing
+// ----- subconstructors --------------------------------------------
- /** Array selection <qualifier> . <name> only used during erasure */
- case class SelectFromArray(qualifier: Tree, name: Name, erasure: Type)
- extends TermTree with RefTree
+ class ApplyToImplicitArgs(fun: Tree, args: List[Tree]) extends Apply(fun, args)
- trait StubTree extends Tree {
- def underlying : AnyRef
- override def equalsStructure0(that: Tree)(f : (Tree,Tree) => Boolean): Boolean = this eq that
- }
+ class ApplyImplicitView(fun: Tree, args: List[Tree]) extends Apply(fun, args)
-/* A standard pattern match
- case EmptyTree =>
- case PackageDef(pid, stats) =>
- // package pid { stats }
- case ClassDef(mods, name, tparams, impl) =>
- // mods class name [tparams] impl where impl = extends parents { defs }
- case ModuleDef(mods, name, impl) => (eliminated by refcheck)
- // mods object name impl where impl = extends parents { defs }
- case ValDef(mods, name, tpt, rhs) =>
- // mods val name: tpt = rhs
- // note missing type information is expressed by tpt = TypeTree()
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- // mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs
- // note missing type information is expressed by tpt = TypeTree()
- case TypeDef(mods, name, tparams, rhs) => (eliminated by erasure)
- // mods type name[tparams] = rhs
- // mods type name[tparams] >: lo <: hi, where lo, hi are in a TypeBoundsTree,
- and DEFERRED is set in mods
- case LabelDef(name, params, rhs) =>
- // used for tailcalls and like
- // while/do are desugared to label defs as follows:
- // while (cond) body ==> LabelDef($L, List(), if (cond) { body; L$() } else ())
- // do body while (cond) ==> LabelDef($L, List(), body; if (cond) L$() else ())
- case Import(expr, selectors) => (eliminated by typecheck)
- // import expr.{selectors}
- // Selectors are a list of pairs of names (from, to).
- // The last (and maybe only name) may be a nme.WILDCARD
- // for instance
- // import qual.{x, y => z, _} would be represented as
- // Import(qual, List(("x", "x"), ("y", "z"), (WILDCARD, null)))
- case DocDef(comment, definition) => (eliminated by typecheck)
- // /** comment */ definition
- case Template(parents, self, body) =>
- // extends parents { self => body }
- // if self is missing it is represented as emptyValDef
- case Block(stats, expr) =>
- // { stats; expr }
- case CaseDef(pat, guard, body) => (eliminated by transmatch/explicitouter)
- // case pat if guard => body
- case Alternative(trees) => (eliminated by transmatch/explicitouter)
- // pat1 | ... | patn
- case Star(elem) => (eliminated by transmatch/explicitouter)
- // pat*
- case Bind(name, body) => (eliminated by transmatch/explicitouter)
- // name @ pat
- case UnApply(fun: Tree, args) (introduced by typer, eliminated by transmatch/explicitouter)
- // used for unapply's
- case ArrayValue(elemtpt, trees) => (introduced by uncurry)
- // used to pass arguments to vararg arguments
- // for instance, printf("%s%d", foo, 42) is translated to after uncurry to:
- // Apply(
- // Ident("printf"),
- // Literal("%s%d"),
- // ArrayValue(<Any>, List(Ident("foo"), Literal(42))))
- case Function(vparams, body) => (eliminated by lambdaLift)
- // vparams => body where vparams:List[ValDef]
- case Assign(lhs, rhs) =>
- // lhs = rhs
- case AssignOrNamedArg(lhs, rhs) => (eliminated by typecheck)
- // lhs = rhs
- case If(cond, thenp, elsep) =>
- // if (cond) thenp else elsep
- case Match(selector, cases) =>
- // selector match { cases }
- case Return(expr) =>
- // return expr
- case Try(block, catches, finalizer) =>
- // try block catch { catches } finally finalizer where catches: List[CaseDef]
- case Throw(expr) =>
- // throw expr
- case New(tpt) =>
- // new tpt always in the context: (new tpt).<init>[targs](args)
- case Typed(expr, tpt) => (eliminated by erasure)
- // expr: tpt
- case TypeApply(fun, args) =>
- // fun[args]
- case Apply(fun, args) =>
- // fun(args)
- // for instance fun[targs](args) is expressed as Apply(TypeApply(fun, targs), args)
- case ApplyDynamic(qual, args) (introduced by erasure, eliminated by cleanup)
- // fun(args)
- case Super(qual, mix) =>
- // qual.super[mix] if qual and/or mix is empty, ther are nme.EMPTY.toTypeName
- case This(qual) =>
- // qual.this
- case Select(qualifier, selector) =>
- // qualifier.selector
- case Ident(name) =>
- // name
- // note: type checker converts idents that refer to enclosing fields or methods
- // to selects; name ==> this.name
- case Literal(value) =>
- // value
- case TypeTree() => (introduced by refcheck)
- // a type that's not written out, but given in the tpe attribute
- case Annotated(annot, arg) => (eliminated by typer)
- // arg @annot for types, arg: @annot for exprs
- case SingletonTypeTree(ref) => (eliminated by uncurry)
- // ref.type
- case SelectFromTypeTree(qualifier, selector) => (eliminated by uncurry)
- // qualifier # selector, a path-dependent type p.T is expressed as p.type # T
- case CompoundTypeTree(templ: Template) => (eliminated by uncurry)
- // parent1 with ... with parentN { refinement }
- case AppliedTypeTree(tpt, args) => (eliminated by uncurry)
- // tpt[args]
- case TypeBoundsTree(lo, hi) => (eliminated by uncurry)
- // >: lo <: hi
- case ExistentialTypeTree(tpt, whereClauses) => (eliminated by uncurry)
- // tpt forSome { whereClauses }
-
-*/
+// ----- auxiliary objects and methods ------------------------------
abstract class TreeCopier {
def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template): ClassDef
@@ -1047,7 +364,7 @@ trait Trees {
def TypeDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], rhs: Tree): TypeDef
def LabelDef(tree: Tree, name: Name, params: List[Ident], rhs: Tree): LabelDef
def Import(tree: Tree, expr: Tree, selectors: List[ImportSelector]): Import
- def DocDef(tree: Tree, comment: String, definition: Tree): DocDef
+ def DocDef(tree: Tree, comment: DocComment, definition: Tree): DocDef
def Template(tree: Tree, parents: List[Tree], self: ValDef, body: List[Tree]): Template
def Block(tree: Tree, stats: List[Tree], expr: Tree): Block
def CaseDef(tree: Tree, pat: Tree, guard: Tree, body: Tree): CaseDef
@@ -1075,6 +392,7 @@ trait Trees {
def Ident(tree: Tree, name: Name): Ident
def Literal(tree: Tree, value: Constant): Literal
def TypeTree(tree: Tree): TypeTree
+ def TypeTreeWithDeferredRefCheck(tree: Tree): TypeTreeWithDeferredRefCheck
def Annotated(tree: Tree, annot: Tree, arg: Tree): Annotated
def SingletonTypeTree(tree: Tree, ref: Tree): SingletonTypeTree
def SelectFromTypeTree(tree: Tree, qualifier: Tree, selector: Name): SelectFromTypeTree
@@ -1102,7 +420,7 @@ trait Trees {
new LabelDef(name, params, rhs).copyAttrs(tree)
def Import(tree: Tree, expr: Tree, selectors: List[ImportSelector]) =
new Import(expr, selectors).copyAttrs(tree)
- def DocDef(tree: Tree, comment: String, definition: Tree) =
+ def DocDef(tree: Tree, comment: DocComment, definition: Tree) =
new DocDef(comment, definition).copyAttrs(tree)
def Template(tree: Tree, parents: List[Tree], self: ValDef, body: List[Tree]) =
new Template(parents, self, body).copyAttrs(tree)
@@ -1143,7 +461,11 @@ trait Trees {
def TypeApply(tree: Tree, fun: Tree, args: List[Tree]) =
new TypeApply(fun, args).copyAttrs(tree)
def Apply(tree: Tree, fun: Tree, args: List[Tree]) =
- new Apply(fun, args).copyAttrs(tree)
+ (tree match {
+ case _: ApplyToImplicitArgs => new ApplyToImplicitArgs(fun, args)
+ case _: ApplyImplicitView => new ApplyImplicitView(fun, args)
+ case _ => new Apply(fun, args)
+ }).copyAttrs(tree)
def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]) =
new ApplyDynamic(qual, args).copyAttrs(tree)
def Super(tree: Tree, qual: Name, mix: Name) =
@@ -1158,6 +480,9 @@ trait Trees {
new Literal(value).copyAttrs(tree)
def TypeTree(tree: Tree) =
new TypeTree().copyAttrs(tree)
+ def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
+ case dc@TypeTreeWithDeferredRefCheck() => new TypeTreeWithDeferredRefCheck()(dc.check).copyAttrs(tree)
+ }
def Annotated(tree: Tree, annot: Tree, arg: Tree) =
new Annotated(annot, arg).copyAttrs(tree)
def SingletonTypeTree(tree: Tree, ref: Tree) =
@@ -1219,7 +544,7 @@ trait Trees {
if (expr0 == expr) && (selectors0 == selectors) => t
case _ => treeCopy.Import(tree, expr, selectors)
}
- def DocDef(tree: Tree, comment: String, definition: Tree) = tree match {
+ def DocDef(tree: Tree, comment: DocComment, definition: Tree) = tree match {
case t @ DocDef(comment0, definition0)
if (comment0 == comment) && (definition0 == definition) => t
case _ => treeCopy.DocDef(tree, comment, definition)
@@ -1358,6 +683,10 @@ trait Trees {
case t @ TypeTree() => t
case _ => treeCopy.TypeTree(tree)
}
+ def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
+ case t @ TypeTreeWithDeferredRefCheck() => t
+ case _ => treeCopy.TypeTreeWithDeferredRefCheck(tree)
+ }
def Annotated(tree: Tree, annot: Tree, arg: Tree) = tree match {
case t @ Annotated(annot0, arg0)
if (annot0==annot) => t
@@ -1504,6 +833,8 @@ trait Trees {
treeCopy.Literal(tree, value)
case TypeTree() =>
treeCopy.TypeTree(tree)
+ case TypeTreeWithDeferredRefCheck() =>
+ treeCopy.TypeTreeWithDeferredRefCheck(tree)
case Annotated(annot, arg) =>
treeCopy.Annotated(tree, transform(annot), transform(arg))
case SingletonTypeTree(ref) =>
@@ -1520,10 +851,6 @@ trait Trees {
treeCopy.ExistentialTypeTree(tree, transform(tpt), transformTrees(whereClauses))
case SelectFromArray(qualifier, selector, erasure) =>
treeCopy.SelectFromArray(tree, transform(qualifier), selector, erasure)
- case tree : StubTree =>
- tree.symbol = NoSymbol
- tree.tpe = null
- tree
}
def transformTrees(trees: List[Tree]): List[Tree] =
@@ -1559,142 +886,52 @@ trait Trees {
}
}
- class Traverser {
- protected var currentOwner: Symbol = definitions.RootClass
- def traverse(tree: Tree): Unit = tree match {
- case EmptyTree =>
- ;
- case PackageDef(pid, stats) =>
- traverse(pid)
- atOwner(tree.symbol.moduleClass) {
- traverseTrees(stats)
- }
- case ClassDef(mods, name, tparams, impl) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverseTrees(tparams); traverse(impl)
- }
- case ModuleDef(mods, name, impl) =>
- atOwner(tree.symbol.moduleClass) {
- traverseTrees(mods.annotations); traverse(impl)
- }
- case ValDef(mods, name, tpt, rhs) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverse(tpt); traverse(rhs)
- }
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverseTrees(tparams); traverseTreess(vparamss); traverse(tpt); traverse(rhs)
- }
- case TypeDef(mods, name, tparams, rhs) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverseTrees(tparams); traverse(rhs)
- }
- case LabelDef(name, params, rhs) =>
- traverseTrees(params); traverse(rhs)
- case Import(expr, selectors) =>
- traverse(expr)
- case Annotated(annot, arg) =>
- traverse(annot); traverse(arg)
- case DocDef(comment, definition) =>
- traverse(definition)
- case Template(parents, self, body) =>
- traverseTrees(parents)
- if (!self.isEmpty) traverse(self)
- traverseStats(body, tree.symbol)
- case Block(stats, expr) =>
- traverseTrees(stats); traverse(expr)
- case CaseDef(pat, guard, body) =>
- traverse(pat); traverse(guard); traverse(body)
- case Alternative(trees) =>
- traverseTrees(trees)
- case Star(elem) =>
- traverse(elem)
- case Bind(name, body) =>
- traverse(body)
- case UnApply(fun, args) =>
- traverse(fun); traverseTrees(args)
- case ArrayValue(elemtpt, trees) =>
- traverse(elemtpt); traverseTrees(trees)
- case Function(vparams, body) =>
- atOwner(tree.symbol) {
- traverseTrees(vparams); traverse(body)
- }
- case Assign(lhs, rhs) =>
- traverse(lhs); traverse(rhs)
+ class Traverser extends super.Traverser {
+ /** Compiler specific tree types are handled here: the remainder are in
+ * the library's abstract tree traverser.
+ */
+ override def traverse(tree: Tree): Unit = tree match {
case AssignOrNamedArg(lhs, rhs) =>
traverse(lhs); traverse(rhs)
- case If(cond, thenp, elsep) =>
- traverse(cond); traverse(thenp); traverse(elsep)
- case Match(selector, cases) =>
- traverse(selector); traverseTrees(cases)
- case Return(expr) =>
- traverse(expr)
- case Try(block, catches, finalizer) =>
- traverse(block); traverseTrees(catches); traverse(finalizer)
- case Throw(expr) =>
- traverse(expr)
- case New(tpt) =>
- traverse(tpt)
- case Typed(expr, tpt) =>
- traverse(expr); traverse(tpt)
- case TypeApply(fun, args) =>
- traverse(fun); traverseTrees(args)
- case Apply(fun, args) =>
- traverse(fun); traverseTrees(args)
- case ApplyDynamic(qual, args) =>
- traverse(qual); traverseTrees(args)
- case Super(_, _) =>
- ;
- case This(_) =>
- ;
- case Select(qualifier, selector) =>
- traverse(qualifier)
- case Ident(_) =>
- ;
- case Literal(_) =>
- ;
- case TypeTree() =>
- ;
- case SingletonTypeTree(ref) =>
- traverse(ref)
- case SelectFromTypeTree(qualifier, selector) =>
- traverse(qualifier)
- case CompoundTypeTree(templ) =>
- traverse(templ)
- case AppliedTypeTree(tpt, args) =>
- traverse(tpt); traverseTrees(args)
- case TypeBoundsTree(lo, hi) =>
- traverse(lo); traverse(hi)
- case ExistentialTypeTree(tpt, whereClauses) =>
- traverse(tpt); traverseTrees(whereClauses)
- case SelectFromArray(qualifier, selector, erasure) =>
- traverse(qualifier)
+ case DocDef(comment, definition) =>
+ traverse(definition)
case Parens(ts) =>
traverseTrees(ts)
- case tree : StubTree =>
+ case TypeTreeWithDeferredRefCheck() => // TODO: should we traverse the wrapped tree?
+ // (and rewrap the result? how to update the deferred check? would need to store wrapped tree instead of returning it from check)
+ case _ => super.traverse(tree)
}
- def traverseTrees(trees: List[Tree]) {
- trees foreach traverse
- }
- def traverseTreess(treess: List[List[Tree]]) {
- treess foreach traverseTrees
- }
- def traverseStats(stats: List[Tree], exprOwner: Symbol) {
+ /** The abstract traverser is not aware of Tree.isTerm, so we override this one.
+ */
+ override def traverseStats(stats: List[Tree], exprOwner: Symbol) {
stats foreach (stat =>
if (exprOwner != currentOwner && stat.isTerm) atOwner(exprOwner)(traverse(stat))
- else traverse(stat))
+ else traverse(stat)
+ )
}
+
+ /** Leave apply available in the generic traverser to do something else.
+ */
def apply[T <: Tree](tree: T): T = { traverse(tree); tree }
+ }
- def atOwner(owner: Symbol)(traverse: => Unit) {
- val prevOwner = currentOwner
- currentOwner = owner
- traverse
- currentOwner = prevOwner
+ private lazy val duplicator = new Transformer {
+ override val treeCopy = new StrictTreeCopier
+ override def transform(t: Tree) = {
+ val t1 = super.transform(t)
+ if ((t1 ne t) && t1.pos.isRange) t1 setPos t.pos.focus
+ t1
}
}
+ private class ShallowDuplicator(orig: Tree) extends Transformer {
+ override val treeCopy = new StrictTreeCopier
+ override def transform(tree: Tree) =
+ if (tree eq orig) super.transform(tree)
+ else tree
+ }
+
class TreeSubstituter(from: List[Symbol], to: List[Tree]) extends Transformer {
override def transform(tree: Tree): Tree = tree match {
case Ident(_) =>
@@ -1712,6 +949,11 @@ trait Trees {
val typeSubst = new SubstTypeMap(from, to)
override def traverse(tree: Tree) {
if (tree.tpe ne null) tree.tpe = typeSubst(tree.tpe)
+ if (tree.isDef) {
+ val sym = tree.symbol
+ val info1 = typeSubst(sym.info)
+ if (info1 ne sym.info) sym.setInfo(info1)
+ }
super.traverse(tree)
}
override def apply[T <: Tree](tree: T): T = super.apply(tree.duplicate)
@@ -1720,19 +962,34 @@ trait Trees {
lazy val EmptyTreeTypeSubstituter = new TreeTypeSubstituter(List(), List())
- class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends Traverser {
+ /** Substitute symbols in 'from' with symbols in 'to'. Returns a new
+ * tree using the new symbols and whose Ident and Select nodes are
+ * name-consistent with the new symbols.
+ */
+ class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends Transformer {
val symSubst = new SubstSymMap(from, to)
- override def traverse(tree: Tree) {
+ override def transform(tree: Tree): Tree = {
def subst(from: List[Symbol], to: List[Symbol]) {
if (!from.isEmpty)
if (tree.symbol == from.head) tree setSymbol to.head
else subst(from.tail, to.tail)
}
+
if (tree.tpe ne null) tree.tpe = symSubst(tree.tpe)
- if (tree.hasSymbol) subst(from, to)
- super.traverse(tree)
+ if (tree.hasSymbol) {
+ subst(from, to)
+ tree match {
+ case Ident(name0) if tree.symbol != NoSymbol =>
+ treeCopy.Ident(tree, tree.symbol.name)
+ case Select(qual, name0) =>
+ treeCopy.Select(tree, transform(qual), tree.symbol.name)
+ case _ =>
+ super.transform(tree)
+ }
+ } else
+ super.transform(tree)
}
- override def apply[T <: Tree](tree: T): T = super.apply(tree.duplicate)
+ def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T]
override def toString() = "TreeSymSubstituter("+from+","+to+")"
}
@@ -1745,13 +1002,6 @@ trait Trees {
}
}
- final class TreeList {
- private var trees = List[Tree]()
- def append(t: Tree): TreeList = { trees = t :: trees; this }
- def append(ts: List[Tree]): TreeList = { trees = ts reverse_::: trees; this }
- def toList: List[Tree] = trees.reverse
- }
-
object posAssigner extends Traverser {
var pos: Position = _
override def traverse(t: Tree) {
@@ -1817,18 +1067,23 @@ trait Trees {
protected def isLocal(sym: Symbol): Boolean = true
protected def resetDef(tree: Tree) {
tree.symbol = NoSymbol
- tree.tpe = null
- super.traverse(tree)
}
- override def traverse(tree: Tree): Unit = tree match {
- case EmptyTree | TypeTree() =>
- ;
- case _: DefTree | Function(_, _) | Template(_, _, _) =>
- resetDef(tree)
- case _ =>
- if (tree.hasSymbol && isLocal(tree.symbol)) tree.symbol = NoSymbol
- tree.tpe = null
- super.traverse(tree)
+ override def traverse(tree: Tree): Unit = {
+ tree match {
+ case _: DefTree | Function(_, _) | Template(_, _, _) =>
+ resetDef(tree)
+ case _ =>
+ if (tree.hasSymbol && isLocal(tree.symbol)) tree.symbol = NoSymbol
+ }
+ tree match {
+ case tpt: TypeTree =>
+ if (tpt.wasEmpty) tree.tpe = null
+ case EmptyTree =>
+ ;
+ case _ =>
+ tree.tpe = null
+ }
+ super.traverse(tree)
}
}
@@ -1849,14 +1104,5 @@ trait Trees {
super.traverse(tree)
}
}
-
- /* hook to memoize trees in IDE */
- trait TreeKind {
- def isType : Boolean
- def isTerm : Boolean
- def isDef : Boolean
- def hasSymbol : Boolean
- def isTop : Boolean
- }
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala b/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala
index 11d6c997c1..e3d265a92b 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
diff --git a/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala b/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala
index c77b33946a..803f35d9cd 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
@@ -10,3 +10,4 @@ package ast.parser
* @param inserted If true, brace needs to be inserted, otherwise brace needs to be deleted.
*/
case class BracePatch(off: Int, inserted: Boolean)
+extends Patch(off, if (inserted) Insertion("{") else Deletion(1)) \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Change.scala b/src/compiler/scala/tools/nsc/ast/parser/Change.scala
new file mode 100644
index 0000000000..12f189ac6b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/Change.scala
@@ -0,0 +1,10 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc.ast.parser
+
+abstract class Change
+case class Insertion(text: String) extends Change
+case class Deletion(nchars: Int) extends Change
+
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 1e254f7e51..cd68d4aa32 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -1,20 +1,18 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Burak Emir
*/
-// $Id$
package scala.tools.nsc
package ast.parser
import scala.collection.mutable
import mutable.{ Buffer, ArrayBuffer, ListBuffer, HashMap }
-import scala.util.control.ControlException
-import scala.tools.nsc.util.{Position,NoPosition,SourceFile,CharArrayReader}
+import scala.util.control.ControlThrowable
+import scala.tools.nsc.util.{SourceFile,CharArrayReader}
import scala.xml.{ Text, TextBuffer }
import scala.xml.Utility.{ isNameStart, isNameChar, isSpace }
-import SourceFile.{ SU, LF }
-import scala.annotation.switch
+import util.Chars.{ SU, LF }
// XXX/Note: many/most of the functions in here are almost direct cut and pastes
// from another file - scala.xml.parsing.MarkupParser, it looks like.
@@ -36,25 +34,42 @@ trait MarkupParsers
{
self: Parsers =>
- case object MissingEndTagException extends RuntimeException with ControlException {
+ case object MissingEndTagControl extends ControlThrowable {
override def getMessage = "start tag was here: "
}
- case object ConfusedAboutBracesException extends RuntimeException with ControlException {
+ case object ConfusedAboutBracesControl extends ControlThrowable {
override def getMessage = " I encountered a '}' where I didn't expect one, maybe this tag isn't closed <"
}
- case object TruncatedXML extends RuntimeException with ControlException {
+ case object TruncatedXMLControl extends ControlThrowable {
override def getMessage = "input ended while parsing XML"
}
import global._
- class MarkupParser(parser: UnitParser, final val preserveWS: Boolean) {
+ class MarkupParser(parser: SourceFileParser, final val preserveWS: Boolean) extends scala.xml.parsing.MarkupParserCommon {
import Tokens.{ EMPTY, LBRACE, RBRACE }
+ type PositionType = Position
+ type InputType = CharArrayReader
+ type ElementType = Tree
+ type AttributesType = mutable.Map[String, Tree]
+ type NamespaceType = Any // namespaces ignored
+
+ def mkAttributes(name: String, other: NamespaceType): AttributesType = xAttributes
+
+ val eof = false
+
+ def truncatedError(msg: String): Nothing = throw TruncatedXMLControl
+ def xHandleError(that: Char, msg: String) =
+ if (ch == SU) throw TruncatedXMLControl
+ else reportSyntaxError(msg)
+
var input : CharArrayReader = _
+ def lookahead(): BufferedIterator[Char] =
+ (input.buf drop input.charOffset).iterator.buffered
import parser.{ symbXMLBuilder => handle, o2p, r2p }
@@ -63,41 +78,24 @@ trait MarkupParsers
def ch = input.ch
/** this method assign the next character to ch and advances in input */
def nextch = { val result = input.ch; input.nextChar(); result }
+ def ch_returning_nextch = nextch
- var xEmbeddedBlock = false
-
- /** Execute body with a variable saved and restored after execution */
- def saving[A,B](getter: A, setter: (A) => Unit)(body: => B): B = {
- val saved = getter
- try body
- finally setter(saved)
- }
+ def mkProcInstr(position: Position, name: String, text: String): Tree =
+ parser.symbXMLBuilder.procInstr(position, name, text)
- /** munch expected XML token, report syntax error for unexpected.
- *
- * @param that ...
- */
- def xToken(that: Char): Unit =
- if (ch == that) nextch
- else if (ch == SU) throw TruncatedXML
- else reportSyntaxError("'%s' expected instead of '%s'".format(that, ch))
+ var xEmbeddedBlock = false
private var debugLastStartElement = new mutable.Stack[(Int, String)]
private def debugLastPos = debugLastStartElement.top._1
private def debugLastElem = debugLastStartElement.top._2
- private def unreachable = Predef.error("Cannot be reached.")
private def errorBraces() = {
reportSyntaxError("in XML content, please use '}}' to express '}'")
- throw ConfusedAboutBracesException
+ throw ConfusedAboutBracesControl
}
- private def errorNoEnd(tag: String) = {
+ def errorNoEnd(tag: String) = {
reportSyntaxError("expected closing tag of " + tag)
- throw MissingEndTagException
- }
- private def errorAndResult[T](msg: String, x: T): T = {
- reportSyntaxError(msg)
- x
+ throw MissingEndTagControl
}
/** checks whether next character starts a Scala block, if yes, skip it.
@@ -126,9 +124,7 @@ trait MarkupParsers
val mid = curOffset
val value: Tree = ch match {
case '"' | '\'' =>
- nextch
- val tmp = xAttributeValue(delim)
- nextch
+ val tmp = xAttributeValue(ch_returning_nextch)
try handle.parseAttribute(r2p(start, mid, curOffset), tmp)
catch {
@@ -140,7 +136,7 @@ trait MarkupParsers
nextch
xEmbeddedExpr
case SU =>
- throw TruncatedXML
+ throw TruncatedXMLControl
case _ =>
errorAndResult("' or \" delimited attribute value or '{' scala-expr '}' expected", Literal(Constant("<syntax-error>")))
}
@@ -155,92 +151,13 @@ trait MarkupParsers
aMap
}
- /** attribute value, terminated by either ' or ". value may not contain <.
- * @param endch either ' or "
- */
- def xAttributeValue(endCh: Char): String = {
- val buf = new StringBuilder
- while (ch != endCh) {
- // well-formedness constraint
- if (ch == '<') return errorAndResult("'<' not allowed in attrib value", "")
- else if (ch == SU) throw TruncatedXML
- else buf append nextch
- }
- // @todo: normalize attribute value
- buf.toString
- }
-
- /** parse a start or empty tag.
- * [40] STag ::= '<' Name { S Attribute } [S]
- * [44] EmptyElemTag ::= '<' Name { S Attribute } [S]
- */
- def xTag: (String, mutable.Map[String, Tree]) = {
- val elemName = xName
- xSpaceOpt
-
- (elemName, xAttributes)
- }
-
- /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
- */
- def xEndTag(startName: String) {
- xToken('/')
- if (xName != startName)
- errorNoEnd(startName)
-
- xSpaceOpt
- xToken('>')
- }
-
- /** Create a non-destructive lookahead reader and see if the head
- * of the input would match the given String. If yes, return true
- * and drop the entire String from input; if no, return false
- * and leave input unchanged.
- */
- private def peek(lookingFor: String): Boolean = {
- val la = input.lookaheadReader
- for (c <- lookingFor) {
- la.nextChar()
- if (la.ch != c)
- return false
- }
- // drop the chars from the real reader (all lookahead + orig)
- (0 to lookingFor.length) foreach (_ => nextch)
- true
- }
-
- /** Take characters from input stream until given String "until"
- * is seen. Once seen, the accumulated characters are passed
- * along with the current Position to the supplied handler function.
- */
- private def xTakeUntil[T](
- handler: (Position, String) => T,
- positioner: () => Position,
- until: String): T =
- {
- val sb = new StringBuilder
- val head = until charAt 0
- val rest = until drop 1
-
- while (true) {
- if (ch == head && peek(rest))
- return handler(positioner(), sb.toString)
- else if (ch == SU)
- throw TruncatedXML
-
- sb append ch
- nextch
- }
- unreachable
- }
-
/** '<! CharData ::= [CDATA[ ( {char} - {char}"]]>"{char} ) ']]>'
*
* see [15]
*/
def xCharData: Tree = {
val start = curOffset
- "[CDATA[" foreach xToken
+ xToken("[CDATA[")
val mid = curOffset
xTakeUntil(handle.charData, () => r2p(start, mid, curOffset), "]]>")
}
@@ -250,43 +167,13 @@ trait MarkupParsers
xTakeUntil(handle.unparsed, () => r2p(start, start, curOffset), "</xml:unparsed>")
}
- /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
- * | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
- *
- * see [66]
- */
- def xCharRef: String = {
- val hex = (ch == 'x') && { nextch; true }
- val base = if (hex) 16 else 10
- var i = 0
- while (ch != ';') {
- (ch: @switch) match {
- case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
- i = i * base + ch.asDigit
- case 'a' | 'b' | 'c' | 'd' | 'e' | 'f'
- | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' =>
- if (!hex)
- reportSyntaxError("hex char not allowed in decimal char ref\n"
- +"Did you mean to write &#x ?");
- else
- i = i * base + ch.asDigit
- case SU =>
- throw TruncatedXML
- case _ =>
- reportSyntaxError("character '"+ch+"' not allowed in char ref")
- }
- nextch
- }
- i.toChar.toString
- }
-
/** Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
*
* see [15]
*/
def xComment: Tree = {
val start = curOffset - 2 // Rewinding to include "<!"
- "--" foreach xToken
+ xToken("--")
xTakeUntil(handle.comment, () => r2p(start, start, curOffset), "-->")
}
@@ -374,9 +261,9 @@ trait MarkupParsers
*/
def element: Tree = {
val start = curOffset
- val (qname, attrMap) = xTag
+ val (qname, attrMap) = xTag(())
if (ch == '/') { // empty element
- "/>" foreach xToken
+ xToken("/>")
handle.element(r2p(start, start, curOffset), qname, attrMap, new ListBuffer[Tree])
}
else { // handle content
@@ -396,54 +283,6 @@ trait MarkupParsers
}
}
- /** actually, Name ::= (Letter | '_' | ':') (NameChar)* but starting with ':' cannot happen
- * Name ::= (Letter | '_') (NameChar)*
- *
- * see [5] of XML 1.0 specification
- *
- * pre-condition: ch != ':' // assured by definition of XMLSTART token
- * post-condition: name does neither start, nor end in ':'
- */
- def xName: String = {
- if (ch == SU) throw TruncatedXML
- else if (!isNameStart(ch))
- return errorAndResult("name expected, but char '%s' cannot start a name" format ch, "")
-
- val buf = new StringBuilder
-
- do buf append nextch
- while (isNameChar(ch))
-
- if (buf.last == ':') {
- reportSyntaxError( "name cannot end in ':'" )
- buf setLength (buf.length - 1)
- }
- buf.toString.intern
- }
-
- /** scan [S] '=' [S]*/
- def xEQ = { xSpaceOpt; xToken('='); xSpaceOpt }
-
- /** skip optional space S? */
- def xSpaceOpt = { while (isSpace(ch)) { nextch }}
-
- /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
- def xSpace =
- if (isSpace(ch)) { nextch; xSpaceOpt }
- else if (ch == SU) throw TruncatedXML
- else reportSyntaxError("whitespace expected")
-
- /** '<?' ProcInstr ::= Name [S ({Char} - ({Char}'>?' {Char})]'?>'
- *
- * see [15]
- */
- // <?xml2 version="1.0" encoding="UTF-8" standalone="yes"?>
- def xProcInstr: Tree = {
- val n = xName
- xSpaceOpt
- xTakeUntil(handle.procInstr(_: Position, n, _:String), () => tmppos, "?>")
- }
-
/** parse character data.
* precondition: xEmbeddedBlock == false (we are not in a scala block)
*/
@@ -467,23 +306,21 @@ trait MarkupParsers
}
/** Some try/catch/finally logic used by xLiteral and xLiteralPattern. */
- private def xLiteralCommon(f: () => Tree, ifTruncated: Exception => Unit): Tree =
- try f()
+ private def xLiteralCommon(f: () => Tree, ifTruncated: String => Unit): Tree = {
+ try return f()
catch {
- case ex: RuntimeException =>
- ex match {
- case c @ TruncatedXML =>
- ifTruncated(c)
- case c @ (MissingEndTagException | ConfusedAboutBracesException) =>
- parser.syntaxError(debugLastPos, c.getMessage + debugLastElem + ">")
- case _: ArrayIndexOutOfBoundsException =>
- parser.syntaxError(debugLastPos, "missing end tag in XML literal for <%s>" format debugLastElem)
- case _ => throw ex
- }
- EmptyTree
+ case c @ TruncatedXMLControl =>
+ ifTruncated(c.getMessage)
+ case c @ (MissingEndTagControl | ConfusedAboutBracesControl) =>
+ parser.syntaxError(debugLastPos, c.getMessage + debugLastElem + ">")
+ case _: ArrayIndexOutOfBoundsException =>
+ parser.syntaxError(debugLastPos, "missing end tag in XML literal for <%s>" format debugLastElem)
}
finally parser.in resume Tokens.XMLSTART
+ EmptyTree
+ }
+
/** Use a lookahead parser to run speculative body, and return the first char afterward. */
private def charComingAfter(body: => Unit): Char = {
input = input.lookaheadReader
@@ -521,7 +358,7 @@ trait MarkupParsers
ts(0)
}
},
- ex => parser.incompleteInputError(ex.getMessage)
+ msg => parser.incompleteInputError(msg)
)
/** @see xmlPattern. resynchronizes after successful parse
@@ -537,7 +374,7 @@ trait MarkupParsers
tree
}
},
- ex => parser.syntaxError(curOffset, ex.getMessage)
+ msg => parser.syntaxError(curOffset, msg)
)
def escapeToScala[A](op: => A, kind: String) = {
@@ -558,8 +395,9 @@ trait MarkupParsers
*/
def xScalaPatterns: List[Tree] = escapeToScala(parser.patterns(true), "pattern")
+ def reportSyntaxError(pos: Int, str: String) = parser.syntaxError(pos, str)
def reportSyntaxError(str: String) = {
- parser.syntaxError(curOffset, "in XML literal: " + str)
+ reportSyntaxError(curOffset, "in XML literal: " + str)
nextch
}
@@ -595,7 +433,7 @@ trait MarkupParsers
assert(!xEmbeddedBlock, "problem with embedded block")
case SU =>
- throw TruncatedXML
+ throw TruncatedXMLControl
case _ => // text
appendText(r2p(start1, start1, curOffset), ts, xText)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index a4f228cffb..6d35ef4199 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
//todo: allow infix type patterns
@@ -10,7 +9,7 @@ package scala.tools.nsc
package ast.parser
import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.util.{Position, OffsetPosition, NoPosition, BatchSourceFile}
+import util.{ SourceFile, OffsetPosition, FreshNameCreator }
import symtab.Flags
import Tokens._
@@ -60,73 +59,121 @@ self =>
case class OpInfo(operand: Tree, operator: Name, offset: Offset)
- class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends Parser {
+ class SourceFileParser(val source: SourceFile) extends Parser {
- def this(unit: global.CompilationUnit) = this(unit, List())
+ /** The parse starting point depends on whether the source file is self-contained:
+ * if not, the AST will be supplemented.
+ */
+ def parseStartRule =
+ if (source.isSelfContained) () => compilationUnit()
+ else () => scriptBody()
+
+ def newScanner = new SourceFileScanner(source)
- val in = new UnitScanner(unit, patches)
+ val in = newScanner
in.init()
- def freshName(pos: Position, prefix: String): Name =
- unit.fresh.newName(pos, prefix)
+ private val globalFresh = new FreshNameCreator.Default
+
+ override def freshName(pos: Position, prefix: String): Name = newTermName(globalFresh.newName(prefix))
+
+ def o2p(offset: Int): Position = new OffsetPosition(source, offset)
+ def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
+
+ // suppress warnings; silent abort on errors
+ def warning(offset: Int, msg: String) {}
+ def deprecationWarning(offset: Int, msg: String) {}
+
+ def syntaxError(offset: Int, msg: String): Unit = throw new MalformedInput(offset, msg)
+ def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg)
+
+ /** the markup parser */
+ lazy val xmlp = new MarkupParser(this, true)
+
+ object symbXMLBuilder extends SymbolicXMLBuilder(this, true) { // DEBUG choices
+ val global: self.global.type = self.global
+ def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix)
+ }
+
+ def xmlLiteral : Tree = xmlp.xLiteral
+ def xmlLiteralPattern : Tree = xmlp.xLiteralPattern
+ }
+
+ class OutlineParser(source: SourceFile) extends SourceFileParser(source) {
+
+ def skipBraces[T](body: T): T = {
+ accept(LBRACE)
+ var openBraces = 1
+ while (in.token != EOF && openBraces > 0) {
+ if (in.token == XMLSTART) xmlLiteral()
+ else {
+ if (in.token == LBRACE) openBraces += 1
+ else if (in.token == RBRACE) openBraces -= 1
+ in.nextToken()
+ }
+ }
+ body
+ }
+
+ override def blockExpr(): Tree = skipBraces(EmptyTree)
+
+ override def templateBody(isPre: Boolean) = skipBraces(emptyValDef, List(EmptyTree))
+ }
+
+ class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) {
+
+ def this(unit: global.CompilationUnit) = this(unit, List())
+
+ override def newScanner = new UnitScanner(unit, patches)
+
- def o2p(offset: Int): Position = new OffsetPosition(unit.source,offset)
- def r2p(start: Int, mid: Int, end: Int): Position = rangePos(unit.source, start, mid, end)
- def warning(offset: Int, msg: String) { unit.warning(o2p(offset), msg) }
- def deprecationWarning(offset: Int,
- msg: String) {
+ override def warning(offset: Int, msg: String) {
+ unit.warning(o2p(offset), msg)
+ }
+
+ override def deprecationWarning(offset: Int, msg: String) {
unit.deprecationWarning(o2p(offset), msg)
}
- var smartParsing = false
+ private var smartParsing = false
+ private def withSmartParsing[T](body: => T): T = {
+ val saved = smartParsing
+ try {
+ smartParsing = true
+ body
+ }
+ finally smartParsing = saved // false
+ }
val syntaxErrors = new ListBuffer[(Int, String)]
+ def showSyntaxErrors() =
+ for ((offset, msg) <- syntaxErrors)
+ unit.error(o2p(offset), msg)
- def incompleteInputError(msg: String) {
- val offset = unit.source.asInstanceOf[BatchSourceFile].content.length - 1
+ override def syntaxError(offset: Int, msg: String) {
if (smartParsing) syntaxErrors += ((offset, msg))
- else unit.incompleteInputError(o2p(offset), msg)
+ else unit.error(o2p(offset), msg)
}
- def syntaxError(offset: Int, msg: String) {
+ override def incompleteInputError(msg: String) {
+ val offset = source.content.length - 1
if (smartParsing) syntaxErrors += ((offset, msg))
- else unit.error(o2p(offset), msg)
+ else unit.incompleteInputError(o2p(offset), msg)
}
/** parse unit. If there are inbalanced braces,
* try to correct them and reparse.
*/
- def smartParse(): Tree = try {
- smartParsing = true
+ def smartParse(): Tree = withSmartParsing {
val firstTry = parse()
if (syntaxErrors.isEmpty) firstTry
- else {
- val patches = in.healBraces()
- if (patches.isEmpty) {
- for ((offset, msg) <- syntaxErrors) unit.error(o2p(offset), msg)
- firstTry
- } else {
-// println(patches)
- new UnitParser(unit, patches).parse()
- }
+ else in.healBraces() match {
+ case Nil => showSyntaxErrors() ; firstTry
+ case patches => new UnitParser(unit, patches).parse()
}
- } finally {
- smartParsing = false
}
-
- /** the markup parser */
- lazy val xmlp = new MarkupParser(this, true)
-
- object symbXMLBuilder extends SymbolicXMLBuilder(this, true) { // DEBUG choices
- val global: self.global.type = self.global
- def freshName(prefix: String): Name = UnitParser.this.freshName(prefix)
- }
-
- def xmlLiteral : Tree = xmlp.xLiteral
-
- def xmlLiteralPattern : Tree = xmlp.xLiteralPattern
}
final val Local = 0
@@ -170,14 +217,70 @@ self =>
*/
var classContextBounds: List[Tree] = Nil
- /** this is the general parse method
+ /** Are we inside the Scala package? Set for files that start with package scala
+ */
+ private var inScalaPackage = false
+
+ def parseStartRule: () => Tree
+
+ /** This is the general parse entry point.
*/
def parse(): Tree = {
- val t = compilationUnit()
+ val t = parseStartRule()
accept(EOF)
t
}
+ /** This is the parse entry point for code which is not self-contained, e.g.
+ * a script which is a series of template statements. They will be
+ * swaddled in Trees until the AST is equivalent to the one returned
+ * by compilationUnit().
+ */
+ def scriptBody(): Tree = {
+ val stmts = templateStatSeq(false)._2
+ accept(EOF)
+
+ /** Here we are building an AST representing the following source fiction,
+ * where <moduleName> is from -Xscript (defaults to "Main") and <stmts> are
+ * the result of parsing the script file.
+ *
+ * object <moduleName> {
+ * def main(argv: Array[String]): Unit = {
+ * val args = argv
+ * new AnyRef {
+ * <stmts>
+ * }
+ * }
+ * }
+ */
+ import definitions._
+
+ def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
+ def emptyInit = DefDef(
+ NoMods,
+ nme.CONSTRUCTOR,
+ Nil,
+ List(Nil),
+ TypeTree(),
+ Block(List(Apply(Select(Super("", ""), nme.CONSTRUCTOR), Nil)), Literal(Constant(())))
+ )
+
+ // def main
+ def mainParamType = AppliedTypeTree(Ident("Array".toTypeName), List(Ident("String".toTypeName)))
+ def mainParameter = List(ValDef(Modifiers(Flags.PARAM), "argv", mainParamType, EmptyTree))
+ def mainSetArgv = List(ValDef(NoMods, "args", TypeTree(), Ident("argv")))
+ def mainNew = makeNew(Nil, emptyValDef, stmts, List(Nil), NoPosition, NoPosition)
+ def mainDef = DefDef(NoMods, "main", Nil, List(mainParameter), scalaDot(nme.Unit.toTypeName), Block(mainSetArgv, mainNew))
+
+ // object Main
+ def moduleName = ScriptRunner scriptMain settings
+ def moduleBody = Template(List(scalaScalaObjectConstr), emptyValDef, List(emptyInit, mainDef))
+ def moduleDef = ModuleDef(NoMods, moduleName, moduleBody)
+
+ // package <empty> { ... }
+ makePackaging(0, emptyPkg, List(moduleDef))
+ }
+
/* --------------- PLACEHOLDERS ------------------------------------------- */
/** The implicit parameters introduced by `_' in the current expression.
@@ -350,8 +453,11 @@ self =>
/** Check that type parameter is not by name T* */
def checkNotByName(t: Tree) = t match {
- case AppliedTypeTree(Select(_, n), _) if (n == nme.BYNAME_PARAM_CLASS_NAME.toTypeName) =>
- syntaxError(t.pos, "no by-name parameter type allowed here", false)
+ case AppliedTypeTree(Select(_, n), _) =>
+ if (n == nme.BYNAME_PARAM_CLASS_NAME.toTypeName)
+ syntaxError(t.pos, "no by-name parameter type allowed here", false)
+ else if (n == nme.REPEATED_PARAM_CLASS_NAME.toTypeName)
+ syntaxError(t.pos, "no * parameter type allowed here", false)
case _ =>
}
@@ -420,15 +526,20 @@ self =>
*/
def joinComment(trees: => List[Tree]): List[Tree] = {
val doc = in.flushDoc
- if ((doc ne null) && doc._1.length > 0) {
- val ts = trees
- val main = ts.find(_.pos.isOpaqueRange)
- ts map {
+ if ((doc ne null) && doc.raw.length > 0) {
+ val joined = trees map {
t =>
- val dd = DocDef(doc._1, t)
- val pos = doc._2.withEnd(t.pos.endOrPoint)
- dd setPos (if (t eq main) pos else pos.makeTransparent)
+ val dd = DocDef(doc, t)
+ val defnPos = t.pos
+ val pos = doc.pos.withEnd(defnPos.endOrPoint)
+ dd setPos (if (defnPos.isOpaqueRange) pos else pos.makeTransparent)
}
+ joined.find(_.pos.isOpaqueRange) foreach {
+ main =>
+ val mains = List(main)
+ joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
+ }
+ joined
}
else trees
}
@@ -957,7 +1068,7 @@ self =>
*/
def statement(location: Int): Tree = expr(location) // !!! still needed?
- /** Expr ::= (Bindings | Id | `_') `=>' Expr
+ /** Expr ::= (Bindings | [`implicit'] Id | `_') `=>' Expr
* | Expr1
* ResultExpr ::= (Bindings | Id `:' CompoundType) `=>' Block
* | Expr1
@@ -1057,6 +1168,8 @@ self =>
atPos(in.skipToken()) {
Throw(expr())
}
+ case IMPLICIT =>
+ implicitClosure(in.skipToken(), location)
case _ =>
var t = postfixExpr()
if (in.token == EQUALS) {
@@ -1097,7 +1210,11 @@ self =>
}
} else if (in.token == MATCH) {
t = atPos(t.pos.startOrPoint, in.skipToken()) {
- Match(stripParens(t), surround(LBRACE, RBRACE)(caseClauses(), Nil))
+ /** For debugging pattern matcher transition issues */
+ if (settings.Ypmatnaive.value)
+ makeSequencedMatch(stripParens(t), surround(LBRACE, RBRACE)(caseClauses(), Nil))
+ else
+ Match(stripParens(t), surround(LBRACE, RBRACE)(caseClauses(), Nil))
}
}
// in order to allow anonymous functions as statements (as opposed to expressions) inside
@@ -1116,6 +1233,17 @@ self =>
stripParens(t)
}
+ /** Expr ::= implicit Id => Expr
+ */
+ def implicitClosure(start: Int, location: Int): Tree = {
+ val param0 = convertToParam(atPos(in.offset)(Ident(ident())))
+ val param = treeCopy.ValDef(param0, param0.mods | Flags.IMPLICIT, param0.name, param0.tpt, param0.rhs)
+ atPos(start, in.offset) {
+ accept(ARROW)
+ Function(List(param), if (location != InBlock) expr() else block())
+ }
+ }
+
/** PostfixExpr ::= InfixExpr [Id [nl]]
* InfixExpr ::= PrefixExpr
* | InfixExpr Id [nl] InfixExpr
@@ -1159,7 +1287,8 @@ self =>
atPos(in.offset) {
val name = unaryOp()
in.token match {
- case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT => literal(true)
+ // Don't include double and float here else we lose -0.0
+ case INTLIT | LONGLIT => literal(true)
case _ => Select(stripParens(simpleExpr()), name)
}
}
@@ -1218,7 +1347,7 @@ self =>
val npos = r2p(nstart, nstart, in.lastOffset)
val tstart = in.offset
val (parents, argss, self, stats) = template(false)
- val cpos = r2p(tstart, tstart, in.lastOffset)
+ val cpos = r2p(tstart, tstart, in.lastOffset max tstart)
makeNew(parents, self, stats, argss, npos, cpos)
case _ =>
syntaxErrorOrIncomplete("illegal start of simple expression", true)
@@ -1228,6 +1357,9 @@ self =>
}
def simpleExprRest(t: Tree, canApply: Boolean): Tree = {
+ // Various errors in XML literals can cause xmlLiteral to propagate
+ // EmptyTree's. Watch out for them here (see also postfixExpr).
+ if (EmptyTree == t) return EmptyTree // #3604 (mics)
if (canApply) newLineOptWhenFollowedBy(LBRACE)
in.token match {
case DOT =>
@@ -1279,25 +1411,10 @@ self =>
}
}
- // if arg has the form "x$1 => a = x$1" it's treated as "a = x$1" with x$1
- // in placeholderParams. This allows e.g. "val f: Int => Int = foo(a = 1, b = _)"
- def convertArg(arg: Tree): Tree = arg match {
- case Function(
- List(vd @ ValDef(mods, pname1, ptype1, EmptyTree)),
- Assign(Ident(aname), rhs)) if (mods hasFlag Flags.SYNTHETIC) =>
- rhs match {
- case Ident(`pname1`) | Typed(Ident(`pname1`), _) =>
- placeholderParams = vd :: placeholderParams
- atPos(arg.pos) { AssignOrNamedArg(Ident(aname), Ident(pname1)) }
- case _ => arg
- }
- case _ => arg
- }
-
if (in.token == LBRACE)
List(blockExpr())
else
- surround(LPAREN, RPAREN)(if (in.token == RPAREN) List() else (args() map convertArg), List())
+ surround(LPAREN, RPAREN)(if (in.token == RPAREN) List() else args(), List())
}
/** BlockExpr ::= `{' (CaseClauses | Block) `}'
@@ -1380,8 +1497,10 @@ self =>
if (tok == EQUALS && eqOK) in.nextToken()
else accept(LARROW)
val rhs = expr()
- enums += makeGenerator(r2p(start, point, in.lastOffset), pat, tok == EQUALS, rhs)
- if (in.token == IF) enums += makeFilter(in.offset, guard())
+ enums += makeGenerator(r2p(start, point, in.lastOffset max start), pat, tok == EQUALS, rhs)
+ // why max above? IDE stress tests have shown that lastOffset could be less than start,
+ // I guess this happens if instead if a for-expression we sit on a closing paren.
+ while (in.token == IF) enums += makeFilter(in.offset, guard())
}
def makeFilter(start: Int, tree: Tree) = Filter(r2p(start, tree.pos.point, tree.pos.endOrPoint), tree)
@@ -1461,6 +1580,9 @@ self =>
def pattern3(seqOK: Boolean): Tree = {
val base = opstack
var top = simplePattern(seqOK)
+ // See ticket #3189 for the motivation for the null check.
+ // TODO: dredge out the remnants of regexp patterns.
+ // ... and now this is back the way it was because it caused #3480.
if (seqOK && isIdent && in.name == STAR)
return atPos(top.pos.startOrPoint, in.skipToken())(Star(stripParens(top)))
@@ -1550,14 +1672,14 @@ self =>
/* -------- MODIFIERS and ANNOTATIONS ------------------------------------------- */
- /** Drop `private' modifier when follwed by a qualifier.
+ /** Drop `private' modifier when followed by a qualifier.
* Conract `abstract' and `override' to ABSOVERRIDE
*/
private def normalize(mods: Modifiers): Modifiers =
if ((mods hasFlag Flags.PRIVATE) && mods.privateWithin != nme.EMPTY.toTypeName)
- mods &~ Flags.PRIVATE
+ normalize(mods &~ Flags.PRIVATE)
else if ((mods hasFlag Flags.ABSTRACT) && (mods hasFlag Flags.OVERRIDE))
- mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE
+ normalize(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
else
mods
@@ -1695,12 +1817,17 @@ self =>
mods = modifiers() | Flags.PARAMACCESSOR
if (mods.hasFlag(Flags.LAZY)) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", false)
if (in.token == VAL) {
+ mods = mods withPosition (in.token, tokenRange(in))
in.nextToken()
} else if (in.token == VAR) {
+ mods = mods withPosition (in.token, tokenRange(in))
mods |= Flags.MUTABLE
in.nextToken()
- } else if (!caseParam) {
- mods |= Flags.PRIVATE | Flags.LOCAL
+ } else {
+ if (mods.flags != Flags.PARAMACCESSOR) accept(VAL)
+ if (!caseParam) {
+ mods |= Flags.PRIVATE | Flags.LOCAL
+ }
}
if (caseParam) {
mods |= Flags.CASEACCESSOR
@@ -1710,7 +1837,7 @@ self =>
val name = ident()
var bynamemod = 0
val tpt =
- if (settings.Xexperimental.value && !owner.isTypeName && in.token != COLON) {
+ if (settings.YmethodInfer.value && !owner.isTypeName && in.token != COLON) {
TypeTree()
} else { // XX-METHOD-INFER
accept(COLON)
@@ -1720,6 +1847,10 @@ self =>
in.offset,
(if (mods.hasFlag(Flags.MUTABLE)) "`var'" else "`val'") +
" parameters may not be call-by-name", false)
+ else if (implicitmod != 0)
+ syntaxError(
+ in.offset,
+ "implicit parameters may not be call-by-name", false)
else bynamemod = Flags.BYNAMEPARAM
}
paramType()
@@ -1739,7 +1870,7 @@ self =>
if (in.token != RPAREN) {
if (in.token == IMPLICIT) {
if (!contextBounds.isEmpty)
- syntaxError("cannot have both implicit parameters and context bounds `: ...' on type parameters", false)
+ syntaxError("cannot have both implicit parameters and context bounds `: ...' or view bounds `<% ...' on type parameters", false)
in.nextToken()
implicitmod = Flags.IMPLICIT
}
@@ -1817,7 +1948,7 @@ self =>
}
val nameOffset = in.offset
val pname =
- (if (in.token == USCORE) { // @M! also allow underscore
+ (if (in.token == USCORE) { // TODO AM: freshName(o2p(in.skipToken()), "_$$"), will need to update test suite
in.nextToken()
nme.WILDCARD
} else ident()).toTypeName
@@ -1873,8 +2004,14 @@ self =>
/** Import ::= import ImportExpr {`,' ImportExpr}
*/
def importClause(): List[Tree] = {
- accept(IMPORT)
- commaSeparated(importExpr())
+ val offset = accept(IMPORT)
+ commaSeparated(importExpr()) match {
+ case Nil => Nil
+ case t :: rest =>
+ // The first import should start at the position of the keyword.
+ t.setPos(t.pos.withStart(offset))
+ t :: rest
+ }
}
/** ImportExpr ::= StableId `.' (Id | `_' | ImportSelectors)
@@ -1907,7 +2044,7 @@ self =>
if (in.token == USCORE) {
val uscoreOffset = in.offset
in.nextToken()
- Import(t, List(ImportSelector(nme.WILDCARD, uscoreOffset, null, -1)))
+ Import(t, List(ImportSelector(nme.WILDCARD, uscoreOffset, nme.WILDCARD, -1)))
} else if (in.token == LBRACE) {
Import(t, importSelectors())
} else {
@@ -2107,7 +2244,7 @@ self =>
var newmods = mods
val nameOffset = in.offset
val name = ident()
- atPos(start, if (name == nme.ERROR) start else nameOffset) {
+ val result = atPos(start, if (name == nme.ERROR) start else nameOffset) {
// contextBoundBuf is for context bounded type parameters of the form
// [T : B] or [T : => B]; it contains the equivalent implicit parameter type,
// i.e. (B[T] or T => B)
@@ -2135,6 +2272,8 @@ self =>
}
DefDef(newmods, name, tparams, vparamss, restype, rhs)
}
+ signalParseProgress(result.pos)
+ result
}
}
@@ -2151,6 +2290,7 @@ self =>
atPos(accept(THIS)) {
newLineOptWhenFollowedBy(LBRACE)
var t = Apply(Ident(nme.CONSTRUCTOR), argumentExprs())
+ newLineOptWhenFollowedBy(LBRACE)
while (in.token == LPAREN || in.token == LBRACE) {
t = Apply(t, argumentExprs())
newLineOptWhenFollowedBy(LBRACE)
@@ -2240,7 +2380,7 @@ self =>
classContextBounds = contextBoundBuf.toList
val tstart = (in.offset::classContextBounds.map(_.pos.startOrPoint)).min
if (!classContextBounds.isEmpty && mods.hasFlag(Flags.TRAIT)) {
- syntaxError("traits cannot have type parameters with context bounds `: ...'", false)
+ syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", false)
classContextBounds = List()
}
val constrAnnots = annotations(false, true)
@@ -2249,7 +2389,7 @@ self =>
else (accessModifierOpt(), paramClauses(name, classContextBounds, mods.hasFlag(Flags.CASE)))
var mods1 = mods
if (mods hasFlag Flags.TRAIT) {
- if (settings.Xexperimental.value && in.token == SUBTYPE) mods1 |= Flags.DEFERRED
+ if (settings.YvirtClasses && in.token == SUBTYPE) mods1 |= Flags.DEFERRED
} else if (in.token == SUBTYPE) {
syntaxError("classes are not allowed to be virtual", false)
}
@@ -2340,7 +2480,7 @@ self =>
def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers,
vparamss: List[List[ValDef]], tstart: Int): Template = {
val (parents0, argss, self, body) =
- if (in.token == EXTENDS || settings.Xexperimental.value && (mods hasFlag Flags.TRAIT) && in.token == SUBTYPE) {
+ if (in.token == EXTENDS || settings.YvirtClasses && (mods hasFlag Flags.TRAIT) && in.token == SUBTYPE) {
in.nextToken()
template(mods hasFlag Flags.TRAIT)
} else if ((in.token == SUBTYPE) && (mods hasFlag Flags.TRAIT)) {
@@ -2352,7 +2492,7 @@ self =>
(List(), List(List()), self, body)
}
var parents = parents0
- if (name != nme.ScalaObject.toTypeName && !isInterface(mods, body))
+ if (!isInterface(mods, body) && !(inScalaPackage && name == nme.Array.toTypeName))
parents = parents ::: List(scalaScalaObjectConstr)
if (parents.isEmpty)
parents = List(scalaAnyRefConstr)
@@ -2446,13 +2586,18 @@ self =>
while (in.token != RBRACE && in.token != EOF) {
if (in.token == PACKAGE) {
val start = in.skipToken()
- stats += {
- if (in.token == OBJECT) makePackageObject(start, objectDef(in.offset, NoMods))
- else packaging(start)
+ stats ++= {
+ if (in.token == OBJECT) {
+ joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
+ }
+ else {
+ in.flushDoc
+ List(packaging(start))
+ }
}
} else if (in.token == IMPORT) {
+ in.flushDoc
stats ++= importClause()
- // XXX: IDE hook this all.
} else if (in.token == CLASS ||
in.token == CASECLASS ||
in.token == TRAIT ||
@@ -2483,6 +2628,7 @@ self =>
var self: ValDef = emptyValDef
val stats = new ListBuffer[Tree]
if (isExprIntro) {
+ in.flushDoc
val first = expr(InTemplate) // @S: first statement is potentially converted so cannot be stubbed.
if (in.token == ARROW) {
first match {
@@ -2503,8 +2649,10 @@ self =>
}
while (in.token != RBRACE && in.token != EOF) {
if (in.token == IMPORT) {
+ in.flushDoc
stats ++= importClause()
} else if (isExprIntro) {
+ in.flushDoc
stats += statement(InTemplate)
} else if (isDefIntro || isModifier || in.token == LBRACKET /*todo: remove */ || in.token == AT) {
stats ++= joinComment(nonLocalDefOrDcl)
@@ -2551,12 +2699,15 @@ self =>
}
*/
- def localDef : List[Tree] = {
+ def localDef(implicitMod: Int): List[Tree] = {
val annots = annotations(true, false)
val pos = in.offset
- val mods = localModifiers() withAnnotations annots
- if (!(mods hasFlag ~(Flags.IMPLICIT | Flags.LAZY))) defOrDcl(pos, mods)
- else List(tmplDef(pos, mods))
+ val mods = (localModifiers() | implicitMod) withAnnotations annots
+ val defs =
+ if (!(mods hasFlag ~(Flags.IMPLICIT | Flags.LAZY))) defOrDcl(pos, mods)
+ else List(tmplDef(pos, mods))
+ if (in.token != RBRACE && in.token != CASE) defs
+ else defs ::: List(Literal(()).setPos(o2p(in.offset)))
}
/** BlockStatSeq ::= { BlockStat semi } [ResultExpr]
@@ -2575,15 +2726,19 @@ self =>
stats += statement(InBlock)
if (in.token != RBRACE && in.token != CASE) acceptStatSep()
} else if (isDefIntro || isLocalModifier || in.token == AT) {
- stats ++= localDef
- if (in.token == RBRACE || in.token == CASE) {
- //syntaxError("block must end in result expression, not in definition", false)
- stats += Literal(()).setPos(o2p(in.offset))
- } else acceptStatSep()
+ if (in.token == IMPLICIT) {
+ val start = in.skipToken()
+ if (isIdent) stats += implicitClosure(start, InBlock)
+ else stats ++= localDef(Flags.IMPLICIT)
+ } else {
+ stats ++= localDef(0)
+ }
+ if (in.token != RBRACE && in.token != CASE) acceptStatSep()
} else if (isStatSep) {
in.nextToken()
} else {
- syntaxErrorOrIncomplete("illegal start of statement", true)
+ val addendum = if (isModifier) " (no modifiers allowed here)" else ""
+ syntaxErrorOrIncomplete("illegal start of statement" + addendum, true)
}
}
stats.toList
@@ -2599,12 +2754,15 @@ self =>
if (in.token == PACKAGE) {
in.nextToken()
if (in.token == OBJECT) {
- ts += makePackageObject(start, objectDef(in.offset, NoMods))
+ ts ++= joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
if (in.token != EOF) {
acceptStatSep()
ts ++= topStatSeq()
}
} else {
+ in.flushDoc
+ if (in.token == IDENTIFIER && in.name.encode == nme.scala_)
+ inScalaPackage = true
val pkg = qualId()
newLineOptWhenFollowedBy(LBRACE)
if (in.token == EOF) {
@@ -2624,10 +2782,14 @@ self =>
}
ts.toList
}
- val start = caseAwareTokenOffset max 0
topstats() match {
case List(stat @ PackageDef(_, _)) => stat
- case stats => makePackaging(start, atPos(o2p(start)) { Ident(nme.EMPTY_PACKAGE_NAME) }, stats)
+ case stats =>
+ val start =
+ if (stats forall (_ == EmptyTree)) 0
+ else wrappingPos(stats).startOrPoint
+
+ makePackaging(start, atPos(start, start, start) { Ident(nme.EMPTY_PACKAGE_NAME) }, stats)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Patch.scala b/src/compiler/scala/tools/nsc/ast/parser/Patch.scala
new file mode 100644
index 0000000000..a55f84151c
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/Patch.scala
@@ -0,0 +1,8 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc.ast.parser
+
+class Patch(off: Int, change: Change)
+
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 267d5bd17f..4605beb5cf 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -1,13 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package ast.parser
import scala.tools.nsc.util._
-import SourceFile.{LF, FF, CR, SU}
+import Chars._
import Tokens._
import scala.annotation.switch
import scala.collection.mutable.{ListBuffer, ArrayBuffer}
@@ -59,7 +58,9 @@ trait Scanners {
def resume(lastCode: Int) = {
token = lastCode
- assert(next.token == EMPTY)
+ if (next.token != EMPTY && !reporter.hasErrors)
+ syntaxError("unexpected end of input: possible missing '}' in XML block")
+
nextToken()
}
@@ -103,11 +104,11 @@ trait Scanners {
/** buffer for the documentation comment
*/
var docBuffer: StringBuilder = null
- var docOffset: Position = null
+ var docPos: Position = null
/** Return current docBuffer and set docBuffer to null */
- def flushDoc = {
- val ret = if (docBuffer != null) (docBuffer.toString, docOffset) else null
+ def flushDoc: DocComment = {
+ val ret = if (docBuffer != null) DocComment(docBuffer.toString, docPos) else null
docBuffer = null
ret
}
@@ -165,10 +166,18 @@ trait Scanners {
sepRegions = sepRegions.tail
case _ =>
}
+ (lastToken: @switch) match {
+ case RBRACE | RBRACKET | RPAREN =>
+ docBuffer = null
+ case _ =>
+ }
// Read a token or copy it from `next` tokenData
if (next.token == EMPTY) {
lastOffset = charOffset - 1
+ if(lastOffset > 0 && buf(lastOffset) == '\n' && buf(lastOffset - 1) == '\r') {
+ lastOffset -= 1
+ }
fetchToken()
} else {
this copyFrom next
@@ -235,7 +244,8 @@ trait Scanners {
// println("blank line found at "+lastOffset+":"+(lastOffset to idx).map(buf(_)).toList)
return true
}
- } while (idx < end && ch <= ' ')
+ if (idx == end) return false
+ } while (ch <= ' ')
}
idx += 1; ch = buf(idx)
}
@@ -312,7 +322,7 @@ trait Scanners {
if (ch == '\"') {
nextChar()
if (ch == '\"') {
- nextChar()
+ nextRawChar()
val saved = lineStartOffset
getMultiLineStringLit()
if (lineStartOffset != saved) // ignore linestarts within a multi-line string
@@ -331,7 +341,7 @@ trait Scanners {
nextChar()
if (isIdentifierStart(ch))
charLitOr(getIdentRest)
- else if (isSpecial(ch))
+ else if (isOperatorPart(ch) && (ch != '\\'))
charLitOr(getOperatorRest)
else {
getLitChar()
@@ -383,6 +393,7 @@ trait Scanners {
getIdentRest()
} else if (isSpecial(ch)) {
putChar(ch)
+ nextChar()
getOperatorRest()
} else {
syntaxError("illegal character")
@@ -554,9 +565,9 @@ trait Scanners {
private def getMultiLineStringLit() {
if (ch == '\"') {
- nextChar()
+ nextRawChar()
if (ch == '\"') {
- nextChar()
+ nextRawChar()
if (ch == '\"') {
nextChar()
while (ch == '\"') {
@@ -578,7 +589,7 @@ trait Scanners {
incompleteInputError("unclosed multi-line string literal")
} else {
putChar(ch)
- nextChar()
+ nextRawChar()
getMultiLineStringLit()
}
}
@@ -680,7 +691,7 @@ trait Scanners {
var value: Long = 0
val divider = if (base == 10) 1 else 2
val limit: Long =
- if (token == LONGLIT) Math.MAX_LONG else Math.MAX_INT
+ if (token == LONGLIT) Long.MaxValue else Int.MaxValue
var i = 0
val len = strVal.length
while (i < len) {
@@ -709,7 +720,7 @@ trait Scanners {
*/
def floatVal(negated: Boolean): Double = {
val limit: Double =
- if (token == DOUBLELIT) Math.MAX_DOUBLE else Math.MAX_FLOAT
+ if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
try {
val value: Double = java.lang.Double.valueOf(strVal).doubleValue()
if (value > limit)
@@ -775,7 +786,7 @@ trait Scanners {
/** Backquoted idents like 22.`foo`. */
case '`' =>
- return setStrVal() /** Note the early return **/
+ return setStrVal() /** Note the early return */
/** These letters may be part of a literal, or a method invocation on an Int */
case 'd' | 'D' | 'f' | 'F' =>
@@ -796,7 +807,7 @@ trait Scanners {
}
/** Parse character literal if current character is followed by \',
- * or follow with given op and return a symol literal token
+ * or follow with given op and return a symbol literal token
*/
def charLitOr(op: () => Unit) {
putChar(ch)
@@ -882,32 +893,6 @@ trait Scanners {
}
} // end Scanner
- // ------------- character classification --------------------------------
-
- def isIdentifierStart(c: Char): Boolean =
- ('A' <= c && c <= 'Z') ||
- ('a' <= c && c <= 'a') ||
- (c == '_') || (c == '$') ||
- Character.isUnicodeIdentifierStart(c)
-
- def isIdentifierPart(c: Char) =
- isIdentifierStart(c) ||
- ('0' <= c && c <= '9') ||
- Character.isUnicodeIdentifierPart(c)
-
- def isSpecial(c: Char) = {
- val chtp = Character.getType(c)
- chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt
- }
-
- def isOperatorPart(c : Char) : Boolean = (c: @switch) match {
- case '~' | '!' | '@' | '#' | '%' |
- '^' | '*' | '+' | '-' | '<' |
- '>' | '?' | ':' | '=' | '&' |
- '|' | '/' | '\\' => true
- case c => isSpecial(c)
- }
-
// ------------- keyword configuration -----------------------------------
/** Keyword array; maps from name indices to tokens */
@@ -1029,17 +1014,31 @@ trait Scanners {
else "'<" + token + ">'"
}
+ class MalformedInput(val offset: Int, val msg: String) extends Exception
+
+ /** A scanner for a given source file not necessarily attached to a compilation unit.
+ * Useful for looking inside source files that aren not currently compiled to see what's there
+ */
+ class SourceFileScanner(val source: SourceFile) extends Scanner {
+ val buf = source.content
+ override val decodeUni: Boolean = !settings.nouescape.value
+
+ // suppress warnings, throw exception on errors
+ def warning(off: Offset, msg: String): Unit = {}
+ def deprecationWarning(off: Offset, msg: String) = {}
+ def error (off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
+ def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
+ }
+
/** A scanner over a given compilation unit
*/
- class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends Scanner {
+ class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
def this(unit: CompilationUnit) = this(unit, List())
- val buf = unit.source.asInstanceOf[BatchSourceFile].content
- val decodeUnit = !settings.nouescape.value
- def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg)
- def error (off: Offset, msg: String) = unit.error(unit.position(off), msg)
- def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg)
- def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg)
+ override def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg)
+ override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg)
+ override def error (off: Offset, msg: String) = unit.error(unit.position(off), msg)
+ override def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg)
private var bracePatches: List[BracePatch] = patches
@@ -1090,8 +1089,8 @@ trait Scanners {
}
override def foundDocComment(value: String, start: Int, end: Int) {
- docOffset = new RangePosition(unit.source, start, start, end)
- unit.comment(docOffset, value)
+ docPos = new RangePosition(unit.source, start, start, end)
+ unit.comment(docPos, value)
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index 03f3053edc..86c79eb733 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Burak Emir
*/
-// $Id$
package scala.tools.nsc
package ast.parser
@@ -10,8 +9,8 @@ package ast.parser
import collection.mutable.Map
import xml.{ EntityRef, Text }
import xml.XML.{ xmlns }
-import util.Position
import symtab.Flags.MUTABLE
+import scala.tools.util.StringOps.splitWhere
/** This class builds instance of <code>Tree</code> that represent XML.
*
@@ -57,11 +56,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean)
// convenience methods
private def LL[A](x: A*): List[List[A]] = List(List(x:_*))
- private def const(x: Any) = x match {
- case s: runtime.RichString => Literal(Constant(s.toString)) // not our finest hour
- case s: collection.immutable.StringLike[_] => Literal(Constant(s.toString)) // not our finest hour
- case _ => Literal(Constant(x))
- }
+ private def const(x: Any) = Literal(Constant(x))
private def wild = Ident(nme.WILDCARD)
private def wildStar = Ident(nme.WILDCARD_STAR.toTypeName)
private def _scala(name: Name) = Select(Select(Ident(nme.ROOTPKG), nme.scala_), name)
@@ -165,9 +160,9 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean)
}
/** Returns (Some(prefix) | None, rest) based on position of ':' */
- def splitPrefix(name: String): (Option[String], String) = (name indexOf ':') match {
- case -1 => (None, name)
- case i => (Some(name take i), name drop (i + 1))
+ def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', true) match {
+ case Some((pre, rest)) => (Some(pre), rest)
+ case _ => (None, name)
}
/** Various node constructions. */
@@ -195,7 +190,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean)
/** Extract all the namespaces from the attribute map. */
val namespaces: List[Tree] =
- for (z <- attrMap.keysIterator.toList ; if z startsWith xmlns) yield {
+ for (z <- attrMap.keys.toList ; if z startsWith xmlns) yield {
val ns = splitPrefix(z) match {
case (Some(_), rest) => rest
case _ => null
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
index ab848b6b48..b3c3c7dfe9 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package ast.parser
@@ -19,6 +18,8 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse
class ParserPhase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
override val checkable = false
+ override val keepsTypeParams = false
+
def apply(unit: global.CompilationUnit) {
global.informProgress("parsing " + unit)
unit.body =
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index 29da4008aa..00347c3a9e 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -1,13 +1,19 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package ast.parser
-object Tokens {
+import annotation.switch
+
+/** Common code between JavaTokens and Tokens. Not as much (and not as concrete)
+ * as one might like because JavaTokens for no clear reason chose new numbers for
+ * identical token sets.
+ */
+abstract class Tokens {
+ import util.Chars._
/** special tokens */
final val EMPTY = -3
@@ -22,6 +28,23 @@ object Tokens {
final val FLOATLIT = 4
final val DOUBLELIT = 5
final val STRINGLIT = 6
+
+ def LPAREN: Int
+ def RBRACE: Int
+
+ def isIdentifier(code: Int): Boolean
+ def isLiteral(code: Int): Boolean
+ def isKeyword(code: Int): Boolean
+ def isSymbol(code: Int): Boolean
+
+ final def isSpace(at: Char) = at == ' ' || at == '\t'
+ final def isNewLine(at: Char) = at == CR || at == LF || at == FF
+ final def isBrace(code : Int) = code >= LPAREN && code <= RBRACE
+ final def isOpenBrace(code : Int) = isBrace(code) && (code % 2 == 0)
+ final def isCloseBrace(code : Int) = isBrace(code) && (code % 2 == 1)
+}
+
+object Tokens extends Tokens {
final val SYMBOLLIT = 7
def isLiteral(code : Int) =
code >= CHARLIT && code <= SYMBOLLIT
@@ -32,16 +55,14 @@ object Tokens {
def isIdentifier(code : Int) =
code >= IDENTIFIER && code <= BACKQUOTED_IDENT
- def canBeginExpression(code : Int) = code match {
- case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true
- case LBRACE|LPAREN|LBRACKET|COMMENT|STRINGLIT => true
- case IF|DO|WHILE|FOR|NEW|TRY|THROW => true
- case NULL|THIS|TRUE|FALSE => true
- case code if isLiteral(code) => true
- case _ => false
+ @switch def canBeginExpression(code : Int) = code match {
+ case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true
+ case LBRACE|LPAREN|LBRACKET|COMMENT|STRINGLIT => true
+ case IF|DO|WHILE|FOR|NEW|TRY|THROW => true
+ case NULL|THIS|TRUE|FALSE => true
+ case code => isLiteral(code)
}
-
/** keywords */
final val IF = 20
final val FOR = 21
@@ -90,15 +111,14 @@ object Tokens {
def isKeyword(code : Int) =
code >= IF && code <= LAZY
- def isDefinition(code : Int) = code match {
- case CLASS|TRAIT|OBJECT => true
- case CASECLASS|CASEOBJECT => true
- case DEF|VAL|VAR => true
- case TYPE => true
- case _ => false
+ @switch def isDefinition(code : Int) = code match {
+ case CLASS|TRAIT|OBJECT => true
+ case CASECLASS|CASEOBJECT => true
+ case DEF|VAL|VAR => true
+ case TYPE => true
+ case _ => false
}
-
/** special symbols */
final val COMMA = 70
final val SEMI = 71
@@ -127,11 +147,6 @@ object Tokens {
final val LBRACE = 94
final val RBRACE = 95
- def isBrace(code : Int) =
- code >= LPAREN && code <= RBRACE
- def isOpenBrace(code : Int) = isBrace(code) && (code % 2 == 0)
- def isCloseBrace(code : Int) = isBrace(code) && (code % 2 == 1)
-
/** XML mode */
final val XMLSTART = 96
@@ -141,15 +156,4 @@ object Tokens {
final val WHITESPACE = 105
final val IGNORE = 106
final val ESCAPE = 109
-
- def isSpace(at : Char) = at match {
- case ' ' | '\t' => true
- case _ => false
- }
- import scala.tools.nsc.util.SourceFile._
-
- def isNewLine(at : Char) = at match {
- case CR | LF | FF => true
- case _ => false
- }
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 29a9599744..9f20a70de5 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -1,15 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package ast.parser
import symtab.Flags._
import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.util.Position
/** Methods for building trees, used in the parser. All the trees
* returned by this class must be untyped.
@@ -65,46 +63,54 @@ abstract class TreeBuilder {
* The variables keep their positions; whereas the pattern is converted to be synthetic
* for all nodes that contain a variable position.
*/
- private object getvarTraverser extends Traverser {
+ class GetVarTraverser extends Traverser {
val buf = new ListBuffer[(Name, Tree, Position)]
- def init: Traverser = { buf.clear; this }
+
def namePos(tree: Tree, name: Name): Position =
- if (!tree.pos.isRange || name.toString.contains('$')) tree.pos.focus
+ if (!tree.pos.isRange || name.containsName(nme.DOLLARraw)) tree.pos.focus
else {
val start = tree.pos.start
val end = start + name.decode.length
r2p(start, start, end)
}
+
override def traverse(tree: Tree): Unit = {
+ def seenName(name: Name) = buf exists (_._1 == name)
+ def add(name: Name, t: Tree) = if (!seenName(name)) buf += ((name, t, namePos(tree, name)))
val bl = buf.length
+
tree match {
- case Bind(name, Typed(tree1, tpt)) =>
- if ((name != nme.WILDCARD) && (buf.iterator forall (name !=))) {
- buf += ((name, if (treeInfo.mayBeTypePat(tpt)) TypeTree() else tpt.duplicate, namePos(tree, name)))
- }
+ case Bind(nme.WILDCARD, _) =>
+ super.traverse(tree)
+
+ case Bind(name, Typed(tree1, tpt)) =>
+ val newTree = if (treeInfo.mayBeTypePat(tpt)) TypeTree() else tpt.duplicate
+ add(name, newTree)
traverse(tree1)
- case Bind(name, tree1) =>
- if ((name != nme.WILDCARD) && (buf.iterator forall (name !=))) {
- // can assume only name range as position, as otherwise might overlap
- // with binds embedded in pattern tree1
- buf += ((name, TypeTree(), namePos(tree, name)))
- //println("found var "+name+" at "+namePos.show) //DEBUG
- }
+
+ case Bind(name, tree1) =>
+ // can assume only name range as position, as otherwise might overlap
+ // with binds embedded in pattern tree1
+ add(name, TypeTree())
traverse(tree1)
+
case _ =>
super.traverse(tree)
}
- if (buf.length > bl) tree setPos tree.pos.makeTransparent
+ if (buf.length > bl)
+ tree setPos tree.pos.makeTransparent
+ }
+ def apply(tree: Tree) = {
+ traverse(tree)
+ buf.toList
}
}
/** Returns list of all pattern variables, possibly with their types,
* without duplicates
*/
- private def getVariables(tree: Tree): List[(Name, Tree, Position)] = {
- getvarTraverser.init.traverse(tree)
- getvarTraverser.buf.toList
- }
+ private def getVariables(tree: Tree): List[(Name, Tree, Position)] =
+ new GetVarTraverser apply tree
private def makeTuple(trees: List[Tree], isType: Boolean): Tree = {
val tupString = "Tuple" + trees.length
@@ -145,8 +151,14 @@ abstract class TreeBuilder {
/** Create tree representing (unencoded) binary operation expression or pattern. */
def makeBinop(isExpr: Boolean, left: Tree, op: Name, right: Tree, opPos: Position): Tree = {
+ def mkNamed(args: List[Tree]) =
+ if (isExpr) args map {
+ case a @ Assign(id @ Ident(name), rhs) =>
+ atPos(a.pos) { AssignOrNamedArg(id, rhs) }
+ case e => e
+ } else args
val arguments = right match {
- case Parens(args) => args
+ case Parens(args) => mkNamed(args)
case _ => List(right)
}
if (isExpr) {
@@ -192,7 +204,7 @@ abstract class TreeBuilder {
}
}
- /** Create a tree represeting an assignment &lt;lhs = rhs&gt; */
+ /** Create a tree representing an assignment &lt;lhs = rhs&gt; */
def makeAssign(lhs: Tree, rhs: Tree): Tree = lhs match {
case Apply(fn, args) =>
Apply(atPos(fn.pos) { Select(fn, nme.update) }, args ::: List(rhs))
@@ -357,7 +369,13 @@ abstract class TreeBuilder {
/** The position of the closure that starts with generator at position `genpos`.
*/
- def closurePos(genpos: Position) = r2p(genpos.startOrPoint, genpos.point, body.pos.endOrPoint)
+ def closurePos(genpos: Position) = {
+ val end = body.pos match {
+ case NoPosition => genpos.point
+ case bodypos => bodypos.endOrPoint
+ }
+ r2p(genpos.startOrPoint, genpos.point, end)
+ }
// val result =
enums match {
@@ -378,7 +396,7 @@ abstract class TreeBuilder {
val rhss = valeqs map { case ValEq(_, _, rhs) => rhs }
val defpat1 = makeBind(pat)
val defpats = pats map makeBind
- val pdefs = (List.map2(defpats, rhss)(makePatDef)).flatten
+ val pdefs = (defpats, rhss).zipped flatMap makePatDef
val ids = (defpat1 :: defpats) map makeValue
val rhs1 = makeForYield(
List(ValFrom(pos, defpat1, rhs)),
@@ -446,6 +464,37 @@ abstract class TreeBuilder {
def makePatDef(pat: Tree, rhs: Tree): List[Tree] =
makePatDef(Modifiers(0), pat, rhs)
+ /** For debugging only. Desugar a match statement like so:
+ * val x = scrutinee
+ * x match {
+ * case case1 => ...
+ * case _ => x match {
+ * case case2 => ...
+ * case _ => x match ...
+ * }
+ * }
+ *
+ * This way there are never transitions between nontrivial casedefs.
+ * Of course many things break: exhaustiveness and unreachable checking
+ * do not work, no switches will be generated, etc.
+ */
+ def makeSequencedMatch(selector: Tree, cases: List[CaseDef]): Tree = {
+ require(cases.nonEmpty)
+
+ val selectorName = freshName()
+ val valdef = atPos(selector.pos)(ValDef(Modifiers(PRIVATE | LOCAL | SYNTHETIC), selectorName, TypeTree(), selector))
+ val nselector = Ident(selectorName)
+
+ def loop(cds: List[CaseDef]): Match = {
+ def mkNext = CaseDef(Ident(nme.WILDCARD), EmptyTree, loop(cds.tail))
+
+ if (cds.size == 1) Match(nselector, cds)
+ else Match(selector, List(cds.head, mkNext))
+ }
+
+ Block(List(valdef), loop(cases))
+ }
+
/** Create tree for pattern definition <mods val pat0 = rhs> */
def makePatDef(mods: Modifiers, pat: Tree, rhs: Tree): List[Tree] = matchVarPattern(pat) match {
case Some((name, tpt)) =>
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
new file mode 100644
index 0000000000..c4365a82ac
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -0,0 +1,41 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package backend
+
+import io.AbstractFile
+import util.JavaClassPath
+import util.ClassPath.{ JavaContext, DefaultJavaContext }
+import scala.tools.util.PathResolver
+
+trait JavaPlatform extends Platform[AbstractFile] {
+ import global._
+ import definitions.{ BoxesRunTimeClass, getMember }
+
+ lazy val classPath = new PathResolver(settings).result
+ def rootLoader = new loaders.JavaPackageLoader(classPath)
+
+ private def depAnalysisPhase = if (settings.make.value != "all") List(dependencyAnalysis) else Nil
+ def platformPhases = List(
+ flatten, // get rid of inner classes
+ liftcode, // generate reified trees
+ genJVM // generate .class files
+ ) ::: depAnalysisPhase
+
+ lazy val externalEquals = getMember(BoxesRunTimeClass, nme.equals_)
+ def externalEqualsNumNum = getMember(BoxesRunTimeClass, "equalsNumNum")
+ def externalEqualsNumChar = getMember(BoxesRunTimeClass, "equalsNumChar")
+ def externalEqualsNumObject = getMember(BoxesRunTimeClass, "equalsNumObject")
+
+ def isMaybeBoxed(sym: Symbol): Boolean = {
+ import definitions._
+ (sym == ObjectClass) ||
+ (sym == SerializableClass) ||
+ (sym == ComparableClass) ||
+ (sym isNonBottomSubClass BoxedNumberClass) ||
+ (sym isNonBottomSubClass BoxedCharacterClass)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala b/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
new file mode 100644
index 0000000000..6df158c411
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
@@ -0,0 +1,36 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package backend
+
+import ch.epfl.lamp.compiler.msil.{ Type => MSILType }
+import util.MsilClassPath
+import msil.GenMSIL
+
+trait MSILPlatform extends Platform[MSILType] {
+ import global._
+ import definitions.{ ComparatorClass, BoxedNumberClass, getMember, getClass }
+
+ if (settings.verbose.value)
+ inform("[AssemRefs = " + settings.assemrefs.value + "]")
+
+ // phaseName = "msil"
+ object genMSIL extends {
+ val global: MSILPlatform.this.global.type = MSILPlatform.this.global
+ val runsAfter = List[String]("dce")
+ val runsRightAfter = None
+ } with GenMSIL
+
+ lazy val classPath = MsilClassPath.fromSettings(settings)
+ def rootLoader = new loaders.NamespaceLoader(classPath)
+
+ def platformPhases = List(
+ genMSIL // generate .msil files
+ )
+
+ lazy val externalEquals = getMember(ComparatorClass.companionModule, nme.equals_)
+ def isMaybeBoxed(sym: Symbol) = sym isNonBottomSubClass BoxedNumberClass
+}
diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala
new file mode 100644
index 0000000000..90075687c6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/Platform.scala
@@ -0,0 +1,31 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package backend
+
+import util.ClassPath
+
+/** The platform dependent pieces of Global.
+ */
+trait Platform[T] {
+ val global: Global
+ import global._
+
+ /** The compiler classpath. */
+ def classPath: ClassPath[T]
+
+ /** The root symbol loader. */
+ def rootLoader: LazyType
+
+ /** Any platform-specific phases. */
+ def platformPhases: List[SubComponent]
+
+ /** Symbol for a method which compares two objects. */
+ def externalEquals: Symbol
+
+ /** The various ways a boxed primitive might materialize at runtime. */
+ def isMaybeBoxed(sym: Symbol): Boolean
+}
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index 290d90b3e9..b1be70a54c 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
@@ -94,6 +93,7 @@ abstract class ScalaPrimitives {
final val AS = 81 // x.as[y]
final val ISERASED = 85 // x.is$erased[y]
final val ASERASED = 86 // x.as$erased[y]
+ final val HASH = 87 // x.##
// AnyRef operations
final val SYNCHRONIZED = 90 // x.synchronized(y)
@@ -215,6 +215,7 @@ abstract class ScalaPrimitives {
addPrimitive(Any_!=, NE)
addPrimitive(Any_isInstanceOf, IS)
addPrimitive(Any_asInstanceOf, AS)
+ addPrimitive(Any_##, HASH)
// java.lang.Object
addPrimitive(Object_eq, ID)
@@ -469,6 +470,18 @@ abstract class ScalaPrimitives {
def isCoercion(code: Int): Boolean = (code >= B2B) && (code <= D2D)
+ final val typeOfArrayOp: Map[Int, TypeKind] = Map(
+ (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++
+ (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++
+ (List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT)) ++
+ (List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR)) ++
+ (List(IARRAY_LENGTH, IARRAY_GET, IARRAY_SET) map (_ -> INT)) ++
+ (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++
+ (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++
+ (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++
+ (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> REFERENCE(AnyRefClass))) : _*
+ )
+
/** Check whether the given operation code is an array operation. */
def isArrayOp(code: Int): Boolean =
isArrayNew(code) | isArrayLength(code) | isArrayGet(code) | isArraySet(code)
@@ -548,9 +561,9 @@ abstract class ScalaPrimitives {
def isPrimitive(sym: Symbol): Boolean = primitives contains sym
- /** Return the code for the givem symbol. */
+ /** Return the code for the given symbol. */
def getPrimitive(sym: Symbol): Int = {
- assert(isPrimitive(sym), "Unkown primitive " + sym)
+ assert(isPrimitive(sym), "Unknown primitive " + sym)
primitives(sym)
}
@@ -567,17 +580,23 @@ abstract class ScalaPrimitives {
import definitions._
val code = getPrimitive(fun)
- var elem: Type = null
- tpe match {
- case TypeRef(_, sym, _elem :: Nil)
- if (sym == ArrayClass) => elem = _elem
- case _ => ()
+ def elementType = atPhase(currentRun.typerPhase) {
+ val arrayParent = tpe :: tpe.parents find {
+ case TypeRef(_, sym, _elem :: Nil)
+ if (sym == ArrayClass) => true
+ case _ => false
+ }
+ if (arrayParent.isEmpty) {
+ println(fun.fullName + " : " + tpe :: tpe.baseTypeSeq.toList)
+ }
+ val TypeRef(_, _, elem :: Nil) = arrayParent.get
+ elem
}
code match {
case APPLY =>
- toTypeKind(elem) match {
+ toTypeKind(elementType) match {
case BOOL => ZARRAY_GET
case BYTE => BARRAY_GET
case SHORT => SARRAY_GET
@@ -588,11 +607,11 @@ abstract class ScalaPrimitives {
case DOUBLE => DARRAY_GET
case REFERENCE(_) | ARRAY(_) => OARRAY_GET
case _ =>
- abort("Unexpected array element type: " + elem)
+ abort("Unexpected array element type: " + elementType)
}
case UPDATE =>
- toTypeKind(elem) match {
+ toTypeKind(elementType) match {
case BOOL => ZARRAY_SET
case BYTE => BARRAY_SET
case SHORT => SARRAY_SET
@@ -603,12 +622,11 @@ abstract class ScalaPrimitives {
case DOUBLE => DARRAY_SET
case REFERENCE(_) | ARRAY(_) => OARRAY_SET
case _ =>
- abort("Unexpected array element type: " + elem)
+ abort("Unexpected array element type: " + elementType)
}
case LENGTH =>
- assert(elem != null)
- toTypeKind(elem) match {
+ toTypeKind(elementType) match {
case BOOL => ZARRAY_LENGTH
case BYTE => BARRAY_LENGTH
case SHORT => SARRAY_LENGTH
@@ -619,7 +637,7 @@ abstract class ScalaPrimitives {
case DOUBLE => DARRAY_LENGTH
case REFERENCE(_) | ARRAY(_) => OARRAY_LENGTH
case _ =>
- abort("Unexpected array element type: " + elem)
+ abort("Unexpected array element type: " + elementType)
}
case _ =>
diff --git a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
index a3634556cd..27f5b27303 100644
--- a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
+++ b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index a774473167..bab3652f69 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
@@ -40,8 +39,8 @@ trait BasicBlocks {
def hasFlag(flag: Int): Boolean = (flags & flag) != 0
/** Set the given flag. */
- def setFlag(flag: Int): Unit = flags |= flag
- def resetFlag(flag: Int) {
+ private def setFlag(flag: Int): Unit = flags |= flag
+ private def resetFlag(flag: Int) {
flags &= ~flag
}
@@ -63,9 +62,16 @@ trait BasicBlocks {
def exceptionHandlerStart_=(b: Boolean) =
if (b) setFlag(EX_HEADER) else resetFlag(EX_HEADER)
- /** Has this basic block been modified since the last call to 'toList'? */
- private def touched = hasFlag(TOUCHED)
- private def touched_=(b: Boolean) = if (b) setFlag(TOUCHED) else resetFlag(TOUCHED)
+ /** Has this basic block been modified since the last call to 'successors'? */
+ def touched = hasFlag(DIRTYSUCCS)
+ def touched_=(b: Boolean) = if (b) {
+ setFlag(DIRTYSUCCS | DIRTYPREDS)
+ } else {
+ resetFlag(DIRTYSUCCS | DIRTYPREDS)
+ }
+
+ // basic blocks start in a dirty state
+ setFlag(DIRTYSUCCS | DIRTYPREDS)
/** Cached predecessors. */
var preds: List[BasicBlock] = null
@@ -85,9 +91,9 @@ trait BasicBlocks {
private var instrs: Array[Instruction] = _
override def toList: List[Instruction] = {
- if (closed && touched)
- instructionList = instrs.toList
- instructionList
+ if (closed)
+ instrs.toList
+ else instructionList
}
/** Return an iterator over the instructions in this basic block. */
@@ -101,12 +107,11 @@ trait BasicBlocks {
}
def fromList(is: List[Instruction]) {
+ code.touched = true
instrs = toInstructionArray(is)
closed = true
}
- // public:
-
/** Return the index of inst. Uses reference equality.
* Returns -1 if not found.
*/
@@ -166,9 +171,9 @@ trait BasicBlocks {
*/
def replaceInstruction(pos: Int, instr: Instruction): Boolean = {
assert(closed, "Instructions can be replaced only after the basic block is closed")
-
instr.setPos(instrs(pos).pos)
instrs(pos) = instr
+ code.touched = true
true
}
@@ -183,10 +188,11 @@ trait BasicBlocks {
var i = 0
var changed = false
while (i < instrs.length && !changed) {
- if (instrs(i) == oldInstr) {
+ if (instrs(i) eq oldInstr) {
newInstr.setPos(oldInstr.pos)
instrs(i) = newInstr
changed = true
+ code.touched = true
}
i += 1
}
@@ -194,7 +200,7 @@ trait BasicBlocks {
}
/** Replaces <code>iold</code> with <code>is</code>. It does not update
- * the position field in the newly inserted instrucitons, so it behaves
+ * the position field in the newly inserted instructions, so it behaves
* differently than the one-instruction versions of this function.
*
* @param iold ..
@@ -213,6 +219,8 @@ trait BasicBlocks {
if (i < instrs.length) {
val newInstrs = new Array[Instruction](instrs.length + is.length - 1);
changed = true
+ code.touched = true
+
Array.copy(instrs, 0, newInstrs, 0, i)
var j = i
for (x <- is) {
@@ -244,6 +252,7 @@ trait BasicBlocks {
Array.copy(instrs, i + 1, newInstrs, j, instrs.length - i)
instrs = newInstrs;
}
+ code.touched = true
}
/** Removes instructions found at the given positions.
@@ -264,6 +273,7 @@ trait BasicBlocks {
i += 1
}
instrs = newInstrs
+ code.touched = true
}
/** Remove the last instruction of this basic block. It is
@@ -274,7 +284,7 @@ trait BasicBlocks {
removeInstructionsAt(size)
else {
instructionList = instructionList.tail
- touched = true
+ code.touched = true
}
}
@@ -287,7 +297,9 @@ trait BasicBlocks {
var i = 0
while (i < instrs.length) {
map get instrs(i) match {
- case Some(instr) => touched = replaceInstruction(i, instr)
+ case Some(instr) =>
+ val changed = replaceInstruction(i, instr)
+ code.touched |= changed
case None => ()
}
i += 1
@@ -321,6 +333,10 @@ trait BasicBlocks {
emit(instr, NoPosition)
}
+ /** Emitting does not set touched to true. During code generation this is a hotspot and
+ * setting the flag for each emit is a waste. Caching should happen only after a block
+ * is closed, which sets the DIRTYSUCCS flag.
+ */
def emit(instr: Instruction, pos: Position) {
if (closed) {
print()
@@ -329,7 +345,6 @@ trait BasicBlocks {
assert(!closed || ignore, "BasicBlock closed")
if (!ignore) {
- touched = true
instr.setPos(pos)
instructionList = instr :: instructionList
_lastInstruction = instr
@@ -357,6 +372,7 @@ trait BasicBlocks {
def close {
assert(instructionList.length > 0, "Empty block.")
closed = true
+ setFlag(DIRTYSUCCS)
instructionList = instructionList.reverse
instrs = toInstructionArray(instructionList)
}
@@ -365,6 +381,7 @@ trait BasicBlocks {
assert(closed)
closed = false
ignore = false
+ touched = true
instructionList = instructionList.reverse // prepare for appending to the head
}
@@ -409,12 +426,45 @@ trait BasicBlocks {
array
}
- def successors : List[BasicBlock] = if (isEmpty) Nil else {
- var res = lastInstruction match {
- case JUMP (whereto) => List(whereto)
+ /** Cached value of successors. Must be recomputed whenver a block in the current method is changed. */
+ private var succs: List[BasicBlock] = Nil
+
+ def successors : List[BasicBlock] = {
+ if (touched) {
+ resetFlag(DIRTYSUCCS)
+ succs = if (isEmpty) Nil else {
+ var res = lastInstruction match {
+ case JUMP(whereto) => List(whereto)
+ case CJUMP(success, failure, _, _) => failure :: success :: Nil
+ case CZJUMP(success, failure, _, _) => failure :: success :: Nil
+ case SWITCH(_, labels) => labels
+ case RETURN(_) => Nil
+ case THROW() => Nil
+ case _ =>
+ if (closed) {
+ dump
+ global.abort("The last instruction is not a control flow instruction: " + lastInstruction)
+ }
+ else Nil
+ }
+ method.exh.foreach {
+ e: ExceptionHandler =>
+ if (e.covers(this)) res = e.startBlock :: res
+ }
+ val res1 = res ++ exceptionalSucc(this, res)
+ res1
+ }
+ }
+// println("reusing cached successors for " + this + " in method " + method)
+ succs
+ }
+
+ def directSuccessors: List[BasicBlock] = {
+ if (isEmpty) Nil else lastInstruction match {
+ case JUMP(whereto) => List(whereto)
case CJUMP(success, failure, _, _) => failure :: success :: Nil
case CZJUMP(success, failure, _, _) => failure :: success :: Nil
- case SWITCH(_,labels) => labels
+ case SWITCH(_, labels) => labels
case RETURN(_) => Nil
case THROW() => Nil
case _ =>
@@ -424,10 +474,6 @@ trait BasicBlocks {
}
else Nil
}
- method.exh.foreach { e: ExceptionHandler =>
- if (e.covers(this)) res = e.startBlock :: res
- }
- res ++ exceptionalSucc(this, res)
}
/** Return a list of successors for 'b' that come from exception handlers
@@ -443,15 +489,15 @@ trait BasicBlocks {
ss ++ (ss flatMap findSucc)
}
- succs.flatMap(findSucc).removeDuplicates
+ succs.flatMap(findSucc).distinct
}
- /** Returns the precessors of this block, in the current 'code' chunk.
- * This is signifficant only if there are exception handlers, which live
- * in different code 'chunks' than the rest of the method.
- */
+ /** Returns the predecessors of this block. */
def predecessors: List[BasicBlock] = {
- preds = code.blocks.iterator.filter (_.successors.contains(this)).toList
+ if (hasFlag(DIRTYPREDS)) {
+ resetFlag(DIRTYPREDS)
+ preds = code.blocks.iterator.filter (_.successors.contains(this)).toList
+ }
preds
}
@@ -467,7 +513,7 @@ trait BasicBlocks {
def print(out: java.io.PrintStream) {
out.println("block #"+label+" :")
- toList.foreach(i => out.println(" " + i))
+ foreach(i => out.println(" " + i))
out.print("Successors: ")
successors.foreach((x: BasicBlock) => out.print(" "+x.label.toString()))
out.println()
@@ -482,6 +528,17 @@ trait BasicBlocks {
}
override def toString(): String = "" + label
+
+ def flagsString: String =
+ ("block " + label + (
+ if (hasFlag(LOOP_HEADER)) " <loopheader> "
+ else if (hasFlag(IGNORING)) " <ignore> "
+ else if (hasFlag(EX_HEADER)) " <exheader> "
+ else if (hasFlag(CLOSED)) " <closed> "
+ else if (hasFlag(DIRTYSUCCS)) " <dirtysuccs> "
+ else if (hasFlag(DIRTYPREDS)) " <dirtypreds> "
+ else ""
+ ))
}
}
@@ -499,6 +556,9 @@ object BBFlags {
/** This block is closed. No new instructions can be added. */
final val CLOSED = 0x00000008
- /** This block has been changed, cached results are recomputed. */
- final val TOUCHED = 0x00000010
+ /** Code has been changed, recompute successors. */
+ final val DIRTYSUCCS = 0x00000010
+
+ /** Code has been changed, recompute predecessors. */
+ final val DIRTYPREDS = 0x00000020
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/CheckerError.scala b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
index 52f5f86326..a2ba0554da 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/CheckerError.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
@@ -1,13 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
package icode
-class CheckerError(s: String) extends Exception(s)
+class CheckerException(s: String) extends Exception(s)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Checkers.scala b/src/compiler/scala/tools/nsc/backend/icode/Checkers.scala
index ab32e69944..6067dc6e42 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Checkers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Checkers.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
@@ -74,7 +73,7 @@ abstract class Checkers {
def checkICodes: Unit = {
if (settings.verbose.value)
println("[[consistency check at the beginning of phase " + globalPhase.name + "]]")
- classes.valuesIterator foreach check
+ classes.values foreach check
}
def check(cls: IClass) {
@@ -84,17 +83,17 @@ abstract class Checkers {
for (f1 <- cls.fields; f2 <- cls.fields if f1 ne f2)
if (f1.symbol.name == f2.symbol.name)
Checkers.this.global.error("Repetitive field name: " +
- f1.symbol.fullNameString);
+ f1.symbol.fullName);
for (m1 <- cls.methods; m2 <- cls.methods if m1 ne m2)
if (m1.symbol.name == m2.symbol.name &&
m1.symbol.tpe =:= m2.symbol.tpe)
Checkers.this.global.error("Repetitive method: " +
- m1.symbol.fullNameString);
+ m1.symbol.fullName);
clasz.methods.foreach(check)
}
- /** Apply the give funtion to each pair of the cartesian product of
+ /** Apply the give function to each pair of the cartesian product of
* l1 x l2.
*/
def pairwise[a](l1: List[a], l2: List[a])(f: (a, a) => Unit) =
@@ -151,8 +150,8 @@ abstract class Checkers {
else if (s2 eq emptyStack) s1
else {
if (s1.length != s2.length)
- throw new CheckerError("Incompatible stacks: " + s1 + " and " + s2 + " in " + method + " at entry to block: " + bl);
- new TypeStack(List.map2(s1.types, s2.types) (lub))
+ throw new CheckerException("Incompatible stacks: " + s1 + " and " + s2 + " in " + method + " at entry to block: " + bl);
+ new TypeStack((s1.types, s2.types).zipped map lub)
}
}
@@ -241,15 +240,15 @@ abstract class Checkers {
receiver match {
case REFERENCE(sym) =>
checkBool(sym.info.member(method.name) != NoSymbol,
- "Method " + method + " does not exist in " + sym.fullNameString);
+ "Method " + method + " does not exist in " + sym.fullName);
if (method hasFlag Flags.PRIVATE)
checkBool(method.owner == clasz.symbol,
- "Cannot call private method of " + method.owner.fullNameString
- + " from " + clasz.symbol.fullNameString);
+ "Cannot call private method of " + method.owner.fullName
+ + " from " + clasz.symbol.fullName);
else if (method hasFlag Flags.PROTECTED)
checkBool(clasz.symbol isSubClass method.owner,
- "Cannot call protected method of " + method.owner.fullNameString
- + " from " + clasz.symbol.fullNameString);
+ "Cannot call protected method of " + method.owner.fullName
+ + " from " + clasz.symbol.fullName);
case ARRAY(_) =>
checkBool(receiver.toType.member(method.name) != NoSymbol,
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
index 8168cc2c6e..e5b94076e8 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
@@ -3,7 +3,6 @@
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index e1ddd260cb..9136bffcda 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1,17 +1,17 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
package icode
-import scala.collection.mutable.{Map, HashMap, ListBuffer, Buffer, HashSet}
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.{ HashMap, ListBuffer, Buffer, HashSet }
import scala.tools.nsc.symtab._
-import scala.tools.nsc.util.Position
+import scala.annotation.switch
import PartialFunction._
/** This class ...
@@ -26,41 +26,40 @@ abstract class GenICode extends SubComponent {
import icodes._
import icodes.opcodes._
import definitions.{
- ArrayClass, ObjectClass, ThrowableClass,
- Object_equals
+ ArrayClass, ObjectClass, ThrowableClass, StringClass, NothingClass, NullClass,
+ Object_equals, Object_isInstanceOf, Object_asInstanceOf, ScalaRunTimeModule,
+ BoxedNumberClass, BoxedCharacterClass,
+ getMember
}
import scalaPrimitives.{
isArrayOp, isComparisonOp, isLogicalOp,
isUniversalEqualityOp, isReferenceEqualityOp
}
+ import platform.isMaybeBoxed
val phaseName = "icode"
override def newPhase(prev: Phase) = new ICodePhase(prev)
+ private def debugLog(msg: => String): Unit =
+ if (settings.debug.value) log(msg)
+
class ICodePhase(prev: Phase) extends StdPhase(prev) {
override def description = "Generate ICode from the AST"
var unit: CompilationUnit = _
- // We assume definitions are alread initialized
- val STRING = REFERENCE(definitions.StringClass)
+ // We assume definitions are already initialized
+ val STRING = REFERENCE(StringClass)
// this depends on the backend! should be changed.
val ANY_REF_CLASS = REFERENCE(ObjectClass)
- val SCALA_ALL = REFERENCE(definitions.NothingClass)
- val SCALA_ALLREF = REFERENCE(definitions.NullClass)
+ val SCALA_ALL = REFERENCE(NothingClass)
+ val SCALA_ALLREF = REFERENCE(NullClass)
val THROWABLE = REFERENCE(ThrowableClass)
- lazy val BoxesRunTime_equals =
- if (!forMSIL)
- definitions.getMember(definitions.BoxesRunTimeClass, nme.equals_)
- else
- definitions.getMember(definitions.getClass("scala.runtime.Comparator").linkedModuleOfClass, nme.equals_)
-
-
override def run {
scalaPrimitives.init
classes.clear
@@ -92,13 +91,15 @@ abstract class GenICode extends SubComponent {
gen(stats, ctx setPackage pid.name)
case ClassDef(mods, name, _, impl) =>
- log("Generating class: " + tree.symbol.fullNameString)
+ log("Generating class: " + tree.symbol.fullName)
val outerClass = ctx.clazz
ctx setClass (new IClass(tree.symbol) setCompilationUnit unit)
addClassFields(ctx, tree.symbol);
classes += (tree.symbol -> ctx.clazz)
unit.icode += ctx.clazz
gen(impl, ctx)
+ ctx.clazz.methods = ctx.clazz.methods.reverse // preserve textual order
+ ctx.clazz.fields = ctx.clazz.fields.reverse // preserve textual order
ctx setClass outerClass
// !! modules should be eliminated by refcheck... or not?
@@ -150,14 +151,8 @@ abstract class GenICode extends SubComponent {
abort("Illegal tree in gen: " + tree)
}
- private def genStat(trees: List[Tree], ctx: Context): Context = {
- var currentCtx = ctx
-
- for (t <- trees)
- currentCtx = genStat(t, currentCtx)
-
- currentCtx
- }
+ private def genStat(trees: List[Tree], ctx: Context): Context =
+ trees.foldLeft(ctx)((currentCtx, t) => genStat(t, currentCtx))
/**
* Generate code for the given tree. The trees should contain statements
@@ -169,30 +164,296 @@ abstract class GenICode extends SubComponent {
* @return a new context. This is necessary for control flow instructions
* which may change the current basic block.
*/
- private def genStat(tree: Tree, ctx: Context): Context = {
+ private def genStat(tree: Tree, ctx: Context): Context = tree match {
+ case Assign(lhs @ Select(_, _), rhs) =>
+ val isStatic = lhs.symbol.isStaticMember
+ var ctx1 = if (isStatic) ctx else genLoadQualifier(lhs, ctx)
- tree match {
- case Assign(lhs @ Select(_, _), rhs) =>
- if (lhs.symbol.isStaticMember) {
- val ctx1 = genLoad(rhs, ctx, toTypeKind(lhs.symbol.info))
- ctx1.bb.emit(STORE_FIELD(lhs.symbol, true), tree.pos)
- ctx1
- } else {
- var ctx1 = genLoadQualifier(lhs, ctx)
- ctx1 = genLoad(rhs, ctx1, toTypeKind(lhs.symbol.info))
- ctx1.bb.emit(STORE_FIELD(lhs.symbol, false), tree.pos)
- ctx1
+ ctx1 = genLoad(rhs, ctx1, toTypeKind(lhs.symbol.info))
+ ctx1.bb.emit(STORE_FIELD(lhs.symbol, isStatic), tree.pos)
+ ctx1
+
+ case Assign(lhs, rhs) =>
+ val ctx1 = genLoad(rhs, ctx, toTypeKind(lhs.symbol.info))
+ val Some(l) = ctx.method.lookupLocal(lhs.symbol)
+ ctx1.bb.emit(STORE_LOCAL(l), tree.pos)
+ ctx1
+
+ case _ =>
+ genLoad(tree, ctx, UNIT)
+ }
+ /**
+ * Generate code for primitive arithmetic operations.
+ * Returns (Context, Generated Type)
+ */
+ private def genArithmeticOp(tree: Tree, ctx: Context, code: Int): (Context, TypeKind) = {
+ val Apply(fun @ Select(larg, _), args) = tree
+ var ctx1 = ctx
+ var resKind = toTypeKind(larg.tpe)
+
+ if (settings.debug.value) {
+ assert(args.length <= 1,
+ "Too many arguments for primitive function: " + fun.symbol)
+ assert(resKind.isNumericType | resKind == BOOL,
+ resKind.toString() + " is not a numeric or boolean type " +
+ "[operation: " + fun.symbol + "]")
+ }
+
+ args match {
+ // unary operation
+ case Nil =>
+ ctx1 = genLoad(larg, ctx1, resKind)
+ code match {
+ case scalaPrimitives.POS =>
+ () // nothing
+ case scalaPrimitives.NEG =>
+ ctx1.bb.emit(CALL_PRIMITIVE(Negation(resKind)), larg.pos)
+ case scalaPrimitives.NOT =>
+ ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(NOT, resKind)), larg.pos)
+ case _ =>
+ abort("Unknown unary operation: " + fun.symbol.fullName +
+ " code: " + code)
}
- case Assign(lhs, rhs) =>
- val ctx1 = genLoad(rhs, ctx, toTypeKind(lhs.symbol.info))
- val Some(l) = ctx.method.lookupLocal(lhs.symbol)
- ctx1.bb.emit(STORE_LOCAL(l), tree.pos)
- ctx1
+ // binary operation
+ case rarg :: Nil =>
+ resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil);
+ if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code))
+ assert(resKind.isIntType | resKind == BOOL,
+ resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1);
+
+ ctx1 = genLoad(larg, ctx1, resKind)
+ ctx1 = genLoad(rarg,
+ ctx1, // check .NET size of shift arguments!
+ if (scalaPrimitives.isShiftOp(code)) INT else resKind)
+
+ val primitiveOp = code match {
+ case scalaPrimitives.ADD => Arithmetic(ADD, resKind)
+ case scalaPrimitives.SUB => Arithmetic(SUB, resKind)
+ case scalaPrimitives.MUL => Arithmetic(MUL, resKind)
+ case scalaPrimitives.DIV => Arithmetic(DIV, resKind)
+ case scalaPrimitives.MOD => Arithmetic(REM, resKind)
+ case scalaPrimitives.OR => Logical(OR, resKind)
+ case scalaPrimitives.XOR => Logical(XOR, resKind)
+ case scalaPrimitives.AND => Logical(AND, resKind)
+ case scalaPrimitives.LSL => Shift(LSL, resKind)
+ case scalaPrimitives.LSR => Shift(LSR, resKind)
+ case scalaPrimitives.ASR => Shift(ASR, resKind)
+ case _ => abort("Unknown primitive: " + fun.symbol + "[" + code + "]")
+ }
+ ctx1.bb.emit(CALL_PRIMITIVE(primitiveOp), tree.pos)
case _ =>
- genLoad(tree, ctx, UNIT)
+ abort("Too many arguments for primitive function: " + tree)
+ }
+ (ctx1, resKind)
+ }
+
+ /** Generate primitive array operations.
+ *
+ * @param tree ...
+ * @param ctx ...
+ * @param code ...
+ * @return ...
+ */
+ private def genArrayOp(tree: Tree, ctx: Context, code: Int, expectedType: TypeKind): (Context, TypeKind) = {
+ import scalaPrimitives._
+ val Apply(Select(arrayObj, _), args) = tree
+ val k = toTypeKind(arrayObj.tpe)
+ val ARRAY(elem) = k
+ var ctx1 = genLoad(arrayObj, ctx, k)
+ val elementType = typeOfArrayOp.getOrElse(code, abort("Unknown operation on arrays: " + tree + " code: " + code))
+
+ var generatedType = expectedType
+
+ if (scalaPrimitives.isArrayGet(code)) {
+ // load argument on stack
+ if (settings.debug.value)
+ assert(args.length == 1,
+ "Too many arguments for array get operation: " + tree);
+ ctx1 = genLoad(args.head, ctx1, INT)
+ generatedType = elem
+ ctx1.bb.emit(LOAD_ARRAY_ITEM(elementType), tree.pos)
+ }
+ else if (scalaPrimitives.isArraySet(code)) {
+ if (settings.debug.value)
+ assert(args.length == 2,
+ "Too many arguments for array set operation: " + tree);
+ ctx1 = genLoad(args.head, ctx1, INT)
+ ctx1 = genLoad(args.tail.head, ctx1, toTypeKind(args.tail.head.tpe))
+ // the following line should really be here, but because of bugs in erasure
+ // we pretend we generate whatever type is expected from us.
+ //generatedType = UNIT
+
+ ctx1.bb.emit(STORE_ARRAY_ITEM(elementType), tree.pos)
+ }
+ else {
+ generatedType = INT
+ ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(elementType)), tree.pos)
+ }
+
+ (ctx1, generatedType)
+ }
+ private def genSynchronized(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
+ val Apply(fun, args) = tree
+ val monitor = ctx.makeLocal(tree.pos, ObjectClass.tpe, "monitor")
+ var monitorResult: Local = null
+
+ // if the synchronized block returns a result, store it in a local variable. just leaving
+ // it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks)
+ val argTpe = args.head.tpe
+ val hasResult = expectedType != UNIT
+ if (hasResult)
+ monitorResult = ctx.makeLocal(tree.pos, argTpe, "monitorResult")
+
+ var ctx1 = genLoadQualifier(fun, ctx)
+ ctx1.bb.emit(Seq(
+ DUP(ANY_REF_CLASS),
+ STORE_LOCAL(monitor),
+ MONITOR_ENTER() setPos tree.pos
+ ))
+ ctx1.enterSynchronized(monitor)
+ debugLog("synchronized block start")
+
+ ctx1 = ctx1.Try(
+ bodyCtx => {
+ val ctx2 = genLoad(args.head, bodyCtx, expectedType /* toTypeKind(tree.tpe.resultType) */)
+ if (hasResult)
+ ctx2.bb.emit(STORE_LOCAL(monitorResult))
+ ctx2.bb.emit(Seq(
+ LOAD_LOCAL(monitor),
+ MONITOR_EXIT() setPos tree.pos
+ ))
+ ctx2
+ }, List(
+ // tree.tpe / fun.tpe is object, which is no longer true after this transformation
+ (ThrowableClass, expectedType, exhCtx => {
+ exhCtx.bb.emit(Seq(
+ LOAD_LOCAL(monitor),
+ MONITOR_EXIT() setPos tree.pos,
+ THROW()
+ ))
+ exhCtx.bb.enterIgnoreMode
+ exhCtx
+ })), EmptyTree, tree)
+
+ debugLog("synchronized block end with block %s closed=%s".format(ctx1.bb, ctx1.bb.closed))
+ ctx1.exitSynchronized(monitor)
+ if (hasResult)
+ ctx1.bb.emit(LOAD_LOCAL(monitorResult))
+ (ctx1, expectedType)
+ }
+
+ private def genLoadIf(tree: If, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
+ val If(cond, thenp, elsep) = tree
+
+ var thenCtx = ctx.newBlock
+ var elseCtx = ctx.newBlock
+ val contCtx = ctx.newBlock
+
+ genCond(cond, ctx, thenCtx, elseCtx)
+
+ val ifKind = toTypeKind(tree.tpe)
+ val thenKind = toTypeKind(thenp.tpe)
+ val elseKind = if (elsep == EmptyTree) UNIT else toTypeKind(elsep.tpe)
+
+ // we need to drop unneeded results, if one branch gives
+ // unit and the other gives something on the stack, because
+ // the type of 'if' is scala.Any, and its erasure would be Object.
+ // But unboxed units are not Objects...
+ def hasUnitBranch = thenKind == UNIT || elseKind == UNIT
+ val resKind = if (hasUnitBranch) UNIT else ifKind
+
+ if (hasUnitBranch)
+ debugLog("Will drop result from an if branch")
+
+ thenCtx = genLoad(thenp, thenCtx, resKind)
+ elseCtx = genLoad(elsep, elseCtx, resKind)
+
+ assert(!settings.debug.value || !(hasUnitBranch && expectedType != UNIT),
+ "I produce UNIT in a context where " + expectedType + " is expected!")
+
+ thenCtx.bb.emitOnly(JUMP(contCtx.bb))
+ elseCtx.bb.emitOnly(
+ if (elsep == EmptyTree) JUMP(contCtx.bb)
+ else JUMP(contCtx.bb) setPos tree.pos
+ )
+
+ (contCtx, resKind)
+ }
+ private def genLoadTry(tree: Try, ctx: Context, setGeneratedType: TypeKind => Unit): Context = {
+ val Try(block, catches, finalizer) = tree
+ val kind = toTypeKind(tree.tpe)
+
+ val caseHandlers =
+ for (CaseDef(pat, _, body) <- catches.reverse) yield {
+ def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) =
+ (sym, kind, ctx => {
+ ctx.bb.emit(DROP(REFERENCE(sym)))
+ genLoad(body, ctx, kind)
+ })
+
+ pat match {
+ case Typed(Ident(nme.WILDCARD), tpt) => genWildcardHandler(tpt.tpe.typeSymbol)
+ case Ident(nme.WILDCARD) => genWildcardHandler(ThrowableClass)
+ case Bind(name, _) =>
+ val exception = ctx.method addLocal new Local(pat.symbol, toTypeKind(pat.symbol.tpe), false)
+
+ (pat.symbol.tpe.typeSymbol, kind, {
+ ctx: Context =>
+ ctx.bb.emit(STORE_LOCAL(exception), pat.pos);
+ genLoad(body, ctx, kind);
+ })
+ }
+ }
+
+ ctx.Try(
+ bodyCtx => {
+ setGeneratedType(kind)
+ genLoad(block, bodyCtx, kind)
+ },
+ caseHandlers,
+ finalizer,
+ tree)
+ }
+
+ private def genPrimitiveOp(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
+ val sym = tree.symbol
+ val Apply(fun @ Select(receiver, _), args) = tree
+ val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
+
+ if (scalaPrimitives.isArithmeticOp(code))
+ genArithmeticOp(tree, ctx, code)
+ else if (code == scalaPrimitives.CONCAT)
+ (genStringConcat(tree, ctx), STRING)
+ else if (code == scalaPrimitives.HASH)
+ (genScalaHash(receiver, ctx), INT)
+ else if (isArrayOp(code))
+ genArrayOp(tree, ctx, code, expectedType)
+ else if (isLogicalOp(code) || isComparisonOp(code)) {
+ val trueCtx = ctx.newBlock
+ val falseCtx = ctx.newBlock
+ val afterCtx = ctx.newBlock
+ genCond(tree, ctx, trueCtx, falseCtx)
+ trueCtx.bb.emitOnly(
+ CONSTANT(Constant(true)) setPos tree.pos,
+ JUMP(afterCtx.bb)
+ )
+ falseCtx.bb.emitOnly(
+ CONSTANT(Constant(false)) setPos tree.pos,
+ JUMP(afterCtx.bb)
+ )
+ (afterCtx, BOOL)
+ }
+ else if (code == scalaPrimitives.SYNCHRONIZED)
+ genSynchronized(tree, ctx, expectedType)
+ else if (scalaPrimitives.isCoercion(code)) {
+ val ctx1 = genLoad(receiver, ctx, toTypeKind(receiver.tpe))
+ genCoercion(tree, ctx1, code)
+ (ctx1, scalaPrimitives.generatedKind(code))
}
+ else abort("Primitive operation not handled yet: " + sym.fullName + "(" +
+ fun.symbol.simpleName + ") " + " at: " + (tree.pos))
}
/**
@@ -210,186 +471,6 @@ abstract class GenICode extends SubComponent {
if (settings.debug.value)
log("at line: " + (if (tree.pos.isDefined) tree.pos.line else tree.pos))
- /**
- * Generate code for primitive arithmetic operations.
- */
- def genArithmeticOp(tree: Tree, ctx: Context, code: Int): Context = {
- val Apply(fun @ Select(larg, _), args) = tree
- var ctx1 = ctx
- var resKind = toTypeKind(larg.tpe)
-
- if (settings.debug.value) {
- assert(args.length <= 1,
- "Too many arguments for primitive function: " + fun.symbol)
- assert(resKind.isNumericType | resKind == BOOL,
- resKind.toString() + " is not a numeric or boolean type " +
- "[operation: " + fun.symbol + "]")
- }
-
- args match {
- // unary operation
- case Nil =>
- ctx1 = genLoad(larg, ctx1, resKind)
- code match {
- case scalaPrimitives.POS =>
- () // nothing
- case scalaPrimitives.NEG =>
- ctx1.bb.emit(CALL_PRIMITIVE(Negation(resKind)), larg.pos)
- case scalaPrimitives.NOT =>
- ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(NOT, resKind)), larg.pos)
- case _ =>
- abort("Unknown unary operation: " + fun.symbol.fullNameString +
- " code: " + code)
- }
- generatedType = resKind
-
- // binary operation
- case rarg :: Nil =>
- resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil);
- if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code))
- assert(resKind.isIntType | resKind == BOOL,
- resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1);
-
- ctx1 = genLoad(larg, ctx1, resKind);
- ctx1 = genLoad(rarg,
- ctx1, // check .NET size of shift arguments!
- if (scalaPrimitives.isShiftOp(code)) INT else resKind)
-
- generatedType = resKind
- code match {
- case scalaPrimitives.ADD =>
- ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(ADD, resKind)), tree.pos)
- case scalaPrimitives.SUB =>
- ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(SUB, resKind)), tree.pos)
- case scalaPrimitives.MUL =>
- ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(MUL, resKind)), tree.pos)
- case scalaPrimitives.DIV =>
- ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(DIV, resKind)), tree.pos)
- case scalaPrimitives.MOD =>
- ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(REM, resKind)), tree.pos)
- case scalaPrimitives.OR =>
- ctx1.bb.emit(CALL_PRIMITIVE(Logical(OR, resKind)), tree.pos)
- case scalaPrimitives.XOR =>
- ctx1.bb.emit(CALL_PRIMITIVE(Logical(XOR, resKind)), tree.pos)
- case scalaPrimitives.AND =>
- ctx1.bb.emit(CALL_PRIMITIVE(Logical(AND, resKind)), tree.pos)
- case scalaPrimitives.LSL =>
- ctx1.bb.emit(CALL_PRIMITIVE(Shift(LSL, resKind)), tree.pos)
- generatedType = resKind
- case scalaPrimitives.LSR =>
- ctx1.bb.emit(CALL_PRIMITIVE(Shift(LSR, resKind)), tree.pos)
- generatedType = resKind
- case scalaPrimitives.ASR =>
- ctx1.bb.emit(CALL_PRIMITIVE(Shift(ASR, resKind)), tree.pos)
- generatedType = resKind
- case _ =>
- abort("Unknown primitive: " + fun.symbol + "[" + code + "]")
- }
-
- case _ =>
- abort("Too many arguments for primitive function: " + tree)
- }
- ctx1
- }
-
- /** Generate primitive array operations.
- *
- * @param tree ...
- * @param ctx ...
- * @param code ...
- * @return ...
- */
- def genArrayOp(tree: Tree, ctx: Context, code: Int): Context = {
- import scalaPrimitives._
- val Apply(Select(arrayObj, _), args) = tree
- val k = toTypeKind(arrayObj.tpe)
- val ARRAY(elem) = k
- var ctx1 = genLoad(arrayObj, ctx, k)
-
- if (scalaPrimitives.isArrayGet(code)) {
- // load argument on stack
- if (settings.debug.value)
- assert(args.length == 1,
- "Too many arguments for array get operation: " + tree);
- ctx1 = genLoad(args.head, ctx1, INT)
- generatedType = elem
- } else if (scalaPrimitives.isArraySet(code)) {
- if (settings.debug.value)
- assert(args.length == 2,
- "Too many arguments for array set operation: " + tree);
- ctx1 = genLoad(args.head, ctx1, INT)
- ctx1 = genLoad(args.tail.head, ctx1, toTypeKind(args.tail.head.tpe))
- // the following line should really be here, but because of bugs in erasure
- // we pretend we generate whatever type is expected from us.
- //generatedType = UNIT
- } else
- generatedType = INT
-
- code match {
- case ZARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(BOOL)), tree.pos)
- case BARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(BYTE)), tree.pos)
- case SARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(SHORT)), tree.pos)
- case CARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(CHAR)), tree.pos)
- case IARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(INT)), tree.pos)
- case LARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(LONG)), tree.pos)
- case FARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(FLOAT)), tree.pos)
- case DARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(DOUBLE)), tree.pos)
- case OARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(ANY_REF_CLASS)), tree.pos)
-
- case ZARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(BOOL), tree.pos)
- case BARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(BYTE), tree.pos)
- case SARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(SHORT), tree.pos)
- case CARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(CHAR), tree.pos)
- case IARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(INT), tree.pos)
- case LARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(LONG), tree.pos)
- case FARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(FLOAT), tree.pos)
- case DARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(DOUBLE), tree.pos)
- case OARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(ANY_REF_CLASS), tree.pos)
-
- case ZARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(BOOL), tree.pos)
- case BARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(BYTE), tree.pos)
- case SARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(SHORT), tree.pos)
- case CARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(CHAR), tree.pos)
- case IARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(INT), tree.pos)
- case LARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(LONG), tree.pos)
- case FARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(FLOAT), tree.pos)
- case DARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(DOUBLE), tree.pos)
- case OARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(ANY_REF_CLASS), tree.pos)
-
- case _ =>
- abort("Unknown operation on arrays: " + tree + " code: " + code)
- }
- ctx1
- }
-
- // genLoad
val resCtx: Context = tree match {
case LabelDef(name, params, rhs) =>
val ctx1 = ctx.newBlock
@@ -436,46 +517,10 @@ abstract class GenICode extends SubComponent {
generatedType = UNIT
ctx1
- case If(cond, thenp, elsep) =>
- var thenCtx = ctx.newBlock
- var elseCtx = ctx.newBlock
- val contCtx = ctx.newBlock
- genCond(cond, ctx, thenCtx, elseCtx)
- val ifKind = toTypeKind(tree.tpe)
-
- val thenKind = toTypeKind(thenp.tpe)
- val elseKind = if (elsep == EmptyTree) UNIT else toTypeKind(elsep.tpe)
-
- generatedType = ifKind
-
- // we need to drop unneeded results, if one branch gives
- // unit and the other gives something on the stack, because
- // the type of 'if' is scala.Any, and its erasure would be Object.
- // But unboxed units are not Objects...
- if (thenKind == UNIT || elseKind == UNIT) {
- if (settings.debug.value)
- log("Will drop result from an if branch");
- thenCtx = genLoad(thenp, thenCtx, UNIT)
- elseCtx = genLoad(elsep, elseCtx, UNIT)
- if (settings.debug.value)
- assert(expectedType == UNIT,
- "I produce UNIT in a context where " +
- expectedType + " is expected!")
- generatedType = UNIT
- } else {
- thenCtx = genLoad(thenp, thenCtx, ifKind)
- elseCtx = genLoad(elsep, elseCtx, ifKind)
- }
-
- thenCtx.bb.emit(JUMP(contCtx.bb))
- thenCtx.bb.close
- if (elsep == EmptyTree)
- elseCtx.bb.emit(JUMP(contCtx.bb), tree.pos)
- else
- elseCtx.bb.emit(JUMP(contCtx.bb))
- elseCtx.bb.close
-
- contCtx
+ case t @ If(cond, thenp, elsep) =>
+ val (newCtx, resKind) = genLoadIf(t, ctx, expectedType)
+ generatedType = resKind
+ newCtx
case Return(expr) =>
val returnedKind = toTypeKind(expr.tpe)
@@ -510,41 +555,7 @@ abstract class GenICode extends SubComponent {
generatedType = expectedType
ctx1
- case Try(block, catches, finalizer) =>
- val kind = toTypeKind(tree.tpe)
-
- var handlers = for (CaseDef(pat, _, body) <- catches.reverse)
- yield pat match {
- case Typed(Ident(nme.WILDCARD), tpt) => (tpt.tpe.typeSymbol, kind, {
- ctx: Context =>
- ctx.bb.emit(DROP(REFERENCE(tpt.tpe.typeSymbol)));
- genLoad(body, ctx, kind);
- })
-
- case Ident(nme.WILDCARD) => (ThrowableClass, kind, {
- ctx: Context =>
- ctx.bb.emit(DROP(REFERENCE(ThrowableClass)))
- genLoad(body, ctx, kind)
- })
-
- case Bind(name, _) =>
- val exception = ctx.method.addLocal(new Local(pat.symbol, toTypeKind(pat.symbol.tpe), false))
-
- (pat.symbol.tpe.typeSymbol, kind, {
- ctx: Context =>
- ctx.bb.emit(STORE_LOCAL(exception), pat.pos);
- genLoad(body, ctx, kind);
- })
- }
-
- ctx.Try(
- bodyCtx => {
- generatedType = kind; //toTypeKind(block.tpe);
- genLoad(block, bodyCtx, generatedType);
- },
- handlers,
- finalizer,
- tree)
+ case t @ Try(_, _, _) => genLoadTry(t, ctx, (x: TypeKind) => generatedType = x)
case Throw(expr) =>
val ctx1 = genLoad(expr, ctx, THROWABLE)
@@ -558,30 +569,27 @@ abstract class GenICode extends SubComponent {
case Apply(TypeApply(fun, targs), _) =>
val sym = fun.symbol
- var ctx1 = ctx
- var cast = false
-
- if (sym == definitions.Object_isInstanceOf)
- cast = false
- else if (sym == definitions.Object_asInstanceOf)
- cast = true
- else
- abort("Unexpected type application " + fun + "[sym: " + sym.fullNameString + "]" + " in: " + tree)
+ val cast = sym match {
+ case Object_isInstanceOf => false
+ case Object_asInstanceOf => true
+ case _ => abort("Unexpected type application " + fun + "[sym: " + sym.fullName + "]" + " in: " + tree)
+ }
val Select(obj, _) = fun
val l = toTypeKind(obj.tpe)
val r = toTypeKind(targs.head.tpe)
-
- ctx1 = genLoadQualifier(fun, ctx)
+ val ctx1 = genLoadQualifier(fun, ctx)
if (l.isValueType && r.isValueType)
genConversion(l, r, ctx1, cast)
else if (l.isValueType) {
ctx1.bb.emit(DROP(l), fun.pos)
if (cast) {
- ctx1.bb.emit(NEW(REFERENCE(definitions.getClass("ClassCastException"))))
- ctx1.bb.emit(DUP(ANY_REF_CLASS))
- ctx1.bb.emit(THROW())
+ ctx1.bb.emit(Seq(
+ NEW(REFERENCE(definitions.getClass("ClassCastException"))),
+ DUP(ANY_REF_CLASS),
+ THROW()
+ ))
} else
ctx1.bb.emit(CONSTANT(Constant(false)))
}
@@ -632,11 +640,9 @@ abstract class GenICode extends SubComponent {
assert(generatedType.isReferenceType || generatedType.isArrayType,
"Non reference type cannot be instantiated: " + generatedType)
- var ctx1 = ctx
-
generatedType match {
case arr @ ARRAY(elem) =>
- ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
+ val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
val dims = arr.dimensions
var elemKind = arr.elementKind
if (args.length > dims)
@@ -645,28 +651,29 @@ abstract class GenICode extends SubComponent {
if (args.length != dims)
for (i <- args.length until dims) elemKind = ARRAY(elemKind)
ctx1.bb.emit(CREATE_ARRAY(elemKind, args.length), tree.pos)
+ ctx1
case rt @ REFERENCE(cls) =>
if (settings.debug.value)
assert(ctor.owner == cls,
- "Symbol " + ctor.owner.fullNameString + " is different than " + tpt)
+ "Symbol " + ctor.owner.fullName + " is different than " + tpt)
val nw = NEW(rt)
- ctx1.bb.emit(nw, tree.pos)
- ctx1.bb.emit(DUP(generatedType))
- ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
+ ctx.bb.emit(nw, tree.pos)
+ ctx.bb.emit(DUP(generatedType))
+ val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
val init = CALL_METHOD(ctor, Static(true))
nw.init = init
ctx1.bb.emit(init, tree.pos)
+ ctx1
case _ =>
abort("Cannot instantiate " + tpt + "of kind: " + generatedType)
}
- ctx1
case Apply(fun @ _, List(expr)) if (definitions.isBox(fun.symbol)) =>
if (settings.debug.value)
- log("BOX : " + fun.symbol.fullNameString);
+ log("BOX : " + fun.symbol.fullName);
val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
val nativeKind = toTypeKind(expr.tpe)
if (settings.Xdce.value) {
@@ -683,14 +690,14 @@ abstract class GenICode extends SubComponent {
case Apply(fun @ _, List(expr)) if (definitions.isUnbox(fun.symbol)) =>
if (settings.debug.value)
- log("UNBOX : " + fun.symbol.fullNameString)
+ log("UNBOX : " + fun.symbol.fullName)
val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
val boxType = toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe)
generatedType = boxType
ctx1.bb.emit(UNBOX(boxType), expr.pos)
ctx1
- case Apply(fun, args) =>
+ case app @ Apply(fun, args) =>
val sym = fun.symbol
if (sym.isLabel) { // jump to a label
@@ -711,78 +718,12 @@ abstract class GenICode extends SubComponent {
}
}
val ctx1 = genLoadLabelArguments(args, label, ctx)
- if (label.anchored)
- ctx1.bb.emit(JUMP(label.block), tree.pos)
- else
- ctx1.bb.emit(PJUMP(label), tree.pos)
-
- ctx1.bb.close
+ ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label))
ctx1.newBlock
} else if (isPrimitive(sym)) { // primitive method call
- val Select(receiver, _) = fun
-
- val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
- var ctx1 = ctx
-
- if (scalaPrimitives.isArithmeticOp(code)) {
- ctx1 = genArithmeticOp(tree, ctx1, code)
- } else if (code == scalaPrimitives.CONCAT) {
- ctx1 = genStringConcat(tree, ctx1)
- generatedType = STRING
- } else if (isArrayOp(code)) {
- ctx1 = genArrayOp(tree, ctx1, code)
- } else if (isLogicalOp(code) || isComparisonOp(code)) {
-
- val trueCtx = ctx1.newBlock
- val falseCtx = ctx1.newBlock
- val afterCtx = ctx1.newBlock
- genCond(tree, ctx1, trueCtx, falseCtx)
- trueCtx.bb.emit(CONSTANT(Constant(true)), tree.pos)
- trueCtx.bb.emit(JUMP(afterCtx.bb))
- trueCtx.bb.close
- falseCtx.bb.emit(CONSTANT(Constant(false)), tree.pos)
- falseCtx.bb.emit(JUMP(afterCtx.bb))
- falseCtx.bb.close
- generatedType = BOOL
- ctx1 = afterCtx
- } else if (code == scalaPrimitives.SYNCHRONIZED) {
- val monitor = ctx.makeLocal(tree.pos, ObjectClass.tpe, "monitor")
- ctx1 = genLoadQualifier(fun, ctx1)
- ctx1.bb.emit(DUP(ANY_REF_CLASS))
- ctx1.bb.emit(STORE_LOCAL(monitor))
- ctx1.bb.emit(MONITOR_ENTER(), tree.pos)
- ctx1.enterSynchronized(monitor)
-
- if (settings.debug.value)
- log("synchronized block start");
-
- ctx1 = ctx1.Try(
- bodyCtx => {
- val ctx1 = genLoad(args.head, bodyCtx, expectedType /* toTypeKind(tree.tpe.resultType) */)
- ctx1.bb.emit(LOAD_LOCAL(monitor))
- ctx1.bb.emit(MONITOR_EXIT(), tree.pos)
- ctx1
- }, List(
- // tree.tpe / fun.tpe is object, which is no longer true after this transformation
- (NoSymbol, expectedType, exhCtx => {
- exhCtx.bb.emit(LOAD_LOCAL(monitor))
- exhCtx.bb.emit(MONITOR_EXIT(), tree.pos)
- exhCtx.bb.emit(THROW())
- exhCtx.bb.enterIgnoreMode
- exhCtx
- })), EmptyTree, tree);
- if (settings.debug.value)
- log("synchronized block end with block " + ctx1.bb +
- " closed=" + ctx1.bb.closed);
- ctx1.exitSynchronized(monitor)
- } else if (scalaPrimitives.isCoercion(code)) {
- ctx1 = genLoad(receiver, ctx1, toTypeKind(receiver.tpe))
- genCoercion(tree, ctx1, code)
- generatedType = scalaPrimitives.generatedKind(code)
- } else
- abort("Primitive operation not handled yet: " + sym.fullNameString + "(" +
- fun.symbol.simpleName + ") " + " at: " + (tree.pos));
- ctx1
+ val (newCtx, resKind) = genPrimitiveOp(app, ctx, expectedType)
+ generatedType = resKind
+ newCtx
} else { // normal method call
if (settings.debug.value)
log("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember);
@@ -799,16 +740,25 @@ abstract class GenICode extends SubComponent {
else ctx
ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1)
-
- val hostClass = fun match {
- case Select(qualifier, _)
- if (qualifier.tpe.typeSymbol != ArrayClass) =>
- qualifier.tpe.typeSymbol
- case _ => sym.owner
+ val cm = CALL_METHOD(sym, invokeStyle)
+
+ /** In a couple cases, squirrel away a little extra information in the
+ * CALL_METHOD for use by GenJVM.
+ */
+ fun match {
+ case Select(qual, _) =>
+ val qualSym = qual.tpe.typeSymbol
+ if (qualSym == ArrayClass) cm setTargetTypeKind toTypeKind(qual.tpe)
+ else cm setHostClass qualSym
+
+ if (settings.debug.value) log(
+ if (qualSym == ArrayClass) "Stored target type kind " + toTypeKind(qual.tpe) + " for " + sym.fullName
+ else "Set more precise host class for " + sym.fullName + " host: " + qualSym
+ )
+ case _ =>
}
- if (settings.debug.value && hostClass != sym.owner)
- log("Set more precise host class for " + sym.fullNameString + " host: " + hostClass);
- ctx1.bb.emit(CALL_METHOD(sym, invokeStyle) setHostClass hostClass, tree.pos)
+ ctx1.bb.emit(cm, tree.pos)
+
if (sym == ctx1.method.symbol) {
ctx1.method.recursive = true
}
@@ -833,14 +783,13 @@ abstract class GenICode extends SubComponent {
if (settings.debug.value)
log("LOAD_MODULE from 'This': " + tree.symbol);
assert(!tree.symbol.isPackageClass, "Cannot use package as value: " + tree)
- ctx.bb.emit(LOAD_MODULE(tree.symbol), tree.pos)
+ genLoadModule(ctx, tree.symbol, tree.pos)
generatedType = REFERENCE(tree.symbol)
} else {
ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
- if (tree.symbol == ArrayClass)
- generatedType = REFERENCE(ObjectClass)
- else
- generatedType = REFERENCE(ctx.clazz.symbol)
+ generatedType = REFERENCE(
+ if (tree.symbol == ArrayClass) ObjectClass else ctx.clazz.symbol
+ )
}
ctx
@@ -853,7 +802,7 @@ abstract class GenICode extends SubComponent {
log("LOAD_MODULE from Select(<emptypackage>): " + tree.symbol);
}
assert(!tree.symbol.isPackageClass, "Cannot use package as value: " + tree)
- ctx.bb.emit(LOAD_MODULE(tree.symbol), tree.pos)
+ genLoadModule(ctx, tree.symbol, tree.pos)
ctx
case Select(qualifier, selector) =>
@@ -862,9 +811,9 @@ abstract class GenICode extends SubComponent {
if (sym.isModule) {
if (settings.debug.value)
- log("LOAD_MODULE from Select(qualifier, selector): " + sym);
+ log("LOAD_MODULE from Select(qualifier, selector): " + sym)
assert(!tree.symbol.isPackageClass, "Cannot use package as value: " + tree)
- ctx.bb.emit(LOAD_MODULE(sym), tree.pos);
+ genLoadModule(ctx, sym, tree.pos)
ctx
} else if (sym.isStaticMember) {
ctx.bb.emit(LOAD_FIELD(sym, true), tree.pos)
@@ -876,22 +825,22 @@ abstract class GenICode extends SubComponent {
}
case Ident(name) =>
- if (!tree.symbol.isPackage) {
- if (tree.symbol.isModule) {
+ val sym = tree.symbol
+ if (!sym.isPackage) {
+ if (sym.isModule) {
if (settings.debug.value)
- log("LOAD_MODULE from Ident(name): " + tree.symbol);
- assert(!tree.symbol.isPackageClass, "Cannot use package as value: " + tree)
- ctx.bb.emit(LOAD_MODULE(tree.symbol), tree.pos)
- generatedType = toTypeKind(tree.symbol.info)
+ log("LOAD_MODULE from Ident(name): " + sym)
+ assert(!sym.isPackageClass, "Cannot use package as value: " + tree)
+ genLoadModule(ctx, sym, tree.pos)
+ generatedType = toTypeKind(sym.info)
} else {
try {
- val Some(l) = ctx.method.lookupLocal(tree.symbol)
+ val Some(l) = ctx.method.lookupLocal(sym)
ctx.bb.emit(LOAD_LOCAL(l), tree.pos)
generatedType = l.kind
} catch {
case ex: MatchError =>
- throw new Error("symbol " + tree.symbol +
- " does not exist in " + ctx.method)
+ abort("symbol " + sym + " does not exist in " + ctx.method)
}
}
}
@@ -961,31 +910,26 @@ abstract class GenICode extends SubComponent {
var tags: List[Int] = Nil
var default: BasicBlock = afterCtx.bb
- for (caze <- cases) caze match {
- case CaseDef(Literal(value), EmptyTree, body) =>
- tags = value.intValue :: tags
- val tmpCtx = ctx1.newBlock
- targets = tmpCtx.bb :: targets
-
- caseCtx = genLoad(body, tmpCtx , generatedType)
- caseCtx.bb.emit(JUMP(afterCtx.bb), caze.pos)
- caseCtx.bb.close
-
- case CaseDef(Ident(nme.WILDCARD), EmptyTree, body) =>
- val tmpCtx = ctx1.newBlock
- default = tmpCtx.bb
-
- caseCtx = genLoad(body, tmpCtx , generatedType)
- caseCtx.bb.emit(JUMP(afterCtx.bb), caze.pos)
- caseCtx.bb.close
+ for (caze @ CaseDef(pat, guard, body) <- cases) {
+ assert(guard == EmptyTree)
+ val tmpCtx = ctx1.newBlock
+ pat match {
+ case Literal(value) =>
+ tags = value.intValue :: tags
+ targets = tmpCtx.bb :: targets
+ case Ident(nme.WILDCARD) =>
+ default = tmpCtx.bb
+ case _ =>
+ abort("Invalid case statement in switch-like pattern match: " +
+ tree + " at: " + (tree.pos))
+ }
- case _ =>
- abort("Invalid case statement in switch-like pattern match: " +
- tree + " at: " + (tree.pos))
+ caseCtx = genLoad(body, tmpCtx, generatedType)
+ caseCtx.bb.emitOnly(JUMP(afterCtx.bb) setPos caze.pos)
}
- ctx1.bb.emit(SWITCH(tags.reverse map (x => List(x)),
- (default :: targets).reverse), tree.pos)
- ctx1.bb.close
+ ctx1.bb.emitOnly(
+ SWITCH(tags.reverse map (x => List(x)), (default :: targets).reverse) setPos tree.pos
+ )
afterCtx
case EmptyTree =>
@@ -994,13 +938,12 @@ abstract class GenICode extends SubComponent {
ctx
case _ =>
- abort("Unexpected tree in genLoad: " + tree + " at: " +
- (tree.pos))
+ abort("Unexpected tree in genLoad: " + tree + " at: " + tree.pos)
}
// emit conversion
if (generatedType != expectedType)
- adapt(generatedType, expectedType, resCtx, tree.pos);
+ adapt(generatedType, expectedType, resCtx, tree.pos)
resCtx
}
@@ -1091,6 +1034,13 @@ abstract class GenICode extends SubComponent {
ctx1
}
+ private def genLoadModule(ctx: Context, sym: Symbol, pos: Position) {
+ if (definitions.primitiveCompanions(sym))
+ ctx.bb.emit(LOAD_MODULE(definitions.getModule("scala.runtime." + sym.name)), pos)
+ else
+ ctx.bb.emit(LOAD_MODULE(sym), pos)
+ }
+
def genConversion(from: TypeKind, to: TypeKind, ctx: Context, cast: Boolean) = {
if (cast)
ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)))
@@ -1143,7 +1093,7 @@ abstract class GenICode extends SubComponent {
*/
def genCoercion(tree: Tree, ctx: Context, code: Int) = {
import scalaPrimitives._
- code match {
+ (code: @switch) match {
case B2B => ()
case B2C => ctx.bb.emit(CALL_PRIMITIVE(Conversion(BYTE, CHAR)), tree.pos)
case B2S => ctx.bb.emit(CALL_PRIMITIVE(Conversion(BYTE, SHORT)), tree.pos)
@@ -1229,6 +1179,19 @@ abstract class GenICode extends SubComponent {
ctx1
}
+ /** Generate the scala ## method.
+ */
+ def genScalaHash(tree: Tree, ctx: Context): Context = {
+ val hashMethod = {
+ ctx.bb.emit(LOAD_MODULE(ScalaRunTimeModule))
+ getMember(ScalaRunTimeModule, "hash")
+ }
+
+ val ctx1 = genLoad(tree, ctx, ANY_REF_CLASS)
+ ctx1.bb.emit(CALL_METHOD(hashMethod, Static(false)))
+ ctx1
+ }
+
/**
* Returns a list of trees that each should be concatenated, from
* left to right. It turns a chained call like "a".+("b").+("c") into
@@ -1410,24 +1373,33 @@ abstract class GenICode extends SubComponent {
* When it is statically known that both sides are equal and subtypes of Number of Character,
* not using the rich equality is possible (their own equals method will do ok.)*/
def mustUseAnyComparator: Boolean = {
- def isBoxed(sym: Symbol): Boolean =
- ((sym isNonBottomSubClass definitions.BoxedNumberClass) ||
- (!forMSIL && (sym isNonBottomSubClass definitions.BoxedCharacterClass)))
-
- val lsym = l.tpe.typeSymbol
- val rsym = r.tpe.typeSymbol
- (lsym == ObjectClass) ||
- (rsym == ObjectClass) ||
- (lsym != rsym) && (isBoxed(lsym) || isBoxed(rsym))
+ def areSameFinals = l.tpe.isFinalType && r.tpe.isFinalType && (l.tpe =:= r.tpe)
+ !areSameFinals && isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol)
}
if (mustUseAnyComparator) {
- var equalsMethod = BoxesRunTime_equals
// when -optimise is on we call the @inline-version of equals, found in ScalaRunTime
- if (settings.XO.value) {
- equalsMethod = definitions.getMember(definitions.ScalaRunTimeModule, nme.inlinedEquals)
- ctx.bb.emit(LOAD_MODULE(definitions.ScalaRunTimeModule))
- }
+ val equalsMethod =
+ if (!settings.XO.value) {
+ def default = platform.externalEquals
+ platform match {
+ case x: JavaPlatform =>
+ import x._
+ if (l.tpe <:< BoxedNumberClass.tpe) {
+ if (r.tpe <:< BoxedNumberClass.tpe) externalEqualsNumNum
+ else if (r.tpe <:< BoxedCharacterClass.tpe) externalEqualsNumChar
+ else externalEqualsNumObject
+ }
+ else default
+
+ case _ => default
+ }
+ }
+ else {
+ ctx.bb.emit(LOAD_MODULE(ScalaRunTimeModule))
+ getMember(ScalaRunTimeModule, nme.inlinedEquals)
+ }
+
val ctx1 = genLoad(l, ctx, ANY_REF_CLASS)
val ctx2 = genLoad(r, ctx1, ANY_REF_CLASS)
ctx2.bb.emit(CALL_METHOD(equalsMethod, if (settings.XO.value) Dynamic else Static(false)))
@@ -1438,48 +1410,35 @@ abstract class GenICode extends SubComponent {
if (isNull(l))
// null == expr -> expr eq null
genLoad(r, ctx, ANY_REF_CLASS).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ANY_REF_CLASS)
- else {
+ else if (isNull(r)) {
+ // expr == null -> expr eq null
+ genLoad(l, ctx, ANY_REF_CLASS).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ANY_REF_CLASS)
+ } else {
val eqEqTempLocal = getTempLocal
var ctx1 = genLoad(l, ctx, ANY_REF_CLASS)
// dicey refactor section
lazy val nonNullCtx = ctx1.newBlock
- if (isNull(r)) {
- // expr == null -> if (l eq null) true else l.equals(null)
- ctx1.bb.emitOnly(
- DUP(ANY_REF_CLASS),
- STORE_LOCAL(eqEqTempLocal) setPos l.pos,
- CZJUMP(thenCtx.bb, nonNullCtx.bb, EQ, ANY_REF_CLASS)
- )
- nonNullCtx.bb.emitOnly(
- LOAD_LOCAL(eqEqTempLocal) setPos l.pos,
- CONSTANT(Constant(null)) setPos r.pos,
- CALL_METHOD(Object_equals, Dynamic),
- CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
- )
- }
- else {
- // l == r -> if (l eq null) r eq null else l.equals(r)
- ctx1 = genLoad(r, ctx1, ANY_REF_CLASS)
- val nullCtx = ctx1.newBlock
-
- ctx1.bb.emitOnly(
- STORE_LOCAL(eqEqTempLocal) setPos l.pos,
- DUP(ANY_REF_CLASS),
- CZJUMP(nullCtx.bb, nonNullCtx.bb, EQ, ANY_REF_CLASS)
- )
- nullCtx.bb.emitOnly(
- DROP(ANY_REF_CLASS) setPos l.pos, // type of AnyRef
- LOAD_LOCAL(eqEqTempLocal),
- CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ANY_REF_CLASS)
- )
- nonNullCtx.bb.emitOnly(
- LOAD_LOCAL(eqEqTempLocal) setPos l.pos,
- CALL_METHOD(Object_equals, Dynamic),
- CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
- )
- }
+ // l == r -> if (l eq null) r eq null else l.equals(r)
+ ctx1 = genLoad(r, ctx1, ANY_REF_CLASS)
+ val nullCtx = ctx1.newBlock
+
+ ctx1.bb.emitOnly(
+ STORE_LOCAL(eqEqTempLocal) setPos l.pos,
+ DUP(ANY_REF_CLASS),
+ CZJUMP(nullCtx.bb, nonNullCtx.bb, EQ, ANY_REF_CLASS)
+ )
+ nullCtx.bb.emitOnly(
+ DROP(ANY_REF_CLASS) setPos l.pos, // type of AnyRef
+ LOAD_LOCAL(eqEqTempLocal),
+ CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ANY_REF_CLASS)
+ )
+ nonNullCtx.bb.emitOnly(
+ LOAD_LOCAL(eqEqTempLocal) setPos l.pos,
+ CALL_METHOD(Object_equals, Dynamic),
+ CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
+ )
}
}
}
@@ -1493,9 +1452,15 @@ abstract class GenICode extends SubComponent {
assert(ctx.clazz.symbol eq cls,
"Classes are not the same: " + ctx.clazz.symbol + ", " + cls)
- for (f <- cls.info.decls.iterator)
- if (!f.isMethod && f.isTerm)
- ctx.clazz.addField(new IField(f));
+ /** Non-method term members are fields, except for module members. Module
+ * members can only happen on .NET (no flatten) for inner traits. There,
+ * a module symbol is generated (transformInfo in mixin) which is used
+ * as owner for the members of the implementation class (so that the
+ * backend emits them as static).
+ * No code is needed for this module symbol.
+ */
+ for (f <- cls.info.decls ; if !f.isMethod && f.isTerm && !f.isModule)
+ ctx.clazz addField new IField(f)
}
/**
@@ -1545,19 +1510,19 @@ abstract class GenICode extends SubComponent {
def prune0(block: BasicBlock): Unit = {
val optCont = block.lastInstruction match {
- case JUMP(b) if (b != block) => Some(b);
+ case JUMP(b) if (b != block) => Some(b)
case _ => None
}
- if (block.size == 1 && optCont != None) {
+ if (block.size == 1 && optCont.isDefined) {
val Some(cont) = optCont;
val pred = block.predecessors;
log("Preds: " + pred + " of " + block + " (" + optCont + ")");
pred foreach { p =>
+ changed = true
p.lastInstruction match {
- case CJUMP(succ, fail, cond, kind) =>
+ case CJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
if (settings.debug.value)
log("Pruning empty if branch.");
- changed = true
p.replaceInstruction(p.lastInstruction,
if (block == succ)
if (block == fail)
@@ -1569,10 +1534,9 @@ abstract class GenICode extends SubComponent {
else
abort("Could not find block in preds: " + method + " " + block + " " + pred + " " + p))
- case CZJUMP(succ, fail, cond, kind) =>
+ case CZJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
if (settings.debug.value)
log("Pruning empty ifz branch.");
- changed = true
p.replaceInstruction(p.lastInstruction,
if (block == succ)
if (block == fail)
@@ -1584,20 +1548,22 @@ abstract class GenICode extends SubComponent {
else
abort("Could not find block in preds"))
- case JUMP(b) =>
+ case JUMP(b) if (b == block) =>
if (settings.debug.value)
log("Pruning empty JMP branch.");
- changed = true
val replaced = p.replaceInstruction(p.lastInstruction, JUMP(cont))
if (settings.debug.value)
assert(replaced, "Didn't find p.lastInstruction")
- case SWITCH(tags, labels) =>
+ case SWITCH(tags, labels) if (labels contains block) =>
if (settings.debug.value)
log("Pruning empty SWITCH branch.");
- changed = true
p.replaceInstruction(p.lastInstruction,
SWITCH(tags, labels map (l => if (l == block) cont else l)))
+
+ // the last instr of the predecessor `p` is not a jump to the block `block`.
+ // this happens when `block` is part of an exception handler covering `b`.
+ case _ => ()
}
}
if (changed) {
@@ -1616,7 +1582,7 @@ abstract class GenICode extends SubComponent {
do {
changed = false
n += 1
- method.code traverse prune0
+ method.code.blocks foreach prune0
} while (changed)
if (settings.debug.value)
@@ -1650,7 +1616,7 @@ abstract class GenICode extends SubComponent {
* to delay it any more: they will be used at some point.
*/
class DuplicateLabels(boundLabels: collection.Set[Symbol]) extends Transformer {
- val labels: Map[Symbol, Symbol] = new HashMap
+ val labels: mutable.Map[Symbol, Symbol] = new HashMap
var method: Symbol = _
var ctx: Context = _
@@ -1698,14 +1664,17 @@ abstract class GenICode extends SubComponent {
abstract class Cleanup;
case class MonitorRelease(m: Local) extends Cleanup {
+ override def hashCode = m.hashCode
override def equals(other: Any) = m == other;
}
case class Finalizer(f: Tree) extends Cleanup {
+ override def hashCode = f.hashCode
override def equals(other: Any) = f == other;
}
- def duplicateFinalizer(ctx: Context, finalizer: Tree) =
- (new DuplicateLabels(ctx.labels.keySet))(ctx, finalizer)
+ def duplicateFinalizer(boundLabels: collection.Set[Symbol], targetCtx: Context, finalizer: Tree) = {
+ (new DuplicateLabels(boundLabels))(targetCtx, finalizer)
+ }
/**
* The Context class keeps information relative to the current state
@@ -1737,8 +1706,8 @@ abstract class GenICode extends SubComponent {
/** The current monitors or finalizers, to be cleaned up upon `return'. */
var cleanups: List[Cleanup] = Nil
- /** The current exception handler, when we generate code for one. */
- var currentExceptionHandler: Option[ExceptionHandler] = None
+ /** The exception handlers we are currently generating code for */
+ var currentExceptionHandlers: List[ExceptionHandler] = Nil
/** The current local variable scope. */
var scope: Scope = EmptyScope
@@ -1769,7 +1738,7 @@ abstract class GenICode extends SubComponent {
this.handlers = other.handlers
this.handlerCount = other.handlerCount
this.cleanups = other.cleanups
- this.currentExceptionHandler = other.currentExceptionHandler
+ this.currentExceptionHandlers = other.currentExceptionHandlers
this.scope = other.scope
}
@@ -1838,10 +1807,7 @@ abstract class GenICode extends SubComponent {
def newBlock: Context = {
val block = method.code.newBlock
handlers foreach (h => h addCoveredBlock block)
- currentExceptionHandler match {
- case Some(e) => e.addBlock(block)
- case None => ()
- }
+ currentExceptionHandlers foreach (h => h.addBlock(block))
block.varsInScope = new HashSet()
block.varsInScope ++= scope.varsInScope
new Context(this) setBasicBlock block
@@ -1888,12 +1854,16 @@ abstract class GenICode extends SubComponent {
* exception handler.
*/
def enterHandler(exh: ExceptionHandler): Context = {
- currentExceptionHandler = Some(exh)
+ currentExceptionHandlers = exh :: currentExceptionHandlers
val ctx = newBlock
exh.setStartBlock(ctx.bb)
ctx
}
+ def endHandler() {
+ currentExceptionHandlers = currentExceptionHandlers.tail
+ }
+
/** Remove the given handler from the list of active exception handlers. */
def removeHandler(exh: ExceptionHandler): Unit = {
assert(handlerCount > 0 && handlers.head == exh,
@@ -1937,7 +1907,7 @@ abstract class GenICode extends SubComponent {
def Try(body: Context => Context,
handlers: List[(Symbol, TypeKind, (Context => Context))],
finalizer: Tree,
- tree: Tree) = {
+ tree: Tree) = if (forMSIL) TryMsil(body, handlers, finalizer, tree) else {
val outerCtx = this.dup // context for generating exception handlers, covered by finalizer
val finalizerCtx = this.dup // context for generating finalizer handler
@@ -1945,6 +1915,10 @@ abstract class GenICode extends SubComponent {
var tmp: Local = null
val kind = toTypeKind(tree.tpe)
val guardResult = kind != UNIT && mayCleanStack(finalizer)
+ // we need to save bound labels before any code generation is performed on
+ // the current context (otherwise, any new labels in the finalizer that need to
+ // be duplicated would be incorrectly considered bound -- see #2850).
+ val boundLabels: collection.Set[Symbol] = Set.empty ++ labels.keySet
if (guardResult) {
tmp = this.makeLocal(tree.pos, tree.tpe, "tmp")
@@ -1957,11 +1931,11 @@ abstract class GenICode extends SubComponent {
if (guardResult) {
ctx1.bb.emit(STORE_LOCAL(tmp))
- val ctx2 = genLoad(duplicateFinalizer(ctx1, finalizer), ctx1, UNIT)
+ val ctx2 = genLoad(duplicateFinalizer(boundLabels, ctx1, finalizer), ctx1, UNIT)
ctx2.bb.emit(LOAD_LOCAL(tmp))
ctx2
} else
- genLoad(duplicateFinalizer(ctx1, finalizer), ctx1, UNIT)
+ genLoad(duplicateFinalizer(boundLabels, ctx1, finalizer), ctx1, UNIT)
} else ctx
@@ -1977,6 +1951,7 @@ abstract class GenICode extends SubComponent {
ctx1.bb.emit(THROW());
ctx1.bb.enterIgnoreMode;
ctx1.bb.close
+ finalizerCtx.endHandler()
exh
}) else None
@@ -1989,6 +1964,7 @@ abstract class GenICode extends SubComponent {
val ctx2 = emitFinalizer(ctx1)
ctx2.bb.emit(JUMP(afterCtx.bb))
ctx2.bb.close
+ outerCtx.endHandler()
exh
}
val bodyCtx = this.newBlock
@@ -2006,6 +1982,71 @@ abstract class GenICode extends SubComponent {
afterCtx
}
+
+
+ /** try-catch-finally blocks are actually simpler to emit in MSIL, because there
+ * is support for `finally` in bytecode.
+ *
+ * A
+ * try { .. } catch { .. } finally { .. }
+ * block is de-sugared into
+ * try { try { ..} catch { .. } } finally { .. }
+ *
+ * In ICode `finally` block is represented exactly the same as an exception handler,
+ * but with `NoSymbol` as the exception class. The covered blocks are all blocks of
+ * the `try { .. } catch { .. }`.
+ *
+ * Also, TryMsil does not enter any Finalizers into the `cleanups', because the
+ * CLI takes care of running the finalizer when seeing a `leave' statement inside
+ * a try / catch.
+ */
+ def TryMsil(body: Context => Context,
+ handlers: List[(Symbol, TypeKind, (Context => Context))],
+ finalizer: Tree,
+ tree: Tree) = {
+
+ val outerCtx = this.dup // context for generating exception handlers, covered by finalizer
+ val finalizerCtx = this.dup // context for generating finalizer handler
+ val afterCtx = outerCtx.newBlock
+
+ if (finalizer != EmptyTree) {
+ // finalizer is covers try and all catch blocks, i.e.
+ // try { try { .. } catch { ..} } finally { .. }
+ val exh = outerCtx.newHandler(NoSymbol, UNIT)
+ this.addActiveHandler(exh)
+ val ctx = finalizerCtx.enterHandler(exh)
+ if (settings.Xdce.value) ctx.bb.emit(LOAD_EXCEPTION())
+ val ctx1 = genLoad(finalizer, ctx, UNIT)
+ // need jump for the ICode to be valid. MSIL backend will emit `Endfinally` instead.
+ ctx1.bb.emit(JUMP(afterCtx.bb))
+ ctx1.bb.close
+ finalizerCtx.endHandler()
+ }
+
+ for (handler <- handlers) {
+ val exh = this.newHandler(handler._1, handler._2)
+ var ctx1 = outerCtx.enterHandler(exh)
+ if (settings.Xdce.value) ctx1.bb.emit(LOAD_EXCEPTION())
+ ctx1 = handler._3(ctx1)
+ // msil backend will emit `Leave` to jump out of a handler
+ ctx1.bb.emit(JUMP(afterCtx.bb))
+ ctx1.bb.close
+ outerCtx.endHandler()
+ }
+
+ val bodyCtx = this.newBlock
+
+ val finalCtx = body(bodyCtx)
+
+ outerCtx.bb.emit(JUMP(bodyCtx.bb))
+ outerCtx.bb.close
+
+ // msil backend will emit `Leave` to jump out of a try-block
+ finalCtx.bb.emit(JUMP(afterCtx.bb))
+ finalCtx.bb.close
+
+ afterCtx
+ }
}
}
@@ -2045,7 +2086,7 @@ abstract class GenICode extends SubComponent {
* jumps to the given basic block.
*/
def patch(code: Code) {
- def substMap: Map[Instruction, Instruction] = {
+ def substMap: mutable.Map[Instruction, Instruction] = {
val map = new HashMap[Instruction, Instruction]()
toPatch foreach (i => map += (i -> patch(i)))
@@ -2053,7 +2094,7 @@ abstract class GenICode extends SubComponent {
}
val map = substMap
- code traverse (_.subst(map))
+ code.blocks foreach (_.subst(map))
}
/**
@@ -2095,7 +2136,7 @@ abstract class GenICode extends SubComponent {
///////////////// Fake instructions //////////////////////////
/**
- * Pseudo jump: it takes a Label instead of a basick block.
+ * Pseudo jump: it takes a Label instead of a basic block.
* It is used temporarily during code generation. It is replaced
* by a real JUMP instruction when all labels are resolved.
*/
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index 8cb3162e94..dc90a438b9 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -1,13 +1,12 @@
/* NSC -- new scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
/* NSC -- new scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
@@ -54,12 +53,22 @@ abstract class ICodes extends AnyRef
else
global.abort("Unknown linearizer: " + global.settings.Xlinearizer.value)
+ /** Have to be careful because dump calls around, possibly
+ * re-entering methods which initiated the dump (like foreach
+ * in BasicBlocks) which leads to the icode output olympics.
+ */
+ private var alreadyDumping = false
+
/** Print all classes and basic blocks. Used for debugging. */
+
def dump {
+ if (alreadyDumping) return
+ else alreadyDumping = true
+
val printer = new TextPrinter(new PrintWriter(Console.out, true),
new DumpLinearizer)
- classes.valuesIterator foreach printer.printClass
+ classes.values foreach printer.printClass
}
object liveness extends Liveness {
@@ -72,13 +81,6 @@ abstract class ICodes extends AnyRef
lazy val AnyRefReference: TypeKind = REFERENCE(global.definitions.ObjectClass)
- import global.settings
- if (settings.XO.value) {
- settings.inline.value = true
- settings.Xcloselim.value = true
- settings.Xdce.value = true
- }
-
object icodeReader extends ICodeReader {
lazy val global: ICodes.this.global.type = ICodes.this.global
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index 7d9b2fd537..701e5d3815 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -3,14 +3,13 @@
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
package icode;
-import scala.tools.nsc.ast._;
-import scala.collection.mutable.{Stack, HashSet, BitSet};
+import scala.tools.nsc.ast._
+import scala.collection.mutable.{Stack, HashSet, BitSet, ListBuffer}
trait Linearizers { self: ICodes =>
import opcodes._;
@@ -201,4 +200,142 @@ trait Linearizers { self: ICodes =>
}
}
+ /** The MSIL linearizer is used only for methods with at least one exception handler.
+ * It makes sure that all the blocks belonging to a `try`, `catch` or `finally` block
+ * are emitted in an order that allows the lexical nesting of try-catch-finally, just
+ * like in the source code.
+ */
+ class MSILLinearizer extends Linearizer {
+ /** The MSIL linearizer first calls a NormalLInearizer. This is because the ILGenerator checks
+ * the stack size before emitting instructions. For instance, to emit a `store`, there needs
+ * to be some value on the stack. This can blow up in situations like this:
+ * ...
+ * jump 3
+ * 4: store_local 0
+ * jump 5
+ * 3: load_value
+ * jump 4
+ * 5: ...
+ * here, 3 must be scheduled first.
+ *
+ * The NormalLinearizer also removes dead blocks (blocks without predecessor). This is important
+ * in the following example:
+ * try { throw new Exception }
+ * catch { case e => throw e }
+ * which adds a dead block containing just a "throw" (which, again, would blow up code generation
+ * because of the stack size; there's no value on the stack when emitting that `throw`)
+ */
+ val normalLinearizer = new NormalLinearizer()
+
+ def linearize(m: IMethod): List[BasicBlock] = {
+
+ val handlersByCovered = m.exh.groupBy(_.covered)
+
+ // number of basic blocks covered by the entire try-catch expression
+ def size(covered: collection.immutable.Set[BasicBlock]) = {
+ val hs = handlersByCovered(covered)
+ covered.size + (hs :\ 0)((h, s) => h.blocks.length + s)
+ }
+
+ val tryBlocks = handlersByCovered.keys.toList sortBy size
+
+ var result = normalLinearizer.linearize(m)
+
+ val frozen = HashSet[BasicBlock](result.head)
+ for (tryBlock <- tryBlocks) {
+ result = groupBlocks(m, result, handlersByCovered(tryBlock), frozen)
+ }
+ result
+ }
+
+ /** @param handlers a list of handlers covering the same blocks (same try, multiple catches)
+ * @param frozen blocks can't be moved (fist block of a method, blocks directly following a try-catch)
+ */
+ def groupBlocks(method: IMethod, blocks: List[BasicBlock], handlers: List[ExceptionHandler], frozen: HashSet[BasicBlock]) = {
+ assert(blocks.head == method.code.startBlock, method)
+
+ // blocks before the try, and blocks for the try
+ val beforeAndTry = new ListBuffer[BasicBlock]()
+ // blocks for the handlers
+ val catches = handlers map (_ => new ListBuffer[BasicBlock]())
+ // blocks to be put at the end
+ val after = new ListBuffer[BasicBlock]()
+
+ var beforeTry = true
+ val head = handlers.head
+
+ for (b <- blocks) {
+ if (head covers b) {
+ beforeTry = false
+ beforeAndTry += b
+ } else {
+ val handlerIndex = handlers.indexWhere(_.blocks.contains(b))
+ if (handlerIndex >= 0) {
+ catches(handlerIndex) += b
+ } else if (beforeTry) {
+ beforeAndTry += b
+ } else {
+ after += b
+ }
+ }
+ }
+
+ // reorder the blocks in "catches" so that the "firstBlock" is actually first
+ for ((lb, handler) <- catches.zip(handlers)) {
+ lb -= handler.startBlock
+ handler.startBlock +=: lb
+ }
+
+ // The first block emitted after a try-catch must be the the one that the try / catch
+ // blocks jump to (because in msil, these jumps cannot be emitted manually)
+ var firstAfter: Option[BasicBlock] = None
+
+ // Find the (hopefully) unique successor, look at the try and all catch blocks
+ var blks = head.covered.toList :: handlers.map(_.blocks)
+ while (firstAfter.isEmpty && !blks.isEmpty) {
+ val b = blks.head
+ blks = blks.tail
+
+ val leaving = leavingBlocks(b)
+ // no leaving blocks when the try or catch ends with THROW or RET
+ if (!leaving.isEmpty) {
+ assert(leaving.size <= 1, leaving)
+ firstAfter = Some(leaving.head)
+ }
+ }
+ if (firstAfter.isDefined) {
+ val b = firstAfter.get
+ if (frozen contains b) {
+ assert(after contains b, b +", "+ method)
+ } else {
+ frozen += b
+ if (beforeAndTry contains b) {
+ beforeAndTry -= b
+ } else {
+ assert(after contains b, after)
+ after -= b
+ }
+ b +=: after
+ }
+ }
+
+ for (lb <- catches) { beforeAndTry ++= lb }
+ beforeAndTry ++= after
+ beforeAndTry.toList
+ }
+
+ /** Returns all direct successors of `blocks` wich are not part
+ * that list, i.e. successors outside the `blocks` list.
+ */
+ private def leavingBlocks(blocks: List[BasicBlock]) = {
+ val res = new HashSet[BasicBlock]()
+ for (b <- blocks; s <- b.directSuccessors; if (!blocks.contains(s)))
+ res += s
+ res
+ }
+
+ def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
+ error("not implemented")
+ }
+ }
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 19f78626e9..95cd15711b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -1,9 +1,8 @@
/* NSC -- new scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
@@ -36,6 +35,14 @@ trait Members { self: ICodes =>
var producedStack: TypeStack = null
private var currentLabel: Int = 0
+ private var _touched = false
+
+ def touched = _touched
+ def touched_=(b: Boolean): Unit = if (b) {
+ blocks foreach (_.touched = true)
+ _touched = true
+ } else
+ _touched = false
// Constructor code
startBlock = newBlock
@@ -52,52 +59,11 @@ trait Members { self: ICodes =>
if (b == startBlock)
startBlock = b.successors.head;
blocks -= b
- }
-
- /**
- * Apply a function to all basic blocks, for side-effects. It starts at
- * the given startBlock and checks that are no predecessors of the given node.
- * Only blocks that are reachable via a path from startBlock are ever visited.
- */
- def traverseFrom(startBlock: BasicBlock, f: BasicBlock => Unit) = {
- val visited: Set[BasicBlock] = new HashSet();
-
- def traverse0(toVisit: List[BasicBlock]): Unit = toVisit match {
- case Nil => ();
- case b :: bs => if (!visited.contains(b)) {
- f(b);
- visited += b;
- traverse0(bs ::: b.successors);
- } else
- traverse0(bs);
- }
- assert(startBlock.predecessors == Nil,
- "Starting traverse from a block with predecessors: " + this);
- traverse0(startBlock :: Nil)
- }
+ assert(!blocks.contains(b))
+ for (handler <- method.exh if handler.covers(b))
+ handler.covered -= b
- def traverse(f: BasicBlock => Unit) = blocks.toList foreach f;
-
- /* This method applies the given function to each basic block. */
- def traverseFeedBack(f: (BasicBlock, HashMap[BasicBlock, Boolean]) => Unit) = {
- val visited : HashMap[BasicBlock, Boolean] = new HashMap;
- visited ++= blocks.iterator.map(x => (x, false));
-
- var blockToVisit: List[BasicBlock] = List(startBlock)
-
- while (!blockToVisit.isEmpty) {
- blockToVisit match {
- case b::xs =>
- if (!visited(b)) {
- f(b, visited);
- blockToVisit = b.successors ::: xs;
- visited += (b -> true)
- } else
- blockToVisit = xs
- case _ =>
- error("impossible match")
- }
- }
+ touched = true
}
/** This methods returns a string representation of the ICode */
@@ -112,6 +78,7 @@ trait Members { self: ICodes =>
/* Create a new block and append it to the list
*/
def newBlock: BasicBlock = {
+ touched = true
val block = new BasicBlock(nextLabel, method);
blocks += block;
block
@@ -140,11 +107,16 @@ trait Members { self: ICodes =>
this
}
- override def toString() = symbol.fullNameString
+ override def toString() = symbol.fullName
def lookupField(s: Symbol) = fields find (_.symbol == s)
def lookupMethod(s: Symbol) = methods find (_.symbol == s)
def lookupMethod(s: Name) = methods find (_.symbol.name == s)
+
+ /* determines whether or not this class contains a static ctor. */
+ def containsStaticCtor: Boolean = methods.exists(_.isStaticCtor)
+ /* returns this methods static ctor if it has one. */
+ def lookupStaticCtor: Option[IMethod] = methods.find(_.isStaticCtor)
}
/** Represent a field in ICode */
@@ -225,12 +197,15 @@ trait Members { self: ICodes =>
def isStatic: Boolean = symbol.isStaticMember
- override def toString() = symbol.fullNameString
+ /* determines whether or not this method is the class static constructor. */
+ def isStaticCtor: Boolean = isStatic && symbol.rawname == nme.CONSTRUCTOR
+
+ override def toString() = symbol.fullName
import opcodes._
def checkLocals: Unit = if (code ne null) {
Console.println("[checking locals of " + this + "]")
- for (bb <- code.blocks; i <- bb.toList) i match {
+ for (bb <- code.blocks; i <- bb) i match {
case LOAD_LOCAL(l) =>
if (!this.locals.contains(l))
Console.println("Local " + l + " is not declared in " + this)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index ca2a0591ed..ed76e66aa0 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
@@ -170,7 +169,7 @@ trait Opcodes { self: ICodes =>
case class LOAD_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
/** Returns a string representation of this instruction */
override def toString(): String =
- "LOAD_FIELD " + (if (isStatic) field.fullNameString else field.toString());
+ "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString());
override def consumed = if (isStatic) 0 else 1
override def produced = 1
@@ -320,22 +319,25 @@ trait Opcodes { self: ICodes =>
case class CALL_METHOD(method: Symbol, style: InvokeStyle) extends Instruction {
/** Returns a string representation of this instruction */
override def toString(): String =
- "CALL_METHOD " + hostClass.fullNameString + method.fullNameString +" ("+style.toString()+")";
+ "CALL_METHOD " + hostClass.fullName + method.fullName +" ("+style.toString()+")";
var hostClass: Symbol = method.owner;
def setHostClass(cls: Symbol): this.type = { hostClass = cls; this }
- override def consumed = {
- var result = method.tpe.paramTypes.length;
- result = result + (style match {
+ /** This is specifically for preserving the target native Array type long
+ * enough that clone() can generate the right call.
+ */
+ var targetTypeKind: TypeKind = UNIT // the default should never be used, so UNIT should fail fast.
+ def setTargetTypeKind(tk: TypeKind) = targetTypeKind = tk
+
+ override def consumed = method.tpe.paramTypes.length + (
+ style match {
case Dynamic | InvokeDynamic => 1
case Static(true) => 1
case Static(false) => 0
case SuperCall(_) => 1
- });
-
- result;
- }
+ }
+ )
override def consumedTypes = {
val args = method.tpe.paramTypes map toTypeKind
@@ -352,7 +354,7 @@ trait Opcodes { self: ICodes =>
0
else 1
- /** object idenity is equality for CALL_METHODs. Needed for
+ /** object identity is equality for CALL_METHODs. Needed for
* being able to store such instructions into maps, when more
* than one CALL_METHOD to the same method might exist.
*/
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
index b9564a3cb1..8624bb5aff 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
@@ -1,9 +1,8 @@
/* NSC -- new scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
index f03b84a50e..9aaeeb2e8b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
@@ -1,9 +1,8 @@
/* NSC -- new scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
@@ -119,9 +118,9 @@ trait Printers { self: ICodes =>
print(bb.label)
if (bb.loopHeader) print("[loop header]")
print(": ");
- if (settings.debug.value) print("pred: " + bb.predecessors + " succs: " + bb.successors)
+ if (settings.debug.value) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString)
indent; println
- bb.toList foreach printInstruction
+ bb foreach printInstruction
undent; println
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
index d7e8f0955b..78a47e129c 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
index a3cc5310ab..65a5291f36 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
@@ -108,6 +107,8 @@ trait TypeKinds { self: ICodes =>
def dimensions: Int = 0
}
+ var lubs0 = 0
+
/**
* The least upper bound of two typekinds. They have to be either
* REFERENCE or ARRAY kinds.
@@ -116,16 +117,11 @@ trait TypeKinds { self: ICodes =>
*/
def lub(a: TypeKind, b: TypeKind): TypeKind = {
def lub0(t1: Type, t2: Type): Type = {
- val lubTpe = global.lub(t1 :: t2 :: Nil)
- assert(lubTpe.typeSymbol.isClass,
- "Least upper bound of " + t1 + " and " + t2 + " is not a class: " + lubTpe)
- lubTpe
+ //lubs0 += 1
+ global.lub(t1 :: t2 :: Nil)
}
- if ((a.isReferenceType || a.isArrayType) &&
- (b.isReferenceType || b.isArrayType))
- toTypeKind(lub0(a.toType, b.toType))
- else if (a == b) a
+ if (a == b) a
else if (a == REFERENCE(NothingClass)) b
else if (b == REFERENCE(NothingClass)) a
else (a, b) match {
@@ -136,7 +132,12 @@ trait TypeKinds { self: ICodes =>
case (SHORT, INT) | (INT, SHORT) => INT
case (CHAR, INT) | (INT, CHAR) => INT
case (BOOL, INT) | (INT, BOOL) => INT
- case _ => throw new CheckerError("Incompatible types: " + a + " with " + b)
+ case _ =>
+ if ((a.isReferenceType || a.isArrayType) &&
+ (b.isReferenceType || b.isArrayType))
+ toTypeKind(lub0(a.toType, b.toType))
+ else
+ throw new CheckerException("Incompatible types: " + a + " with " + b)
}
}
@@ -162,28 +163,36 @@ trait TypeKinds { self: ICodes =>
case object BYTE extends TypeKind {
override def maxType(other: TypeKind): TypeKind =
other match {
- case BYTE | SHORT | CHAR | INT | LONG | FLOAT | DOUBLE => other
+ case CHAR => INT
+ case BYTE | SHORT | INT | LONG | FLOAT | DOUBLE => other
case REFERENCE(NothingClass) => BYTE
case _ => abort("Uncomparable type kinds: BYTE with " + other)
}
}
+ /** Note that the max of Char/Byte and Char/Short is Int, because
+ * neither strictly encloses the other due to unsignedness.
+ * See ticket #2087 for a consequence.
+ */
+
/** A 2-byte signed integer */
case object SHORT extends TypeKind {
override def maxType(other: TypeKind): TypeKind =
other match {
- case BYTE | SHORT | CHAR => SHORT
+ case CHAR => INT
+ case BYTE | SHORT => SHORT
case REFERENCE(NothingClass) => SHORT
case INT | LONG | FLOAT | DOUBLE => other
case _ => abort("Uncomparable type kinds: SHORT with " + other)
}
}
- /** A 2-byte signed integer */
+ /** A 2-byte UNSIGNED integer */
case object CHAR extends TypeKind {
override def maxType(other: TypeKind): TypeKind =
other match {
- case BYTE | SHORT | CHAR => CHAR
+ case CHAR => CHAR
+ case BYTE | SHORT => INT
case REFERENCE(NothingClass) => CHAR
case INT | LONG | FLOAT | DOUBLE => other
case _ => abort("Uncomparable type kinds: CHAR with " + other)
@@ -237,7 +246,7 @@ trait TypeKinds { self: ICodes =>
// override def maxType(other: TypeKind): TypeKind = other match {
// case STRING => STRING;
// case _ =>
- // abort("Uncomparbale type kinds: STRING with " + other);
+ // abort("Uncomparable type kinds: STRING with " + other);
// }
// }
@@ -251,7 +260,7 @@ trait TypeKinds { self: ICodes =>
"REFERENCE to NoSymbol not allowed!")
override def toString(): String =
- "REFERENCE(" + cls.fullNameString + ")"
+ "REFERENCE(" + cls.fullName + ")"
/**
* Approximate `lub'. The common type of two references is
@@ -263,7 +272,7 @@ trait TypeKinds { self: ICodes =>
case REFERENCE(_) | ARRAY(_) =>
REFERENCE(AnyRefClass)
case _ =>
- abort("Uncomparbale type kinds: REFERENCE with " + other)
+ abort("Uncomparable type kinds: REFERENCE with " + other)
}
/** Checks subtyping relationship. */
@@ -296,7 +305,7 @@ trait TypeKinds { self: ICodes =>
// abort(toString() + " maxType " + other.toString());
// override def toString(): String =
-// "VALUE(" + cls.fullNameString + ")";
+// "VALUE(" + cls.fullName + ")";
// }
def ArrayN(elem: TypeKind, dims: Int): ARRAY = {
@@ -334,7 +343,7 @@ trait TypeKinds { self: ICodes =>
if (elem == elem2) ARRAY(elem)
else REFERENCE(AnyRefClass)
case _ =>
- abort("Uncomparbale type kinds: ARRAY with " + other)
+ abort("Uncomparable type kinds: ARRAY with " + other)
}
/** Array subtyping is covariant, as in Java. Necessary for checking
@@ -370,7 +379,7 @@ trait TypeKinds { self: ICodes =>
case REFERENCE(_) | ARRAY(_) | BOXED(_) =>
REFERENCE(AnyRefClass)
case _ =>
- abort("Uncomparbale type kinds: ARRAY with " + other)
+ abort("Uncomparable type kinds: ARRAY with " + other)
}
/** Checks subtyping relationship. */
@@ -409,7 +418,7 @@ trait TypeKinds { self: ICodes =>
case REFERENCE(_) =>
REFERENCE(AnyRefClass)
case _ =>
- abort("Uncomparbale type kinds: ConcatClass with " + other)
+ abort("Uncomparable type kinds: ConcatClass with " + other)
}
/** Checks subtyping relationship. */
@@ -421,8 +430,12 @@ trait TypeKinds { self: ICodes =>
////////////////// Conversions //////////////////////////////
- /** Return the TypeKind of the given type */
- def toTypeKind(t: Type): TypeKind = t match {
+ /** Return the TypeKind of the given type
+ *
+ * Call to .normalize fixes #3003 (follow type aliases). Otherwise,
+ * arrayOrClassType below would return AnyRefReference.
+ */
+ def toTypeKind(t: Type): TypeKind = t.normalize match {
case ThisType(sym) =>
if (sym == ArrayClass)
AnyRefReference
@@ -430,13 +443,13 @@ trait TypeKinds { self: ICodes =>
REFERENCE(sym)
case SingleType(pre, sym) =>
- (primitiveTypeMap get sym) getOrElse REFERENCE(sym)
+ primitiveTypeMap.getOrElse(sym, REFERENCE(sym))
case ConstantType(value) =>
toTypeKind(t.underlying)
case TypeRef(_, sym, args) =>
- (primitiveTypeMap get sym) getOrElse arrayOrClassType(sym, args)
+ primitiveTypeMap.getOrElse(sym, arrayOrClassType(sym, args))
case ClassInfoType(_, _, sym) =>
primitiveTypeMap get sym match {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
index 6e7a81ac1e..d334af525f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend
@@ -18,7 +17,7 @@ trait TypeStacks { self: ICodes =>
import opcodes._
import global.{Symbol, Type, definitions}
- /* This class simulates the type of the opperand
+ /* This class simulates the type of the operand
* stack of the ICode.
*/
type Rep = List[TypeKind]
@@ -71,20 +70,21 @@ trait TypeStacks { self: ICodes =>
def apply(n: Int): TypeKind = types(n)
/**
- * A TypeStack aggress with another one if they have the same
+ * A TypeStack agrees with another one if they have the same
* length and each type kind agrees position-wise. Two
* types agree if one is a subtype of the other.
*/
def agreesWith(other: TypeStack): Boolean =
- (types.length == other.types.length) &&
- List.forall2(types, other.types) ((t1, t2) => t1 <:< t2 || t2 <:< t1)
+ (types corresponds other.types)((t1, t2) => t1 <:< t2 || t2 <:< t1)
/* This method returns a String representation of the stack */
override def toString() = types.mkString("\n", "\n", "\n")
- override def equals(other: Any): Boolean =
- other.isInstanceOf[TypeStack] &&
- List.forall2(other.asInstanceOf[TypeStack].types, types)((a, b) => a == b)
+ override def hashCode() = types.hashCode()
+ override def equals(other: Any): Boolean = other match {
+ case x: TypeStack => x.types sameElements types
+ case _ => false
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CompleteLattice.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CompleteLattice.scala
index df4f9d70b1..4efefe89f9 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CompleteLattice.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CompleteLattice.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend.icode.analysis
@@ -40,7 +39,7 @@ trait CompleteLattice {
def lub(xs: List[Elem], exceptional: Boolean): Elem = try {
if (xs == Nil) bottom else xs reduceLeft lub2(exceptional)
} catch {
- case e: LubError =>
+ case e: LubException =>
Console.println("Lub on blocks: " + xs)
throw e
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index 9ee628ef19..289fae6fe3 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend.icode.analysis
@@ -58,18 +57,13 @@ abstract class CopyPropagation {
class State(val bindings: Bindings, var stack: List[Value]) {
override def equals(that: Any): Boolean =
- (this eq that.asInstanceOf[AnyRef]) ||
- that.isInstanceOf[State] && {
- val other = that.asInstanceOf[State]
-
+ (this eq that.asInstanceOf[AnyRef]) || (that match {
/* comparison with bottom is reference equality! */
- if ((other eq bottom) || (this eq bottom))
- (this eq other)
- else {
- this.bindings == other.bindings &&
- List.forall2(this.stack, other.stack) { (a, b) => a == b }
- }
- }
+ case other: State if (this ne bottom) && (other ne bottom) =>
+ (this.bindings == other.bindings) &&
+ (this.stack corresponds other.stack)(_ == _) // @PP: corresponds
+ case _ => false
+ })
/* Return an alias for the given local. It returns the last
* local in the chain of aliased locals. Cycles are not allowed
@@ -146,10 +140,12 @@ abstract class CopyPropagation {
target match {
case Deref(LocalVar(l)) =>
val alias = getAlias(l)
+ val derefAlias = Deref(LocalVar(alias))
getBinding(alias) match {
- case Record(_, _) => Some(Deref(LocalVar(alias)))
+ case Record(_, _) => Some(derefAlias)
case Deref(Field(r1, f1)) =>
- getFieldNonRecordValue(r1, f1) orElse Some(Deref(LocalVar(alias)))
+ getFieldNonRecordValue(r1, f1) orElse Some(derefAlias)
+ case Boxed(_) => Some(derefAlias)
case v => Some(v)
}
case Deref(Field(r1, f1)) =>
@@ -187,14 +183,14 @@ abstract class CopyPropagation {
if (exceptional) exceptionHandlerStack
else {
// if (a.stack.length != b.stack.length)
-// throw new LubError(a, b, "Invalid stacks in states: ");
- List.map2(a.stack, b.stack) { (v1, v2) =>
+// throw new LubException(a, b, "Invalid stacks in states: ");
+ (a.stack, b.stack).zipped map { (v1, v2) =>
if (v1 == v2) v1 else Unknown
}
}
/* if (a.stack.length != b.stack.length)
- throw new LubError(a, b, "Invalid stacks in states: ");
+ throw new LubException(a, b, "Invalid stacks in states: ");
val resStack = List.map2(a.stack, b.stack) { (v1, v2) =>
if (v1 == v2) v1 else Unknown
}
@@ -245,10 +241,15 @@ abstract class CopyPropagation {
}
def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem =
- b.toList.foldLeft(in)(interpret)
+ b.foldLeft(in)(interpret)
import opcodes._
+ private def retain[A, B](map: Map[A, B])(p: (A, B) => Boolean) = {
+ for ((k, v) <- map ; if !p(k, v)) map -= k
+ map
+ }
+
/** Abstract interpretation for one instruction. */
def interpret(in: copyLattice.Elem, i: Instruction): copyLattice.Elem = {
var out = in.dup
@@ -352,7 +353,7 @@ abstract class CopyPropagation {
if (onInstance) {
val obj = out.stack.drop(method.info.paramTypes.length).head
// if (method.isPrimaryConstructor) {
- if (method.isPrimaryConstructor/* && isClosureClass(method.owner)*/) {
+ if (method.isPrimaryConstructor) {
obj match {
case Record(_, bindings) =>
for (v <- out.stack.take(method.info.paramTypes.length + 1)
@@ -458,7 +459,7 @@ abstract class CopyPropagation {
*/
final def cleanReferencesTo(s: copyLattice.State, target: Location) {
def cleanRecord(r: Record): Record = {
- r.bindings retain { (loc, value) =>
+ retain(r.bindings) { (loc, value) =>
(value match {
case Deref(loc1) if (loc1 == target) => false
case Boxed(loc1) if (loc1 == target) => false
@@ -478,7 +479,7 @@ abstract class CopyPropagation {
case _ => v
}}
- s.bindings retain { (loc, value) =>
+ retain(s.bindings) { (loc, value) =>
(value match {
case Deref(loc1) if (loc1 == target) => false
case Boxed(loc1) if (loc1 == target) => false
@@ -526,17 +527,17 @@ abstract class CopyPropagation {
final def invalidateRecords(state: copyLattice.State) {
def shouldRetain(sym: Symbol): Boolean = {
if (sym.hasFlag(symtab.Flags.MUTABLE))
- log("dropping binding for " + sym.fullNameString)
+ log("dropping binding for " + sym.fullName)
!sym.hasFlag(symtab.Flags.MUTABLE)
}
state.stack = state.stack map { v => v match {
case Record(cls, bindings) =>
- bindings.retain { (sym: Symbol, v: Value) => shouldRetain(sym) }
+ retain(bindings) { (sym, _) => shouldRetain(sym) }
Record(cls, bindings)
case _ => v
}}
- state.bindings retain {(loc, value) =>
+ retain(state.bindings) { (loc, value) =>
value match {
case Deref(Field(rec, sym)) => shouldRetain(sym)
case Boxed(Field(rec, sym)) => shouldRetain(sym)
@@ -573,7 +574,7 @@ abstract class CopyPropagation {
// this relies on having the same order in paramAccessors and
// the arguments on the stack. It should be the same!
for ((p, i) <- paramAccessors.zipWithIndex) {
-// assert(p.tpe == paramTypes(i), "In: " + ctor.fullNameString
+// assert(p.tpe == paramTypes(i), "In: " + ctor.fullName
// + " having acc: " + (paramAccessors map (_.tpe))+ " vs. params" + paramTypes
// + "\n\t failed at pos " + i + " with " + p.tpe + " == " + paramTypes(i))
if (p.tpe == paramTypes(i))
@@ -585,18 +586,6 @@ abstract class CopyPropagation {
bindings
}
- /** Is <code>cls</code> a closure class?
- *
- * @param cls ...
- * @return ...
- */
- final def isClosureClass(cls: Symbol): Boolean =
- cls.isFinal &&
- cls.tpe.parents.exists { t =>
- val TypeRef(_, sym, _) = t;
- definitions.FunctionClass exists sym.==
- }
-
/** Is symbol <code>m</code> a pure method?
*
* @param m ...
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
index 09a39f4280..39405cd84e 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend.icode.analysis
@@ -70,6 +69,8 @@ trait DataFlowAnalysis[L <: CompleteLattice] {
succs foreach { p =>
if (!worklist.contains(p))
worklist += p;
+ if (!in.isDefinedAt(p))
+ assert(false, "Invalid successor for: " + point + " successor " + p + " does not exist")
// if (!p.exceptionHandlerHeader) {
// println("lubbing " + p.predecessors + " outs: " + p.predecessors.map(out.apply).mkString("\n", "\n", ""))
in(p) = lattice.lub(/*in(p) :: */(p.predecessors map out.apply), p.exceptionHandlerStart)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
index 46e24c18ec..101bb81503 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend.icode.analysis
@@ -71,7 +70,7 @@ abstract class Liveness {
def genAndKill(b: BasicBlock): (Set[Local], Set[Local]) = {
var genSet = new ListSet[Local]
var killSet = new ListSet[Local]
- for (i <- b.toList) i match {
+ for (i <- b) i match {
case LOAD_LOCAL(local) if (!killSet(local)) => genSet = genSet + local
case STORE_LOCAL(local) if (!genSet(local)) => killSet = killSet + local
case _ => ()
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/LubError.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala
index 4a1c75f199..6cdee7ef4b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/LubError.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala
@@ -1,13 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend.icode.analysis
-class LubError(a: Any, b: Any, msg: String) extends Exception {
+class LubException(a: Any, b: Any, msg: String) extends Exception {
override def toString() = "Lub error: " + msg + a + b
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ProgramPoint.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ProgramPoint.scala
index 1fa1e36799..a9a09a71a0 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ProgramPoint.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ProgramPoint.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend.icode.analysis
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index 65065fe0d1..f3dd6dd93b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend.icode.analysis
@@ -43,19 +42,19 @@ abstract class ReachingDefinitions {
else if (bottom == b) a
else {
val locals = a.vars ++ b.vars
- val stack = if (a.stack == Nil)
- b.stack
- else if (b.stack == Nil) a.stack
- else List.map2(a.stack, b.stack) (_ ++ _)
-
- val res = IState(locals, stack)
-
-// Console.println("\tlub2: " + a + ", " + b)
-// Console.println("\tis: " + res)
-
-// if (res._1 eq bottom._1) (new ListSet[Definition], Nil)
-// else res
- res
+ val stack =
+ if (a.stack == Nil) b.stack
+ else if (b.stack == Nil) a.stack
+ else (a.stack, b.stack).zipped map (_ ++ _)
+
+ IState(locals, stack)
+
+ // val res = IState(locals, stack)
+ // Console.println("\tlub2: " + a + ", " + b)
+ // Console.println("\tis: " + res)
+ // if (res._1 eq bottom._1) (new ListSet[Definition], Nil)
+ // else res
+ // res
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 32a6037d41..9b145c9fbc 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package backend.icode.analysis
@@ -52,8 +51,8 @@ abstract class TypeFlowAnalysis {
else if ((s1 eq exceptionHandlerStack) || (s2 eq exceptionHandlerStack)) Predef.error("merging with exhan stack")
else {
// if (s1.length != s2.length)
-// throw new CheckerError("Incompatible stacks: " + s1 + " and " + s2);
- new TypeStack(List.map2(s1.types, s2.types) (icodes.lub))
+// throw new CheckerException("Incompatible stacks: " + s1 + " and " + s2);
+ new TypeStack((s1.types, s2.types).zipped map icodes.lub)
}
}
}
@@ -81,10 +80,14 @@ abstract class TypeFlowAnalysis {
override val top = new Elem(new VarBinding, typeStackLattice.top)
override val bottom = new Elem(new VarBinding, typeStackLattice.bottom)
+// var lubs = 0
+
def lub2(exceptional: Boolean)(a: Elem, b: Elem) = {
val IState(env1, s1) = a
val IState(env2, s2) = b
+// lubs += 1
+
val resultingLocals = new VarBinding
for (binding1 <- env1.iterator) {
@@ -118,7 +121,7 @@ abstract class TypeFlowAnalysis {
/** Initialize the in/out maps for the analysis of the given method. */
def init(m: icodes.IMethod) {
this.method = m
-
+ //typeFlowLattice.lubs = 0
init {
worklist += m.code.startBlock
worklist ++= (m.exh map (_.startBlock))
@@ -168,14 +171,17 @@ abstract class TypeFlowAnalysis {
def run = {
timer.start
+// icodes.lubs0 = 0
forwardAnalysis(blockTransfer)
- timer.stop
+ val t = timer.stop
if (settings.debug.value) {
linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
assert(visited.contains(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited));
}
- //println("iterations: " + iterations + " for " + method.code.blocks.size)
+// log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] "
+// + "\n\t" + iterations + " iterations: " + t + " ms."
+// + "\n\tlubs: " + typeFlowLattice.lubs + " out of which " + icodes.lubs0 + " typer lubs")
}
def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 67f3f7f8b2..fabdc79fd8 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -1,24 +1,23 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Iulian Dragos
*/
-// $Id$
package scala.tools.nsc
package backend.jvm
-import java.io.{DataOutputStream, File, OutputStream}
import java.nio.ByteBuffer
import scala.collection.immutable.{Set, ListSet}
import scala.collection.mutable.{Map, HashMap, HashSet}
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.symtab._
-import scala.tools.nsc.util.{Position, NoPosition}
import scala.tools.nsc.symtab.classfile.ClassfileConstants._
import ch.epfl.lamp.fjbg._
+import java.io.{ByteArrayOutputStream, DataOutputStream, File, OutputStream}
+import reflect.generic.{PickleFormat, PickleBuffer}
/** This class ...
*
@@ -26,7 +25,7 @@ import ch.epfl.lamp.fjbg._
* @version 1.0
*
*/
-abstract class GenJVM extends SubComponent {
+abstract class GenJVM extends SubComponent with GenJVMUtil {
import global._
import icodes._
import icodes.opcodes._
@@ -34,7 +33,7 @@ abstract class GenJVM extends SubComponent {
val phaseName = "jvm"
/** Create a new phase */
- override def newPhase(p: Phase) = new JvmPhase(p)
+ override def newPhase(p: Phase): Phase = new JvmPhase(p)
/** JVM code generation phase
*/
@@ -47,9 +46,10 @@ abstract class GenJVM extends SubComponent {
override def run {
if (settings.debug.value) inform("[running phase " + name + " on icode]")
if (settings.Xdce.value)
- icodes.classes.retain { (sym: Symbol, cls: IClass) => !inliner.isClosureClass(sym) || deadCode.liveClosures(sym) }
+ for ((sym, cls) <- icodes.classes ; if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym))
+ icodes.classes -= sym
- classes.valuesIterator foreach apply
+ classes.values foreach apply
}
override def apply(cls: IClass) {
@@ -70,17 +70,20 @@ abstract class GenJVM extends SubComponent {
* Java bytecode generator.
*
*/
- class BytecodeGenerator {
+ class BytecodeGenerator extends BytecodeUtil {
import JAccessFlags._
+ def debugLevel = settings.debuginfo.indexOfChoice
+
val MIN_SWITCH_DENSITY = 0.7
val INNER_CLASSES_FLAGS =
(ACC_PUBLIC | ACC_PRIVATE | ACC_PROTECTED | ACC_STATIC | ACC_FINAL | ACC_INTERFACE | ACC_ABSTRACT)
- val StringBuilderClass = definitions.getClass2("scala.StringBuilder", "scala.collection.mutable.StringBuilder").fullNameString
+ val StringBuilderClass = definitions.getClass2("scala.StringBuilder", "scala.collection.mutable.StringBuilder").fullName
val BoxesRunTime = "scala.runtime.BoxesRunTime"
val StringBuilderType = new JObjectType(StringBuilderClass)
val toStringType = new JMethodType(JObjectType.JAVA_LANG_STRING, JType.EMPTY_ARRAY)
+ val arrayCloneType = new JMethodType(JObjectType.JAVA_LANG_OBJECT, JType.EMPTY_ARRAY)
val MethodTypeType = new JObjectType("java.dyn.MethodType")
val JavaLangClassType = new JObjectType("java.lang.Class")
val MethodHandleType = new JObjectType("java.dyn.MethodHandle")
@@ -92,7 +95,6 @@ abstract class GenJVM extends SubComponent {
val TransientAtt = definitions.getClass("scala.transient")
val VolatileAttr = definitions.getClass("scala.volatile")
val RemoteAttr = definitions.getClass("scala.remote")
- val ThrowsAttr = definitions.getClass("scala.throws")
val BeanInfoAttr = definitions.getClass("scala.reflect.BeanInfo")
val BeanInfoSkipAttr = definitions.getClass("scala.reflect.BeanInfoSkip")
val BeanDisplayNameAttr = definitions.getClass("scala.reflect.BeanDisplayName")
@@ -102,6 +104,16 @@ abstract class GenJVM extends SubComponent {
lazy val RemoteInterface = definitions.getClass("java.rmi.Remote")
lazy val RemoteException = definitions.getClass("java.rmi.RemoteException").tpe
+
+ val versionPickle = {
+ val vp = new PickleBuffer(new Array[Byte](16), -1, 0)
+ assert(vp.writeIndex == 0)
+ vp.writeNat(PickleFormat.MajorVersion)
+ vp.writeNat(PickleFormat.MinorVersion)
+ vp.writeNat(0)
+ vp
+ }
+
var clasz: IClass = _
var method: IMethod = _
var jclass: JClass = _
@@ -112,9 +124,16 @@ abstract class GenJVM extends SubComponent {
val fjbgContext = new FJBGContext(49, 0)
- val emitSource = settings.debuginfo.level >= 1
- val emitLines = settings.debuginfo.level >= 2
- val emitVars = settings.debuginfo.level >= 3
+ val emitSource = debugLevel >= 1
+ val emitLines = debugLevel >= 2
+ val emitVars = debugLevel >= 3
+
+ override def javaName(sym: Symbol): String = {
+ if (sym.isClass && !sym.rawowner.isPackageClass && !sym.isModuleClass)
+ innerClasses = innerClasses + sym;
+
+ super.javaName(sym)
+ }
/** Write a class to disk, adding the Scala signature (pickled type information) and
* inner classes.
@@ -123,36 +142,47 @@ abstract class GenJVM extends SubComponent {
* @param sym The corresponding symbol, used for looking up pickled information
*/
def emitClass(jclass: JClass, sym: Symbol) {
- def addScalaAttr(sym: Symbol): Unit = currentRun.symData.get(sym) match {
- case Some(pickle) =>
- val scalaAttr = fjbgContext.JOtherAttribute(jclass,
- jclass,
- nme.ScalaSignatureATTR.toString,
- pickle.bytes,
- pickle.writeIndex)
- pickledBytes = pickledBytes + pickle.writeIndex
- jclass.addAttribute(scalaAttr)
- currentRun.symData -= sym
- currentRun.symData -= sym.linkedSym
- //System.out.println("Generated ScalaSig Attr for " + sym)//debug
- case _ =>
- val markerAttr = getMarkerAttr(jclass)
- jclass.addAttribute(markerAttr)
- log("Could not find pickle information for " + sym)
- }
- if (!(jclass.getName().endsWith("$") && sym.isModuleClass))
- addScalaAttr(if (isTopLevelModule(sym)) sym.sourceModule else sym);
addInnerClasses(jclass)
-
val outfile = getFile(sym, jclass, ".class")
- val outstream = new DataOutputStream(outfile.output)
+ val outstream = new DataOutputStream(outfile.bufferedOutput)
jclass.writeTo(outstream)
outstream.close()
informProgress("wrote " + outfile)
}
- private def getMarkerAttr(jclass: JClass): JOtherAttribute =
- fjbgContext.JOtherAttribute(jclass, jclass, nme.ScalaATTR.toString, new Array[Byte](0), 0)
+ /** Returns the ScalaSignature annotation if it must be added to this class, none otherwise; furthermore, it adds to
+ * jclass the ScalaSig marker attribute (marking that a scala signature annotation is present) or the Scala marker
+ * attribute (marking that the signature for this class is in another file). The annotation that is returned by
+ * this method must be added to the class' annotations list when generating them.
+ * @param jclass The class file that is being readied.
+ * @param sym The symbol for which the signature has been entered in the symData map. This is different than the
+ * symbol that is being generated in the case of a mirror class.
+ * @return An option that is:
+ * - defined and contains an annotation info of the ScalaSignature type, instantiated with the
+ * pickle signature for sym (a ScalaSig marker attribute has been written);
+ * - undefined if the jclass/sym couple must not contain a signature (a Scala marker attribute has
+ * been written). */
+ def scalaSignatureAddingMarker(jclass: JClass, sym: Symbol): Option[AnnotationInfo] =
+ currentRun.symData.get(sym) match {
+ case Some(pickle) if !jclass.getName().endsWith("$") =>
+ val scalaAttr =
+ fjbgContext.JOtherAttribute(jclass, jclass, nme.ScalaSignatureATTR.toString,
+ versionPickle.bytes, versionPickle.writeIndex)
+ jclass.addAttribute(scalaAttr)
+ val scalaAnnot = {
+ val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex))
+ AnnotationInfo(sigBytes.sigAnnot, Nil, List((nme.bytes, sigBytes)))
+ }
+ pickledBytes = pickledBytes + pickle.writeIndex
+ currentRun.symData -= sym
+ currentRun.symData -= sym.companionSymbol
+ Some(scalaAnnot)
+ case _ =>
+ val markerAttr =
+ fjbgContext.JOtherAttribute(jclass, jclass, nme.ScalaATTR.toString, new Array[Byte](0), 0)
+ jclass.addAttribute(markerAttr)
+ None
+ }
var serialVUID: Option[Long] = None
var remoteClass: Boolean = false
@@ -183,7 +213,7 @@ abstract class GenJVM extends SubComponent {
case _ => ()
}
- parents = parents.removeDuplicates
+ parents = parents.distinct
if (parents.length > 1) {
ifaces = new Array[String](parents.length - 1)
@@ -196,30 +226,30 @@ abstract class GenJVM extends SubComponent {
javaName(parents(0).typeSymbol),
ifaces,
c.cunit.source.toString)
- if (jclass.getName.endsWith("$"))
- jclass.addAttribute(getMarkerAttr(jclass))
if (isStaticModule(c.symbol) || serialVUID != None || clasz.bootstrapClass.isDefined) {
if (isStaticModule(c.symbol))
addModuleInstanceField;
- addStaticInit(jclass)
+ addStaticInit(jclass, c.lookupStaticCtor)
if (isTopLevelModule(c.symbol)) {
- if (c.symbol.linkedClassOfModule == NoSymbol)
+ if (c.symbol.companionClass == NoSymbol)
dumpMirrorClass(c.symbol, c.cunit.source.toString);
else
log("No mirror class for module with linked class: " +
- c.symbol.fullNameString)
+ c.symbol.fullName)
}
}
else {
+ if (c.containsStaticCtor) addStaticInit(jclass, c.lookupStaticCtor)
+
// it must be a top level class (name contains no $s)
def isCandidateForForwarders(sym: Symbol): Boolean =
atPhase (currentRun.picklerPhase.next) {
!(sym.name.toString contains '$') && (sym hasFlag Flags.MODULE) && !sym.isImplClass && !sym.isNestedClass
}
- val lmoc = c.symbol.linkedModuleOfClass
+ val lmoc = c.symbol.companionModule
// add static forwarders if there are no name conflicts; see bugs #363 and #1735
if (lmoc != NoSymbol && !c.symbol.hasFlag(Flags.INTERFACE)) {
if (isCandidateForForwarders(lmoc) && !settings.noForwarders.value) {
@@ -233,8 +263,9 @@ abstract class GenJVM extends SubComponent {
clasz.fields foreach genField
clasz.methods foreach genMethod
+ val ssa = scalaSignatureAddingMarker(jclass, c.symbol)
addGenericSignature(jclass, c.symbol, c.symbol.owner)
- addAnnotations(jclass, c.symbol.annotations)
+ addAnnotations(jclass, c.symbol.annotations ++ ssa)
emitClass(jclass, c.symbol)
if (c.symbol hasAnnotation BeanInfoAttr)
@@ -271,7 +302,7 @@ abstract class GenJVM extends SubComponent {
!m.symbol.isGetter &&
!m.symbol.isSetter) yield javaName(m.symbol)
- val constructor = beanInfoClass.addNewMethod(JAccessFlags.ACC_PUBLIC, "<init>", JType.VOID, javaTypes(Nil), javaNames(Nil))
+ val constructor = beanInfoClass.addNewMethod(JAccessFlags.ACC_PUBLIC, "<init>", JType.VOID, new Array[JType](0), new Array[String](0))
val jcode = constructor.getCode().asInstanceOf[JExtendedCode]
val strKind = new JObjectType(javaName(definitions.StringClass))
val stringArrayKind = new JArrayType(strKind)
@@ -312,7 +343,7 @@ abstract class GenJVM extends SubComponent {
// write the bean information class file.
val outfile = getFile(c.symbol, beanInfoClass, ".class")
- val outstream = new DataOutputStream(outfile.output)
+ val outstream = new DataOutputStream(outfile.bufferedOutput)
beanInfoClass.writeTo(outstream)
outstream.close()
informProgress("wrote BeanInfo " + outfile)
@@ -327,10 +358,10 @@ abstract class GenJVM extends SubComponent {
val buf: ByteBuffer = ByteBuffer.allocate(512)
var nattr = 0
- // put some radom value; the actual number is determined at the end
+ // put some random value; the actual number is determined at the end
buf.putShort(0xbaba.toShort)
- for (AnnotationInfo(tp, List(exc), _) <- excs.removeDuplicates if tp.typeSymbol == ThrowsAttr) {
+ for (AnnotationInfo(tp, List(exc), _) <- excs.distinct if tp.typeSymbol == definitions.ThrowsClass) {
val Literal(const) = exc
buf.putShort(
cpool.addClass(
@@ -344,7 +375,7 @@ abstract class GenJVM extends SubComponent {
}
/** Whether an annotation should be emitted as a Java annotation
- * .initialize: if 'annnot' is read from pickle, atp might be un-initialized
+ * .initialize: if 'annot' is read from pickle, atp might be un-initialized
*/
private def shouldEmitAnnotation(annot: AnnotationInfo) =
(annot.atp.typeSymbol.initialize.hasFlag(Flags.JAVA) &&
@@ -391,6 +422,21 @@ abstract class GenJVM extends SubComponent {
buf.putShort(cpool.addUtf8(const.symbolValue.name.toString).toShort)
}
+ case sb@ScalaSigBytes(bytes) if (!sb.isLong) =>
+ buf.put('s'.toByte)
+ buf.putShort(cpool.addUtf8(sb.encodedBytes).toShort)
+
+ case sb@ScalaSigBytes(bytes) if (sb.isLong) =>
+ buf.put('['.toByte)
+ val stringCount = (sb.encodedBytes.length / 65534) + 1
+ buf.putShort(stringCount.toShort)
+ for (i <- 0 until stringCount) {
+ buf.put('s'.toByte)
+ val j = i * 65535
+ val string = sb.encodedBytes.slice(j, j + 65535)
+ buf.putShort(cpool.addUtf8(string).toShort)
+ }
+
case ArrayAnnotArg(args) =>
buf.put('['.toByte)
buf.putShort(args.length.toShort)
@@ -429,35 +475,62 @@ abstract class GenJVM extends SubComponent {
nannots
}
+ // @M don't generate java generics sigs for (members of) implementation
+ // classes, as they are monomorphic (TODO: ok?)
+ private def needsGenericSignature(sym: Symbol) = !(
+ // PP: This condition used to include sym.hasExpandedName, but this leads
+ // to the total loss of generic information if a private member is
+ // accessed from a closure: both the field and the accessor were generated
+ // without it. This is particularly bad because the availability of
+ // generic information could disappear as a consequence of a seemingly
+ // unrelated change.
+ sym.isSynthetic
+ || (sym.hasFlag(Flags.LIFTED) && sym.isMethod)
+ || sym.isBridge
+ || (sym.ownerChain exists (_.isImplClass))
+ )
def addGenericSignature(jmember: JMember, sym: Symbol, owner: Symbol) {
- if (!sym.hasFlag(Flags.EXPANDEDNAME | Flags.SYNTHETIC)
- && !(sym.isMethod && sym.hasFlag(Flags.LIFTED))) {
+ if (needsGenericSignature(sym)) {
val memberTpe = atPhase(currentRun.erasurePhase)(owner.thisType.memberInfo(sym))
-// println("sym: " + sym.fullNameString + " : " + memberTpe + " sym.info: " + sym.info)
- erasure.javaSig(sym, memberTpe) match {
- case Some(sig) =>
- val index = jmember.getConstantPool().addUtf8(sig).toShort
- if (settings.debug.value && settings.verbose.value)
- atPhase(currentRun.erasurePhase) {
- println("add generic sig "+sym+":"+sym.info+" ==> "+sig+" @ "+index)
- }
- val buf = ByteBuffer.allocate(2)
- buf.putShort(index)
- addAttribute(jmember, nme.SignatureATTR, buf)
- case None =>
+ // println("addGenericSignature sym: " + sym.fullName + " : " + memberTpe + " sym.info: " + sym.info)
+ // println("addGenericSignature: "+ (sym.ownerChain map (x => (x.name, x.isImplClass))))
+ erasure.javaSig(sym, memberTpe) foreach { sig =>
+ if ((settings.check.value contains "genjvm")) {
+ val normalizedTpe = atPhase(currentRun.erasurePhase)(erasure.prepareSigMap(memberTpe))
+ val bytecodeTpe = owner.thisType.memberInfo(sym)
+ if (!sym.isType && !sym.isConstructor && !(erasure.erasure(normalizedTpe) =:= bytecodeTpe)) {
+ clasz.cunit.warning(sym.pos,
+ """|compiler bug: created generic signature for %s in %s that does not conform to its erasure
+ |signature: %s
+ |original type: %s
+ |normalized type: %s
+ |erasure type: %s
+ |if this is reproducible, please report bug at http://lampsvn.epfl.ch/trac/scala
+ """.trim.stripMargin.format(sym, sym.ownerSkipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
+ return
+ }
+ }
+ val index = jmember.getConstantPool.addUtf8(sig).toShort
+ val buf = ByteBuffer.allocate(2)
+ buf putShort index
+ addAttribute(jmember, nme.SignatureATTR, buf)
}
}
}
def addAnnotations(jmember: JMember, annotations: List[AnnotationInfo]) {
- val toEmit = annotations.filter(shouldEmitAnnotation(_))
+ if (annotations.exists(_.atp.typeSymbol == definitions.DeprecatedAttr)) {
+ val attr = jmember.getContext().JOtherAttribute(
+ jmember.getJClass(), jmember, nme.DeprecatedATTR.toString,
+ new Array[Byte](0), 0)
+ jmember.addAttribute(attr)
+ }
+ val toEmit = annotations.filter(shouldEmitAnnotation(_))
if (toEmit.isEmpty) return
val buf: ByteBuffer = ByteBuffer.allocate(2048)
-
emitJavaAnnotations(jmember.getConstantPool, buf, toEmit)
-
addAttribute(jmember, nme.RuntimeAnnotationATTR, buf)
}
@@ -495,8 +568,9 @@ abstract class GenJVM extends SubComponent {
for (sym <- cls.info.decls.iterator if sym.isClass)
innerClasses = innerClasses + sym;
}
+
// add inner classes which might not have been referenced yet
- atPhase(currentRun.erasurePhase) {
+ atPhase(currentRun.erasurePhase.next) {
addOwnInnerClasses(clasz.symbol)
addOwnInnerClasses(clasz.symbol.linkedClassOfClass)
}
@@ -505,7 +579,7 @@ abstract class GenJVM extends SubComponent {
val innerClassesAttr = jclass.getInnerClasses()
// sort them so inner classes succeed their enclosing class
// to satisfy the Eclipse Java compiler
- for (innerSym <- innerClasses.toList.sort(_.name.length < _.name.length)) {
+ for (innerSym <- innerClasses.toList sortBy (_.name.length)) {
var outerName = javaName(innerSym.rawowner)
// remove the trailing '$'
if (outerName.endsWith("$") && isTopLevelModule(innerSym.rawowner))
@@ -522,18 +596,9 @@ abstract class GenJVM extends SubComponent {
}
}
- def isTopLevelModule(sym: Symbol): Boolean =
- atPhase (currentRun.picklerPhase.next) {
- sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass
- }
-
- def isStaticModule(sym: Symbol): Boolean = {
- sym.isModuleClass && !sym.isImplClass && !sym.hasFlag(Flags.LIFTED)
- }
-
def genField(f: IField) {
if (settings.debug.value)
- log("Adding field: " + f.symbol.fullNameString);
+ log("Adding field: " + f.symbol.fullName);
var attributes = 0
f.symbol.annotations foreach { a => a match {
@@ -556,7 +621,9 @@ abstract class GenJVM extends SubComponent {
}
def genMethod(m: IMethod) {
- log("Generating method " + m.symbol.fullNameString)
+ if (m.isStaticCtor) return
+
+ log("Generating method " + m.symbol.fullName)
method = m
endPC.clear
computeLocalVarsIndex(m)
@@ -576,8 +643,8 @@ abstract class GenJVM extends SubComponent {
jmethod = jclass.addNewMethod(flags,
javaName(m.symbol),
resTpe,
- javaTypes(m.params map (_.kind)),
- javaNames(m.params map (_.sym)));
+ m.params map (p => javaType(p.kind)) toArray,
+ m.params map (p => javaName(p.sym)) toArray);
addRemoteException(jmethod, m.symbol)
@@ -614,7 +681,7 @@ abstract class GenJVM extends SubComponent {
}
addGenericSignature(jmethod, m.symbol, clasz.symbol)
- val (excs, others) = splitAnnotations(m.symbol.annotations, ThrowsAttr)
+ val (excs, others) = splitAnnotations(m.symbol.annotations, definitions.ThrowsClass)
addExceptionsAttribute(jmethod, excs)
addAnnotations(jmethod, others)
addParamAnnotations(jmethod, m.params.map(_.sym.annotations))
@@ -622,7 +689,7 @@ abstract class GenJVM extends SubComponent {
private def addRemoteException(jmethod: JMethod, meth: Symbol) {
def isRemoteThrows(ainfo: AnnotationInfo) = ainfo match {
- case AnnotationInfo(tp, List(arg), _) if tp.typeSymbol == ThrowsAttr =>
+ case AnnotationInfo(tp, List(arg), _) if tp.typeSymbol == definitions.ThrowsClass =>
arg match {
case Literal(Constant(tpe: Type)) if tpe.typeSymbol == RemoteException.typeSymbol => true
case _ => false
@@ -633,7 +700,7 @@ abstract class GenJVM extends SubComponent {
if (remoteClass ||
(meth.hasAnnotation(RemoteAttr) && jmethod.isPublic())) {
val c = Constant(RemoteException)
- val ainfo = AnnotationInfo(ThrowsAttr.tpe, List(Literal(c).setType(c.tpe)), List())
+ val ainfo = AnnotationInfo(definitions.ThrowsClass.tpe, List(Literal(c).setType(c.tpe)), List())
if (!meth.annotations.exists(isRemoteThrows)) {
meth.addAnnotation(ainfo)
}
@@ -667,7 +734,7 @@ abstract class GenJVM extends SubComponent {
jclass.getType())
}
- def addStaticInit(cls: JClass) {
+ def addStaticInit(cls: JClass, mopt: Option[IMethod]) {
import JAccessFlags._
val clinitMethod = cls.addNewMethod(ACC_PUBLIC | ACC_STATIC,
"<clinit>",
@@ -675,6 +742,53 @@ abstract class GenJVM extends SubComponent {
JType.EMPTY_ARRAY,
new Array[String](0))
val clinit = clinitMethod.getCode().asInstanceOf[JExtendedCode]
+
+ mopt match {
+ case Some(m) =>
+ if (clasz.bootstrapClass.isDefined) legacyEmitBootstrapMethodInstall(clinit)
+
+ val oldLastBlock = m.code.blocks.last
+ val lastBlock = m.code.newBlock
+ oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
+
+ if (isStaticModule(clasz.symbol)) {
+ // call object's private ctor from static ctor
+ lastBlock.emit(NEW(REFERENCE(m.symbol.enclClass)))
+ lastBlock.emit(CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true)))
+ }
+
+ // add serialVUID code
+ serialVUID match {
+ case Some(value) =>
+ import Flags._
+ import definitions._
+ val fieldName = "serialVersionUID"
+ val fieldSymbol = clasz.symbol.newValue(NoPosition, newTermName(fieldName))
+ .setFlag(STATIC | FINAL)
+ .setInfo(longType)
+ clasz.addField(new IField(fieldSymbol))
+ lastBlock.emit(CONSTANT(Constant(value)))
+ lastBlock.emit(STORE_FIELD(fieldSymbol, true))
+ case None => ()
+ }
+
+ if (clasz.bootstrapClass.isDefined) {
+ // emit bootstrap method install
+ //emitBootstrapMethodInstall(block)
+ }
+
+ lastBlock.emit(RETURN(UNIT))
+ lastBlock.close
+
+ method = m
+ jmethod = clinitMethod
+ genCode(m)
+ case None =>
+ legacyStaticInitializer(cls, clinit)
+ }
+ }
+
+ private def legacyStaticInitializer(cls: JClass, clinit: JExtendedCode) {
if (isStaticModule(clasz.symbol)) {
clinit.emitNEW(cls.getName())
clinit.emitINVOKESPECIAL(cls.getName(),
@@ -693,7 +807,7 @@ abstract class GenJVM extends SubComponent {
case None => ()
}
- if (clasz.bootstrapClass.isDefined) emitBootstrapMethodInstall(clinit)
+ if (clasz.bootstrapClass.isDefined) legacyEmitBootstrapMethodInstall(clinit)
clinit.emitRETURN()
}
@@ -701,7 +815,7 @@ abstract class GenJVM extends SubComponent {
/** Emit code that installs a boostrap method for invoke dynamic. It installs the default
* method, found in scala.runtime.DynamicDispatch.
*/
- def emitBootstrapMethodInstall(jcode: JExtendedCode) {
+ def legacyEmitBootstrapMethodInstall(jcode: JExtendedCode) {
jcode.emitPUSH(jclass.getType.asInstanceOf[JReferenceType])
jcode.emitPUSH(new JObjectType("scala.runtime.DynamicDispatch"))
jcode.emitPUSH("bootstrapInvokeDynamic")
@@ -719,7 +833,10 @@ abstract class GenJVM extends SubComponent {
import JAccessFlags._
val moduleName = javaName(module) // + "$"
val mirrorName = moduleName.substring(0, moduleName.length() - 1)
- val paramJavaTypes = m.info.paramTypes map toTypeKind
+
+ val methodInfo = module.thisType.memberInfo(m)
+
+ val paramJavaTypes = methodInfo.paramTypes map toTypeKind
val paramNames: Array[String] = new Array[String](paramJavaTypes.length);
for (i <- 0 until paramJavaTypes.length)
@@ -727,8 +844,8 @@ abstract class GenJVM extends SubComponent {
val mirrorMethod = jclass.addNewMethod(ACC_PUBLIC | ACC_FINAL | ACC_STATIC,
javaName(m),
- javaType(m.info.resultType),
- javaTypes(paramJavaTypes),
+ javaType(methodInfo.resultType),
+ methodInfo.paramTypes map javaType toArray,
paramNames);
val mirrorCode = mirrorMethod.getCode().asInstanceOf[JExtendedCode];
mirrorCode.emitGETSTATIC(moduleName,
@@ -743,7 +860,7 @@ abstract class GenJVM extends SubComponent {
i += 1
}
- mirrorCode.emitINVOKEVIRTUAL(moduleName, mirrorMethod.getName(), mirrorMethod.getType().asInstanceOf[JMethodType])
+ mirrorCode.emitINVOKEVIRTUAL(moduleName, mirrorMethod.getName(), javaType(m).asInstanceOf[JMethodType])
mirrorCode.emitRETURN(mirrorMethod.getReturnType())
addRemoteException(mirrorMethod, m)
@@ -751,7 +868,7 @@ abstract class GenJVM extends SubComponent {
if (!m.hasFlag(Flags.DEFERRED))
addGenericSignature(mirrorMethod, m, module)
- val (throws, others) = splitAnnotations(m.annotations, ThrowsAttr)
+ val (throws, others) = splitAnnotations(m.annotations, definitions.ThrowsClass)
addExceptionsAttribute(mirrorMethod, throws)
addAnnotations(mirrorMethod, others)
addParamAnnotations(mirrorMethod, m.info.params.map(_.annotations))
@@ -766,32 +883,37 @@ abstract class GenJVM extends SubComponent {
def addForwarders(jclass: JClass, module: Symbol) { addForwarders(jclass, module, _ => true) }
def addForwarders(jclass: JClass, module: Symbol, cond: (Symbol) => Boolean) {
def conflictsIn(cls: Symbol, name: Name) =
- cls.info.nonPrivateMembers.exists(_.name == name)
+ cls.info.members exists (_.name == name)
/** List of parents shared by both class and module, so we don't add forwarders
* for methods defined there - bug #1804 */
lazy val commonParents = {
val cps = module.info.baseClasses
- val mps = module.linkedClassOfModule.info.baseClasses
+ val mps = module.companionClass.info.baseClasses
cps.filter(mps contains)
}
- /* the setter doesn't show up in members so we inspect the name */
+ /* The setter doesn't show up in members so we inspect the name
+ * ... and clearly it helps to know how the name is encoded, see ticket #3004.
+ * This logic is grossly inadequate! Name mangling needs a devotee.
+ */
def conflictsInCommonParent(name: Name) =
- commonParents exists { cp => name startsWith (cp.name + "$") }
+ commonParents exists { cp =>
+ (name startsWith (cp.name + "$")) || (name containsName ("$" + cp.name + "$"))
+ }
/** Should method `m' get a forwarder in the mirror class? */
def shouldForward(m: Symbol): Boolean =
atPhase(currentRun.picklerPhase) (
m.owner != definitions.ObjectClass
&& m.isMethod
- && !m.hasFlag(Flags.CASE | Flags.PROTECTED)
+ && !m.hasFlag(Flags.CASE | Flags.PRIVATE | Flags.PROTECTED | Flags.DEFERRED | Flags.SPECIALIZED)
&& !m.isConstructor
&& !m.isStaticMember
&& !(m.owner == definitions.AnyClass)
- && !module.isSubClass(module.linkedClassOfModule)
+ && !module.isSubClass(module.companionClass)
&& !conflictsIn(definitions.ObjectClass, m.name)
&& !conflictsInCommonParent(m.name)
- && !conflictsIn(module.linkedClassOfModule, m.name)
+ && !conflictsIn(module.companionClass, m.name)
)
assert(module.isModuleClass)
@@ -818,11 +940,12 @@ abstract class GenJVM extends SubComponent {
JClass.NO_INTERFACES,
sourceFile)
addForwarders(mirrorClass, clasz)
+ val ssa = scalaSignatureAddingMarker(mirrorClass, clasz.companionSymbol)
+ addAnnotations(mirrorClass, clasz.annotations ++ ssa)
emitClass(mirrorClass, clasz)
}
var linearization: List[BasicBlock] = Nil
-
var isModuleInitialized = false
/**
@@ -835,7 +958,7 @@ abstract class GenJVM extends SubComponent {
if (settings.debug.value)
log("Making labels for: " + method)
- HashMap(bs map (b => b -> jcode.newLabel) : _*)
+ HashMap(bs map (_ -> jcode.newLabel) : _*)
}
isModuleInitialized = false
@@ -847,12 +970,11 @@ abstract class GenJVM extends SubComponent {
var nextBlock: BasicBlock = linearization.head
- def genBlocks(l: List[BasicBlock]): Unit = l match {
- case Nil => ()
- case x :: Nil => nextBlock = null; genBlock(x)
- case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
- }
-
+ def genBlocks(l: List[BasicBlock]): Unit = l match {
+ case Nil => ()
+ case x :: Nil => nextBlock = null; genBlock(x)
+ case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
+ }
/** Generate exception handlers for the current method. */
def genExceptionHandlers {
@@ -867,30 +989,28 @@ abstract class GenJVM extends SubComponent {
var start = -1
var end = -1
- linearization foreach ((b) => {
+ linearization foreach { b =>
if (! (covered contains b) ) {
if (start >= 0) { // we're inside a handler range
end = labels(b).getAnchor()
- ranges = (start, end) :: ranges
+ ranges ::= (start, end)
start = -1
}
} else {
- if (start >= 0) { // we're inside a handler range
- end = endPC(b)
- } else {
+ if (start < 0) // we're not inside a handler range
start = labels(b).getAnchor()
- end = endPC(b)
- }
- covered = covered - b
+
+ end = endPC(b)
+ covered -= b
}
- });
+ }
/* Add the last interval. Note that since the intervals are
* open-ended to the right, we have to give a number past the actual
* code!
*/
if (start >= 0) {
- ranges = (start, jcode.getPC()) :: ranges;
+ ranges ::= (start, jcode.getPC())
}
if (!covered.isEmpty)
@@ -900,19 +1020,18 @@ abstract class GenJVM extends SubComponent {
ranges
}
- this.method.exh foreach { e =>
- ranges(e).sort({ (p1, p2) => p1._1 < p2._1 })
- .foreach { p =>
- if (p._1 < p._2) {
- if (settings.debug.value)
- log("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
- " from: " + p._1 + " to: " + p._2 + " catching: " + e.cls);
- jcode.addExceptionHandler(p._1, p._2,
- labels(e.startBlock).getAnchor(),
- if (e.cls == NoSymbol) null else javaName(e.cls))
- } else
- log("Empty exception range: " + p)
- }
+ for (e <- this.method.exh ; p <- ranges(e).sortBy(_._1)) {
+ if (p._1 < p._2) {
+ if (settings.debug.value)
+ log("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
+ " from: " + p._1 + " to: " + p._2 + " catching: " + e.cls);
+ val cls = if (e.cls == NoSymbol || e.cls == definitions.ThrowableClass) null
+ else javaName(e.cls)
+ jcode.addExceptionHandler(p._1, p._2,
+ labels(e.startBlock).getAnchor(),
+ cls)
+ } else
+ log("Empty exception range: " + p)
}
}
@@ -927,7 +1046,7 @@ abstract class GenJVM extends SubComponent {
varsInBlock.clear
for (instr <- b) {
- class CompilationError(msg: String) extends Error {
+ class CompilationException(msg: String) extends Exception(msg) {
override def toString: String = {
msg +
"\nCurrent method: " + method +
@@ -937,38 +1056,14 @@ abstract class GenJVM extends SubComponent {
method.dump
}
}
- def assert(cond: Boolean, msg: String) = if (!cond) throw new CompilationError(msg);
+ def assert(cond: Boolean, msg: String) = if (!cond) throw new CompilationException(msg)
instr match {
case THIS(clasz) =>
jcode.emitALOAD_0()
case CONSTANT(const) =>
- const.tag match {
- case UnitTag => ();
- case BooleanTag => jcode.emitPUSH(const.booleanValue)
- case ByteTag => jcode.emitPUSH(const.byteValue)
- case ShortTag => jcode.emitPUSH(const.shortValue)
- case CharTag => jcode.emitPUSH(const.charValue)
- case IntTag => jcode.emitPUSH(const.intValue)
- case LongTag => jcode.emitPUSH(const.longValue)
- case FloatTag => jcode.emitPUSH(const.floatValue)
- case DoubleTag => jcode.emitPUSH(const.doubleValue)
- case StringTag => jcode.emitPUSH(const.stringValue)
- case NullTag => jcode.emitACONST_NULL()
- case ClassTag =>
- val kind = toTypeKind(const.typeValue);
- if (kind.isValueType)
- jcode.emitPUSH(classLiteral(kind));
- else
- jcode.emitPUSH(javaType(kind).asInstanceOf[JReferenceType]);
- case EnumTag =>
- val sym = const.symbolValue
- jcode.emitGETSTATIC(javaName(sym.owner),
- javaName(sym),
- javaType(sym.tpe.underlying))
- case _ => abort("Unknown constant value: " + const);
- }
+ genConstant(jcode, const)
case LOAD_ARRAY_ITEM(kind) =>
jcode.emitALOAD(javaType(kind))
@@ -994,7 +1089,7 @@ abstract class GenJVM extends SubComponent {
case LOAD_MODULE(module) =>
// assert(module.isModule, "Expected module: " + module)
if (settings.debug.value)
- log("genearting LOAD_MODULE for: " + module + " flags: " +
+ log("generating LOAD_MODULE for: " + module + " flags: " +
Flags.flagsToString(module.flags));
if (clasz.symbol == module.moduleClass && jmethod.getName() != nme.readResolve.toString)
jcode.emitALOAD_0()
@@ -1028,47 +1123,42 @@ abstract class GenJVM extends SubComponent {
case CALL_PRIMITIVE(primitive) =>
genPrimitive(primitive, instr.pos)
+ /** Special handling to access native Array.clone() */
+ case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
+ val target: String = javaType(call.targetTypeKind).getSignature()
+ jcode.emitINVOKEVIRTUAL(target, "clone", arrayCloneType)
+
case call @ CALL_METHOD(method, style) =>
- val owner: String = javaName(method.owner);
- //reference the type of the receiver instead of the method owner (if not an interface!)
+ val owner: String = javaName(method.owner)
+ // reference the type of the receiver instead of the method owner (if not an interface!)
val dynamicOwner =
if (needsInterfaceCall(call.hostClass)) owner
else javaName(call.hostClass)
+ val jname = javaName(method)
+ val jtype = javaType(method).asInstanceOf[JMethodType]
style match {
case InvokeDynamic =>
- jcode.emitINVOKEINTERFACE("java.dyn.Dynamic",
- javaName(method),
- javaType(method).asInstanceOf[JMethodType])
+ jcode.emitINVOKEINTERFACE("java.dyn.Dynamic", jname, jtype)
case Dynamic =>
if (needsInterfaceCall(method.owner))
- jcode.emitINVOKEINTERFACE(owner,
- javaName(method),
- javaType(method).asInstanceOf[JMethodType])
+ jcode.emitINVOKEINTERFACE(owner, jname, jtype)
else
- jcode.emitINVOKEVIRTUAL(dynamicOwner,
- javaName(method),
- javaType(method).asInstanceOf[JMethodType]);
+ jcode.emitINVOKEVIRTUAL(dynamicOwner, jname, jtype)
case Static(instance) =>
- if (instance) {
- jcode.emitINVOKESPECIAL(owner,
- javaName(method),
- javaType(method).asInstanceOf[JMethodType]);
- } else
- jcode.emitINVOKESTATIC(owner,
- javaName(method),
- javaType(method).asInstanceOf[JMethodType]);
+ if (instance)
+ jcode.emitINVOKESPECIAL(owner, jname, jtype)
+ else
+ jcode.emitINVOKESTATIC(owner, jname, jtype)
case SuperCall(_) =>
- jcode.emitINVOKESPECIAL(owner,
- javaName(method),
- javaType(method).asInstanceOf[JMethodType]);
+ jcode.emitINVOKESPECIAL(owner, jname, jtype)
// we initialize the MODULE$ field immediately after the super ctor
if (isStaticModule(clasz.symbol) && !isModuleInitialized &&
jmethod.getName() == JMethod.INSTANCE_CONSTRUCTOR_NAME &&
- javaName(method) == JMethod.INSTANCE_CONSTRUCTOR_NAME) {
+ jname == JMethod.INSTANCE_CONSTRUCTOR_NAME) {
isModuleInitialized = true;
jcode.emitALOAD_0();
jcode.emitPUTSTATIC(jclass.getName(),
@@ -1082,11 +1172,11 @@ abstract class GenJVM extends SubComponent {
case BOX(kind) =>
val boxedType = definitions.boxedClass(kind.toType.typeSymbol)
val mtype = new JMethodType(javaType(boxedType), Array(javaType(kind)))
- jcode.emitINVOKESTATIC(BoxesRunTime, "boxTo" + boxedType.cleanNameString, mtype)
+ jcode.emitINVOKESTATIC(BoxesRunTime, "boxTo" + boxedType.decodedName, mtype)
case UNBOX(kind) =>
val mtype = new JMethodType(javaType(kind), Array(JObjectType.JAVA_LANG_OBJECT))
- jcode.emitINVOKESTATIC(BoxesRunTime, "unboxTo" + kind.toType.typeSymbol.cleanNameString, mtype)
+ jcode.emitINVOKESTATIC(BoxesRunTime, "unboxTo" + kind.toType.typeSymbol.decodedName, mtype)
case NEW(REFERENCE(cls)) =>
val className = javaName(cls)
@@ -1552,37 +1642,6 @@ abstract class GenJVM extends SubComponent {
/** For each basic block, the first PC address following it. */
val endPC: HashMap[BasicBlock, Int] = new HashMap()
- val conds: HashMap[TestOp, Int] = new HashMap()
-
- conds += (EQ -> JExtendedCode.COND_EQ)
- conds += (NE -> JExtendedCode.COND_NE)
- conds += (LT -> JExtendedCode.COND_LT)
- conds += (GT -> JExtendedCode.COND_GT)
- conds += (LE -> JExtendedCode.COND_LE)
- conds += (GE -> JExtendedCode.COND_GE)
-
- val negate: HashMap[TestOp, TestOp] = new HashMap()
-
- negate += (EQ -> NE)
- negate += (NE -> EQ)
- negate += (LT -> GE)
- negate += (GT -> LE)
- negate += (LE -> GT)
- negate += (GE -> LT)
-
- /** Map from type kinds to the Java reference types. It is used for
- * loading class constants. @see Predef.classOf. */
- val classLiteral: Map[TypeKind, JObjectType] = new HashMap()
-
- classLiteral += (UNIT -> new JObjectType("java.lang.Void"))
- classLiteral += (BOOL -> new JObjectType("java.lang.Boolean"))
- classLiteral += (BYTE -> new JObjectType("java.lang.Byte"))
- classLiteral += (SHORT -> new JObjectType("java.lang.Short"))
- classLiteral += (CHAR -> new JObjectType("java.lang.Character"))
- classLiteral += (INT -> new JObjectType("java.lang.Integer"))
- classLiteral += (LONG -> new JObjectType("java.lang.Long"))
- classLiteral += (FLOAT -> new JObjectType("java.lang.Float"))
- classLiteral += (DOUBLE -> new JObjectType("java.lang.Double"))
////////////////////// local vars ///////////////////////
@@ -1602,7 +1661,7 @@ abstract class GenJVM extends SubComponent {
def indexOf(local: Local): Int = {
assert(local.index >= 0,
- "Invalid index for: " + local + "{" + local.hashCode + "}: ")
+ "Invalid index for: " + local + "{" + local.## + "}: ")
local.index
}
@@ -1617,7 +1676,7 @@ abstract class GenJVM extends SubComponent {
for (l <- m.locals) {
if (settings.debug.value)
- log("Index value for " + l + "{" + l.hashCode + "}: " + idx)
+ log("Index value for " + l + "{" + l.## + "}: " + idx)
l.index = idx
idx += sizeOf(l.kind)
}
@@ -1625,97 +1684,9 @@ abstract class GenJVM extends SubComponent {
////////////////////// Utilities ////////////////////////
- /**
- * <p>
- * Return the a name of this symbol that can be used on the Java
- * platform. It removes spaces from names.
- * </p>
- * <p>
- * Special handling: scala.Nothing and <code>scala.Null</code> are
- * <em>erased</em> to <code>scala.runtime.Nothing$</code> and
- * </code>scala.runtime.Null$</code>. This is needed because they are
- * not real classes, and they mean 'abrupt termination upon evaluation
- * of that expression' or <code>null</code> respectively. This handling is
- * done already in <a href="../icode/GenIcode.html" target="contentFrame">
- * <code>GenICode</code></a>, but here we need to remove references
- * from method signatures to these types, because such classes can
- * not exist in the classpath: the type checker will be very confused.
- * </p>
- */
- def javaName(sym: Symbol): String = {
- val suffix = moduleSuffix(sym)
-
- if (sym == definitions.NothingClass)
- return javaName(definitions.RuntimeNothingClass)
- else if (sym == definitions.NullClass)
- return javaName(definitions.RuntimeNullClass)
-
- if (sym.isClass && !sym.rawowner.isPackageClass && !sym.isModuleClass) {
- innerClasses = innerClasses + sym;
- }
-
- (if (sym.isClass || (sym.isModule && !sym.isMethod))
- sym.fullNameString('/')
- else
- sym.simpleName.toString.trim()) + suffix
- }
-
- def javaNames(syms: List[Symbol]): Array[String] = {
- val res = new Array[String](syms.length)
- var i = 0
- syms foreach (s => { res(i) = javaName(s); i += 1 })
- res
- }
-
- /**
- * Return the Java modifiers for the given symbol.
- * Java modifiers for classes:
- * - public, abstract, final, strictfp (not used)
- * for interfaces:
- * - the same as for classes, without 'final'
- * for fields:
- * - public, private (*)
- * - static, final
- * for methods:
- * - the same as for fields, plus:
- * - abstract, synchronized (not used), strictfp (not used), native (not used)
- *
- * (*) protected cannot be used, since inner classes 'see' protected members,
- * and they would fail verification after lifted.
- */
- def javaFlags(sym: Symbol): Int = {
- import JAccessFlags._
-
- var jf: Int = 0
- val f = sym.flags
- jf = jf | (if (sym hasFlag Flags.SYNTHETIC) ACC_SYNTHETIC else 0)
-/* jf = jf | (if (sym hasFlag Flags.PRIVATE) ACC_PRIVATE else
- if (sym hasFlag Flags.PROTECTED) ACC_PROTECTED else ACC_PUBLIC)
-*/
- jf = jf | (if (sym hasFlag Flags.PRIVATE) ACC_PRIVATE else ACC_PUBLIC)
- jf = jf | (if ((sym hasFlag Flags.ABSTRACT) ||
- (sym hasFlag Flags.DEFERRED)) ACC_ABSTRACT else 0)
- jf = jf | (if (sym hasFlag Flags.INTERFACE) ACC_INTERFACE else 0)
- jf = jf | (if ((sym hasFlag Flags.FINAL)
- && !sym.enclClass.hasFlag(Flags.INTERFACE)
- && !sym.isClassConstructor) ACC_FINAL else 0)
- jf = jf | (if (sym.isStaticMember) ACC_STATIC else 0)
- jf = jf | (if (sym hasFlag Flags.BRIDGE) ACC_BRIDGE | ACC_SYNTHETIC else 0)
-
- if (sym.isClass && !sym.hasFlag(Flags.INTERFACE))
- jf = jf | ACC_SUPER
-
- // constructors of module classes should be private
- if (sym.isPrimaryConstructor && isTopLevelModule(sym.owner)) {
- jf |= ACC_PRIVATE
- jf &= ~ACC_PUBLIC
- }
- jf
- }
-
/** Calls to methods in 'sym' need invokeinterface? */
def needsInterfaceCall(sym: Symbol): Boolean = {
- log("checking for interface call: " + sym.fullNameString)
+ log("checking for interface call: " + sym.fullName)
// the following call to 'info' may cause certain symbols to fail loading because we're
// too late in the compilation chain (aliases to overloaded symbols will not be properly
// resolved, see scala.Range, method super$++ that fails in UnPickler at LazyTypeRefAndAlias.complete
@@ -1727,37 +1698,6 @@ abstract class GenJVM extends SubComponent {
}
- def javaType(t: TypeKind): JType = (t: @unchecked) match {
- case UNIT => JType.VOID
- case BOOL => JType.BOOLEAN
- case BYTE => JType.BYTE
- case SHORT => JType.SHORT
- case CHAR => JType.CHAR
- case INT => JType.INT
- case LONG => JType.LONG
- case FLOAT => JType.FLOAT
- case DOUBLE => JType.DOUBLE
- case REFERENCE(cls) => new JObjectType(javaName(cls))
- case ARRAY(elem) => new JArrayType(javaType(elem))
- }
-
- def javaType(t: Type): JType = javaType(toTypeKind(t))
-
- def javaType(s: Symbol): JType =
- if (s.isMethod)
- new JMethodType(
- if (s.isClassConstructor) JType.VOID else javaType(s.tpe.resultType),
- s.tpe.paramTypes.map(javaType).toArray)
- else
- javaType(s.tpe)
-
- def javaTypes(ts: List[TypeKind]): Array[JType] = {
- val res = new Array[JType](ts.length)
- var i = 0
- ts foreach ( t => { res(i) = javaType(t); i += 1 } );
- res
- }
-
/** Return an abstract file for the given class symbol, with the desired suffix.
* Create all necessary subdirectories on the way.
*/
@@ -1771,8 +1711,6 @@ abstract class GenJVM extends SubComponent {
dir.fileNamed(pathParts.last + suffix)
}
-
-
/** Merge adjacent ranges. */
private def mergeEntries(ranges: List[(Int, Int)]): List[(Int, Int)] =
(ranges.foldLeft(Nil: List[(Int, Int)]) { (collapsed: List[(Int, Int)], p: (Int, Int)) => (collapsed, p) match {
@@ -1783,9 +1721,74 @@ abstract class GenJVM extends SubComponent {
def assert(cond: Boolean, msg: => String) = if (!cond) {
method.dump
- throw new Error(msg + "\nMethod: " + method)
+ abort(msg + "\nMethod: " + method)
}
def assert(cond: Boolean) { assert(cond, "Assertion failed.") }
}
+
+ /**
+ * Return the Java modifiers for the given symbol.
+ * Java modifiers for classes:
+ * - public, abstract, final, strictfp (not used)
+ * for interfaces:
+ * - the same as for classes, without 'final'
+ * for fields:
+ * - public, private (*)
+ * - static, final
+ * for methods:
+ * - the same as for fields, plus:
+ * - abstract, synchronized (not used), strictfp (not used), native (not used)
+ *
+ * (*) protected cannot be used, since inner classes 'see' protected members,
+ * and they would fail verification after lifted.
+ */
+ def javaFlags(sym: Symbol): Int = {
+ import JAccessFlags._
+ def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
+ //
+ // var jf: Int = 0
+ // val f = sym.flags
+ // jf = jf | (if (sym hasFlag Flags.SYNTHETIC) ACC_SYNTHETIC else 0)
+ // /* jf = jf | (if (sym hasFlag Flags.PRIVATE) ACC_PRIVATE else
+ // if (sym hasFlag Flags.PROTECTED) ACC_PROTECTED else ACC_PUBLIC)
+ // */
+ // jf = jf | (if (sym hasFlag Flags.PRIVATE) ACC_PRIVATE else ACC_PUBLIC)
+ // jf = jf | (if ((sym hasFlag Flags.ABSTRACT) ||
+ // (sym hasFlag Flags.DEFERRED)) ACC_ABSTRACT else 0)
+ // jf = jf | (if (sym hasFlag Flags.INTERFACE) ACC_INTERFACE else 0)
+ // jf = jf | (if ((sym hasFlag Flags.FINAL)
+ // && !sym.enclClass.hasFlag(Flags.INTERFACE)
+ // && !sym.isClassConstructor) ACC_FINAL else 0)
+ // jf = jf | (if (sym.isStaticMember) ACC_STATIC else 0)
+ // jf = jf | (if (sym hasFlag Flags.BRIDGE) ACC_BRIDGE | ACC_SYNTHETIC else 0)
+ //
+ // if (sym.isClass && !sym.hasFlag(Flags.INTERFACE))
+ // jf = jf | ACC_SUPER
+ //
+ // constructors of module classes should be private
+ // PP: why are they only being marked private at this stage and not earlier?
+ val isConsideredPrivate =
+ sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner))
+
+ mkFlags(
+ if (isConsideredPrivate) ACC_PRIVATE else ACC_PUBLIC,
+ if (sym.isDeferred || sym.hasFlag(Flags.ABSTRACT)) ACC_ABSTRACT else 0,
+ if (sym.isInterface) ACC_INTERFACE else 0,
+ if (sym.isFinal && !sym.enclClass.isInterface && !sym.isClassConstructor) ACC_FINAL else 0,
+ if (sym.isStaticMember) ACC_STATIC else 0,
+ if (sym.isBridge || sym.hasFlag(Flags.MIXEDIN) && sym.isMethod) ACC_BRIDGE else 0,
+ if (sym.isClass && !sym.isInterface) ACC_SUPER else 0
+ )
+ }
+
+ def isTopLevelModule(sym: Symbol): Boolean =
+ atPhase (currentRun.picklerPhase.next) {
+ sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass
+ }
+
+ def isStaticModule(sym: Symbol): Boolean = {
+ sym.isModuleClass && !sym.isImplClass && !sym.hasFlag(Flags.LIFTED)
+ }
+
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
new file mode 100644
index 0000000000..6ff5d42e55
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
@@ -0,0 +1,148 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Iulian Dragos
+ */
+
+
+package scala.tools.nsc
+package backend.jvm
+
+import scala.collection.{ mutable, immutable }
+
+import ch.epfl.lamp.fjbg._
+
+trait GenJVMUtil {
+ self: GenJVM =>
+
+ import global._
+ import icodes._
+ import icodes.opcodes._
+ import definitions._
+
+ /** Map from type kinds to the Java reference types. It is used for
+ * loading class constants. @see Predef.classOf.
+ */
+ val classLiteral = immutable.Map[TypeKind, JObjectType](
+ UNIT -> new JObjectType("java.lang.Void"),
+ BOOL -> new JObjectType("java.lang.Boolean"),
+ BYTE -> new JObjectType("java.lang.Byte"),
+ SHORT -> new JObjectType("java.lang.Short"),
+ CHAR -> new JObjectType("java.lang.Character"),
+ INT -> new JObjectType("java.lang.Integer"),
+ LONG -> new JObjectType("java.lang.Long"),
+ FLOAT -> new JObjectType("java.lang.Float"),
+ DOUBLE -> new JObjectType("java.lang.Double")
+ )
+
+ private val javaNameCache = {
+ val map = new mutable.WeakHashMap[Symbol, String]()
+ map ++= List(
+ NothingClass -> RuntimeNothingClass.fullName('/'),
+ RuntimeNothingClass -> RuntimeNothingClass.fullName('/'),
+ NullClass -> RuntimeNullClass.fullName('/'),
+ RuntimeNullClass -> RuntimeNullClass.fullName('/')
+ )
+ map
+ }
+
+ /** This trait may be used by tools who need access to
+ * utility methods like javaName and javaType. (for instance,
+ * the Eclipse plugin uses it).
+ */
+ trait BytecodeUtil {
+
+ val conds = immutable.Map[TestOp, Int](
+ EQ -> JExtendedCode.COND_EQ,
+ NE -> JExtendedCode.COND_NE,
+ LT -> JExtendedCode.COND_LT,
+ GT -> JExtendedCode.COND_GT,
+ LE -> JExtendedCode.COND_LE,
+ GE -> JExtendedCode.COND_GE
+ )
+ val negate = immutable.Map[TestOp, TestOp](
+ EQ -> NE,
+ NE -> EQ,
+ LT -> GE,
+ GT -> LE,
+ LE -> GT,
+ GE -> LT
+ )
+
+ /** Return the a name of this symbol that can be used on the Java
+ * platform. It removes spaces from names.
+ *
+ * Special handling:
+ * scala.Nothing erases to scala.runtime.Nothing$
+ * scala.Null erases to scala.runtime.Null$
+ *
+ * This is needed because they are not real classes, and they mean
+ * 'abrupt termination upon evaluation of that expression' or null respectively.
+ * This handling is done already in GenICode, but here we need to remove
+ * references from method signatures to these types, because such classes can
+ * not exist in the classpath: the type checker will be very confused.
+ */
+ def javaName(sym: Symbol): String =
+ javaNameCache.getOrElseUpdate(sym, {
+ if (sym.isClass || (sym.isModule && !sym.isMethod))
+ sym.fullName('/') + moduleSuffix(sym)
+ else
+ sym.simpleName.toString.trim() + moduleSuffix(sym)
+ })
+
+ def javaType(t: TypeKind): JType = (t: @unchecked) match {
+ case UNIT => JType.VOID
+ case BOOL => JType.BOOLEAN
+ case BYTE => JType.BYTE
+ case SHORT => JType.SHORT
+ case CHAR => JType.CHAR
+ case INT => JType.INT
+ case LONG => JType.LONG
+ case FLOAT => JType.FLOAT
+ case DOUBLE => JType.DOUBLE
+ case REFERENCE(cls) => new JObjectType(javaName(cls))
+ case ARRAY(elem) => new JArrayType(javaType(elem))
+ }
+
+ def javaType(t: Type): JType = javaType(toTypeKind(t))
+
+ def javaType(s: Symbol): JType =
+ if (s.isMethod)
+ new JMethodType(
+ if (s.isClassConstructor) JType.VOID else javaType(s.tpe.resultType),
+ s.tpe.paramTypes map javaType toArray
+ )
+ else
+ javaType(s.tpe)
+
+ protected def genConstant(jcode: JExtendedCode, const: Constant) {
+ const.tag match {
+ case UnitTag => ()
+ case BooleanTag => jcode emitPUSH const.booleanValue
+ case ByteTag => jcode emitPUSH const.byteValue
+ case ShortTag => jcode emitPUSH const.shortValue
+ case CharTag => jcode emitPUSH const.charValue
+ case IntTag => jcode emitPUSH const.intValue
+ case LongTag => jcode emitPUSH const.longValue
+ case FloatTag => jcode emitPUSH const.floatValue
+ case DoubleTag => jcode emitPUSH const.doubleValue
+ case StringTag => jcode emitPUSH const.stringValue
+ case NullTag => jcode.emitACONST_NULL()
+ case ClassTag =>
+ val kind = toTypeKind(const.typeValue)
+ val toPush =
+ if (kind.isValueType) classLiteral(kind)
+ else javaType(kind).asInstanceOf[JReferenceType]
+
+ jcode emitPUSH toPush
+
+ case EnumTag =>
+ val sym = const.symbolValue
+ jcode.emitGETSTATIC(javaName(sym.owner),
+ javaName(sym),
+ javaType(sym.tpe.underlying))
+ case _ =>
+ abort("Unknown constant value: " + const)
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
index bbc11037ce..174a1b778e 100644
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
@@ -1,9 +1,8 @@
/* NSC -- new scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Nikolay Mihaylov
*/
-// $Id$
package scala.tools.nsc
package backend.msil
@@ -11,9 +10,8 @@ package backend.msil
import java.io.{File, IOException}
import java.nio.{ByteBuffer, ByteOrder}
-import scala.collection.mutable.{Map, HashMap, HashSet, Stack}
+import scala.collection.mutable.{Map, HashMap, HashSet, Stack, ListBuffer}
import scala.tools.nsc.symtab._
-import scala.tools.nsc.util.Position
import ch.epfl.lamp.compiler.msil.{Type => MsilType, _}
import ch.epfl.lamp.compiler.msil.emit._
@@ -43,15 +41,15 @@ abstract class GenMSIL extends SubComponent {
val codeGenerator = new BytecodeGenerator
//classes is ICodes.classes, a HashMap[Symbol, IClass]
- classes.valuesIterator foreach codeGenerator.findEntryPoint
+ classes.values foreach codeGenerator.findEntryPoint
codeGenerator.initAssembly
- classes.valuesIterator foreach codeGenerator.createTypeBuilder
- classes.valuesIterator foreach codeGenerator.createClassMembers
+ classes.values foreach codeGenerator.createTypeBuilder
+ classes.values foreach codeGenerator.createClassMembers
try {
- classes.valuesIterator foreach codeGenerator.genClass
+ classes.values foreach codeGenerator.genClass
} finally {
codeGenerator.writeAssembly
}
@@ -206,7 +204,6 @@ abstract class GenMSIL extends SubComponent {
var clasz: IClass = _
var method: IMethod = _
- var code: Code = _
var massembly: AssemblyBuilder = _
var mmodule: ModuleBuilder = _
@@ -249,7 +246,7 @@ abstract class GenMSIL extends SubComponent {
assemblyName.Name = assemName
massembly = AssemblyBuilderFactory.DefineDynamicAssembly(assemblyName)
- moduleName = assemName + (if (entryPoint == null) ".dll" else ".exe")
+ moduleName = assemName // + (if (entryPoint == null) ".dll" else ".exe")
// filename here: .dll or .exe (in both parameters), second: give absolute-path
mmodule = massembly.DefineDynamicModule(moduleName,
new File(outDir, moduleName).getAbsolutePath())
@@ -294,7 +291,7 @@ abstract class GenMSIL extends SubComponent {
tBuilder.SetCustomAttribute(SYMTAB_ATTRIBUTE_CONSTRUCTOR, symtab)
currentRun.symData -= sym
- currentRun.symData -= sym.linkedSym
+ currentRun.symData -= sym.companionSymbol
case _ =>
addMarker()
@@ -327,7 +324,7 @@ abstract class GenMSIL extends SubComponent {
annType.CreateType() // else, GetConstructors can't be used
val constr: ConstructorInfo = annType.GetConstructors()(0)
// prevent a second call of CreateType, only needed because there's no
- // otehr way than GetConstructors()(0) to get the constructor, if there's
+ // other way than GetConstructors()(0) to get the constructor, if there's
// no constructor symbol available.
val args: Array[Byte] =
@@ -471,7 +468,7 @@ abstract class GenMSIL extends SubComponent {
}
private def createTypes() {
- for (sym <- classes.keysIterator) {
+ for (sym <- classes.keys) {
val iclass = classes(sym)
val tBuilder = types(sym).asInstanceOf[TypeBuilder]
@@ -515,11 +512,11 @@ abstract class GenMSIL extends SubComponent {
tBuilder.setPosition(line, iclass.cunit.source.file.name)
if (isTopLevelModule(sym)) {
- if (sym.linkedClassOfModule == NoSymbol)
+ if (sym.companionClass == NoSymbol)
dumpMirrorClass(sym)
else
log("No mirror class for module with linked class: " +
- sym.fullNameString)
+ sym.fullName)
}
addSymtabAttribute(sym, tBuilder)
@@ -561,7 +558,7 @@ abstract class GenMSIL extends SubComponent {
}
if (mcode != null) {
- for (local <- m.locals -- m.params) {
+ for (local <- m.locals ; if !(m.params contains local)) {
if (settings.debug.value)
log("add local var: " + local + ", of kind " + local.kind)
val t: MsilType = msilType(local.kind)
@@ -574,637 +571,304 @@ abstract class GenMSIL extends SubComponent {
}
- var linearization: List[BasicBlock] = Nil
- // a "ret" instruction is needed (which is not present in
- // icode) if there's no code after a try-catch block
- var needAdditionalRet: Boolean = false
-
- def genCode(m: IMethod) {
- code = m.code
+ /** Special linearizer for methods with at least one exception handler. This
+ * linearizer brings all basic blocks in the right order so that nested
+ * try-catch and try-finally blocks can be emitted.
+ */
+ val msilLinearizer = new MSILLinearizer()
- labels.clear
- linearization = linearizer.linearize(m)
- val orderedBlocks = (if (m.exh != Nil) orderBlocksForExh(linearization, m.exh)
- else linearization)
+ val labels: HashMap[BasicBlock, Label] = new HashMap()
- makeLabels(orderedBlocks) // orderBlocksForExh may create new Blocks -> new Labels
- genBlocks(orderedBlocks)
- if (needAdditionalRet) {
- mcode.Emit(OpCodes.Ret)
- needAdditionalRet = false
- }
- }
+ def genCode(m: IMethod) {
- abstract class ExHInstruction(handler: ExceptionHandler) { }
- case class BeginExceptionBlock(handler: ExceptionHandler) extends ExHInstruction(handler)
- case class BeginCatchBlock(handler: ExceptionHandler, exceptionType: MsilType) extends ExHInstruction(handler)
- case class BeginFinallyBlock(handler: ExceptionHandler) extends ExHInstruction(handler)
- case class EndExceptionBlock(handler: ExceptionHandler) extends ExHInstruction(handler)
-
-
- abstract class Block {
- var closed: Boolean = false
- def parentBlockList: Option[BlockList0]
- def firstBasicBlock: BasicBlock
- def lastBasicBlock: BasicBlock
-// def getExceptionBlock(exh: ExceptionHandler): Option[ExceptionBlock]
- def close(): Unit
-/* protected def findExceptionBlock(list: List[Block], exh: ExceptionHandler): Option[ExceptionBlock] = {
- var res: Option[ExceptionBlock] = None
- var i: Int = 0
- while (i < list.length && res == None) {
- val b = list(i)
- val exB = b.getExceptionBlock(exh)
- exB match {
- case some: Some[ExceptionBlock] => res = some
- case None => ()
- }
- i = i + 1
- }
- res
- } */
- }
- case class CodeBlock(parent: BlockList0) extends Block {
- var basicBlocks: List[BasicBlock] = Nil
- def isEmpty = basicBlocks.isEmpty
- override def firstBasicBlock: BasicBlock = {
- if(isEmpty) null
- else {
- if (closed) basicBlocks.head
- else basicBlocks.last
- }
- }
- override def lastBasicBlock: BasicBlock = {
- if(isEmpty) null
- else {
- if (closed) basicBlocks.last
- else basicBlocks.head
- }
- }
- override def parentBlockList = Some(parent)
-// override def getExceptionBlock(exh: ExceptionHandler): Option[ExceptionBlock] = None
- override def close() {
- basicBlocks = basicBlocks.reverse
- closed = true
- }
- override def toString() = {
- var res = ""
- res = res + TopBlock.indent + "CodeBlock(" + basicBlocks + ")\n"
- res
- }
- }
- abstract class BlockList0 extends Block {
- var blocks: List[Block] = Nil
- override def firstBasicBlock: BasicBlock = {
- if(blocks.isEmpty) null
- else {
- if (closed) blocks.head.firstBasicBlock
- else blocks.last.firstBasicBlock
- }
- }
- override def lastBasicBlock: BasicBlock = {
- if(blocks.isEmpty) null
- else {
- if (closed) blocks.last.lastBasicBlock
- else blocks.head.lastBasicBlock
- }
- }
-/* override def getExceptionBlock(exh: ExceptionHandler): Option[ExceptionBlock] = {
- findExceptionBlock(blocks, exh)
- } */
- def addExceptionBlock(exh: ExceptionHandler) = {
- if (settings.debug.value)
- log("new exc block with " + exh + " to " + this)
- val e = new ExceptionBlock(this, exh)
- blocks = e :: blocks
- e
- }
- def addBasicBlock(bb: BasicBlock) = {
- if (settings.debug.value)
- log("adding bb " + bb + " to " + this)
- var cb: CodeBlock = if (!blocks.isEmpty) {
- blocks.head match {
- case blk: CodeBlock => blk
- case _ => null
- }
- } else null
- if (cb == null) {
- cb = new CodeBlock(this)
- blocks = cb :: blocks
- }
- cb.basicBlocks = bb :: cb.basicBlocks
- }
- override def close() {
- blocks.foreach(_.close)
- blocks = blocks.reverse
- closed = true
- }
- override def toString() = {
- var res = ""
- res = res + TopBlock.indent + "BlockList0:\n"
- TopBlock.indent = TopBlock.indent + " "
- for (b <- blocks)
- res = res + b + "\n"
- TopBlock.indent = TopBlock.indent.substring(0,TopBlock.indent.length-2)
- res
- }
- }
- case class BlockList(parent: Block) extends BlockList0 {
- override def parentBlockList: Option[BlockList0] = {
- if (parent == TopBlock)
- Some(TopBlock)
- else parent match {
- case bl: BlockList => Some(bl)
- case cb: CatchBlock => Some(cb)
- case _ => parent.parentBlockList
- }
- }
- override def toString() = {
- var res = ""
- res = res + TopBlock.indent + "BlockList:\n"
- res = res + super.toString()
- res
- }
- }
- case class ExceptionBlock(parent: Block, handler: ExceptionHandler) extends Block {
- var tryBlock: BlockList = new BlockList(this)
- var catchBlocks: List[CatchBlock] = Nil
- var finallyBlock: BlockList = new BlockList(this)
- override def firstBasicBlock = {
- tryBlock.firstBasicBlock
- }
- override def lastBasicBlock = {
- if (!finallyBlock.blocks.isEmpty)
- finallyBlock.lastBasicBlock
- else if(!catchBlocks.isEmpty) {
- if (closed) catchBlocks.last.lastBasicBlock
- else catchBlocks.head.lastBasicBlock
- } else {
- tryBlock.lastBasicBlock
- }
- }
- override def parentBlockList: Option[BlockList0] = {
- if (parent == TopBlock)
- Some(TopBlock)
- else parent match {
- case bl: BlockList => Some(bl)
- case cb: CatchBlock => Some(cb)
- case _ => parent.parentBlockList
- }
- }
-/* override def getExceptionBlock(exh: ExceptionHandler): Option[ExceptionBlock] = {
- if (exh == handler) Some(this)
- else {
- val t = if (tryBlock == null) Nil else List(tryBlock)
- val f = if (finallyBlock == null) Nil else List(finallyBlock)
- findExceptionBlock(t ::: catchBlocks ::: f, exh)
- }
- }
-*/
- def addCatchBlock(exSym: Symbol): CatchBlock = {
+ def makeLabels(blocks: List[BasicBlock]) = {
if (settings.debug.value)
- log("new catch block with " + exSym + " to " + this)
- val c = new CatchBlock(this, exSym)
- catchBlocks = c :: catchBlocks
- c
- }
- override def close() {
- tryBlock.close
- catchBlocks.foreach(_.close)
- catchBlocks = catchBlocks.reverse
- finallyBlock.close
- closed = true
+ log("Making labels for: " + method)
+ for (bb <- blocks) labels(bb) = mcode.DefineLabel()
}
- override def toString() = {
- var res = ""
- res = res + TopBlock.indent + "ExceptionBlock, handler: " + handler + "\n"
- res = res + TopBlock.indent + " " + "try:\n"
- TopBlock.indent = TopBlock.indent + " "
- res = res + tryBlock + "\n"
- TopBlock.indent = TopBlock.indent.substring(0,TopBlock.indent.length-4)
- res = res + TopBlock.indent + " " + "catch:\n"
- TopBlock.indent = TopBlock.indent + " "
- for (b <- catchBlocks)
- res = res + b + "\n"
- TopBlock.indent = TopBlock.indent.substring(0,TopBlock.indent.length-4)
- res = res + TopBlock.indent + " " + "finally:\n"
- TopBlock.indent = TopBlock.indent + " "
- res = res + finallyBlock + "\n"
- TopBlock.indent = TopBlock.indent.substring(0,TopBlock.indent.length-4)
- res
- }
- }
- case class CatchBlock(parent: ExceptionBlock, exSym: Symbol) extends BlockList0 {
- override def parentBlockList: Option[BlockList0] = {
- parent.parentBlockList
- }
- override def toString() = {
- var res = ""
- res = res + TopBlock.indent + "CatchBlock:\n"
- res = res + super.toString()
- res
- }
- }
- case object TopBlock extends BlockList0 {
- var indent = ""
- override def parentBlockList = None
- override def toString() = {
- var res = ""
- res = res + TopBlock.indent + "TopBlock:\n"
- res = res + super.toString()
- res
- }
- }
-
- // for every basic block, a list of ExHInstructions to be executed:
- // - Begin_ are executed before the block
- // - EndExceptionBlock is executed after the block
- val bb2exHInstructions: HashMap[BasicBlock, List[ExHInstruction]] = new HashMap()
- // at the end of a try, catch or finally block, the jumps must not be emitted,
- // the automatically generated leave (or endfinally) will do the job.
- val omitJumpBlocks: HashSet[BasicBlock] = new HashSet()
-
- // suposes that finalizers are the same for different handlers
- // covering the same blocks
- def orderBlocksForExh(blocks: List[BasicBlock], exH: List[ExceptionHandler]): List[BasicBlock] = {
-
- var blocksToPut: List[BasicBlock] = blocks
- var nextBlock: BasicBlock = null
- var untreatedHandlers: List[ExceptionHandler] = exH
- TopBlock.blocks = Nil
- var currentBlock: BlockList0 = TopBlock
- def addBlocks(b: List[BasicBlock]):Unit = b match {
- case Nil => if (settings.debug.value) log("adding " + b)
-
- case x :: xs =>
- if (settings.debug.value) log("adding " + b)
- // problem: block may already be added, and and needs to be moved.
- // if nextblock NOT in b: check if nextblock in blocksToPut, if NOT, check if movable, else don't put
- if (nextBlock != null && b.contains(nextBlock)) {
- val blocksToAdd = nextBlock :: (b - nextBlock)
- nextBlock = null
- addBlocks(blocksToAdd)
- }
- else if (untreatedHandlers.forall(h => !(h.covers(x)))) {
-
- if (settings.debug.value) log(" no new handler for " + x)
- if (untreatedHandlers.forall(h => !(h.blocks.contains(x) ||
- (h.finalizer != null &&
- h.finalizer.covers(x)))))
- {
- // the block is not part of some catch or finally code
- currentBlock.addBasicBlock(x)
- blocksToPut = blocksToPut - x
- if (settings.debug.value) log(" -> addBlocks(" + xs + ")")
- addBlocks(xs)
- } else {
- if (settings.debug.value) log("x is part of catch or finally block")
-
- // check if the covered code of the handler x belongs to is empty
- // this check is not needed for finalizers: empty try with finalizer
- // is optimized by compiler (no try left)
- if(untreatedHandlers.forall(h =>
- (!h.blocks.contains(x) || h.covered.isEmpty))) {
- blocksToPut = blocksToPut - x
- addBlocks(xs)
- } else
- addBlocks(xs ::: List(x))
- }
- } else { // there are new handlers for this block
-
- var firstBlockAfter: HashMap[ExceptionHandler,BasicBlock] = new HashMap()
- val savedCurrentBlock = currentBlock
- /**
- * the output blocks of this method are changed so that:
- * - only one block has a successor outside the set of blocks
- * - this block is the last of the reusulting list
- *
- * side-effect: it stores the successor in the hashMap
- * firstBlockAfter, which has to be emitted first after try/catch/finally,
- * because the target of the Leave-instruction will always be the first
- * instruction after EndExceptionBlock
- *
- * returns: the output blocks plus an Option containing the possibly created
- * new block
- **/
- def adaptBlocks(blocks: List[BasicBlock], exh: ExceptionHandler): (List[BasicBlock], Option[BasicBlock]) = {
- def outsideTargets(block: BasicBlock, blocks: List[BasicBlock]) = {
- /* The catch block of the ExceptionHandler is always a successor of any block inside the try
- * (see successors method in BasicBlocks.scala)
- * Thus, this successor does not correspond to a jump outside the exception handler
- * and has to be ignored when computing the list of blocks leaving the exception handler. */
- val res = block.successors.filter(scc => !blocks.contains(scc) && scc != exh.startBlock)
- if (settings.debug.value) log("outside of " + block + " = " + res + " succ " + block.successors)
- res
- }
- // get leaving blocks and their outside targets
- def leavingBlocks(blocks: List[BasicBlock]): List[(BasicBlock, List[BasicBlock])] = {
- for {b <- blocks
- val t = outsideTargets(b, blocks)
- if t.length != 0 } yield (b, t)
- }
-
- def replaceOutJumps(blocks: List[BasicBlock], leaving: List[(BasicBlock, List[BasicBlock])], exh: ExceptionHandler): (List[BasicBlock], Option[BasicBlock]) = {
- def replaceJump(block: BasicBlock, from: BasicBlock, to: BasicBlock) = block.lastInstruction match {
- case JUMP(whereto) =>
- //assert(from == whereto)
- block.replaceInstruction(block.lastInstruction, JUMP(to))
- case CJUMP(success, failure, cond, kind) =>
- if (from == success)
- block.replaceInstruction(block.lastInstruction, CJUMP(to, failure, cond, kind))
- else
- //assert(from == failure)
- if (from == failure)
- block.replaceInstruction(block.lastInstruction, CJUMP(success, to, cond, kind))
- case CZJUMP(success, failure, cond, kind) =>
- if (from == success)
- block.replaceInstruction(block.lastInstruction, CZJUMP(to, failure, cond, kind))
- else
- //assert(from == failure)
- if (from == failure)
- block.replaceInstruction(block.lastInstruction, CZJUMP(success, to, cond, kind))
- case SWITCH(tags, labels) => // labels: List[BasicBlock]
- val newLabels = labels.map(b => if (b == from) to else b)
- assert(newLabels.contains(to))
- block.replaceInstruction(block.lastInstruction, SWITCH(tags, newLabels))
- /*
- case RETURN(kind) =>
- if (kind != UNIT) {
- returnVal
- }
- block.replaceInstruction(block.lastInstructionm JUMP(to))
- */
- case _ => () //abort("expected branch at the end of block " + block)
- }
- val jumpOutBlock = blocks.last.code.newBlock
- jumpOutBlock.emit(JUMP(firstBlockAfter(exh)))
- jumpOutBlock.close
- leaving.foreach(p => {
- val lBlock = p._1
- val target = p._2(0) // the elemets of p._2 are all the same, checked before
- replaceJump(lBlock, target, jumpOutBlock)
- if (settings.debug.value) log("replacing " + lBlock + " target " + target + " jump out " + jumpOutBlock)
- })
- (blocks ::: List(jumpOutBlock), Some(jumpOutBlock))
- }
+ labels.clear
- val leaving = leavingBlocks(blocks)
- if (settings.debug.value) log("leaving " + leaving)
- if (leaving.length == 0)
- (blocks, None)
- else if (leaving.length == 1) {
- val outside = leaving(0)._2
- //assert(outside.forall(b => b == outside(0)), "exception-block leaving to multiple targets")
- if (!firstBlockAfter.isDefinedAt(exh))
- firstBlockAfter(exh) = outside(0)
- //else ()
- //assert(firstBlockAfter(exh) == outside(0), "try/catch leaving to multiple targets: " + firstBlockAfter(exh) + ", new: " + outside(0))
- val last = leaving(0)._1
- ((blocks - last) ::: List(last), None)
- } else {
- val outside = leaving.flatMap(p => p._2)
- //assert(outside.forall(b => b == outside(0)), "exception-block leaving to multiple targets")
- if (!firstBlockAfter.isDefinedAt(exh))
- firstBlockAfter(exh) = outside(0)
- //else
- //assert(firstBlockAfter(exh) == outside(0), "try/catch leaving to multiple targets")
- replaceOutJumps(blocks, leaving, exh)
- }
- }
+ var linearization = if(m.exh != Nil) msilLinearizer.linearize(m)
+ else linearizer.linearize(m)
- var affectedHandlers: List[ExceptionHandler] = Nil
- untreatedHandlers.foreach( (h) => {
- if (h.covers(x)) {
- affectedHandlers = h :: affectedHandlers
- }
- })
-
- // shorter try-catch-finally last (the ones contained in another)
- affectedHandlers = affectedHandlers.sort({(h1, h2) => h1.covered.size > h2.covered.size})
- affectedHandlers = affectedHandlers.filter(h => {h.covered.size == affectedHandlers(0).covered.size})
- untreatedHandlers = untreatedHandlers -- affectedHandlers
-
- // more than one catch produces more than one exh, but we only need one
- var singleAffectedHandler: ExceptionHandler = affectedHandlers(0) // List[ExceptionHandler] = Nil
- var exceptionBlock: Option[ExceptionBlock] = None
- if (settings.debug.value) log("affected handlers " + affectedHandlers)
- affectedHandlers.foreach(h1 => {
- val (adaptedBlocks, newBlock) = adaptBlocks(blocksToPut.intersect(h1.blocks), singleAffectedHandler)
- newBlock match {
- case Some(block) =>
- blocksToPut = blocksToPut ::: List(block)
- h1.addBlock(block)
- case None => ()
- }
- val orderedCatchBlocks = h1.startBlock :: (adaptedBlocks - h1.startBlock)
-
- exceptionBlock match {
- case Some(excBlock) =>
- val catchBlock = excBlock.addCatchBlock(h1.cls)
- currentBlock = catchBlock
- addBlocks(orderedCatchBlocks)
- case None =>
- val excBlock = currentBlock.addExceptionBlock(singleAffectedHandler)
- exceptionBlock = Some(excBlock)
-
- val (tryBlocks, newBlock) = adaptBlocks(blocksToPut.intersect(singleAffectedHandler.covered.toList), singleAffectedHandler)
-
- newBlock match {
- case Some(block) =>
- blocksToPut = blocksToPut ::: List(block)
- singleAffectedHandler.addCoveredBlock(block)
- case None => ()
- }
- currentBlock = excBlock.tryBlock
- if (settings.debug.value) log("adding try blocks " + tryBlocks)
- addBlocks(tryBlocks)
-
- if (singleAffectedHandler.finalizer != null && singleAffectedHandler.finalizer != NoFinalizer) {
- val (blocks0, newBlock) = adaptBlocks(blocksToPut.intersect(singleAffectedHandler.finalizer.blocks), singleAffectedHandler)
- newBlock match {
- case Some(block) =>
- blocksToPut = blocksToPut ::: List(block)
- singleAffectedHandler.finalizer.addBlock(block)
- case None => ()
- }
- val blocks = singleAffectedHandler.finalizer.startBlock :: (blocks0 - singleAffectedHandler.finalizer.startBlock)
- currentBlock = excBlock.finallyBlock
- addBlocks(blocks)
- }
+ if (m.exh != Nil)
+ linearization = computeExceptionMaps(linearization, m)
- val catchBlock = excBlock.addCatchBlock(singleAffectedHandler.cls)
- currentBlock = catchBlock
- addBlocks(orderedCatchBlocks)
- }
- if (firstBlockAfter.isDefinedAt(singleAffectedHandler))
- nextBlock = firstBlockAfter(singleAffectedHandler)
- else
- nextBlock = null
- })
+ makeLabels(linearization)
- currentBlock = savedCurrentBlock
+ genBlocks(linearization)
- if (settings.debug.value)
- log(" -> addBlocks(" + xs.intersect(blocksToPut) + ")")
- addBlocks(xs.intersect(blocksToPut))
- }
+ // RETURN inside exception blocks are replaced by Leave. The target of the
+ // leave is a `Ret` outside any exception block (generated here).
+ if (handlerReturnMethod == m) {
+ mcode.MarkLabel(handlerReturnLabel)
+ if (handlerReturnKind != UNIT)
+ mcode.Emit(OpCodes.Ldloc, handlerReturnLocal)
+ mcode.Emit(OpCodes.Ret)
}
- // begin method orderBlocksForExh
-
- if (settings.debug.value)
- log("before: " + blocks)
- // some blocks may have been removed by linearization
- untreatedHandlers.foreach(h => {
- h.blocks = h.blocks.intersect(blocksToPut)
- h.covered = h.covered.intersect(collection.immutable.HashSet.empty ++ blocksToPut)
- if (h.finalizer != null && h.finalizer != NoFinalizer)
- h.finalizer.blocks = h.finalizer.blocks.intersect(blocksToPut)
- })
- addBlocks(blocks)
+ beginExBlock.clear()
+ beginCatchBlock.clear()
+ endExBlock.clear()
+ endFinallyLabels.clear()
+ }
- TopBlock.close()
+ def genBlocks(blocks: List[BasicBlock], previous: BasicBlock = null) {
+ blocks match {
+ case Nil => ()
+ case x :: Nil => genBlock(x, prev = previous, next = null)
+ case x :: y :: ys => genBlock(x, prev = previous, next = y); genBlocks(y :: ys, previous = x)
+ }
+ }
- if (settings.debug.value) log("TopBlock tree is: ")
- if (settings.debug.value) log(TopBlock)
+ // the try blocks starting at a certain BasicBlock
+ val beginExBlock = new HashMap[BasicBlock, List[ExceptionHandler]]()
- bb2exHInstructions.clear
- def addExHInstruction(b: BasicBlock, ehi: ExHInstruction) = {
- if (settings.debug.value)
- log("adding exhinstr: " + b + " -> " + ehi)
+ // the catch blocks starting / endling at a certain BasicBlock
+ val beginCatchBlock = new HashMap[BasicBlock, ExceptionHandler]()
+ val endExBlock = new HashMap[BasicBlock, List[ExceptionHandler]]()
- if (bb2exHInstructions.contains(b)) {
- bb2exHInstructions(b) = ehi :: bb2exHInstructions(b)
- } else {
- bb2exHInstructions(b) = List(ehi)
+ /** When emitting the code (genBlock), the number of currently active try / catch
+ * blocks. When seeing a `RETURN' inside a try / catch, we need to
+ * - store the result in a local (if it's not UNIT)
+ * - emit `Leave handlerReturnLabel` instead of the Return
+ * - emit code at the end: load the local and return its value
+ */
+ var currentHandlers = new Stack[ExceptionHandler]
+ // The IMethod the Local/Label/Kind below belong to
+ var handlerReturnMethod: IMethod = _
+ // Stores the result when returning inside an exception block
+ var handlerReturnLocal: LocalBuilder = _
+ // Label for a return instruction outside any exception block
+ var handlerReturnLabel: Label = _
+ // The result kind.
+ var handlerReturnKind: TypeKind = _
+ def returnFromHandler(kind: TypeKind): (LocalBuilder, Label) = {
+ if (handlerReturnMethod != method) {
+ handlerReturnMethod = method
+ if (kind != UNIT) {
+ handlerReturnLocal = mcode.DeclareLocal(msilType(kind))
+ handlerReturnLocal.SetLocalSymInfo("$handlerReturn")
}
+ handlerReturnLabel = mcode.DefineLabel()
+ handlerReturnKind = kind
}
- omitJumpBlocks.clear
- def omitJump(blk: BasicBlock) = {
- omitJumpBlocks += blk
- }
- var orderedBlocks: List[BasicBlock] = Nil
- def flatten(block: Block) {
- if (block == TopBlock) {
- for (b <- TopBlock.blocks) flatten(b)
- } else block match {
- case cb: CodeBlock =>
- orderedBlocks = orderedBlocks ::: cb.basicBlocks
- case bl: BlockList =>
- for (b <- bl.blocks) flatten(b)
- case cb: CatchBlock =>
- for (b <- cb.blocks) flatten(b)
- case eb: ExceptionBlock =>
- val handler = eb.handler
- if (settings.debug.value) {
- log("new exception block " + eb)
- log("try: " + eb.tryBlock)
- }
- addExHInstruction(eb.tryBlock.firstBasicBlock, new BeginExceptionBlock(handler))
- omitJump(eb.tryBlock.lastBasicBlock)
- flatten(eb.tryBlock)
- for (c <- eb.catchBlocks) {
- val t: MsilType = (if (c.exSym == NoSymbol) EXCEPTION
- else getType(c.exSym))
- addExHInstruction(c.firstBasicBlock, new BeginCatchBlock(handler, t))
- omitJump(c.lastBasicBlock)
- flatten(c)
- }
- if (handler.finalizer != null && handler.finalizer != NoFinalizer) {
- addExHInstruction(eb.finallyBlock.firstBasicBlock, new BeginFinallyBlock(handler))
- flatten(eb.finallyBlock)
- addExHInstruction(eb.finallyBlock.lastBasicBlock, new EndExceptionBlock(handler))
- omitJump(eb.finallyBlock.lastBasicBlock)
- } else {
- addExHInstruction(eb.catchBlocks.last.lastBasicBlock, new EndExceptionBlock(handler))
+ (handlerReturnLocal, handlerReturnLabel)
+ }
+
+ /** For try/catch nested inside a finally, we can't use `Leave OutsideFinally`, the
+ * Leave target has to be inside the finally (and it has to be the `endfinally` instruction).
+ * So for every finalizer, we have a label which marks the place of the `endfinally`,
+ * nested try/catch blocks will leave there.
+ */
+ val endFinallyLabels = new HashMap[ExceptionHandler, Label]()
+
+ /** Computes which blocks are the beginning / end of a try or catch block */
+ private def computeExceptionMaps(blocks: List[BasicBlock], m: IMethod): List[BasicBlock] = {
+ val visitedBlocks = new HashSet[BasicBlock]()
+
+ // handlers which have not been introduced so far
+ var openHandlers = m.exh
+
+
+ /** Example
+ * try {
+ * try {
+ * // *1*
+ * } catch {
+ * case h1 =>
+ * }
+ * } catch {
+ * case h2 =>
+ * case h3 =>
+ * try {
+ *
+ * } catch {
+ * case h4 => // *2*
+ * case h5 =>
+ * }
+ * }
+ */
+
+ // Stack of nested try blocks. Each bloc has a List of ExceptionHandler (multiple
+ // catch statements). Example *1*: Stack(List(h2, h3), List(h1))
+ val currentTryHandlers = new Stack[List[ExceptionHandler]]()
+
+ // Stack of nested catch blocks. The head of the list is the current catch block. The
+ // tail is all following catch blocks. Example *2*: Stack(List(h3), List(h4, h5))
+ val currentCatchHandlers = new Stack[List[ExceptionHandler]]()
+
+ for (b <- blocks) {
+
+ // are we past the current catch blocks?
+ def endHandlers(): List[ExceptionHandler] = {
+ var res: List[ExceptionHandler] = Nil
+ if (!currentCatchHandlers.isEmpty) {
+ val handler = currentCatchHandlers.top.head
+ if (!handler.blocks.contains(b)) {
+ // all blocks of the handler are either visited, or not part of the linearization (i.e. dead)
+ assert(handler.blocks.forall(b => visitedBlocks.contains(b) || !blocks.contains(b)),
+ "Bad linearization of basic blocks inside catch. Found block not part of the handler\n"+
+ b.fullString +"\nwhile in catch-part of\n"+ handler)
+
+ val rest = currentCatchHandlers.pop.tail
+ if (rest.isEmpty) {
+ // all catch blocks of that exception handler are covered
+ res = handler :: endHandlers()
+ } else {
+ // there are more catch blocks for that try (handlers covering the same)
+ currentCatchHandlers.push(rest)
+ beginCatchBlock(b) = rest.head
+ }
}
+ }
+ res
+ }
+ val end = endHandlers()
+ if (!end.isEmpty) endExBlock(b) = end
+
+ // are we past the current try block?
+ if (!currentTryHandlers.isEmpty) {
+ val handler = currentTryHandlers.top.head
+ if (!handler.covers(b)) {
+ // all of the covered blocks are visited, or not part of the linearization
+ assert(handler.covered.forall(b => visitedBlocks.contains(b) || !blocks.contains(b)),
+ "Bad linearization of basic blocks inside try. Found non-covered block\n"+
+ b.fullString +"\nwhile in try-part of\n"+ handler)
+
+ assert(handler.startBlock == b,
+ "Bad linearization of basic blocks. The entry block of a catch does not directly follow the try\n"+
+ b.fullString +"\n"+ handler)
+
+ val handlers = currentTryHandlers.pop
+ currentCatchHandlers.push(handlers)
+ beginCatchBlock(b) = handler
+ }
}
- }
-
- flatten(TopBlock)
-
- assert(untreatedHandlers.forall((h) => h.covered.isEmpty),
- "untreated exception handlers left: " + untreatedHandlers)
- // remove catch blocks from empty handlers (finally-blocks remain)
- untreatedHandlers.foreach((h) => {
- orderedBlocks = orderedBlocks -- h.blocks
- })
-
- // take care of order in which exHInstructions are executed (BeginExceptionBlock as last)
- bb2exHInstructions.keysIterator.foreach((b) => {
- bb2exHInstructions(b).sort((i1, i2) => (!i1.isInstanceOf[BeginExceptionBlock]))
- })
-
- if (settings.debug.value) {
- log("after: " + orderedBlocks)
- log(" exhInstr: " + bb2exHInstructions)
+ // are there try blocks starting at b?
+ val (newHandlers, stillOpen) = openHandlers.partition(_.covers(b))
+ openHandlers = stillOpen
+
+ val newHandlersBySize = newHandlers.groupBy(_.covered.size)
+ // big handlers first, smaller ones are nested inside the try of the big one
+ // (checked by the assertions below)
+ val sizes = newHandlersBySize.keys.toList.sortWith(_ > _)
+
+ val beginHandlers = new ListBuffer[ExceptionHandler]
+ for (s <- sizes) {
+ val sHandlers = newHandlersBySize(s)
+ for (h <- sHandlers) {
+ assert(h.covered == sHandlers.head.covered,
+ "bad nesting of exception handlers. same size, but not covering same blocks\n"+
+ h +"\n"+ sHandlers.head)
+ assert(h.resultKind == sHandlers.head.resultKind,
+ "bad nesting of exception handlers. same size, but the same resultKind\n"+
+ h +"\n"+ sHandlers.head)
+ }
+ for (bigger <- beginHandlers; h <- sHandlers) {
+ assert(h.covered.subsetOf(bigger.covered),
+ "bad nesting of exception handlers. try blocks of smaller handler are not nested in bigger one.\n"+
+ h +"\n"+ bigger)
+ assert(h.blocks.toSet.subsetOf(bigger.covered),
+ "bad nesting of exception handlers. catch blocks of smaller handler are not nested in bigger one.\n"+
+ h +"\n"+ bigger)
+ }
+ beginHandlers += sHandlers.head
+ currentTryHandlers.push(sHandlers)
+ }
+ beginExBlock(b) = beginHandlers.toList
+ visitedBlocks += b
+ }
+
+ // if there handlers left (i.e. handlers covering nothing, or a
+ // non-existent (dead) block), remove their catch-blocks.
+ val liveBlocks = if (openHandlers.isEmpty) blocks else {
+ blocks.filter(b => openHandlers.forall(h => !h.blocks.contains(b)))
+ }
+
+ /** There might be open handlers, but no more blocks. happens when try/catch end
+ * with `throw` or `return`
+ * def foo { try { .. throw } catch { _ => .. throw } }
+ *
+ * In this case we need some code after the catch block for the auto-generated
+ * `leave` instruction. So we're adding a (dead) `throw new Exception`.
+ */
+ val rest = currentCatchHandlers.map(handlers => {
+ assert(handlers.length == 1, handlers)
+ handlers.head
+ }).toList
+
+ if (rest.isEmpty) {
+ liveBlocks
+ } else {
+ val b = m.code.newBlock
+ b.emit(Seq(
+ NEW(REFERENCE(definitions.ThrowableClass)),
+ DUP(REFERENCE(definitions.ObjectClass)),
+ CALL_METHOD(definitions.ThrowableClass.primaryConstructor, Static(true)),
+ THROW()
+ ))
+ b.close
+ endExBlock(b) = rest
+ liveBlocks ::: List(b)
}
-
- orderedBlocks
}
- var currentBlock: BasicBlock = _
- var lastBlock: BasicBlock = _
- var nextBlock: BasicBlock = _
-
- def genBlocks(l: List[BasicBlock]) {
- l match {
- case Nil => ()
- case x :: Nil => currentBlock = x; nextBlock = null; genBlock(x)
- case x :: y :: ys => currentBlock = x; nextBlock = y; genBlock(x); genBlocks(y :: ys)
- }
- }
+ /**
+ * @param block the BasicBlock to emit code for
+ * @param next the following BasicBlock, `null` if `block` is the last one
+ */
+ def genBlock(block: BasicBlock, prev: BasicBlock, next: BasicBlock) {
+ /** Creating objects works differently on .NET. On the JVM
+ * - NEW(type) => reference on Stack
+ * - DUP, load arguments, CALL_METHOD(constructor)
+ *
+ * On .NET, the NEW and DUP are ignored, but we emit a special method call
+ * - load arguments
+ * - NewObj(constructor) => reference on stack
+ *
+ * This variable tells whether the previous instruction was a NEW,
+ * we expect a DUP which is not emitted. */
+ var previousWasNEW = false
- var ignoreNextDup: Boolean = false
- val excResultLocals: Stack[LocalBuilder] = new Stack()
+ var lastLineNr: Int = 0
- def genBlock(b: BasicBlock) {
- // at begin of the first block, there's nothing to save =>
- // lastBlock != null is secure
- def saveResult(resType: MsilType) = if (resType != MVOID && lastBlock != null) {
- lastBlock.lastInstruction match {
- case THROW() => ()
- case _ =>
- val lb: LocalBuilder = excResultLocals.top
- mcode.Emit(OpCodes.Stloc, lb)
- }
- }
- if (bb2exHInstructions.contains(b)) {
- bb2exHInstructions(b).foreach((i) => i match {
- case BeginExceptionBlock(handler) =>
- if (settings.debug.value) log("begin ex blk: " + handler)
- mcode.BeginExceptionBlock()
- val resType = msilType(handler.resultKind)
- if (resType != MVOID) {
- val l = mcode.DeclareLocal(resType)
- l.SetLocalSymInfo("$exhResult")
- excResultLocals.push(l)
- }
- case BeginCatchBlock(handler, exType) =>
- if (settings.debug.value) log("begin catch blk: " + handler + ", tpe: " + exType)
- saveResult(msilType(handler.resultKind))
- mcode.BeginCatchBlock(exType)
- case BeginFinallyBlock(handler) =>
- saveResult(msilType(handler.resultKind))
- mcode.BeginFinallyBlock()
- case EndExceptionBlock(handler) => ()
- case _ => abort("unknown case: " + i)
- })
+ // EndExceptionBlock must happen before MarkLabel because it adds the
+ // Leave instruction. Otherwise, labels(block) points to the Leave
+ // (inside the catch) instead of the instruction afterwards.
+ for (handlers <- endExBlock.get(block); exh <- handlers) {
+ currentHandlers.pop()
+ for (l <- endFinallyLabels.get(exh))
+ mcode.MarkLabel(l)
+ mcode.EndExceptionBlock()
}
- mcode.MarkLabel(labels(b))
+ mcode.MarkLabel(labels(block))
if (settings.debug.value)
- log("Generating code for block: " + b)
-
- var lastLineNr: Int = 0
-
- for (instr <- b) {
+ log("Generating code for block: " + block)
- needAdditionalRet = false
+ for (handler <- beginCatchBlock.get(block)) {
+ if (!currentHandlers.isEmpty && currentHandlers.top.covered == handler.covered) {
+ currentHandlers.pop()
+ currentHandlers.push(handler)
+ }
+ if (handler.cls == NoSymbol) {
+ // `finally` blocks are represented the same as `catch`, but with no catch-type
+ mcode.BeginFinallyBlock()
+ } else {
+ val t = getType(handler.cls)
+ mcode.BeginCatchBlock(t)
+ }
+ }
+ for (handlers <- beginExBlock.get(block); exh <- handlers) {
+ currentHandlers.push(exh)
+ mcode.BeginExceptionBlock()
+ }
+ for (instr <- block) {
val currentLineNr = try {
instr.pos.line
} catch {
@@ -1218,6 +882,9 @@ abstract class GenMSIL extends SubComponent {
lastLineNr = currentLineNr
}
+ if (previousWasNEW)
+ assert(instr.isInstanceOf[DUP], block)
+
instr match {
case THIS(clasz) =>
mcode.Emit(OpCodes.Ldarg_0)
@@ -1253,9 +920,7 @@ abstract class GenMSIL extends SubComponent {
case FLOAT => mcode.Emit(OpCodes.Ldelem_R4)
case DOUBLE => mcode.Emit(OpCodes.Ldelem_R8)
case REFERENCE(cls) => mcode.Emit(OpCodes.Ldelem_Ref)
-
- // case ARRAY(elem) is not possible, for Array[Array[Int]], the
- // load will be case REFERENCE(java.lang.Object)
+ case ARRAY(elem) => mcode.Emit(OpCodes.Ldelem_Ref)
// case UNIT is not possible: an Array[Unit] will be an
// Array[scala.runtime.BoxedUnit] (-> case REFERENCE)
@@ -1264,21 +929,18 @@ abstract class GenMSIL extends SubComponent {
case LOAD_LOCAL(local) =>
if (settings.debug.value)
log("load_local for " + local)
- val isArg: Boolean = local.arg
+ val isArg = local.arg
val i = local.index
- if (isArg) {
+ if (isArg)
loadArg(mcode)(i)
- }
- else {
+ else
loadLocal(i, local, mcode)
- }
case LOAD_FIELD(field, isStatic) =>
if (settings.debug.value)
log("LOAD_FIELD with owner: " + field.owner +
" flags: " + Flags.flagsToString(field.owner.flags))
-
- var fieldInfo: FieldInfo = fields.get(field) match {
+ var fieldInfo = fields.get(field) match {
case Some(fInfo) => fInfo
case None =>
val fInfo = getType(field.owner).GetField(msilName(field))
@@ -1287,7 +949,6 @@ abstract class GenMSIL extends SubComponent {
}
mcode.Emit(if (isStatic) OpCodes.Ldsfld else OpCodes.Ldfld, fieldInfo)
-
case LOAD_MODULE(module) =>
if (settings.debug.value)
log("Generating LOAD_MODULE for: " + showsym(module))
@@ -1304,12 +965,13 @@ abstract class GenMSIL extends SubComponent {
case FLOAT => mcode.Emit(OpCodes.Stelem_R4)
case DOUBLE => mcode.Emit(OpCodes.Stelem_R8)
case REFERENCE(cls) => mcode.Emit(OpCodes.Stelem_Ref)
+ case ARRAY(elem) => mcode.Emit(OpCodes.Stelem_Ref) // @TODO: test this! (occurs when calling a Array[Object]* vararg param method)
- // case UNIT / ARRRAY are not possible (see comment at LOAD_ARRAY_ITEM)
+ // case UNIT not possible (see comment at LOAD_ARRAY_ITEM)
}
case STORE_LOCAL(local) =>
- val isArg: Boolean = local.arg
+ val isArg = local.arg
val i = local.index
if (settings.debug.value)
log("store_local for " + local + ", index " + i)
@@ -1341,7 +1003,7 @@ abstract class GenMSIL extends SubComponent {
mcode.Emit(OpCodes.Starg_S, 0)
case STORE_FIELD(field, isStatic) =>
- val fieldInfo: FieldInfo = fields.get(field) match {
+ val fieldInfo = fields.get(field) match {
case Some(fInfo) => fInfo
case None =>
val fInfo = getType(field.owner).GetField(msilName(field))
@@ -1350,11 +1012,9 @@ abstract class GenMSIL extends SubComponent {
}
mcode.Emit(if (isStatic) OpCodes.Stsfld else OpCodes.Stfld, fieldInfo)
-
case CALL_PRIMITIVE(primitive) =>
genPrimitive(primitive, instr.pos)
-
case CALL_METHOD(msym, style) =>
if (msym.isClassConstructor) {
val constructorInfo: ConstructorInfo = getConstructor(msym)
@@ -1362,6 +1022,12 @@ abstract class GenMSIL extends SubComponent {
// normal constructor calls are Static..
case Static(_) =>
if (method.symbol.isClassConstructor && method.symbol.owner == msym.owner)
+ // we're generating a constructor (method: IMethod is a constructor), and we're
+ // calling another constructor of the same class.
+
+ // @LUC TODO: this can probably break, namely when having: class A { def this { new A() } }
+ // instead, we should instruct the CALL_METHOD with additional information, know whether it's
+ // an instance creation constructor call or not.
mcode.Emit(OpCodes.Call, constructorInfo)
else
mcode.Emit(OpCodes.Newobj, constructorInfo)
@@ -1389,8 +1055,8 @@ abstract class GenMSIL extends SubComponent {
mcode.Emit(OpCodes.Add) // compute length (-start + end)
}
- var doEmit: Boolean = true
- types.get(msym.owner) match {
+ var doEmit = true
+ getTypeOpt(msym.owner) match {
case Some(typ) if (typ.IsEnum) => {
def negBool = {
mcode.Emit(OpCodes.Ldc_I4_0)
@@ -1472,12 +1138,13 @@ abstract class GenMSIL extends SubComponent {
}
}
- case BOX(boxType) => emitBox(mcode, boxType) //mcode.Emit(OpCodes.Box, msilType(boxType))
+ case BOX(boxType) => emitBox(mcode, boxType)
case UNBOX(boxType) => emitUnbox(mcode, boxType)
case NEW(REFERENCE(cls)) =>
- ignoreNextDup = true
+ // the next instruction must be a DUP, see comment on `var previousWasNEW`
+ previousWasNEW = true
// works also for arrays and reference-types
case CREATE_ARRAY(elem, dims) =>
@@ -1493,7 +1160,6 @@ abstract class GenMSIL extends SubComponent {
mcode.Emit(OpCodes.Ldc_I4_0)
mcode.Emit(OpCodes.Ceq)
-
// works for arrays and reference-types
// part from the scala reference: "S <: T does not imply
// Array[S] <: Array[T] in Scala. However, it is possible
@@ -1501,7 +1167,6 @@ abstract class GenMSIL extends SubComponent {
// is permitted in the host environment."
case CHECK_CAST(tpe) => mcode.Emit(OpCodes.Castclass, msilType(tpe))
-
// no SWITCH is generated when there's
// - a default case ("case _ => ...") in the matching expr
// - OR is used ("case 1 | 2 => ...")
@@ -1510,7 +1175,7 @@ abstract class GenMSIL extends SubComponent {
// if the int on stack is 4, and 4 is in the second list => jump
// to second label
// branches is List[BasicBlock]
- // the labels to jump to (the last one ist the default one)
+ // the labels to jump to (the last one is the default one)
val switchLocal = mcode.DeclareLocal(MINT)
// several switch variables will appear with the same name in the
@@ -1518,7 +1183,7 @@ abstract class GenMSIL extends SubComponent {
switchLocal.SetLocalSymInfo("$switch_var")
mcode.Emit(OpCodes.Stloc, switchLocal)
- var i: Int = 0
+ var i = 0
for (l <- tags) {
var targetLabel = labels(branches(i))
for (i <- l) {
@@ -1529,44 +1194,40 @@ abstract class GenMSIL extends SubComponent {
i += 1
}
val defaultTarget = labels(branches(i))
- if (nextBlock != defaultTarget && !omitJumpBlocks.contains(currentBlock))
+ if (next != defaultTarget)
mcode.Emit(OpCodes.Br, defaultTarget)
-
case JUMP(whereto) =>
- if (nextBlock != whereto && !omitJumpBlocks.contains(currentBlock))
+ val (leaveHandler, leaveFinally, lfTarget) = leavesHandler(block, whereto)
+ if (leaveHandler) {
+ if (leaveFinally) {
+ if (lfTarget.isDefined) mcode.Emit(OpCodes.Leave, lfTarget.get)
+ else mcode.Emit(OpCodes.Endfinally)
+ } else
+ mcode.Emit(OpCodes.Leave, labels(whereto))
+ } else if (next != whereto)
mcode.Emit(OpCodes.Br, labels(whereto))
-
case CJUMP(success, failure, cond, kind) =>
// cond is TestOp (see Primitives.scala), and can take
// values EQ, NE, LT, GE LE, GT
// kind is TypeKind
val isFloat = kind == FLOAT || kind == DOUBLE
- if (nextBlock == success || omitJumpBlocks.contains(currentBlock)) {
- emitBr(cond.negate, labels(failure), isFloat)
- } else {
- emitBr(cond, labels(success), isFloat)
- if (nextBlock != failure && !omitJumpBlocks.contains(currentBlock)) {
- mcode.Emit(OpCodes.Br, labels(failure))
- }
- }
+ val emit = (c: TestOp, l: Label) => emitBr(c, l, isFloat)
+ emitCondBr(block, cond, success, failure, next, emit)
case CZJUMP(success, failure, cond, kind) =>
- (kind: @unchecked) match {
- case BOOL | REFERENCE(_) =>
- if (nextBlock == success || omitJumpBlocks.contains(currentBlock)) {
- emitBrBool(cond.negate, labels(failure))
- } else {
- emitBrBool(cond, labels(success))
- if (nextBlock != failure && !omitJumpBlocks.contains(currentBlock)) {
- mcode.Emit(OpCodes.Br, labels(failure))
- }
- }
- }
+ emitCondBr(block, cond, success, failure, next, emitBrBool(_, _))
case RETURN(kind) =>
- mcode.Emit(OpCodes.Ret)
+ if (currentHandlers.isEmpty)
+ mcode.Emit(OpCodes.Ret)
+ else {
+ val (local, label) = returnFromHandler(kind)
+ if (kind != UNIT)
+ mcode.Emit(OpCodes.Stloc, local)
+ mcode.Emit(OpCodes.Leave, label)
+ }
case THROW() =>
mcode.Emit(OpCodes.Throw)
@@ -1575,12 +1236,11 @@ abstract class GenMSIL extends SubComponent {
mcode.Emit(OpCodes.Pop)
case DUP(kind) =>
- // needed to create new instances
- if (!ignoreNextDup) {
+ // see comment on `var previousWasNEW`
+ if (!previousWasNEW)
mcode.Emit(OpCodes.Dup)
- } else {
- ignoreNextDup = false
- }
+ else
+ previousWasNEW = false
case MONITOR_ENTER() =>
mcode.Emit(OpCodes.Call, MMONITOR_ENTER)
@@ -1588,35 +1248,12 @@ abstract class GenMSIL extends SubComponent {
case MONITOR_EXIT() =>
mcode.Emit(OpCodes.Call, MMONITOR_EXIT)
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
+ case SCOPE_ENTER(_) | SCOPE_EXIT(_) | LOAD_EXCEPTION() =>
()
}
} // end for (instr <- b) { .. }
-
- lastBlock = b // this way, saveResult knows lastBlock
-
- if (bb2exHInstructions.contains(b)) {
- bb2exHInstructions(b).foreach((i) => i match {
- case BeginExceptionBlock(handler) => ()
- case BeginCatchBlock(handler, exType) => ()
- case BeginFinallyBlock(handler) => ()
- case EndExceptionBlock(handler) =>
- if (settings.debug.value) log("end ex blk: " + handler)
- val resType = msilType(handler.resultKind)
- if (handler.finalizer == null || handler.finalizer == NoFinalizer)
- saveResult(resType)
- mcode.EndExceptionBlock()
- if (resType != MVOID) {
- val lb: LocalBuilder = excResultLocals.pop
- mcode.Emit(OpCodes.Ldloc, lb)
- } else
- needAdditionalRet = true
- case _ => abort("unknown case: " + i)
- })
- }
-
- } // end genBlock
+ }
def genPrimitive(primitive: Primitive, pos: Position) {
primitive match {
@@ -1739,10 +1376,85 @@ abstract class GenMSIL extends SubComponent {
code.Emit(OpCodes.Ldloc, localBuilders(local))
}
- ////////////////////// labels ///////////////////////
+ ////////////////////// branches ///////////////////////
+
+ /** Returns a Triple (Boolean, Boolean, Option[Label])
+ * - wether the jump leaves some exception block (try / catch / finally)
+ * - wether the it leaves a finally handler (finally block, but not it's try / catch)
+ * - a label where to jump for leaving the finally handler
+ * . None to leave directly using `endfinally`
+ * . Some(label) to emit `leave label` (for try / catch inside a finally handler)
+ */
+ def leavesHandler(from: BasicBlock, to: BasicBlock): (Boolean, Boolean, Option[Label]) =
+ if (currentHandlers.isEmpty) (false, false, None)
+ else {
+ val h = currentHandlers.head
+ val leaveHead = { h.covers(from) != h.covers(to) ||
+ h.blocks.contains(from) != h.blocks.contains(to) }
+ if (leaveHead) {
+ // we leave the innermost exception block.
+ // find out if we also leave som e `finally` handler
+ currentHandlers.find(e => {
+ e.cls == NoSymbol && e.blocks.contains(from) != e.blocks.contains(to)
+ }) match {
+ case Some(finallyHandler) =>
+ if (h == finallyHandler) {
+ // the finally handler is the innermost, so we can emit `endfinally` directly
+ (true, true, None)
+ } else {
+ // we need to `Leave` to the `endfinally` of the next outer finally handler
+ val l = endFinallyLabels.getOrElseUpdate(finallyHandler, mcode.DefineLabel())
+ (true, true, Some(l))
+ }
+ case None =>
+ (true, false, None)
+ }
+ } else (false, false, None)
+ }
+ def emitCondBr(block: BasicBlock, cond: TestOp, success: BasicBlock, failure: BasicBlock,
+ next: BasicBlock, emitBrFun: (TestOp, Label) => Unit) {
+ val (sLeaveHandler, sLeaveFinally, slfTarget) = leavesHandler(block, success)
+ val (fLeaveHandler, fLeaveFinally, flfTarget) = leavesHandler(block, failure)
- val labels: HashMap[BasicBlock, Label] = new HashMap() // labels for branches
+ if (sLeaveHandler || fLeaveHandler) {
+ val sLabelOpt = if (sLeaveHandler) {
+ val leaveSLabel = mcode.DefineLabel()
+ emitBrFun(cond, leaveSLabel)
+ Some(leaveSLabel)
+ } else {
+ emitBrFun(cond, labels(success))
+ None
+ }
+
+ if (fLeaveHandler) {
+ if (fLeaveFinally) {
+ if (flfTarget.isDefined) mcode.Emit(OpCodes.Leave, flfTarget.get)
+ else mcode.Emit(OpCodes.Endfinally)
+ } else
+ mcode.Emit(OpCodes.Leave, labels(failure))
+ } else
+ mcode.Emit(OpCodes.Br, labels(failure))
+
+ sLabelOpt.map(l => {
+ mcode.MarkLabel(l)
+ if (sLeaveFinally) {
+ if (slfTarget.isDefined) mcode.Emit(OpCodes.Leave, slfTarget.get)
+ else mcode.Emit(OpCodes.Endfinally)
+ } else
+ mcode.Emit(OpCodes.Leave, labels(success))
+ })
+ } else {
+ if (next == success) {
+ emitBrFun(cond.negate, labels(failure))
+ } else {
+ emitBrFun(cond, labels(success))
+ if (next != failure) {
+ mcode.Emit(OpCodes.Br, labels(failure))
+ }
+ }
+ }
+ }
def emitBr(condition: TestOp, dest: Label, isFloat: Boolean) {
condition match {
@@ -1758,7 +1470,7 @@ abstract class GenMSIL extends SubComponent {
def emitBrBool(cond: TestOp, dest: Label) {
cond match {
// EQ -> Brfalse, NE -> Brtrue; this is because we come from
- // a CZJUMP. If the value on the stack is 0 (e.g. a boolen
+ // a CZJUMP. If the value on the stack is 0 (e.g. a boolean
// method returned false), and we are in the case EQ, then
// we need to emit Brfalse (EQ Zero means false). vice versa
case EQ => mcode.Emit(OpCodes.Brfalse, dest)
@@ -1766,12 +1478,6 @@ abstract class GenMSIL extends SubComponent {
}
}
- def makeLabels(bs: List[BasicBlock]) {
- if (settings.debug.value)
- log("Making labels for: " + method)
- for (bb <- bs) labels(bb) = mcode.DefineLabel()
- }
-
////////////////////// local vars ///////////////////////
/**
@@ -1791,7 +1497,7 @@ abstract class GenMSIL extends SubComponent {
idx += 1 // sizeOf(l.kind)
}
- val locvars = m.locals -- params
+ val locvars = m.locals filterNot (params contains)
idx = 0
for (l <- locvars) {
@@ -1827,9 +1533,10 @@ abstract class GenMSIL extends SubComponent {
else if (sym == definitions.NullClass)
return "scala.runtime.Null$"
- (if (sym.isClass || (sym.isModule && !sym.isMethod))
- sym.fullNameString
- else
+ (if (sym.isClass || (sym.isModule && !sym.isMethod)) {
+ if (sym.isNestedClass) sym.simpleName
+ else sym.fullName
+ } else
sym.simpleName.toString().trim()) + suffix
}
@@ -1873,9 +1580,9 @@ abstract class GenMSIL extends SubComponent {
mf = mf | FieldAttributes.Static
else {
mf = mf | MethodAttributes.Virtual
- if (sym.isFinal && !types(sym.owner).IsInterface)
+ if (sym.isFinal && !getType(sym.owner).IsInterface)
mf = mf | MethodAttributes.Final
- if (sym.hasFlag(Flags.DEFERRED) || types(sym.owner).IsInterface)
+ if (sym.hasFlag(Flags.DEFERRED) || getType(sym.owner).IsInterface)
mf = mf | MethodAttributes.Abstract
}
}
@@ -1895,7 +1602,7 @@ abstract class GenMSIL extends SubComponent {
if (sym.isStaticMember)
mf = mf | FieldAttributes.Static
- // TRANSIENT: "not nerialized", VOLATILE: doesn't exist on .net
+ // TRANSIENT: "not serialized", VOLATILE: doesn't exist on .net
// TODO: add this annotation also if the class has the custom attribute
// System.NotSerializedAttribute
sym.annotations.foreach( a => a match {
@@ -1959,7 +1666,14 @@ abstract class GenMSIL extends SubComponent {
case FLOAT => MFLOAT
case DOUBLE => MDOUBLE
case REFERENCE(cls) => getType(cls)
- case ARRAY(elem) => clrTypes.mkArrayType(msilType(elem))
+ case ARRAY(elem) =>
+ msilType(elem) match {
+ // For type builders, cannot call "clrTypes.mkArrayType" because this looks up
+ // the type "tp" in the assembly (not in the HashMap "types" of the backend).
+ // This can fail for nested types because the biulders are not complete yet.
+ case tb: TypeBuilder => tb.MakeArrayType()
+ case tp: MsilType => clrTypes.mkArrayType(tp)
+ }
}
private def msilType(tpe: Type): MsilType = msilType(toTypeKind(tpe))
@@ -1968,21 +1682,27 @@ abstract class GenMSIL extends SubComponent {
sym.tpe.paramTypes.map(msilType).toArray
}
- def getType(sym: Symbol): MsilType = types.get(sym) match {
- case Some(typ) => typ
+ def getType(sym: Symbol) = getTypeOpt(sym).getOrElse(abort(showsym(sym)))
+
+ /**
+ * Get an MSIL type form a symbol. First look in the clrTypes.types map, then
+ * lookup the name using clrTypes.getType
+ */
+ def getTypeOpt(sym: Symbol): Option[MsilType] = types.get(sym) match {
+ case typ @ Some(_) => typ
case None =>
def typeString(sym: Symbol): String = {
val s = if (sym.isNestedClass) typeString(sym.owner) +"+"+ sym.simpleName
- else sym.fullNameString
+ else sym.fullName
if (sym.isModuleClass && !sym.isTrait) s + "$" else s
}
val name = typeString(sym)
val typ = clrTypes.getType(name)
if (typ == null)
- throw new Error(showsym(sym) + " with name " + name)
+ None
else {
- clrTypes.types(sym) = typ
- typ
+ types(sym) = typ
+ Some(typ)
}
}
@@ -1992,10 +1712,20 @@ abstract class GenMSIL extends SubComponent {
}
def createTypeBuilder(iclass: IClass) {
+ /**
+ * First look in the clrTypes.types map, then see if it's a class we're
+ * currently compiling by looking at the icodes.classes map, then finally
+ * lookup the name using clrTypes.getType (by calling getType).
+ */
def msilTypeFromSym(sym: Symbol): MsilType = {
- types.get(sym) match {
- case Some(mtype) => mtype
- case None => createTypeBuilder(classes(sym)); types(sym)
+ types.get(sym).getOrElse {
+ classes.get(sym) match {
+ case Some(iclass) =>
+ createTypeBuilder(iclass)
+ types (sym)
+ case None =>
+ getType(sym)
+ }
}
}
@@ -2005,7 +1735,7 @@ abstract class GenMSIL extends SubComponent {
def isInterface(s: Symbol) = s.isTrait && !s.isImplClass
val parents: List[Type] =
if (sym.info.parents.isEmpty) List(definitions.ObjectClass.tpe)
- else sym.info.parents.removeDuplicates
+ else sym.info.parents.distinct
val superType = if (isInterface(sym)) null else msilTypeFromSym(parents.head.typeSymbol)
if (settings.debug.value)
@@ -2051,7 +1781,7 @@ abstract class GenMSIL extends SubComponent {
for (ifield <- iclass.fields) {
val sym = ifield.symbol
if (settings.debug.value)
- log("Adding field: " + sym.fullNameString)
+ log("Adding field: " + sym.fullName)
var attributes = msilFieldFlags(sym)
val fBuilder = mtype.DefineField(msilName(sym), msilType(sym.tpe), attributes)
@@ -2064,7 +1794,7 @@ abstract class GenMSIL extends SubComponent {
val sym = m.symbol
if (settings.debug.value)
log("Creating MethodBuilder for " + Flags.flagsToString(sym.flags) + " " +
- sym.owner.fullNameString + "::" + sym.name)
+ sym.owner.fullName + "::" + sym.name)
val ownerType = getType(sym.enclClass).asInstanceOf[TypeBuilder]
assert(mtype == ownerType, "mtype = " + mtype + "; ownerType = " + ownerType)
@@ -2151,7 +1881,7 @@ abstract class GenMSIL extends SubComponent {
case Some(sym) => sym
case None =>
//val mclass = types(moduleClassSym)
- val mClass = clrTypes.getType(moduleClassSym.fullNameString + "$")
+ val mClass = clrTypes.getType(moduleClassSym.fullName + "$")
val mfield = mClass.GetField("MODULE$")
assert(mfield ne null, "module not found " + showsym(moduleClassSym))
fields(moduleClassSym) = mfield
@@ -2163,7 +1893,7 @@ abstract class GenMSIL extends SubComponent {
/** Adds a static initializer which creates an instance of the module
* class (calls the primary constructor). A special primary constructor
- * will be generated (notInitializedModules) which stores the new intance
+ * will be generated (notInitializedModules) which stores the new instance
* in the MODULE$ field right after the super call.
*/
private def addStaticInit(sym: Symbol) {
@@ -2335,7 +2065,7 @@ abstract class GenMSIL extends SubComponent {
if (constr eq null) {
System.out.println("Cannot find constructor " + sym.owner + "::" + sym.name)
System.out.println("scope = " + sym.owner.tpe.decls)
- throw new Error(sym.fullNameString)
+ abort(sym.fullName)
}
else {
mapConstructor(sym, constr)
@@ -2369,7 +2099,7 @@ abstract class GenMSIL extends SubComponent {
if (method eq null) {
System.out.println("Cannot find method " + sym.owner + "::" + msilName(sym))
System.out.println("scope = " + sym.owner.tpe.decls)
- throw new Error(sym.fullNameString)
+ abort(sym.fullName)
}
else {
mapMethod(sym, method)
@@ -2420,7 +2150,7 @@ abstract class GenMSIL extends SubComponent {
}
/*
- * add maping for member with name and paramTypes to member
+ * add mapping for member with name and paramTypes to member
* newName of newClass (same parameters)
*/
private def mapMethod(
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index 248b24bc43..28aadda14d 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -1,15 +1,14 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Iulian Dragos
*/
-// $Id$
package scala.tools.nsc
package backend.opt;
import scala.collection.mutable.{Map, HashMap};
-import scala.tools.nsc.backend.icode.analysis.LubError;
+import scala.tools.nsc.backend.icode.analysis.LubException;
import scala.tools.nsc.symtab._;
/**
@@ -37,7 +36,7 @@ abstract class ClosureElimination extends SubComponent {
}
/**
- * Remove references to the environemnt through fields of a closure object.
+ * Remove references to the environment through fields of a closure object.
* This has to be run after an 'apply' method has been inlined, but it still
* references the closure object.
*
@@ -105,16 +104,12 @@ abstract class ClosureElimination extends SubComponent {
var info = cpp.in(bb)
log("Cpp info at entry to block " + bb + ": " + info)
- for (i <- bb.toList) {
+ for (i <- bb) {
i match {
case LOAD_LOCAL(l) if (info.bindings.isDefinedAt(LocalVar(l))) =>
val t = info.getBinding(l)
t match {
- case Deref(LocalVar(v)) =>
- bb.replaceInstruction(i, valueToInstruction(t));
- log("replaced " + i + " with " + t)
-
- case Deref(This) =>
+ case Deref(LocalVar(_)) | Deref(This) | Const(_) =>
bb.replaceInstruction(i, valueToInstruction(t));
log("replaced " + i + " with " + t)
@@ -181,7 +176,7 @@ abstract class ClosureElimination extends SubComponent {
}
}
}} catch {
- case e: LubError =>
+ case e: LubException =>
Console.println("In method: " + m)
Console.println(e)
e.printStackTrace
@@ -201,10 +196,7 @@ abstract class ClosureElimination extends SubComponent {
/** is field 'f' accessible from method 'm'? */
def accessible(f: Symbol, m: Symbol): Boolean =
- f.isPublic || (f.hasFlag(Flags.PROTECTED) && (enclPackage(f) == enclPackage(m)))
-
- private def enclPackage(sym: Symbol): Symbol =
- if ((sym == NoSymbol) || sym.isPackageClass) sym else enclPackage(sym.owner)
+ f.isPublic || (f.hasFlag(Flags.PROTECTED) && (f.enclosingPackageClass == m.enclosingPackageClass))
} /* class ClosureElim */
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 9f5701ff45..e365b3bc08 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -1,16 +1,14 @@
/* NSC -- new scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Iulian Dragos
*/
-// $Id$
package scala.tools.nsc
package backend.opt
import scala.collection._
import scala.collection.immutable.{Map, HashMap, Set, HashSet}
-import scala.tools.nsc.backend.icode.analysis.LubError
import scala.tools.nsc.symtab._
/**
@@ -82,7 +80,7 @@ abstract class DeadCodeElimination extends SubComponent {
collectRDef(m)
mark
sweep(m)
- accessedLocals = accessedLocals.removeDuplicates
+ accessedLocals = accessedLocals.distinct
if (m.locals diff accessedLocals nonEmpty) {
log("Removed dead locals: " + (m.locals diff accessedLocals))
m.locals = accessedLocals.reverse
@@ -131,7 +129,7 @@ abstract class DeadCodeElimination extends SubComponent {
}
/** Mark useful instructions. Instructions in the worklist are each inspected and their
- * dependecies are marked useful too, and added to the worklist.
+ * dependencies are marked useful too, and added to the worklist.
*/
def mark {
// log("Starting with worklist: " + worklist)
@@ -218,7 +216,7 @@ abstract class DeadCodeElimination extends SubComponent {
private def computeCompensations(m: IMethod): mutable.Map[(BasicBlock, Int), List[Instruction]] = {
val compensations: mutable.Map[(BasicBlock, Int), List[Instruction]] = new mutable.HashMap
- for (bb <- m.code.blocks.toList) {
+ for (bb <- m.code.blocks) {
assert(bb.closed, "Open block in computeCompensations")
for ((i, idx) <- bb.toList.zipWithIndex) {
if (!useful(bb)(idx)) {
@@ -226,8 +224,20 @@ abstract class DeadCodeElimination extends SubComponent {
log("Finding definitions of: " + i + "\n\t" + consumedType + " at depth: " + depth)
val defs = rdef.findDefs(bb, idx, 1, depth)
for (d <- defs) {
- if (!compensations.isDefinedAt(d))
- compensations(d) = List(DROP(consumedType))
+ val (bb, idx) = d
+ bb(idx) match {
+ case DUP(_) if idx > 0 =>
+ bb(idx - 1) match {
+ case nw @ NEW(_) =>
+ val init = findInstruction(bb, nw.init)
+ log("Moving DROP to after <init> call: " + nw.init)
+ compensations(init) = List(DROP(consumedType))
+ case _ =>
+ compensations(d) = List(DROP(consumedType))
+ }
+ case _ =>
+ compensations(d) = List(DROP(consumedType))
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 321b27b030..5e9898a7e6 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Iulian Dragos
*/
-// $Id$
package scala.tools.nsc
package backend.opt
@@ -23,6 +22,18 @@ abstract class Inliners extends SubComponent {
val phaseName = "inliner"
+ /** Debug - for timing the inliner. */
+ private def timed[T](s: String, body: => T): T = {
+ val t1 = System.currentTimeMillis()
+ val res = body
+ val t2 = System.currentTimeMillis()
+ val ms = (t2 - t1).toInt
+ if (ms >= 2000)
+ println("%s: %d milliseconds".format(s, ms))
+
+ res
+ }
+
/** The maximum size in basic blocks of methods considered for inlining. */
final val MAX_INLINE_SIZE = 16
@@ -92,7 +103,7 @@ abstract class Inliners extends SubComponent {
}
val instrAfter = block.toList.drop(instrBefore.length + 1);
- assert(!instrAfter.isEmpty, "CALL_METHOD cannot be the last instrcution in block!");
+ assert(!instrAfter.isEmpty, "CALL_METHOD cannot be the last instruction in block!");
// store the '$this' into the special local
val inlinedThis = new Local(caller.symbol.newVariable(instr.pos, freshName("$inlThis")), REFERENCE(definitions.ObjectClass), false);
@@ -266,12 +277,15 @@ abstract class Inliners extends SubComponent {
assert(pending.isEmpty, "Pending NEW elements: " + pending)
}
+ /** The current iclass */
+ private var currentIClazz: IClass = _
+
def analyzeClass(cls: IClass): Unit = if (settings.inline.value) {
if (settings.debug.value)
log("Analyzing " + cls);
- cls.methods.foreach { m => if (!m.symbol.isConstructor) analyzeMethod(m)
- }}
-
+ this.currentIClazz = cls
+ cls.methods filterNot (_.symbol.isConstructor) foreach analyzeMethod
+ }
val tfa = new analysis.MethodTFA();
tfa.stat = settings.Ystatistics.value
@@ -281,7 +295,7 @@ abstract class Inliners extends SubComponent {
override def default(k: Symbol) = 0
}
- def analyzeMethod(m: IMethod): Unit = {//try {
+ def analyzeMethod(m: IMethod): Unit = {
var retry = false
var count = 0
fresh.clear
@@ -289,17 +303,25 @@ abstract class Inliners extends SubComponent {
do {
retry = false;
- if (m.code ne null) {
- if (settings.debug.value)
- log("Analyzing " + m + " count " + count + " with " + m.code.blocks.length + " blocks");
+ if (m.symbol.hasAnnotation(ScalaInlineAttr)) {
+ log("Not inlining into " + m.symbol.originalName.decode + " because it is marked @inline.")
+ }
+ else if (m.code ne null) {
+ log("Analyzing " + m + " count " + count + " with " + m.code.blocks.length + " blocks");
tfa.init(m)
tfa.run
for (bb <- linearizer.linearize(m)) {
var info = tfa.in(bb);
- for (i <- bb.toList) {
+ for (i <- bb) {
if (!retry) {
i match {
case CALL_METHOD(msym, Dynamic) =>
+ def warnNoInline(reason: String) = {
+ if (msym.hasAnnotation(ScalaInlineAttr) && !m.symbol.hasFlag(Flags.BRIDGE))
+ currentIClazz.cunit.warning(i.pos,
+ "Could not inline required method %s because %s.".format(msym.originalName.decode, reason))
+ }
+
val receiver = info.stack.types.drop(msym.info.paramTypes.length).head match {
case REFERENCE(s) => s;
case _ => NoSymbol;
@@ -308,11 +330,11 @@ abstract class Inliners extends SubComponent {
if (receiver != msym.owner && receiver != NoSymbol) {
if (settings.debug.value)
log("" + i + " has actual receiver: " + receiver);
- }
- if (!concreteMethod.isFinal && receiver.isFinal) {
- concreteMethod = lookupImpl(concreteMethod, receiver)
- if (settings.debug.value)
- log("\tlooked up method: " + concreteMethod.fullNameString)
+ if (!concreteMethod.isEffectivelyFinal && receiver.isFinal) {
+ concreteMethod = lookupImpl(concreteMethod, receiver)
+ if (settings.debug.value)
+ log("\tlooked up method: " + concreteMethod.fullName)
+ }
}
if (shouldLoad(receiver, concreteMethod)) {
@@ -322,11 +344,11 @@ abstract class Inliners extends SubComponent {
log("Treating " + i
+ "\n\treceiver: " + receiver
+ "\n\ticodes.available: " + icodes.available(receiver)
- + "\n\tconcreteMethod.isFinal: " + concreteMethod.isFinal);
+ + "\n\tconcreteMethod.isEffectivelyFinal: " + concreteMethod.isFinal);
if ( icodes.available(receiver)
&& (isClosureClass(receiver)
- || concreteMethod.isFinal
+ || concreteMethod.isEffectivelyFinal
|| receiver.isFinal)) {
icodes.icode(receiver).get.lookupMethod(concreteMethod) match {
case Some(inc) =>
@@ -350,29 +372,27 @@ abstract class Inliners extends SubComponent {
+ "\n\tinc.code ne null: " + (inc.code ne null) + (if (inc.code ne null)
"\n\tisSafeToInline(m, inc, info.stack): " + isSafeToInline(m, inc, info.stack)
+ "\n\tshouldInline heuristics: " + shouldInline(m, inc) else ""));
+ warnNoInline(
+ if (inc.code eq null) "bytecode was unavailable"
+ else if (!isSafeToInline(m, inc, info.stack)) "it is unsafe (target may reference private fields)"
+ else "a bug (run with -Ylog:inline -Ydebug for more information)")
}
case None =>
+ warnNoInline("bytecode was not available")
if (settings.debug.value)
log("could not find icode\n\treceiver: " + receiver + "\n\tmethod: " + concreteMethod)
}
- }
+ } else
+ warnNoInline(if (icodes.available(receiver)) "it is not final" else "bytecode was not available")
case _ => ();
}
info = tfa.interpret(info, i)
}}}
- if (tfa.stat) log(m.symbol.fullNameString + " iterations: " + tfa.iterations + " (size: " + m.code.blocks.length + ")")
+ if (tfa.stat) log(m.symbol.fullName + " iterations: " + tfa.iterations + " (size: " + m.code.blocks.length + ")")
}} while (retry && count < 15)
m.normalize
-// } catch {
-// case e =>
-// Console.println("############# Caught exception: " + e + " #################");
-// Console.println("\nMethod: " + m +
-// "\nMethod owner: " + m.symbol.owner);
-// e.printStackTrace();
-// m.dump
-// throw e
- }
+ }
def isMonadMethod(method: Symbol): Boolean =
@@ -384,7 +404,7 @@ abstract class Inliners extends SubComponent {
/** Should the given method be loaded from disk? */
def shouldLoad(receiver: Symbol, method: Symbol): Boolean = {
if (settings.debug.value) log("shouldLoad: " + receiver + "." + method)
- ((method.isFinal && isMonadMethod(method) && isHigherOrderMethod(method))
+ ((method.isEffectivelyFinal && isMonadMethod(method) && isHigherOrderMethod(method))
|| (receiver.enclosingPackage == definitions.ScalaRunTimeModule.enclosingPackage)
|| (receiver == definitions.PredefModule.moduleClass)
|| (method.hasAnnotation(ScalaInlineAttr)))
@@ -426,8 +446,17 @@ abstract class Inliners extends SubComponent {
case Some(b) =>
callsNonPublic = b
case None =>
+ // Avoiding crashing the compiler if there are open blocks.
+ callee.code.blocks filterNot (_.closed) foreach { b =>
+ currentIClazz.cunit.warning(callee.symbol.pos,
+ "Encountered open block in isSafeToInline: this indicates a bug in the optimizer!\n" +
+ " caller = " + caller + ", callee = " + callee
+ )
+ return false
+ }
+
breakable {
- for (b <- callee.code.blocks; i <- b.toList)
+ for (b <- callee.code.blocks; i <- b)
i match {
case CALL_METHOD(m, style) =>
if (m.hasFlag(Flags.PRIVATE) ||
@@ -470,7 +499,7 @@ abstract class Inliners extends SubComponent {
}
private def lookupImpl(meth: Symbol, clazz: Symbol): Symbol = {
- //println("\t\tlooking up " + meth + " in " + clazz.fullNameString + " meth.owner = " + meth.owner)
+ //println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
if (meth.owner == clazz
|| clazz == definitions.NullClass
|| clazz == definitions.NothingClass) meth
@@ -505,7 +534,7 @@ abstract class Inliners extends SubComponent {
var score = 0
if (callee.code.blocks.length <= SMALL_METHOD_SIZE) score = score + 1
if (caller.code.blocks.length <= SMALL_METHOD_SIZE
- && ((caller.code.blocks.length + callee.code.blocks.length) > SMALL_METHOD_SIZE)) {
+ && ((caller.code.blocks.length + callee.code.blocks.length - 1) > SMALL_METHOD_SIZE)) {
score -= 1
if (settings.debug.value)
log("shouldInline: score decreased to " + score + " because small " + caller + " would become large")
@@ -526,13 +555,10 @@ abstract class Inliners extends SubComponent {
}
} /* class Inliner */
- /** Is the given class a subtype of a function trait? */
+ /** Is the given class a closure? */
def isClosureClass(cls: Symbol): Boolean = {
- val res = cls.isFinal && cls.hasFlag(Flags.SYNTHETIC) && !cls.isModuleClass &&
- cls.tpe.parents.exists { t =>
- val TypeRef(_, sym, _) = t;
- definitions.FunctionClass exists sym.==
- }
+ val res = (cls.isFinal && cls.hasFlag(Flags.SYNTHETIC)
+ && !cls.isModuleClass && cls.isAnonymousFunction)
res
}
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index 80a068dcdf..6aef637902 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -18,10 +18,14 @@ abstract class Changes {
abstract class Change
+ private lazy val annotationsChecked =
+ List(definitions.getClass("scala.specialized")) // Any others that should be checked?
+
/** Are the new modifiers more restrictive than the old ones? */
private def moreRestrictive(from: Long, to: Long): Boolean =
((((to & PRIVATE) != 0L) && (from & PRIVATE) == 0L)
- || (((to & PROTECTED) != 0L) && (from & PROTECTED) == 0L))
+ || (((to & PROTECTED) != 0L) && (from & PROTECTED) == 0L)) ||
+ ((from & IMPLICIT) != (to & IMPLICIT))
/** An entity in source code, either a class or a member definition.
* Name is fully-qualified.
@@ -35,18 +39,26 @@ abstract class Changes {
case class Changed(e: Entity)(implicit val reason: String) extends Change {
override def toString = "Changed(" + e + ")[" + reason + "]"
}
+ case class ParentChanged(e: Entity) extends Change
+
+ private val changedTypeParams = new mutable.HashSet[String]
- private def sameSymbol(sym1: Symbol, sym2: Symbol): Boolean =
- sym1.fullNameString == sym2.fullNameString
+ private def sameSymbol(sym1: Symbol, sym2: Symbol, simple: Boolean = false): Boolean =
+ if (simple) sym1.encodedName == sym2.encodedName else sym1.fullName == sym2.fullName
+ private def sameFlags(sym1: Symbol, sym2: Symbol): Boolean =
+ sym1.flags == sym2.flags
+ private def sameAnnotations(sym1: Symbol, sym2: Symbol): Boolean =
+ annotationsChecked.forall(a =>
+ (sym1.hasAnnotation(a) == sym2.hasAnnotation(a)))
- private def sameType(tp1: Type, tp2: Type) = {
+ private def sameType(tp1: Type, tp2: Type)(implicit strict: Boolean) = {
def typeOf(tp: Type): String = tp.toString + "[" + tp.getClass + "]"
val res = sameType0(tp1, tp2)
-// if (!res) println("\t different types: " + typeOf(tp1) + " : " + typeOf(tp2))
+ //if (!res) println("\t different types: " + typeOf(tp1) + " : " + typeOf(tp2))
res
}
- private def sameType0(tp1: Type, tp2: Type): Boolean = ((tp1, tp2) match {
+ private def sameType0(tp1: Type, tp2: Type)(implicit strict: Boolean): Boolean = ((tp1, tp2) match {
/*case (ErrorType, _) => false
case (WildcardType, _) => false
case (_, ErrorType) => false
@@ -65,9 +77,18 @@ abstract class Changes {
case (ConstantType(value1), ConstantType(value2)) =>
value1 == value2
case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
- sameType(pre1, pre2) && sameSymbol(sym1, sym2) &&
- ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
- sameTypes(args1, args2))
+ val testSymbols =
+ if (!sameSymbol(sym1, sym2)) {
+ val v = (!strict && sym1.isType && sym2.isType && sameType(sym1.info, sym2.info))
+ if (v) changedTypeParams += sym1.fullName
+ v
+ } else
+ !sym1.isTypeParameter || !changedTypeParams.contains(sym1.fullName)
+
+ testSymbols && sameType(pre1, pre2) &&
+ (sym1.variance == sym2.variance) &&
+ ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
+ sameTypes(args1, args2))
// @M! normalize reduces higher-kinded case to PolyType's
case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) =>
@@ -84,35 +105,28 @@ abstract class Changes {
}
}
sameTypes(parents1, parents2) && isSubScope(ref1, ref2) && isSubScope(ref2, ref1)
-
- case (MethodType(params1, res1), MethodType(params2, res2)) =>
+ case (mt1 @ MethodType(params1, res1), mt2 @ MethodType(params2, res2)) =>
// new dependent types: probably fix this, use substSym as done for PolyType
- (sameTypes(tp1.paramTypes, tp2.paramTypes) &&
- sameType(res1, res2) &&
- tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType])
-
+ sameTypes(tp1.paramTypes, tp2.paramTypes) &&
+ (tp1.params corresponds tp2.params)((t1, t2) => sameSymbol(t1, t2, true) && sameFlags(t1, t2)) &&
+ sameType(res1, res2) &&
+ mt1.isImplicit == mt2.isImplicit
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
- (tparams1.length == tparams2.length &&
- List.forall2(tparams1, tparams2)
- ((p1, p2) => sameType(p1.info, p2.info)) &&
- sameType(res1, res2))
+ sameTypeParams(tparams1, tparams2) && sameType(res1, res2)
case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
- (tparams1.length == tparams2.length &&
- List.forall2(tparams1, tparams2)
- ((p1, p2) => sameType(p1.info, p2.info)) &&
- sameType(res1, res2))
+ sameTypeParams(tparams1, tparams2)(false) && sameType(res1, res2)(false)
case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
- sameType(lo1, lo2) && sameType(hi1, hi2)
+ sameType(lo1, lo2) && sameType(hi1, hi2)
case (BoundedWildcardType(bounds), _) =>
bounds containsType tp2
case (_, BoundedWildcardType(bounds)) =>
bounds containsType tp1
-
case (AnnotatedType(_,_,_), _) =>
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
+ annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) &&
+ sameType(tp1.withoutAnnotations, tp2.withoutAnnotations)
case (_, AnnotatedType(_,_,_)) =>
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
-
+ annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) &&
+ sameType(tp1.withoutAnnotations, tp2.withoutAnnotations)
case (_: SingletonType, _: SingletonType) =>
var origin1 = tp1
while (origin1.underlying.isInstanceOf[SingletonType]) {
@@ -133,52 +147,65 @@ abstract class Changes {
((tp1n ne tp1) || (tp2n ne tp2)) && sameType(tp1n, tp2n)
}
- def sameTypes(tps1: List[Type], tps2: List[Type]): Boolean =
- (tps1.length == tps2.length
- && List.forall2(tps1, tps2)(sameType))
+ private def sameTypeParams(tparams1: List[Symbol], tparams2: List[Symbol])(implicit strict: Boolean) =
+ sameTypes(tparams1 map (_.info), tparams2 map (_.info)) &&
+ sameTypes(tparams1 map (_.tpe), tparams2 map (_.tpe)) &&
+ (tparams1 corresponds tparams2)((t1, t2) => sameAnnotations(t1, t2))
- /** Return the list of changes between 'from' and 'to'.
+ private def sameTypes(tps1: List[Type], tps2: List[Type])(implicit strict: Boolean) =
+ (tps1 corresponds tps2)(sameType(_, _))
+
+ /** Return the list of changes between 'from' and 'toSym.info'.
*/
- def changeSet(from: Symbol, to: Symbol): List[Change] = {
+ def changeSet(from: Type, toSym: Symbol): List[Change] = {
implicit val defaultReason = "types"
-// println("changeSet " + from + "(" + from.info + ")"
-// + " vs " + to + "(" + to.info + ")")
+ implicit val defaultStrictTypeRefTest = true
+
+ val to = toSym.info
+ changedTypeParams.clear
+ def omitSymbols(s: Symbol): Boolean = !s.hasFlag(LOCAL | LIFTED | PRIVATE)
val cs = new mutable.ListBuffer[Change]
- if ((from.info.parents zip to.info.parents) exists { case (t1, t2) => !sameType(t1, t2) })
- cs += Changed(toEntity(from))(from.info.parents.zip(to.info.parents).toString)
- if (from.typeParams != to.typeParams)
- cs += Changed(toEntity(from))(" tparams: " + from.typeParams.zip(to.typeParams))
+ if ((from.parents zip to.parents) exists { case (t1, t2) => !sameType(t1, t2) })
+ cs += Changed(toEntity(toSym))(from.parents.zip(to.parents).toString)
+ if (!sameTypeParams(from.typeParams, to.typeParams)(false))
+ cs += Changed(toEntity(toSym))(" tparams: " + from.typeParams.zip(to.typeParams))
// new members not yet visited
val newMembers = mutable.HashSet[Symbol]()
- newMembers ++= to.info.decls.iterator
+ newMembers ++= to.decls.iterator filter omitSymbols
- for (o <- from.info.decls.iterator;
- val n = to.info.decl(o.name)) {
+ for (o <- from.decls.iterator filter omitSymbols) {
+ val n = to.decl(o.name)
newMembers -= n
- if (!o.hasFlag(Flags.PRIVATE | Flags.LOCAL | Flags.LIFTED)) {
- if (o.isClass)
- cs ++= changeSet(o, n)
- else if (n == NoSymbol)
- cs += Removed(toEntity(o))
- else {
- val newSym = n.suchThat(ov => sameType(ov.tpe, o.tpe))
- if (newSym == NoSymbol || moreRestrictive(o.flags, newSym.flags)) {
- cs += Changed(toEntity(o))(n + " changed from " + o.tpe + " to " + n.tpe + " flags: " + Flags.flagsToString(o.flags))
- } else
- newMembers -= newSym
- }
+ if (o.isClass)
+ cs ++= changeSet(o.info, n)
+ else if (n == NoSymbol)
+ cs += Removed(toEntity(o))
+ else {
+ val newSym =
+ o match {
+ case _:TypeSymbol if o.isAliasType =>
+ n.suchThat(ov => sameType(ov.info, o.info))
+ case _ =>
+ n.suchThat(ov => sameType(ov.tpe, o.tpe))
+ }
+ if (newSym == NoSymbol || moreRestrictive(o.flags, newSym.flags))
+ cs += Changed(toEntity(o))(n + " changed from " + o.tpe + " to " + n.tpe + " flags: " + Flags.flagsToString(o.flags))
+ else
+ newMembers -= newSym
}
- }
- cs ++= (newMembers map (Added compose toEntity))
+ }: Unit // Give the type explicitly until #2281 is fixed
+ cs ++= (newMembers map (Added compose toEntity))
cs.toList
}
def removeChangeSet(sym: Symbol): Change = Removed(toEntity(sym))
+ def changeChangeSet(sym: Symbol, msg: String): Change = Changed(toEntity(sym))(msg)
+ def parentChangeSet(sym: Symbol): Change = ParentChanged(toEntity(sym))
private def toEntity(sym: Symbol): Entity =
- if (sym.isClass) Class(sym.fullNameString)
- else Definition(sym.fullNameString)
+ if (sym.isClass) Class(sym.fullName)
+ else Definition(sym.fullName)
}
diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
index c5ac031406..aa784e9c87 100644
--- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
@@ -1,6 +1,6 @@
package scala.tools.nsc
-package dependencies;
-import util.SourceFile;
+package dependencies
+import util.SourceFile
import io.AbstractFile
import collection._
import symtab.Flags
@@ -8,19 +8,24 @@ import symtab.Flags
trait DependencyAnalysis extends SubComponent with Files {
import global._
- val phaseName = "dependencyAnalysis";
+ val phaseName = "dependencyAnalysis"
def off = settings.make.value == "all"
- def newPhase(prev : Phase) = new AnalysisPhase(prev)
+ def newPhase(prev: Phase) = new AnalysisPhase(prev)
lazy val maxDepth = settings.make.value match {
case "changed" => 0
- case "transitive" => Int.MaxValue
+ case "transitive" | "transitivenocp" => Int.MaxValue
case "immediate" => 1
}
- def nameToFile(src: AbstractFile, name : String) =
+ def shouldCheckClasspath = settings.make.value != "transitivenocp"
+
+ // todo: order insensible checking and, also checking timestamp?
+ def validateClasspath(cp1: String, cp2: String): Boolean = cp1 == cp2
+
+ def nameToFile(src: AbstractFile, name: String) =
settings.outputDirs.outputDirFor(src)
.lookupPathUnchecked(name.toString.replace(".", java.io.File.separator) + ".class", false)
@@ -34,7 +39,7 @@ trait DependencyAnalysis extends SubComponent with Files {
def dependenciesFile: Option[AbstractFile] = depFile
def classpath = settings.classpath.value
- def newDeps = new FileDependencies(classpath);
+ def newDeps = new FileDependencies(classpath)
var dependencies = newDeps
@@ -43,13 +48,19 @@ trait DependencyAnalysis extends SubComponent with Files {
/** Top level definitions per source file. */
val definitions: mutable.Map[AbstractFile, List[Symbol]] =
new mutable.HashMap[AbstractFile, List[Symbol]] {
- override def default(f : AbstractFile) = Nil
+ override def default(f: AbstractFile) = Nil
}
/** External references used by source file. */
val references: mutable.Map[AbstractFile, immutable.Set[String]] =
new mutable.HashMap[AbstractFile, immutable.Set[String]] {
- override def default(f : AbstractFile) = immutable.Set()
+ override def default(f: AbstractFile) = immutable.Set()
+ }
+
+ /** External references for inherited members used in the source file */
+ val inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] =
+ new mutable.HashMap[AbstractFile, immutable.Set[Inherited]] {
+ override def default(f: AbstractFile) = immutable.Set()
}
/** Write dependencies to the current file. */
@@ -60,15 +71,14 @@ trait DependencyAnalysis extends SubComponent with Files {
/** Load dependencies from the given file and save the file reference for
* future saves.
*/
- def loadFrom(f: AbstractFile, toFile: String => AbstractFile) : Boolean = {
+ def loadFrom(f: AbstractFile, toFile: String => AbstractFile): Boolean = {
dependenciesFile = f
FileDependencies.readFrom(f, toFile) match {
case Some(fd) =>
- val success = fd.classpath == classpath
+ val success = if (shouldCheckClasspath) validateClasspath(fd.classpath, classpath) else true
dependencies = if (success) fd else {
- if(settings.debug.value){
- println("Classpath has changed. Nuking dependencies");
- }
+ if (settings.debug.value)
+ println("Classpath has changed. Nuking dependencies")
newDeps
}
@@ -77,15 +87,13 @@ trait DependencyAnalysis extends SubComponent with Files {
}
}
- def filter(files : List[SourceFile]) : List[SourceFile] =
+ def filter(files: List[SourceFile]): List[SourceFile] =
if (off) files
- else if (dependencies.isEmpty){
- if(settings.debug.value){
- println("No known dependencies. Compiling everything");
- }
+ else if (dependencies.isEmpty) {
+ println("No known dependencies. Compiling " +
+ (if (settings.debug.value) files.mkString(", ") else "everything"))
files
- }
- else {
+ } else {
val (direct, indirect) = dependencies.invalidatedFiles(maxDepth);
val filtered = files.filter(x => {
val f = x.file.absolute
@@ -94,16 +102,21 @@ trait DependencyAnalysis extends SubComponent with Files {
filtered match {
case Nil => println("No changes to recompile");
case x => println("Recompiling " + (
- if(settings.debug.value) x.mkString(", ")
- else x.length + " files")
+ if(settings.debug.value) x.mkString(", ") else x.length + " files")
)
}
filtered
}
- class AnalysisPhase(prev : Phase) extends StdPhase(prev){
+ case class Inherited(qualifier: String, member: Name)
+
+ class AnalysisPhase(prev: Phase) extends StdPhase(prev) {
+
+ override def cancelled(unit: CompilationUnit) =
+ super.cancelled(unit) && !unit.isJava
+
def apply(unit : global.CompilationUnit) {
- val f = unit.source.file.file;
+ val f = unit.source.file.file
// When we're passed strings by the interpreter
// they have no source file. We simply ignore this case
// as irrelevant to dependency analysis.
@@ -111,22 +124,31 @@ trait DependencyAnalysis extends SubComponent with Files {
val source: AbstractFile = unit.source.file;
for (d <- unit.icode){
val name = d.toString
- dependencies.emits(source, nameToFile(unit.source.file, name))
d.symbol match {
- case _ : ModuleClassSymbol =>
+ case s : ModuleClassSymbol =>
+ val isTopLevelModule =
+ atPhase (currentRun.picklerPhase.next) {
+ !s.isImplClass && !s.isNestedClass
+ }
+ if (isTopLevelModule && (s.companionModule != NoSymbol)) {
+ dependencies.emits(source, nameToFile(unit.source.file, name))
+ }
dependencies.emits(source, nameToFile(unit.source.file, name + "$"))
case _ =>
+ dependencies.emits(source, nameToFile(unit.source.file, name))
}
}
+ dependencies.reset(source)
for (d <- unit.depends; if (d.sourceFile != null)){
- dependencies.depends(source, d.sourceFile);
+ dependencies.depends(source, d.sourceFile)
}
}
// find all external references in this compilation unit
val file = unit.source.file
references += file -> immutable.Set.empty[String]
+ inherited += file -> immutable.Set.empty[Inherited]
val buf = new mutable.ListBuffer[Symbol]
@@ -137,18 +159,70 @@ trait DependencyAnalysis extends SubComponent with Files {
&& (!tree.symbol.isPackage)
&& (!tree.symbol.hasFlag(Flags.JAVA))
&& ((tree.symbol.sourceFile eq null)
- || (tree.symbol.sourceFile.path != file.path))) {
- references += file -> (references(file) + tree.symbol.fullNameString)
+ || (tree.symbol.sourceFile.path != file.path))
+ && (!tree.symbol.isClassConstructor)) {
+ updateReferences(tree.symbol.fullName)
+ atPhase(currentRun.uncurryPhase.prev) {
+ checkType(tree.symbol.tpe)
+ }
}
+
tree match {
- case cdef: ClassDef if !cdef.symbol.hasFlag(Flags.PACKAGE) =>
- buf += cdef.symbol
+ case cdef: ClassDef if !cdef.symbol.hasFlag(Flags.PACKAGE) &&
+ !cdef.symbol.isAnonymousFunction =>
+ if (cdef.symbol != NoSymbol) buf += cdef.symbol
+ atPhase(currentRun.erasurePhase.prev) {
+ for (s <- cdef.symbol.info.decls)
+ s match {
+ case ts: TypeSymbol if !ts.isClass =>
+ checkType(s.tpe)
+ case _ =>
+ }
+ }
super.traverse(tree)
- case _ =>
+ case ddef: DefDef =>
+ atPhase(currentRun.typerPhase.prev) {
+ checkType(ddef.symbol.tpe)
+ }
+ super.traverse(tree)
+ case a @ Select(q, n) if ((a.symbol != NoSymbol) && (q.symbol != null)) => // #2556
+ if (!a.symbol.isConstructor &&
+ !a.symbol.owner.isPackageClass &&
+ !isSameType(q.tpe, a.symbol.owner.tpe))
+ inherited += file ->
+ (inherited(file) + Inherited(q.symbol.tpe.resultType.safeToString, n))
+ super.traverse(tree)
+ case _ =>
super.traverse(tree)
}
}
+
+ def checkType(tpe: Type): Unit =
+ tpe match {
+ case t: MethodType =>
+ checkType(t.resultType)
+ for (s <- t.params) checkType(s.tpe)
+
+ case t: TypeRef =>
+ if (t.sym.isAliasType) {
+ updateReferences(t.typeSymbolDirect.fullName)
+ checkType(t.typeSymbolDirect.info)
+ }
+ updateReferences(t.typeSymbol.fullName)
+ for (tp <- t.args) checkType(tp)
+
+ case t: PolyType =>
+ checkType(t.resultType)
+ updateReferences(t.typeSymbol.fullName)
+
+ case t =>
+ updateReferences(t.typeSymbol.fullName)
+ }
+
+ def updateReferences(s: String): Unit =
+ references += file -> (references(file) + s)
+
}).apply(unit.body)
definitions(unit.source.file) = buf.toList
diff --git a/src/compiler/scala/tools/nsc/dependencies/Files.scala b/src/compiler/scala/tools/nsc/dependencies/Files.scala
index 501936ee4e..194351a13f 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Files.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Files.scala
@@ -1,43 +1,48 @@
package scala.tools.nsc
-package dependencies;
+package dependencies
import java.io.{InputStream, OutputStream, PrintStream, InputStreamReader, BufferedReader}
-import io.{AbstractFile, PlainFile}
+import io.{AbstractFile, PlainFile, VirtualFile}
+
+import scala.collection._
-import scala.collection._;
trait Files { self : SubComponent =>
- class FileDependencies(val classpath : String) {
+ class FileDependencies(val classpath: String) {
+ import FileDependencies._
- class Tracker extends mutable.OpenHashMap[AbstractFile, mutable.Set[AbstractFile]]{
+ class Tracker extends mutable.OpenHashMap[AbstractFile, mutable.Set[AbstractFile]] {
override def default(key: AbstractFile) = {
- this(key) = new mutable.HashSet[AbstractFile];
- this(key);
+ this(key) = new mutable.HashSet[AbstractFile]
+ this(key)
}
}
val dependencies = new Tracker
- val targets = new Tracker;
+ val targets = new Tracker
def isEmpty = dependencies.isEmpty && targets.isEmpty
def emits(source: AbstractFile, result: AbstractFile) =
- targets(source) += result;
+ targets(source) += result
def depends(from: AbstractFile, on: AbstractFile) =
- dependencies(from) += on;
+ dependencies(from) += on
- def reset(file: AbstractFile) = dependencies -= file;
+ def reset(file: AbstractFile) = dependencies -= file
- def cleanEmpty() = {
- dependencies.foreach({case (key, value) => value.retain(_.exists)})
- dependencies.retain((key, value) => key.exists && !value.isEmpty)
+ def cleanEmpty = {
+ dependencies foreach {case (_, value) =>
+ value retain (x => x.exists && (x ne removedFile))}
+ dependencies retain ((key, value) => key.exists && !value.isEmpty)
+ targets foreach {case (_, value) => value retain (_.exists)}
+ targets retain ((key, value) => key.exists && !value.isEmpty)
}
def containsFile(f: AbstractFile) = targets.contains(f.absolute)
- def invalidatedFiles(maxDepth : Int) = {
- val direct = new mutable.HashSet[AbstractFile];
+ def invalidatedFiles(maxDepth: Int) = {
+ val direct = new mutable.HashSet[AbstractFile]
for ((file, products) <- targets) {
// This looks a bit odd. It may seem like one should invalidate a file
@@ -49,34 +54,33 @@ trait Files { self : SubComponent =>
val indirect = dependentFiles(maxDepth, direct)
- for ((source, targets) <- targets;
- if direct(source) || indirect(source)){
- targets.foreach(_.delete);
- targets -= source;
+ for ((source, targets) <- targets
+ if direct(source) || indirect(source) || (source eq removedFile)) {
+ targets foreach (_.delete)
+ targets -= source
}
- (direct, indirect);
+ (direct, indirect)
}
/** Return the set of files that depend on the given changed files.
* It computes the transitive closure up to the given depth.
*/
def dependentFiles(depth: Int, changed: Set[AbstractFile]): Set[AbstractFile] = {
- val indirect = new mutable.HashSet[AbstractFile];
- val newInvalidations = new mutable.HashSet[AbstractFile];
+ val indirect = new mutable.HashSet[AbstractFile]
+ val newInvalidations = new mutable.HashSet[AbstractFile]
- def invalid(file: AbstractFile) = indirect(file) || changed(file);
+ def invalid(file: AbstractFile) =
+ indirect(file) || changed(file) || (file eq removedFile)
- def go(i : Int) : Unit = if(i > 0){
- newInvalidations.clear;
- for((target, depends) <- dependencies;
- if !invalid(target);
- d <- depends){
+ def go(i: Int) : Unit = if(i > 0) {
+ newInvalidations.clear
+ for((target, depends) <- dependencies if !invalid(target);
+ d <- depends)
newInvalidations(target) ||= invalid(d)
- }
- indirect ++= newInvalidations;
- if(!newInvalidations.isEmpty) go(i - 1);
- else ()
+
+ indirect ++= newInvalidations
+ if (!newInvalidations.isEmpty) go(i - 1)
}
go(depth)
@@ -84,56 +88,77 @@ trait Files { self : SubComponent =>
indirect --= changed
}
- def writeTo(file: AbstractFile, fromFile : AbstractFile => String) {
+ def writeTo(file: AbstractFile, fromFile: AbstractFile => String): Unit =
writeToFile(file)(out => writeTo(new PrintStream(out), fromFile))
- }
- def writeTo(print : PrintStream, fromFile : AbstractFile => String) : Unit = {
- cleanEmpty();
- def emit(tracker : Tracker){
- for ((f, ds) <- tracker;
- d <- ds){
- print.println(fromFile(f) + " -> " + fromFile(d));
- }
- }
-
- print.println(classpath);
- print.println(FileDependencies.Separator)
- emit(dependencies);
- print.println(FileDependencies.Separator)
- emit(targets);
+ def writeTo(print: PrintStream, fromFile: AbstractFile => String): Unit = {
+ def emit(tracker: Tracker) =
+ for ((f, ds) <- tracker; d <- ds) print.println(fromFile(f) + arrow + fromFile(d))
+ cleanEmpty
+ print.println(classpath)
+ print.println(separator)
+ emit(dependencies)
+ print.println(separator)
+ emit(targets)
}
}
- object FileDependencies{
- val Separator = "-------";
-
- def readFrom(file: AbstractFile, toFile : String => AbstractFile): Option[FileDependencies] = readFromFile(file) { in =>
- val reader = new BufferedReader(new InputStreamReader(in))
- val it = new FileDependencies(reader.readLine)
- reader.readLine
- var line : String = null
- while ({line = reader.readLine; (line != null) && (line != Separator)}){
- line.split(" -> ") match {
- case Array(from, on) => it.depends(toFile(from), toFile(on));
- case x => global.inform("Parse error: Unrecognised string " + line); return None
+ object FileDependencies {
+ private val separator:String = "-------"
+ private val arrow = " -> "
+ private val removedFile = new VirtualFile("removed")
+
+ private def validLine(l: String) = (l != null) && (l != separator)
+
+ def readFrom(file: AbstractFile, toFile: String => AbstractFile): Option[FileDependencies] =
+ readFromFile(file) { in =>
+ val reader = new BufferedReader(new InputStreamReader(in))
+ val it = new FileDependencies(reader.readLine)
+
+ def readLines(valid: Boolean)(f: (AbstractFile, AbstractFile) => Unit): Boolean = {
+ var continue = valid
+ var line: String = null
+ while (continue && {line = reader.readLine; validLine(line)}) {
+ line.split(arrow) match {
+ case Array(from, on) => f(toFile(from), toFile(on))
+ case _ =>
+ global.inform("Parse error: Unrecognised string " + line)
+ continue = false
+ }
+ }
+ continue
}
- }
- while ({line = reader.readLine; (line != null) && (line != Separator)}){
- line.split(" -> ") match {
- case Array(source, target) => it.emits(toFile(source), toFile(target));
- case x => global.inform("Parse error: Unrecognised string " + line); return None
- }
+ reader.readLine
+
+ val dResult = readLines(true)(
+ (_, _) match {
+ case (null, _) => // fromFile is removed, it's ok
+ case (fromFile, null) =>
+ // onFile is removed, should recompile fromFile
+ it.depends(fromFile, removedFile)
+ case (fromFile, onFile) => it.depends(fromFile, onFile)
+ })
+
+ readLines(dResult)(
+ (_, _) match {
+ case (null, null) =>
+ // source and target are all removed, it's ok
+ case (null, targetFile) =>
+ // source is removed, should remove relative target later
+ it.emits(removedFile, targetFile)
+ case (_, null) =>
+ // it may has been cleaned outside, or removed during last phase
+ case (sourceFile, targetFile) => it.emits(sourceFile, targetFile)
+ })
+
+ Some(it)
}
-
- Some(it)
- }
}
def writeToFile[T](file: AbstractFile)(f: OutputStream => T) : T = {
- val out = file.output
+ val out = file.bufferedOutput
try {
f(out)
} finally {
diff --git a/src/compiler/scala/tools/nsc/doc/DefaultDocDriver.scala b/src/compiler/scala/tools/nsc/doc/DefaultDocDriver.scala
deleted file mode 100644
index 8a1261a2f6..0000000000
--- a/src/compiler/scala/tools/nsc/doc/DefaultDocDriver.scala
+++ /dev/null
@@ -1,307 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
- * @author Sean McDirmid
- */
-// $Id$
-
-package scala.tools.nsc
-package doc
-
-import scala.collection.mutable
-import java.util.zip.ZipFile
-
-import symtab.Flags._
-import scala.xml._
-
-/**
- * @author Sean McDirmid
- */
-abstract class DefaultDocDriver extends DocDriver with ModelFrames with ModelToXML {
- import global._
- import definitions.{AnyClass, AnyRefClass}
-
- lazy val additions = new mutable.LinkedHashSet[Symbol]
- lazy val additions0 = new ModelAdditions(global) {
- override def addition(sym: global.Symbol) = {
- super.addition(sym)
- sym match {
- case sym : global.ClassSymbol => additions += sym.asInstanceOf[Symbol]
- case sym : global.ModuleSymbol => additions += sym.asInstanceOf[Symbol]
- case sym : global.TypeSymbol => additions += sym.asInstanceOf[Symbol]
- case _ =>
- }
- }
- def init {}
- }
-
- /** Add all top-level entities in ModelAdditions to allClasses */
- def addAdditionsToClasses() {
- additions0.init
- for (sym <- additions) {
- val packSym = sym.enclosingPackage
- if (packSym != NoSymbol) {
- val pack = Package(packSym)
- if (!(allClasses contains pack)) {
- // don't emit an addition unless its package
- // is already being scaladoced
- } else {
- val addition: Option[ClassOrObject] =
- if (sym.isClass)
- Some(new TopLevelClass(sym))
- else if (sym.isModule)
- Some(new TopLevelObject(sym))
- else if (sym == definitions.AnyRefClass) {
- // AnyRef is the only top-level type alias, so handle
- // it specially instead of introducing general support for
- // top-level aliases
- Some(new TopLevelClass(sym))
- }
- else
- None
-
- addition match {
- case None =>
- //println("skipping: " + sym) //DEBUG
- case Some(addition) =>
- allClasses(pack) += addition
- }
- }
- } else {
- //println("no package found for: "+sym) //DEBUG
- }
- }
- }
-
- def process(units: Iterator[CompilationUnit]) {
-
- assert(global.definitions != null)
-
- def g(pkg: Package, clazz: ClassOrObject) {
- if (isAccessible(clazz.sym)) {
- allClasses(pkg) += clazz
- clazz.decls.map(_._2).foreach {
- case clazz : ClassOrObject => g(pkg, clazz)
- case _ =>
- }
- }
- }
- def f(pkg: Package, tree: Tree) {
- if (tree != EmptyTree && tree.hasSymbol) {
- val sym = tree.symbol
- if (sym != NoSymbol && !sym.hasFlag(symtab.Flags.PRIVATE)) tree match {
- case tree : PackageDef =>
- val pkg1 = new Package(sym.asInstanceOf[ModuleSymbol])
- tree.stats.foreach(stat => f(pkg1, stat))
- case tree : ClassDef =>
- assert(pkg != null)
- g(pkg, new TopLevelClass(sym.asInstanceOf[ClassSymbol]))
- case tree : ModuleDef =>
- assert(pkg != null)
- g(pkg, new TopLevelObject(sym.asInstanceOf[ModuleSymbol]))
- case _ =>
- }
- }
- }
- units.foreach(unit => f(null, unit.body))
- addAdditionsToClasses()
-
- for (p <- allClasses; d <- p._2) {
- symbols += d.sym
- for (pp <- d.sym.tpe.parents) subClasses(pp.typeSymbol) += d
- }
- copyResources
- lazy val packages0 = sort(allClasses.keySet)
- new AllPackagesFrame with Frame { def packages = packages0 }
- new PackagesContentFrame with Frame { def packages = packages0 }
- new NavigationFrame with Frame { }
- new ListClassFrame with Frame {
- def classes = for (p <- allClasses; d <- p._2) yield d
- object organized extends mutable.LinkedHashMap[(List[String],Boolean),List[ClassOrObject]] {
- override def default(key : (List[String],Boolean)) = Nil;
- classes.foreach(cls => {
- val path = cls.path.map(_.name);
- this((path,cls.isInstanceOf[Clazz])) = cls :: this((path,cls.isInstanceOf[Clazz]));
- });
- }
-
- def title = "List of all classes and objects"
- def path = "all-classes"
- def navLabel = null // "root-page"
- // override protected def navSuffix = ".html";
- override def optional(cls: ClassOrObject): NodeSeq = {
- val path = cls.path.map(_.name)
- val key = (cls.path.map(_.name), cls.isInstanceOf[Clazz])
- assert(!organized(key).isEmpty);
-
- ((if (!organized(key).tail.isEmpty) Text(" (" +{
- //Console.println("CONFLICT: " + path + " " + organized(key));
- val str = cls.path(0).sym.owner.fullNameString('.');
- val idx = str.lastIndexOf('.');
- if (idx == -1) str;
- else str.substring(idx + 1);
- }+ ")");
- else NodeSeq.Empty) ++ super.optional(cls))//(NodeSeq.builderFactory)
- }
-
- }
- for ((pkg0, classes0) <- allClasses) {
- new ListClassFrame with Frame {
- def title =
- "List of classes and objects in package " + pkg0.fullName('.')
- def classes = classes0
- def path = pkgPath(pkg0.sym) + NAME_SUFFIX_PACKAGE
- def navLabel = pkg0.fullName('.')
- }
- new PackageContentFrame with Frame {
- def classes = classes0
- def pkg = pkg0
- }
- for (clazz0 <- classes0) {
- new ClassContentFrame with Frame {
- def clazz = clazz0
- def title =
- clazz0.kind + " " + clazz0.name + " in " + (clazz0.sym.owner.fullNameString('.'));
- }
- }
- }
- new RootFrame with Frame
- }
- override def longList(entity: ClassOrObject, category: Category)(implicit from: Frame) : NodeSeq = category match {
- case Classes | Objects => NodeSeq.Empty
- case _ => super.longList(entity, category)
- }
-
- trait Frame extends super.Frame {
- def longHeader(entity : Entity) = DefaultDocDriver.this.longHeader(entity)(this)
- def shortHeader(entity : Entity) = DefaultDocDriver.this.shortHeader(entity)(this)
- }
-
- import DocUtil._
- override def classBody(entity: ClassOrObject)(implicit from: Frame): NodeSeq =
- (((subClasses.get(entity.sym) match {
- case Some(symbols) =>
- (<dl>
- <dt style="margin:10px 0 0 20px;"><b>Direct Known Subclasses:</b></dt>
- <dd>{symbols.mkXML("",", ","")(cls => {
- aref(urlFor(cls.sym), cls.path.map(_.name).mkString("",".",""));
- })}</dd>
- </dl><hr/>);
- case None =>
- NodeSeq.Empty
- }): NodeSeq)++super.classBody(entity))//(NodeSeq.builderFactory)
-
- protected def urlFor(sym: Symbol)(implicit frame: Frame) = frame.urlFor(sym)
-
- override protected def decodeTag(tag: String): String = tag match {
- case "exception" => "Throws"
- case "ex" => "Examples"
- case "param" => "Parameters"
- case "pre" => "Precondition"
- case "return" => "Returns"
- case "note" => "Notes"
- case "see" => "See Also"
- case tag => super.decodeTag(tag)
- }
-
- override protected def decodeOption(tag: String, option: String): NodeSeq = tag match {
- case "throws" if additions0.exceptions.contains(option) =>
- val (sym, s) = additions0.exceptions(option)
- val path = "../" //todo: fix path
- val href = path + sym.fullNameString('/') +
- (if (sym.isModule || sym.isModuleClass) NAME_SUFFIX_OBJECT else "") +
- "#" + s
- (<a href={href}>{option}</a>) ++ {Text(" - ")};
- case _ =>
- super.decodeOption(tag,option)
- }
-
- object roots extends mutable.LinkedHashMap[String,String];
- roots("classes") = "http://java.sun.com/j2se/1.5.0/docs/api";
- roots("rt") = roots("classes");
- private val SCALA_API_ROOT = "http://www.scala-lang.org/docu/files/api/";
- roots("scala-library") = SCALA_API_ROOT;
-
- private def keyFor(file: ZipFile): String = {
- var name = file.getName
- var idx = name.lastIndexOf(java.io.File.pathSeparator)
- if (idx == -1) idx = name.lastIndexOf('/')
- if (idx != -1) name = name.substring(idx + 1)
- if (name endsWith ".jar") name.substring(0, name.length - (".jar").length)
- else null
- }
-
- // <code>{Text(string + " - ")}</code>;
- override def hasLink0(sym: Symbol): Boolean = {
- if (sym == NoSymbol) return false;
- if (sym == AnyRefClass) {
- // AnyRefClass is a type alias, so the following logic
- // does not work. AnyClass should have a link in
- // the same cases as AnyRefClass, so test it instead.
- return hasLink(AnyClass)
- }
- if (super.hasLink0(sym) && symbols.contains(sym))
- return true;
- if (SyntheticClasses contains sym)
- return true;
- if (sym.toplevelClass == NoSymbol) return false;
- val clazz = sym.toplevelClass.asInstanceOf[ClassSymbol];
- import scala.tools.nsc.io._;
- clazz.classFile match {
- case file : ZipArchive#FileEntry =>
- val key = keyFor(file.archive);
- if (key != null && roots.contains(key)) return true;
- case null =>
- case _ =>
- }
- false
- }
-
- def aref(href: String, label: String)(implicit frame: Frame) =
- frame.aref(href, "_self", label)
-
- protected def anchor(entity: Symbol)(implicit frame: Frame): NodeSeq =
- (<a name={Text(frame.docName(entity))}></a>)
-
- object symbols extends mutable.LinkedHashSet[Symbol]
-
- object allClasses extends mutable.LinkedHashMap[Package, mutable.LinkedHashSet[ClassOrObject]] {
- override def default(pkg: Package): mutable.LinkedHashSet[ClassOrObject] = {
- object ret extends mutable.LinkedHashSet[ClassOrObject]
- this(pkg) = ret
- ret
- }
- }
-
- object subClasses extends mutable.LinkedHashMap[Symbol, mutable.LinkedHashSet[ClassOrObject]] {
- override def default(key: Symbol) = {
- val ret = new mutable.LinkedHashSet[ClassOrObject]
- this(key) = ret
- ret
- }
- }
-
- override def rootFor(sym: Symbol): String = {
- assert(sym != NoSymbol)
- if (sym == definitions.AnyRefClass) {
- // AnyRefClass is a type alias, so the following logic
- // does not work. AnyClass should have the same root,
- // so use it instead.
- return rootFor(definitions.AnyClass)
- }
- if (sym.toplevelClass == NoSymbol) return super.rootFor(sym)
- if (symbols.contains(sym.toplevelClass)) return super.rootFor(sym)
- if (SyntheticClasses contains sym)
- return SCALA_API_ROOT
- val clazz = sym.toplevelClass.asInstanceOf[ClassSymbol]
- import scala.tools.nsc.io._;
- clazz.classFile match {
- case file : ZipArchive#FileEntry =>
- val key = keyFor(file.archive)
- if (key != null && roots.contains(key)) {
- return roots(key) + '/'
- }
- case _ =>
- }
- super.rootFor(sym)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/DocDriver.scala b/src/compiler/scala/tools/nsc/doc/DocDriver.scala
deleted file mode 100644
index 3593775e35..0000000000
--- a/src/compiler/scala/tools/nsc/doc/DocDriver.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
- * @author Sean McDirmid
- */
-// $Id$
-
-package scala.tools.nsc
-package doc
-
-/**
- * This is an abstract class for documentation plugins.
- *
- * @author Geoffrey Washburn
- */
-abstract class DocDriver {
- val global: Global
- import global._
- def settings: doc.Settings
-
- def process(units: Iterator[CompilationUnit]): Unit
-}
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
new file mode 100644
index 0000000000..8b800381b8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
@@ -0,0 +1,66 @@
+/* NSC -- new Scala compiler -- Copyright 2007-2010 LAMP/EPFL */
+
+
+package scala.tools.nsc
+package doc
+
+import reporters.Reporter
+
+/** A documentation processor controls the process of generating Scala documentation, which is as follows.
+ *
+ * * A simplified compiler instance (with only the front-end phases enabled) is created, and additional
+ * ''sourceless'' comments are registered.
+ * * Documentable files are compiled, thereby filling the compiler's symbol table.
+ * * A documentation model is extracted from the post-compilation symbol table.
+ * * A generator is used to transform the model into the correct final format (HTML).
+ *
+ * A processor contains a single compiler instantiated from the processor's `settings`. Each call to `document`
+ * uses the same compiler instance with the same symbol table. In particular, this implies that the scaladoc site
+ * obtained from a call to `run` will contain documentation about files compiled during previous calls to the same
+ * processor's `run` method.
+ *
+ * @param reporter The reporter to which both documentation and compilation errors will be reported.
+ * @param settings The settings to be used by the documenter and compiler for generating documentation.
+ *
+ * @author Gilles Dubochet */
+class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor =>
+
+ /** The unique compiler instance used by this processor and constructed from its `settings`. */
+ object compiler extends Global(settings, reporter) with interactive.RangePositions {
+ override protected def computeInternalPhases() {
+ phasesSet += syntaxAnalyzer
+ phasesSet += analyzer.namerFactory
+ phasesSet += analyzer.packageObjects
+ phasesSet += analyzer.typerFactory
+ phasesSet += superAccessors
+ phasesSet += pickler
+ phasesSet += refchecks
+ }
+ override def onlyPresentation = true
+ lazy val addSourceless = {
+ val sless = new SourcelessComments { val global = compiler }
+ docComments ++= sless.comments
+ }
+ }
+
+ /** Creates a scaladoc site for all symbols defined in this call's `files`, as well as those defined in `files` of
+ * previous calls to the same processor.
+ * @param files The list of paths (relative to the compiler's source path, or absolute) of files to document. */
+ def universe(files: List[String]): Option[Universe] = {
+ (new compiler.Run()) compile files
+ compiler.addSourceless
+ assert(settings.docformat.value == "html")
+ if (!reporter.hasErrors) {
+ val modelFactory = (new model.ModelFactory(compiler, settings) with model.comment.CommentFactory with model.TreeFactory)
+ println("model contains " + modelFactory.templatesCount + " documentable templates")
+ Some(modelFactory.makeModel)
+ }
+ else None
+ }
+
+ /** Generate document(s) for all `files` containing scaladoc documenataion.
+ * @param files The list of paths (relative to the compiler's source path, or absolute) of files to document. */
+ def document(files: List[String]): Unit =
+ universe(files) foreach { docModel => (new html.HtmlFactory(docModel)).generate }
+
+}
diff --git a/src/compiler/scala/tools/nsc/doc/DocUtil.scala b/src/compiler/scala/tools/nsc/doc/DocUtil.scala
deleted file mode 100644
index c01d9566f8..0000000000
--- a/src/compiler/scala/tools/nsc/doc/DocUtil.scala
+++ /dev/null
@@ -1,104 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
- * @author Sean McDirmid
- */
-// $Id$
-
-package scala.tools.nsc
-package doc
-
-import java.io.StringReader
-import org.xml.sax.InputSource
-
-import scala.collection.immutable.{ListMap, TreeSet}
-import scala.xml._
-
-object DocUtil
-{
- def load(str: String): NodeSeq =
- if ((str == null) || (str.length == 0))
- NodeSeq.Empty
- else {
- val xmlSrc =
- if (str.matches("^(<!--.*-->)*<[^>]+>.*<[^>]+>(<!--.*-->)*$")) str
- else "<div>" + str + "</div>"
- XML.load(new StringReader(xmlSrc))
- }
-
- def br(nodes: NodeSeq): NodeSeq = nodes ++ (<br/>)
- def hr(nodes: NodeSeq): NodeSeq = nodes ++ (<hr/>)
-
- trait UrlContext {
- def relative: String
-
- def aref(href0: String, target: String, text: String): NodeSeq = {
- if (href0 == null) return Text(text);
-
- val href = {
- if (href0.startsWith("http:") || href0.startsWith("file:")) "";
- else relative
- } + Utility.escape(href0)
- if ((target ne null) && target.indexOf('<') != -1) throw new Error(target)
-
- val t0 = Text(text)
- if (target ne null)
- (<a href={href} target={target}>{t0}</a>);
- else
- (<a href={href}>{t0}</a>);
- }
-
- // can't use platform default here or the generated XML may end up all MacRoman
- val encoding = Properties.sourceEncoding
- val generator = System.getProperty("doc.generator", "scaladoc (" + Properties.versionString + ")")
- val header =
- (<meta http-equiv="content-type" content={"text/html; charset=" + encoding}/>
- <meta name="generator" content={generator}/>
- <link rel="stylesheet" type="text/css" href={ relative + "style.css"}/>
- <script type="text/javascript" src={relative + "script.js"}></script>);
-
- def body0(hasBody: Boolean, nodes: NodeSeq): NodeSeq =
- if (!hasBody) nodes else (<body onload="init()">{nodes}</body>);
-
- val dtype = "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">"
-
- def page(title: String, body: NodeSeq, hasBody: Boolean): NodeSeq =
- (<html>
- <head><title>{Text(if (title eq null) "null title" else title)}</title>
- {header}
- </head>
- {body0(hasBody, body)}
- </html>)
- } // UrlContext
-
- def div0(title: String): NodeSeq =
- (<div class="doctitle-larger">{Text(title)}</div>);
-
- def merge[T](ts0: TreeSet[T], ts1: TreeSet[T]): TreeSet[T] = ts0 ++ ts1
-
- def merge[T,S](ts0: ListMap[T,TreeSet[S]], ts1: ListMap[T,TreeSet[S]]): ListMap[T,TreeSet[S]] = {
- (ts1 foldLeft ts0) { case (xs, (k, v)) =>
- if (xs contains k) xs.updated(k, xs(k) ++ v)
- else xs.updated(k, v)
- }
- }
-
- implicit def coerceIterable[T](list : Iterable[T]) = NodeWrapper(list.iterator)
- implicit def coerceIterator[T](list : Iterator[T]) = NodeWrapper(list)
-
- case class NodeWrapper[T](list: Iterator[T]) {
- def interleave(xs: Seq[NodeSeq], sep: NodeSeq): NodeSeq =
- if (xs.isEmpty) NodeSeq.Empty
- else if (xs.size == 1) xs.head
- else xs.head ++ sep ++ interleave(xs.tail, sep)
-
- def mkXML(begin: NodeSeq, separator: NodeSeq, end: NodeSeq)(f: T => NodeSeq): NodeSeq =
- begin ++ interleave(list.toSeq map f, separator) ++ end
-
- def mkXML(begin: String, separator: String, end: String)(f: T => NodeSeq): NodeSeq =
- this.mkXML(Text(begin), Text(separator), Text(end))(f)
-
- def surround(open: String, close: String)(f: T => NodeSeq) =
- if (list.hasNext) mkXML(open, ", ", close)(f)
- else NodeSeq.Empty
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/ModelAdditions.scala b/src/compiler/scala/tools/nsc/doc/ModelAdditions.scala
deleted file mode 100644
index e3186fd3fe..0000000000
--- a/src/compiler/scala/tools/nsc/doc/ModelAdditions.scala
+++ /dev/null
@@ -1,412 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
- * @author Sean McDirmid
- */
-// $Id$
-
-package scala.tools.nsc
-package doc
-
-/**
- * @author Stephane Micheloud, Sean McDirmid, Geoffrey Washburn
- * @version 1.0
- */
-class ModelAdditions(val global: Global) {
- import global._
- import definitions._
- def addition(sym: global.Symbol) {}
-
- addition(NothingClass);
- comments(NothingClass) = """
- <p>
- Class <code>Nothing</code> is - together with class <a href="Null.html">
- <code>Null</code></a> - at the bottom of the
- <a href="http://scala-lang.org" target="_top">Scala</a> type
- hierarchy.
- </p>
- <p>
- Type <code>Nothing</code> is a subtype of every other type
- (including <a href="Null.html"><code>Null</code></a>); there
- exist <em>no instances</em> of this type. Even though type
- <code>Nothing</code> is empty, it is nevertheless useful as a
- type parameter. For instance, the <a href="http://scala-lang.org"
- target="_top">Scala</a> library defines a value
- <a href="Nil$object.html"><code>Nil</code></a> of type
- <code><a href="List.html">List</a>[Nothing]</code>. Because lists
- are covariant in <a href="http://scala-lang.org" target="_top">Scala</a>,
- this makes <a href="Nil$object.html"><code>Nil</code></a> an
- instance of <code><a href="List.html">List</a>[T]</code>, for
- any element type <code>T</code>.
- </p>"""
-
- addition(NullClass);
- comments(NullClass) = """
- <p>
- Class <code>Null</code> is - together with class <a href="Nothing.html">
- <code>Nothing</code> - at the bottom of the
- <a href="http://scala-lang.org" target="_top">Scala</a> type
- hierarchy.
- </p>
- <p>
- Type <code>Null</code> is a subtype of all reference types; its
- only instance is the <code>null</code> reference.
- Since <code>Null</code> is not a subtype of value types,
- <code>null</code> is not a member of any such type. For instance,
- it is not possible to assign <code>null</code> to a variable of
- type <a href="Int.html"><code>Int</code></a>.
- </p>"""
-
- /*******************************************************************/
- /* Documentation for Any */
-
- addition(AnyClass);
- comments(AnyClass) = """
- <p>
- Class <code>Any</code> is the root of the <a
- href="http://scala-lang.org/"
- target="_top">Scala</a> class hierarchy. Every class in a
- <a href="http://scala-lang.org/" target="_top">Scala</a> execution
- environment inherits directly or indirectly from this class.
- Class <code>Any</code> has two direct subclasses:
- <a href="AnyRef.html"><code>AnyRef</code></a> and
- <a href="AnyVal.html"><code>AnyVal</code></a>.
- </p>"""
-
- addition(Any_equals);
- comments(Any_equals) = """
- This method is used to compare the receiver object (<code>this</code>)
- with the argument object (<code>arg0</code>) for equivalence.
-
- <p>
- The default implementations of this method is an <a
- href="http://en.wikipedia.org/wiki/Equivalence_relation">equivalence
- relation</a>:
- <ul>
- <li>It is reflexive: for any instance <code>x</code> of type <code>Any</code>,
- <code>x.equals(x)</code> should return <code>true</code>.</li>
- <li>It is symmetric: for any instances <code>x</code> and <code>y</code> of type
- <code>Any</code>, <code>x.equals(y)</code> should return <code>true</code> if and only
- if <code>y.equals(x)</code> returns <code>true</code>.</li>
- <li>It is transitive: for any instances
- <code>x</code>, <code>y</code>, and <code>z</code> of type <code>AnyRef</code>
- if <code>x.equals(y)</code> returns <code>true</code> and
- <code>y.equals(z)</code> returns
- <code>true</code>, then <code>x.equals(z)</code> should return <code>true</code>.</li>
- </ul>
- </p>
-
- <p>
- If you override this method, you should verify that
- your implementation remains an equivalence relation.
- Additionally, when overriding this method it is often necessary to
- override <code>hashCode</code> to ensure that objects that are
- "equal" (<code>o1.equals(o2)</code> returns <code>true</code>)
- hash to the same <a href="Int.html"><code>Int</code></a>
- (<code>o1.hashCode.equals(o2.hashCode)</code>).
-
- @param arg0 the object to compare against this object for equality.
- @return <code>true</code> if the receiver object is equivalent to the argument; <code>false</code> otherwise.
- </p>
- """
-
- addition(Any_==);
- comments(Any_==) = """
- <code>o == arg0</code> is the same as <code>o.equals(arg0)</code>.
- <p>
- @param arg0 the object to compare against this object for equality.
- @return <code>true</code> if the receiver object is equivalent to the argument; <code>false</code> otherwise.
- </p>
- """
-
- addition(Any_!=);
- comments(Any_!=) = """
- <code>o != arg0</code> is the same as <code>!(o == (arg0))</code>.
- <p>
- @param arg0 the object to compare against this object for dis-equality.
- @return <code>false</code> if the receiver object is equivalent to the argument; <code>true</code> otherwise.
- </p>
- """
-
- addition(Any_toString);
- comments(Any_toString) = """
- Returns a string representation of the object.
- <p>
- The default representation is platform dependent.
-
- @return a string representation of the object.
- </p>
- """
-
- addition(Any_asInstanceOf);
- comments(Any_asInstanceOf) = """
- This method is used to cast the receiver object to be of type <code>T0</code>.
-
- <p>Note that the success of a cast at runtime is modulo Scala's
- erasure semantics. Therefore the expression
- <code>1.asInstanceOf[String]</code> will throw a
- <code>ClassCastException</code> at runtime, while the expression
- <code>List(1).asInstanceOf[List[String]]</code> will not. In the
- latter example, because the type argument is erased as part of
- compilation it is not possible to check whether the contents of
- the list are of the requested typed.
-
- @throws ClassCastException if the receiver object is not an
- instance of erasure of type <code>T0</code>.
- @return the receiver object.
- </p> """
-
- addition(Any_isInstanceOf);
- comments(Any_isInstanceOf) = """
- This method is used to test whether the dynamic type of the receiver object is <code>T0</code>.
-
- <p>Note that the test result of the test is modulo Scala's erasure
- semantics. Therefore the expression
- <code>1.isInstanceOf[String]</code> will return
- <code>false</code>, while the expression
- <code>List(1).isInstanceOf[List[String]]</code> will return
- <code>true</code>. In the latter example, because the type
- argument is erased as part of compilation it is not possible to
- check whether the contents of the list are of the requested typed.
-
- @return <code>true</code> if the receiver object is an
- instance of erasure of type <code>T0</code>; <code>false</code> otherwise.
- """
-
- addition(Any_hashCode);
- comments(Any_hashCode) = """
- Returns a hash code value for the object.
-
- <p>
- The default hashing algorithm is platform dependent.
-
- Note that it is allowed for two objects to have identical hash
- codes (<code>o1.hashCode.equals(o2.hashCode)</code>) yet not be
- equal (<code>o1.equals(o2)</code> returns <code>false</code>). A
- degenerate implementation could always return <code>0</code>.
- However, it is required that if two objects are equal
- (<code>o1.equals(o2)</code> returns <code>true</code>) that they
- have identical hash codes
- (<code>o1.hashCode.equals(o2.hashCode)</code>). Therefore, when
- overriding this method, be sure to verify that the behavior is
- consistent with the <code>equals</code> method.
- </p>
-
- <p>
- @return the hash code value for the object.
- </p> """
-
- /*******************************************************************/
- /* Documentation for AnyRef */
-
- addition(AnyRefClass);
- comments(AnyRefClass) = """
- <p>
- Class <code>AnyRef</code> is the root class of all
- <em>reference types</em>.
- </p>"""
-
- addition(Object_==);
- comments(Object_==) = """
- <code>o == arg0</code> is the same as <code>if (o eq null) arg0 eq null else o.equals(arg0)</code>.
- <p>
- @param arg0 the object to compare against this object for equality.
- @return <code>true</code> if the receiver object is equivalent to the argument; <code>false</code> otherwise.
- </p>
- """
-
- addition(Object_ne);
- comments(Object_ne) = """
- <code>o.ne(arg0)</code> is the same as <code>!(o.eq(arg0))</code>.
- <p>
- @param arg0 the object to compare against this object for reference dis-equality.
- @return <code>false</code> if the argument is not a reference to the receiver object; <code>true</code> otherwise.
- </p>
- """
-
-
- addition(Object_finalize);
- comments(Object_finalize) = """
- This method is called by the garbage collector on the receiver object when garbage
- collection determines that there are no more references to the object.
- <p>
- The details of when and if the <code>finalize</code> method are
- invoked, as well as the interaction between <code>finalize</code>
- and non-local returns and exceptions, are all platform dependent.
- </p>
- """
-
- addition(Object_clone);
- comments(Object_clone) = """
- This method creates and returns a copy of the receiver object.
-
- <p>
- The default implementation of the <code>clone</code> method is platform dependent.
-
- @return a copy of the receiver object.
- </p>
- """
-
- addition(Object_getClass);
- comments(Object_getClass) = """
- Returns a representation that corresponds to the dynamic class of the receiver object.
-
- <p>
- The nature of the representation is platform dependent.
-
- @return a representation that corresponds to the dynamic class of the receiver object.
- </p>
- """
-
- addition(Object_notify);
- comments(Object_notify) = """
- Wakes up a single thread that is waiting on the receiver object's monitor.
- """
-
- addition(Object_notifyAll);
- comments(Object_notifyAll) = """
- Wakes up all threads that are waiting on the receiver object's monitor.
- """
-
- addition(Object_eq);
- comments(Object_eq) = """
- This method is used to test whether the argument (<code>arg0</code>) is a reference to the
- receiver object (<code>this</code>).
-
- <p>
- The <code>eq</code> method implements an
- <a href="http://en.wikipedia.org/wiki/Equivalence_relation">equivalence relation</a> on non-null instances of
- <code>AnyRef</code>:
- <ul>
- <li>It is reflexive: for any non-null instance <code>x</code> of type <code>AnyRef</code>,
- <code>x.eq(x)</code> returns <code>true</code>.</li>
- <li>It is symmetric: for any non-null instances <code>x</code> and <code>y</code> of type
- <code>AnyRef</code>, <code>x.eq(y)</code> returns <code>true</code> if and only
- if <code>y.eq(x)</code> returns <code>true</code>.</li>
- <li>It is transitive: for any non-null instances
- <code>x</code>, <code>y</code>, and <code>z</code> of type <code>AnyRef</code>
- if <code>x.eq(y)</code> returns <code>true</code> and
- <code>y.eq(z)</code> returns
- <code>true</code>, then <code>x.eq(z)</code> returns <code>true</code>.</li>
- </ul>
- Additionally, the <code>eq</code> method has three other properties.
- <ul>
- <li>It is consistent: for any non-null instances <code>x</code> and <code>y</code> of type <code>AnyRef</code>,
- multiple invocations of <code>x.eq(y)</code> consistently returns <code>true</code>
- or consistently returns <code>false</code>.</li>
- <li>For any non-null instance <code>x</code> of type <code>AnyRef</code>,
- <code>x.eq(null)</code> and <code>null.eq(x)</code> returns <code>false</code>.</li>
- <li><code>null.eq(null)</code> returns <code>true</code>.</li>
- </ul>
- </p>
-
- <p> When overriding the <code>equals</code> or
- <code>hashCode</code> methods, it is important to ensure that
- their behavior is consistent with reference equality. Therefore,
- if two objects are references to each other (<code>o1 eq
- o2</code>), they should be equal to each other (<code>o1 ==
- o2</code>) and they should hash to the same value
- (<code>o1.hashCode == o2.hashCode</code>).</p>
-
- @param arg0 the object to compare against this object for reference equality.
- @return <code>true</code> if the argument is a reference to the receiver object; <code>false</code> otherwise.
- </p>
- """
-
- /*******************************************************************/
-
- addition(AnyValClass);
- comments(AnyValClass) = """
- <p>
- Class <code>AnyVal</code> is the root class of all
- <em>value types</em>.
- </p>
- <p>
- <code>AnyVal</code> has a fixed number subclasses, which
- describe values which are not implemented as objects in the
- underlying host system.
- </p>
- <p>
- Classes <a href="Double.html"><code>Double</code></a>,
- <a href="Float.html"><code>Float</code></a>,
- <a href="Long.html"><code>Long</code></a>,
- <a href="Int.html"><code>Int</code></a>,
- <a href="Char.html"><code>Char</code></a>,
- <a href="Short.html"><code>Short</code></a>, and
- <a href="Byte.html"><code>Byte</code></a> are together called
- <em>numeric value types</em>.
- Classes <a href="Byte.html"><code>Byte</code></a>,
- <a href="Short.html"><code>Short</code></a>, or
- <a href="Char.html"><code>Char</code></a>
- are called <em>subrange types</em>. Subrange types, as well as
- <a href="Int.html"><code>Int</code></a> and
- <a href="Long.html"><code>Long</code></a> are called
- <em>integer types</em>, whereas
- <a href="Float.html"><code>Float</code></a> and
- <a href="Double.html"><code>Double</code></a> are called
- <em>floating point types</em>.
- </p>"""
-
- addition(BooleanClass)
- comments(BooleanClass) = """
- <p>
- Class <code>Boolean</code> has only two values: <code>true</code>
- and <code>false</code>.
- </p>"""
-
- def numericValDescr(sym: Symbol) = {
- val maxValue = "MAX_" + sym.name.toString().toUpperCase()
- val minValue = "MIN_" + sym.name.toString().toUpperCase()
- addition(sym)
- comments(sym) = """
- <p>
- Class <code>""" + sym.name + """</code> belongs to the value
- classes whose instances are not represented as objects by the
- underlying host system. There is an implicit conversion from
- instances of <code>""" + sym.name + """</code> to instances of
- <a href="runtime/Rich""" + sym.name + """.html"><code>runtime.Rich""" + sym.name + """</code></a> which
- provides useful non-primitive operations. All value classes inherit
- from class <a href="AnyVal.html"><code>AnyVal</code></a>.
- </p>
- <p>
- Values <code>""" + maxValue + """</code> and <code>""" + minValue + """</code>
- are in defined in object <a href="Math$object.html">scala.Math</a>.
- </p>"""
- }
- (ByteClass :: CharClass :: DoubleClass :: LongClass ::
- FloatClass :: IntClass :: ShortClass :: Nil).foreach(numericValDescr);
-
- addition(UnitClass);
- comments(UnitClass) = """
- <p>
- Class <code>Unit</code> has only one value: <code>()</code>.
- </p>"""
-
- addition(UnitClass);
-/*
- def boxedValDescr(what: String) = {
- val sym = definitions.getClass("java.lang." + what)
- addition(sym)
- comments(sym) = """
- <p>
- Class <code>""" + sym.name + """</code> implements the
- boxing/unboxing from/to value types.
- </p>
- <p>
- Boxing and unboxing enable value types to be treated as objects;
- they provide a unified view of the type system wherein a value
- of any type can ultimately be treated as an object.
- </p>"""
- };
- //("Float" :: "Long" :: "Number" :: "Integer" :: Nil).foreach(boxedValDescr);
-*/
- object exceptions extends collection.JavaConversions.JMapWrapper[String,(Symbol,String)](
- new java.util.TreeMap()) {
- def f(name: String) {
- this("Predef." + name) = (definitions.PredefModule, name)
- }
- f("IndexOutOfBoundsException")
- f("NoSuchElementException")
- f("NullPointerException")
- f("UnsupportedOperationException")
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/ModelExtractor.scala b/src/compiler/scala/tools/nsc/doc/ModelExtractor.scala
deleted file mode 100644
index 94793dda2e..0000000000
--- a/src/compiler/scala/tools/nsc/doc/ModelExtractor.scala
+++ /dev/null
@@ -1,453 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
- * @author Sean McDirmid
- */
-// $Id$
-
-package scala.tools.nsc
-package doc
-
-import scala.collection.mutable
-import compat.Platform.{EOL => LINE_SEPARATOR}
-
-
-/** This class attempts to reverse engineer source code intent from compiler
- * symbol objects.
- *
- * @author Sean McDirmid
- */
-trait ModelExtractor {
- val global: Global
- import global._
- def settings: doc.Settings
-
- def assert(b: Boolean) {
- if (!b) throw new Error
- }
-
- def assert(b: Boolean, message: Any) {
- if (!b) throw new Error(message.toString)
- }
-
- case class Tag(tag: String, option: String, body: String)
-
- case class Comment(body: String, attributes: List[Tag]) {
- def decodeAttributes = {
- val map = new mutable.LinkedHashMap[String, List[(String, String)]] {
- override def default(key: String) = Nil
- }
- attributes.foreach(a => {
- map(a.tag) = map(a.tag) ::: List((a.option, a.body))
- });
- map
- }
- }
- protected def decode(sym: Symbol) =
- if (sym == definitions.ScalaObjectClass || sym == definitions.ObjectClass)
- definitions.AnyRefClass
- else sym match {
- case sym: ModuleClassSymbol => sym.sourceModule
- case sym => sym
- }
-
- protected def decodeComment(comment0: String): Comment = {
- val comment = { // discard outmost comment delimiters if present
- val begin = if (comment0 startsWith "/**") 3 else 0
- val end = comment0.length - (if (comment0 endsWith "*/") 2 else 0)
- comment0.substring(begin, end)
- }
- val tok = new java.util.StringTokenizer(comment, LINE_SEPARATOR)
- val buf = new StringBuilder
- type AttrDescr = (String, String, StringBuilder)
- val attributes = new collection.mutable.ListBuffer[AttrDescr]
- var attr: AttrDescr = null
- while (tok.hasMoreTokens) {
- val s = tok.nextToken.replaceFirst("\\p{Space}?\\*", "")
- val mat1 = pat1.matcher(s)
- if (mat1.matches) {
- attr = (mat1.group(1), null, new StringBuilder(mat1.group(2)))
- //if (kind != CONSTRUCTOR)
- attributes += attr
- } else {
- val mat2 = pat2.matcher(s)
- if (mat2.matches) {
- attr = (mat2.group(1), mat2.group(2), new StringBuilder(mat2.group(3)))
- //if (kind != CLASS)
- attributes += attr
- } else if (attr ne null)
- attr._3.append(s + LINE_SEPARATOR)
- else
- buf.append(s + LINE_SEPARATOR)
- }
- }
- Comment(buf.toString, attributes.toList.map({x => Tag(x._1,x._2,x._3.toString)}))
- }
-
- sealed abstract class Entity(val sym: Symbol) {
- private[ModelExtractor] def sym0 = sym
-
- override def toString = sym.toString
- def comment: Option[String] = global.comments.get(sym)
- // comments decoded, now what?
- def attributes = sym.annotations
- def decodeComment: Option[Comment] = {
- val comment0 = this.comment
- if (comment0.isEmpty) None
- else Some(ModelExtractor.this.decodeComment(comment0.get.trim))
- }
- protected def accessQualified(core: String, qual: String) = core match {
- case "public" => "" // assert(qual == null); "";
- case core => core + (if (qual == null) "" else "[" + qual + "]")
- }
-
- def flagsString = {
- import symtab.Flags
- //val isLocal = sym.hasFlag(Flags.LOCAL)
- val x =
- if (sym hasFlag Flags.PRIVATE) "private"
- else if (sym hasFlag Flags.PROTECTED) "protected"
- else "public"
- var string = accessQualified(x,
- if (sym hasFlag Flags.LOCAL) "this"
- else if (sym.privateWithin != null && sym.privateWithin != NoSymbol)
- sym.privateWithin.nameString
- else null
- )
- def f(flag: Int, str: String) {
- if (sym hasFlag flag) string = string + " " + str
- }
- f(Flags.IMPLICIT, "implicit")
- f(Flags.SEALED, "sealed")
- f(Flags.OVERRIDE, "override")
- f(Flags.CASE, "case")
- if (!sym.isTrait) f(Flags.ABSTRACT, "abstract")
- if (!sym.isModule) f(Flags.FINAL, "final")
- if (!sym.isTrait) f(Flags.DEFERRED, "abstract")
- string.trim
- }
- def listName = name
- def name = sym.nameString
- def fullName(sep: Char) = sym.fullNameString(sep)
- def kind: String
- def header { }
- def typeParams: List[TypeParam] = Nil
- def valueParams: List[List[ValueParam]] = Nil
- def resultType: Option[Type] = None
- def parents: Iterable[Type] = Nil
- def lo: Option[Type] = sym.info match {
- case TypeBounds(lo, hi) if decode(lo.typeSymbol) != definitions.NothingClass => Some(lo)
- case _ => None
- }
- def hi: Option[Type] = sym.info match {
- case TypeBounds(lo, hi) if decode(hi.typeSymbol) != definitions.AnyClass => Some(hi)
- case _ => None
- }
- def variance = {
- import symtab.Flags._
- if (sym hasFlag COVARIANT) "+"
- else if (sym hasFlag CONTRAVARIANT) "-"
- else ""
- }
- def overridden: Iterable[Symbol] = Nil
- }
-
- class ValueParam(sym: Symbol) extends Entity(sym) {
- override def resultType = Some(sym.tpe)
- //def kind = if (sym.isPublic) "val" else "";
- def kind = ""
- }
-
- class ConstructorParam(sym: Symbol) extends ValueParam(sym) {
- override protected def accessQualified(core: String, qual: String) = core match {
- case "public" => "val"
- case "protected" => super.accessQualified(core,qual) + " val"
- case "private" if qual == "this" => ""
- case core => super.accessQualified(core, qual)
- }
- }
-
- def ValueParam(sym: Symbol) = new ValueParam(sym)
- class TypeParam(sym: Symbol) extends Entity(sym) {
- def kind = ""
- }
- def TypeParam(sym: Symbol) = new TypeParam(sym)
-
- trait Clazz extends ClassOrObject {
- private def csym = sym.asInstanceOf[TypeSymbol]
- override def typeParams = csym.typeParams.map(TypeParam)
- override def valueParams = {
- if (constructorArgs.isEmpty) Nil
- else constructorArgs.valuesIterator.toList :: Nil
- }
- def isTrait = csym.isTrait
- override def kind = if (sym.isTrait) "trait" else "class"
- }
-
- trait Object extends ClassOrObject {
- override def kind = "object"
- }
-
- case class Package(override val sym: Symbol) extends Entity(sym) {
- override def kind = "package"
- override def name = fullName('.')
- }
-
- trait TopLevel extends ClassOrObject
- class TopLevelClass (sym: Symbol) extends Entity(sym) with TopLevel with Clazz
- class TopLevelObject(sym: Symbol) extends Entity(sym) with TopLevel with Object {
- override def attributes = sym.moduleClass.annotations
- }
-
- def compare(pathA: List[ClassOrObject], pathB: List[ClassOrObject]): Int = {
- var pA = pathA
- var pB = pathB
- while (true) {
- if (pA.isEmpty) return -1
- if (pB.isEmpty) return +1
- val diff = pA.head.name compare pB.head.name
- if (diff != 0) return diff
- pA = pA.tail
- pB = pB.tail
- }
- 0
- }
-
- def isAccessible(sym: Symbol): Boolean = {
- import symtab.Flags._
- settings.memberaccess.value match {
- case "private" => sym.isPublic || (sym hasFlag PROTECTED) || (sym hasFlag PRIVATE)
- case "protected" => sym.isPublic || (sym hasFlag PROTECTED)
- case "public" => sym.isPublic
- case _ => false
- }
- }
-
- trait ClassOrObject extends Entity {
- def path: List[ClassOrObject] = this :: Nil
- override def listName = path map (_.name) mkString "."
-
- object freshParents extends mutable.LinkedHashSet[Type] {
- this ++= sym.tpe.parents
- this.toList foreach (this --= _.parents)
- }
- object constructorArgs extends mutable.LinkedHashMap[Symbol, ValueParam] {
- import symtab.Flags._
- sym.constrParamAccessors.filter(arg => ! (arg hasFlag SYNTHETIC)).foreach(arg => {
- val str = flagsToString(arg.flags)
- assert((arg hasFlag PRIVATE) && (arg hasFlag LOCAL), arg)
- val argName = arg.name.toString.trim
- val actual = sym.tpe.decls.iterator.find(e => {
- val eName = e.name.toString.trim;
- argName == eName && {
- val str = flagsToString(e.flags);
- !e.hasFlag(LOCAL);
- }
- });
- val param = actual getOrElse arg
- this(param) = new ConstructorParam(param)
- });
- }
- object decls extends mutable.LinkedHashMap[Symbol, Member] {
- sym.tpe.decls.iterator.foreach(e => {
- if (!constructorArgs.contains(e)) {
- val m = Member(e)
- if (!m.isEmpty && !this.contains(e)) this.put(e, m.get)
- }
- });
- }
- def members0(f: Symbol => Boolean) = decls.filterKeys(f).valuesIterator.toList
- def members(c: Category): Iterable[Member] = members0(c.f)
- object inherited extends mutable.LinkedHashMap[Symbol, List[Member]]() {
- override def default(tpe: Symbol) = Nil
- for (m <- sym.tpe.members if !sym.tpe.decls.iterator.contains(m) &&
- (Values.f(m) || Methods.f(m))) {
- val o = m.overridingSymbol(sym)
- if (o == NoSymbol) {
- val parent = decode(m.enclClass)
- val mo = Member(m)
- if (!mo.isEmpty) {
- this(parent) = mo.get :: this(parent)
- }
- }
- }
- }
- override def parents = freshParents
- abstract class Member(sym: Symbol) extends Entity(sym) {
- private def overriding = sym.allOverriddenSymbols
- override def comment = super.comment match {
- case ret @ Some(comment) =>
- ret
- case None =>
- val o = overriding.find(comments.contains)
- o.map(comments.apply)
- }
- }
- abstract class ValDef(sym: Symbol) extends Member(sym) {
- override def resultType = Some(resultType0)
- protected def resultType0: Type
- override def overridden: Iterable[Symbol] = {
- var ret: mutable.LinkedHashSet[Symbol] = null
- for (parent <- ClassOrObject.this.parents) {
- val sym0 = sym.overriddenSymbol(parent.typeSymbol)
- if (sym0 != NoSymbol) {
- if (ret == null) ret = new mutable.LinkedHashSet[Symbol];
- ret += sym0
- }
- }
- if (ret == null) Nil else ret
- }
- }
- case class Def(override val sym : TermSymbol) extends ValDef(sym) {
- override def resultType0 = sym.tpe.finalResultType
- override def typeParams = sym.tpe.typeParams.map(TypeParam)
- override def valueParams = methodArgumentNames.get(sym) match {
- case Some(argss) if argss.length > 1 || (!argss.isEmpty && !argss(0).isEmpty) =>
- argss map (_.map(ValueParam))
- case _ =>
- var i = 0
- val ret = for (tpe <- sym.tpe.paramTypes) yield {
- val ret = sym.newValueParameter(sym.pos, newTermName("arg" + i));
- ret setInfo tpe
- i += 1
- ValueParam(ret)
- }
- if (ret.isEmpty) Nil
- else ret :: Nil
- }
- override def kind = "def"
- }
- case class Val(override val sym: TermSymbol) extends ValDef(sym) {
- import symtab.Flags._
- def resultType0: Type = sym.tpe
- override def kind: String =
- if (sym hasFlag ACCESSOR) {
- val setterName = nme.getterToSetter(sym.name)
- val setter = sym.owner.info.decl(setterName)
- val lazyMod = if (sym hasFlag LAZY) "lazy " else ""
- lazyMod + (if (setter == NoSymbol) "val" else "var")
- } else {
- assert(sym hasFlag JAVA)
- if (sym hasFlag FINAL) "val" else "var"
- }
- }
-
- case class AbstractType(override val sym: Symbol) extends Member(sym) {
- override def kind = "type"
- }
-
- abstract class NestedClassOrObject(override val sym: Symbol) extends Member(sym) with ClassOrObject {
- override def path: List[ClassOrObject] = ClassOrObject.this.path ::: super.path
- }
-
- case class NestedClass(override val sym: ClassSymbol) extends NestedClassOrObject(sym) with Clazz
-
- case class NestedObject(override val sym: ModuleSymbol) extends NestedClassOrObject(sym) with Object {
- override def attributes = sym.moduleClass.annotations
- }
-
- def isVisible(sym: Symbol): Boolean = {
- import symtab.Flags._
- if (sym.isLocalClass) return false
- if (sym.isLocal) return false
- if (sym.isPrivateLocal) return false
- // the next line used to return !inIDE - now it returns true. The underlying
- // logic being applied here is somewhat mysterious (if PRIVATE return isVisible == true?)
- // but changing it causes the docgenerator.scala test case to break, so I leave as-is.
- if (sym hasFlag PRIVATE) return true
- if (sym hasFlag SYNTHETIC) return false
- if (sym hasFlag BRIDGE) return false
- if ((sym.nameString indexOf "$") != -1) return false
- if ((sym hasFlag CASE) && sym.isMethod) return false
- true
- }
-
- def Member(sym: Symbol): Option[Member] = {
- import global._
- import symtab.Flags
- if (!isVisible(sym))
- None
- else if (!isAccessible(sym))
- None
- else if (sym hasFlag Flags.ACCESSOR) {
- if (sym.isSetter) return None;
- assert(sym.isGetter);
- Some[Member](new Val(sym.asInstanceOf[TermSymbol]))
- }
- else if (sym.isValue && !sym.isMethod && !sym.isModule) {
- if (!sym.hasFlag(Flags.JAVA)) {
- Console.println("SYM: " + sym + " " + sym.fullNameString('.'))
- Console.println("FLA: " + Flags.flagsToString(sym.flags))
- }
- assert(sym hasFlag Flags.JAVA)
- Some[Member](new Val(sym.asInstanceOf[TermSymbol]))
- }
- else if (sym.isValue && !sym.isModule) {
- val str = Flags.flagsToString(sym.flags)
- assert(sym.isMethod)
- Some[Member](new Def(sym.asInstanceOf[TermSymbol]))
- }
- else if (sym.isAliasType || sym.isAbstractType)
- Some(new AbstractType(sym))
- else if (sym.isClass)
- Some(new NestedClass(sym.asInstanceOf[ClassSymbol]))
- else if (sym.isModule)
- Some(new NestedObject(sym.asInstanceOf[ModuleSymbol]))
- else
- None
- }
-
- }
- case class Category(label: String)(g: Symbol => Boolean) {
- val f = g
- def plural = label + "s"
- }
- val Constructors = new Category("Additional Constructor")(e => e.isConstructor && !e.isPrimaryConstructor) {
- // override def plural = "Additional Constructors";
- }
- val Objects = Category("Object")(_.isModule);
- val Classes = new Category("Class")(sym => sym.isClass || (sym == definitions.AnyRefClass)) {
- override def plural = "Classes"
- }
- val Values = new Category("Value")(e => e.isValue && e.hasFlag(symtab.Flags.ACCESSOR)) {
- override def plural = "Values and Variables"
- }
- val Methods = Category("Method")(e => e.isValue && e.isMethod && !e.isConstructor && !e.hasFlag(symtab.Flags.ACCESSOR));
- val Types = Category("Type")(e => e.isAliasType || e.isAbstractType);
-
- val categories = Constructors :: Types :: Values :: Methods :: Classes :: Objects :: Nil;
-
-
- import java.util.regex.Pattern
- // patterns for standard tags with 1 and 2 arguments
- private val pat1 = Pattern.compile(
- "[ \t]*@(author|deprecated|owner|pre|return|see|since|todo|version|ex|note)[ \t]*(.*)")
- private val pat2 = Pattern.compile(
- "[ \t]*@(exception|param|throws)[ \t]+(\\p{Graph}*)[ \t]*(.*)")
-
- def sort[E <: Entity](entities: Iterable[E]): Iterable[E] = {
- val set = new collection.immutable.TreeSet[E]()(new Ordering[E] {
- def compare(eA : E, eB: E): Int = {
- if (eA eq eB) return 0;
- (eA, eB) match {
- case (eA: ClassOrObject, eB: ClassOrObject) =>
- val diff = ModelExtractor.this.compare(eA.path, eB.path)
- if (diff!= 0) return diff
- case _ =>
- }
- if (eA.getClass != eB.getClass) {
- val diff = eA.getClass.getName.compare(eB.getClass.getName)
- assert(diff != 0)
- return diff
- }
- if (!eA.sym0.isPackage) {
- val diff = eA.sym0.nameString compare eB.sym0.nameString
- if (diff != 0) return diff
- }
- val diff0 = eA.sym0.fullNameString compare eB.sym0.fullNameString
- assert(diff0 != 0)
- diff0
- }
- })
- set ++ entities
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/ModelFrames.scala b/src/compiler/scala/tools/nsc/doc/ModelFrames.scala
deleted file mode 100644
index 970a179cc8..0000000000
--- a/src/compiler/scala/tools/nsc/doc/ModelFrames.scala
+++ /dev/null
@@ -1,396 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
- * @author Sean McDirmid
- */
-// $Id$
-
-package scala.tools.nsc
-package doc
-
-import java.io.{File, FileWriter}
-import scala.util.NameTransformer
-import scala.collection.mutable
-import scala.compat.Platform.{EOL => LINE_SEPARATOR}
-import scala.xml.{NodeSeq, Text, Unparsed, Utility}
-
-/** This class provides HTML document framing functionality.
- *
- * @author Sean McDirmid, Stephane Micheloud
- */
-trait ModelFrames extends ModelExtractor {
- import DocUtil._
- def settings: doc.Settings
- import global.definitions.{AnyClass, AnyRefClass}
-
- val SyntheticClasses = new scala.collection.mutable.HashSet[global.Symbol];
- {
- import global.definitions._
- global.definitions.init
- SyntheticClasses ++= List(
- NothingClass, NullClass, AnyClass, AnyRefClass, AnyValClass,
- //value classes
- BooleanClass, ByteClass, CharClass, IntClass, LongClass, ShortClass,
- FloatClass, DoubleClass, UnitClass)
- }
-
- val outdir = settings.outdir.value
- val windowTitle = settings.windowtitle.value
- val docTitle = load(settings.doctitle.value)
-
- val stylesheetSetting = settings.stylesheetfile
-
- def pageHeader = load(settings.pageheader.value)
- def pageFooter = load(settings.pagefooter.value)
- def pageTop = load(settings.pagetop.value)
- def pageBottom = load(settings.pagebottom.value)
-
- def contentFrame = "contentFrame"
- def classesFrame = "classesFrame"
- def modulesFrame = "modulesFrame"
-
- protected val FILE_EXTENSION_HTML = ".html"
- protected val NAME_SUFFIX_OBJECT = "$object"
- protected val NAME_SUFFIX_PACKAGE = "$package"
-
- def rootTitle = (<div class="page-title">{docTitle}</div>);
- def rootDesc =
- (<p>{load("This document is the API specification for " + windowTitle)}</p>);
-
- final def hasLink(sym: global.Symbol): Boolean =
- if (sym == global.NoSymbol) false
- else if (hasLink0(sym)) true
- else hasLink(decode(sym.owner))
-
- def hasLink0(sym: global.Symbol): Boolean = true
-
- abstract class Frame extends UrlContext {
- { // just save.
- save(page(title, body, hasBody));
- }
- def path: String // relative to outdir
- def relative: String = {
- if (path eq null) return "foo"
- assert(path ne null)
- var idx = 0
- var ct = new StringBuilder
- while (idx != -1) {
- idx = path.indexOf('/', idx)
- //System.err.println(path + " idx=" + idx)
- ct.append(if (idx != -1) "../" else "")
- idx += (if (idx == -1) 0 else 1)
- }
- ct.toString
- }
- def save(nodes: NodeSeq) = {
- val path = this.path
- if (path.startsWith("http://")) throw new Error("frame: " + this)
- val path0 = outdir + File.separator + path + FILE_EXTENSION_HTML
- //if (settings.debug.value) inform("Writing XML nodes to " + path0)
- val file = new File(path0)
- val parent = file.getParentFile()
- if (!parent.exists()) parent.mkdirs()
- val writer = new FileWriter(file)
- val str = dtype + LINE_SEPARATOR + nodes.toString()
- writer.write(str, 0, str.length())
- writer.close()
- }
- protected def body: NodeSeq
- protected def title: String
- protected def hasBody = true
-
- //def urlFor(entity: Entity, target: String): NodeSeq
- def urlFor(entity: Entity): String = {
- val ret = this.urlFor(entity.sym)
- assert(ret != null);
- ret
- }
- def link(entity: Entity, target: String) = aref(urlFor(entity), target, entity.name)
- protected def shortHeader(entity: Entity): NodeSeq
- protected def longHeader(entity: Entity): NodeSeq
- import global._
- import symtab.Flags
-
- def urlFor(sym: Symbol): String = sym match {
- case psym : ModuleSymbol if psym.isPackage =>
- urlFor0(sym, sym) + FILE_EXTENSION_HTML
- case sym if !hasLink(sym) =>
- null
- case sym if sym == AnyRefClass =>
- urlFor0(sym, sym) + FILE_EXTENSION_HTML
- case msym: ModuleSymbol =>
- urlFor0(sym, sym) + FILE_EXTENSION_HTML
- case csym: ClassSymbol =>
- urlFor0(sym, sym) + FILE_EXTENSION_HTML
- case _ =>
- val cnt = urlFor(decode(sym.owner))
- if (cnt == null) null else cnt + "#" + docName(sym)
- }
-
- def docName(sym: Symbol): String = {
- def javaParams(paramTypes: List[Type]): String = {
- def javaName(pt: Type): String = {
- val s = pt.toString
- val matVal = patVal.matcher(s)
- if (matVal.matches) matVal.group(1).toLowerCase
- else s.replaceAll("\\$", ".")
- }
- paramTypes.map(pt => javaName(pt)).mkString("(", ",", ")")
- }
- def scalaParams(paramTypes: List[Type]): String = {
- def scalaName(pt: Type): String = pt.toString.replaceAll(" ", "")
- paramTypes.map(pt => scalaName(pt)).mkString("(", ",", ")")
- }
- java.net.URLEncoder.encode(sym.nameString +
- (sym.tpe match {
- case MethodType(params, _) =>
- val paramTypes = params map (_.tpe)
- if (sym hasFlag Flags.JAVA) javaParams(paramTypes)
- else scalaParams(paramTypes)
- case PolyType(_, MethodType(params, _)) =>
- val paramTypes = params map (_.tpe)
- if (sym hasFlag Flags.JAVA) javaParams(paramTypes)
- else scalaParams(paramTypes)
- case _ => ""
- }), encoding)
- }
-
- def urlFor0(sym: Symbol, orig: Symbol): String =
- (if (sym == NoSymbol) "XXX"
- else if (sym.owner.isPackageClass) rootFor(sym) + pkgPath(sym)
- else urlFor0(decode(sym.owner), orig) + "." + NameTransformer.encode(Utility.escape(sym.nameString))
- ) +
- (sym match {
- case msym: ModuleSymbol =>
- if (msym hasFlag Flags.PACKAGE) NAME_SUFFIX_PACKAGE
- else NAME_SUFFIX_OBJECT
- case csym: ClassSymbol if csym.isModuleClass =>
- if (csym hasFlag Flags.PACKAGE) NAME_SUFFIX_PACKAGE
- else NAME_SUFFIX_OBJECT
- case _ =>
- ""
- })
- }
- def pkgPath(sym : global.Symbol) = sym.fullNameString('/') match {
- case "<empty>" => "_empty_"
- case path => path
- }
-
- protected def rootFor(sym: global.Symbol) = ""
-
- abstract class AllPackagesFrame extends Frame {
- override lazy val path = "modules"
- override lazy val title = "List of all packages"
- def packages: Iterable[Package]
- override def body: NodeSeq =
- (<div>
- <div class="doctitle-larger">{windowTitle}</div>
- <a href="all-classes.html" target={classesFrame} onclick="resetKind();">{"All objects and classes"}</a>
- </div>
- <div class="kinds">Packages</div>
- <ul class="list">{sort(packages).mkXML("","\n","")(pkg => {
- (<li><a href={urlFor(pkg)} target={classesFrame} onclick="resetKind();">
- {pkg.fullName('.')}</a></li>)
- })}
- </ul>);
- }
- abstract class PackagesContentFrame extends Frame {
- lazy val path = "root-content"
- lazy val title = "All Packages"
- def packages : Iterable[Package]
- //def modules: TreeMap[String, ModuleClassSymbol]
- def body: NodeSeq =
- {rootTitle} ++ {rootDesc} ++ (<hr/>) ++
- (<table cellpadding="3" class="member" summary="">
- <tr><td colspan="2" class="title">Package Summary</td></tr>
- {sort(packages).mkXML("","\n","")(pkg => (<tr><td class="signature">
- <code>package
- {aref(pkgPath(pkg.sym) + "$content.html", "_self", pkg.fullName('.'))}
- </code>
- </td></tr>))}
- </table>);
- }
-
- val classFrameKinds = Classes :: Objects :: Nil;
- abstract class ListClassFrame extends Frame {
- def classes: Iterable[ClassOrObject]
- def navLabel: String
- private def navPath = {
- val p = path;
- (if (p endsWith NAME_SUFFIX_PACKAGE)
- p.substring(0, p.length() - NAME_SUFFIX_PACKAGE.length());
- else p) + navSuffix;
- }
- protected def navSuffix = "$content.html"
-
- def body: NodeSeq = {
- val nav = if (navLabel == null) NodeSeq.Empty else
- (<table class="navigation" summary="">
- <tr><td valign="top" class="navigation-links">
- {aref(navPath, contentFrame, navLabel)}
- </td></tr>
- </table>);
- val ids = new mutable.LinkedHashSet[String]
- def idFor(kind: Category, t: Entity)(seq : NodeSeq): NodeSeq = {
- val ch = t.listName.charAt(0);
- val id = kind.plural + "_" + ch;
- if (ids contains id) (<li>{seq}</li>);
- else {
- ids += id;
- (<li id={id}>{seq}</li>)
- };
- }
- val body = (<div>{classFrameKinds.mkXML("","\n","")(kind => {
- val classes = sort(this.classes.filter(e => kind.f(e.sym)));
- if (classes.isEmpty) NodeSeq.Empty; else
- (<div id={kind.plural} class="kinds">{Text(kind.plural)}</div>
- <ul class="list">
- {classes.mkXML("","\n","")(cls => {
- idFor(kind, cls)(
- aref(urlFor(cls), contentFrame, cls.listName) ++ optional(cls)
- );
- })}
- </ul>);
- })}</div>);
- nav ++ body
- }
- def optional(cls: ClassOrObject): NodeSeq = NodeSeq.Empty
- }
-
- abstract class PackageContentFrame extends Frame {
- override def path = pkgPath(pkg.sym) + "$content"
- override def title = "All classes and objects in " + pkg.fullName('.')
- protected def pkg: Package
- protected def classes: Iterable[ClassOrObject]
- def body: NodeSeq =
- {rootTitle} ++ {rootDesc} ++ {classFrameKinds.mkXML("","\n","")(kind => {
- val classes = sort(this.classes.filter(e => kind.f(e.sym) && e.isInstanceOf[TopLevel]));
- if (classes.isEmpty) NodeSeq.Empty else
- (<table cellpadding="3" class="member" summary="">
- <tr><td colspan="2" class="title">{kind.label} Summary</td></tr>
- {classes.mkXML("","\n","")(shortHeader)}
- </table>)
- })};
- }
-
- abstract class ClassContentFrame extends Frame {
- def clazz: ClassOrObject
- def body: NodeSeq =
- (<xml:group>
- {pageHeader}{navigation}{pageTop}
- {header0}{longHeader(clazz)}
- {pageBottom}{navigation}{pageFooter}
- </xml:group>);
- final def path = urlFor0(clazz.sym, clazz.sym)
- private def navigation: NodeSeq =
- (<table class="navigation" summary="">
- <tr>
- <td valign="top" class="navigation-links">
- <!-- <table><tr></tr></table> -->
- </td>
- <td align="right" valign="top" style="white-space:nowrap;" rowspan="2">
- <div class="doctitle-larger">{windowTitle}</div>
- </td>
- </tr>
- <tr><td></td></tr>
- </table>);
- private def header0: NodeSeq = {
- val owner = decode(clazz.sym.owner)
- (<xml:group>
- <div class="entity">
- {aref(urlFor(owner), "_self", owner.fullNameString('.'))}
- <br/>
- <span class="entity">{Text(clazz.kind)} {Text(clazz.name)}</span>
- </div><hr/>
- <div class="source">
- {
- if (SyntheticClasses contains clazz.sym)
- Text("[Source: none]")
- else {
- val name = owner.fullNameString('/') + (if (owner.isPackage) "/" + clazz.name else "")
- Text("[source: ") ++
- (<a class={name} href=""><code>{name + ".scala"}</code></a>) ++
- Text("]")
- }
- }
- </div><hr/>
- </xml:group>)
- }
- }
-
- val index =
- (<frameset cols="25%, 75%">
- <frameset rows="50%, 28, 50%">
- <frame src="modules.html" name={modulesFrame}></frame>
- <frame src="nav-classes.html" name="navigationFrame"></frame>
- <frame src="all-classes.html" name={classesFrame}></frame>
- </frameset>
- <frame src="root-content.html" name={contentFrame}></frame>
- </frameset>);
-
- val root = (<b></b>);
-
- abstract class RootFrame extends Frame {
- def title = windowTitle
- def body = index
- def path = "index"
- override def hasBody = false
- }
-
- val indexChars = 'A' :: 'B' :: 'C' :: 'D' :: 'E' :: 'G' :: 'I' :: 'L' :: 'M' :: 'P' :: 'R' :: 'T' :: 'V' :: 'X' :: Nil;
-
- abstract class NavigationFrame extends Frame {
- def title="navigation"
- def path="nav-classes"
- override def body0(hasBody: Boolean, nodes: NodeSeq): NodeSeq =
- if (!hasBody) nodes
- else (<body style="margin:1px 0 0 1px; padding:1px 0 0 1px;">{nodes}</body>);
- def body =
- (<form>
- <select id="kinds" onchange="gotoKind()">
- <option value="#Classes" selected="selected">Classes</option>
- <option value="#Objects">Objects</option>
- </select>
- <span id="alphabet" style="font-family:Courier;word-spacing:-8px;">{
- indexChars.mkXML("","\n","")(c => {
- (<a href={Unparsed("javascript:gotoName(\'" + c + "\')")}>{c}</a>)
- });
- }
- </span>
- </form>)
- }
-
- def copyResources = {
- import java.io._
- val loader = this.getClass().getClassLoader()
- def basename(path: String): String = {
- val pos = path lastIndexOf System.getProperty("file.separator", "/")
- if (pos != -1) path.substring(pos + 1) else path
- }
- def copyResource(name: String, isFile: Boolean) = try {
- val (in, outfile) =
- if (isFile)
- (new FileInputStream(name), basename(name))
- else {
- // The name of a resource is a '/'-separated path name that identifies the resource.
- (loader.getResourceAsStream("scala/tools/nsc/doc/" + name), name)
- }
- val out = new FileOutputStream(new File(outdir + File.separator + outfile))
- val buf = new Array[Byte](1024)
- var len = 0
- while (len != -1) {
- out.write(buf, 0, len)
- len = in.read(buf)
- }
- in.close()
- out.close()
- } catch {
- case _ =>
- System.err.println("Resource file '" + name + "' not found")
- }
- copyResource(stylesheetSetting.value, !stylesheetSetting.isDefault)
- copyResource("script.js", false)
- }
-
- private val patVal = java.util.regex.Pattern.compile(
- "scala\\.(Byte|Boolean|Char|Double|Float|Int|Long|Short)")
-}
diff --git a/src/compiler/scala/tools/nsc/doc/ModelToXML.scala b/src/compiler/scala/tools/nsc/doc/ModelToXML.scala
deleted file mode 100644
index 19d67ab2fd..0000000000
--- a/src/compiler/scala/tools/nsc/doc/ModelToXML.scala
+++ /dev/null
@@ -1,368 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
- * @author Sean McDirmid
- */
-// $Id$
-
-package scala.tools.nsc
-package doc
-
-import scala.xml._
-
-/** This class has functionality to format source code models as XML blocks.
- *
- * @author Sean McDirmid, Stephane Micheloud
- */
-trait ModelToXML extends ModelExtractor {
- import global._
- import definitions.AnyRefClass
- import DocUtil._
- // decode entity into XML.
- type Frame
-
- protected def urlFor(sym: Symbol)(implicit frame: Frame): String
- protected def anchor(sym: Symbol)(implicit frame: Frame): NodeSeq
-
- def aref(href: String, label: String)(implicit frame: Frame): NodeSeq
-/*
- def link(entity: Symbol)(implicit frame: Frame): NodeSeq = {
- val url = urlFor(entity)
- // nothing to do but be verbose.
- if (url == null)
- Text(entity.owner.fullNameString('.') + '.' + entity.nameString)
- else
- aref(url, entity.nameString)
- }
-*/
- def link(entity: Symbol, label: String)(implicit frame: Frame): NodeSeq = {
- val url = urlFor(entity)
- if (url == null) { // external link (handled by script.js)
- val (href, attr) =
- if (entity.isClass || (entity==AnyRefClass))
- ("", entity.owner.fullNameString('/') + '/' + entity.nameString)
- else
- ("#" + entity.nameString, entity.owner.fullNameString('/'))
- val name = entity.owner.fullNameString('.') + '.' + entity.nameString
- <a href={Utility.escape(href)} class={attr} target="contentFrame">{name}</a>;
- }
- else
- aref(url, label)
- }
-
- def link(entity: Symbol)(implicit frame: Frame): NodeSeq =
- link(entity, entity.nameString)
-
- def link(tpe: Type)(implicit frame: Frame): NodeSeq = {
- if (!tpe.typeArgs.isEmpty) {
- if (definitions.isFunctionType(tpe)) {
- val (args,r) = tpe.normalize.typeArgs.splitAt(tpe.normalize.typeArgs.length - 1);
- args.mkXML("(", ", ", ")")(link) ++ Text(" => ") ++ link(r.head);
- } else if (definitions.isRepeatedParamType(tpe)) {
- assert(tpe.typeArgs.length == 1)
- link(tpe.typeArgs(0)) ++ Text("*")
- } else if (tpe.typeSymbol == definitions.ByNameParamClass) {
- assert(tpe.typeArgs.length == 1)
- Text("=> ") ++ link(tpe.typeArgs(0))
- } else if (tpe.typeSymbol.name.toString.startsWith("Tuple") &&
- tpe.typeSymbol.owner.name == nme.scala_.toTypeName) {
- tpe.typeArgs.mkXML("(", ", ", ")")(link)
- } else
- link(decode(tpe.typeSymbol)) ++ tpe.typeArgs.surround("[", "]")(link)
- } else tpe match {
- case PolyType(tparams,result) =>
- link(result) ++ tparams.surround("[", "]")(link)
- case RefinedType(parents,_) =>
- val parents1 =
- if ((parents.length > 1) &&
- (parents.head.typeSymbol eq definitions.ObjectClass)) parents.tail;
- else parents;
- parents1.mkXML(Text(""), <code> with </code>, Text(""))(link);
- case _ =>
- if (tpe.typeSymbol == NoSymbol) {
- throw new Error(tpe + " has no type class " + tpe.getClass)
- }
- link(decode(tpe.typeSymbol))
- }
- }
-
- private def printIf[T](what: Option[T], before: String, after: String)(f: T => NodeSeq): NodeSeq =
- if (what.isEmpty) Text("")
- else Text(before) ++ f(what.get) ++ Text(after)
-
- def bodyFor(entity: Entity)(implicit frame: Frame): NodeSeq = try {
- var seq = {entity.typeParams.surround("[", "]")(e => {
- Text(e.variance) ++ <em>{e.name}</em> ++
- {printIf(e.hi, " <: ", "")(link)} ++
- {printIf(e.lo, " >: ", "")(link)}
- })} ++ printIf(entity.hi, " <: ", "")(link) ++
- printIf(entity.lo, " >: ", "")(link);
- {entity.valueParams.foreach(xs => {
- seq = seq ++ xs.mkXML("(", ", ", ")")(arg =>
- {
- val str = arg.flagsString.trim
- if (str.length == 0) NodeSeq.Empty
- else <code>{Text(str)} </code>
- } ++
- <em>{arg.name}</em> ++ (try {
-
- Text(" : ") ++ link(arg.resultType.get)
- } catch {
- case e : Throwable => System.err.println("ARG " + arg + " in " + entity); throw e
- })
- );
- seq
- })};
- seq ++ {printIf(entity.resultType, " : ", "")(tpe => link(tpe))}
- } catch {
- case e => System.err.println("generating for " + entity); throw e
- }
-
- def extendsFor(entity: Entity)(implicit frame: Frame): NodeSeq = {
- if (entity.parents.isEmpty) NodeSeq.Empty
- else <code> extends </code>++
- entity.parents.mkXML(Text(""), <code> with </code>, Text(""))(link);
- }
-
- def parse(str: String): NodeSeq = {
- new SpecialNode {
- def label = "#PCDATA"
- def buildString(sb: StringBuilder): StringBuilder = {
- sb.append(str.trim)
- sb
- }
- }
- }
-
- def longHeader(entity: Entity)(implicit frame: Frame): NodeSeq = Group({
- anchor(entity.sym) ++ <dl>
- <dt>
- {attrsFor(entity)}
- <code>{Text(entity.flagsString)}</code>
- <code>{Text(entity.kind)}</code>
- <em>{entity.sym.nameString}</em>{bodyFor(entity)}
- </dt>
- <dd>{extendsFor(entity)}</dd>
- </dl>;
- } ++ {
- val cmnt = entity.decodeComment
- if (cmnt.isEmpty) NodeSeq.Empty
- else longComment(entity, cmnt.get)
- } ++ (entity match {
- case entity: ClassOrObject => classBody(entity)
- case _ => NodeSeq.Empty
- }) ++ {
- val overridden = entity.overridden
- if (overridden.isEmpty)
- NodeSeq.Empty
- else {
- <dl>
- <dt style="margin:10px 0 0 20px;">
- <b>Overrides</b>
- </dt>
- <dd>
- { overridden.mkXML("",", ", "")(sym => link(decode(sym.owner)) ++ Text(".") ++ link(sym))
- }
- </dd>
- </dl>
- }
- } ++ <hr/>);
-
- def longComment(entity: Entity, cmnt: Comment)(implicit frame: Frame): NodeSeq = {
- val attrs = <dl>{
- var seq: NodeSeq = NodeSeq.Empty
- cmnt.decodeAttributes.foreach{
- case (tag, xs) =>
- seq = seq ++ <dt style="margin:10px 0 0 20px;">
- <b>{decodeTag(tag)}</b></dt> ++ {xs.flatMap{
- case (option,body) => <dd>{
- if (option == null) NodeSeq.Empty;
- else decodeOption(tag, option);
- }{ tag match {
- case "see" => resolveSee(entity.sym, body.trim)
- case _ => parse(body)
- }}</dd>
- }}
- };
- seq
- }</dl>;
- <xml:group>
- <dl><dd>{parse(cmnt.body)}</dd></dl>
- {attrs}
- </xml:group>
- }
-
- /**
- * Try to be smart about @see elements. If the body looks like a link, turn it into
- * a link. If it can be resolved in the symbol table, turn it into a link to the referenced
- * entity.
- */
- private def resolveSee(owner: Symbol, body: String)(implicit frame: Frame): NodeSeq = {
- /** find a class either in the root package, in the current class or in the current package. */
- def findClass(clsName: String): Symbol = {
- try { definitions.getClass(clsName) } catch {
- case f: FatalError =>
- try { definitions.getMember(owner, clsName.toTypeName) } catch {
- case f: FatalError =>
- definitions.getMember(owner.enclosingPackage, clsName.toTypeName)
- }
- }
- }
-
- if (body.startsWith("http://")
- || body.startsWith("https://")
- || body.startsWith("www")) {
- // a link
- body.split(" ") match {
- case Seq(href, txt, rest @ _*) =>
- <a href={href}>{txt}{rest}</a>
- case _ =>
- <a href={body}>{body}</a>
- }
- } else try {
- // treat it like a class or member reference
- body.split("#") match {
- case Seq(clazz, member) =>
- val clazzSym = if (clazz.length == 0) owner.enclClass else findClass(clazz)
- link(definitions.getMember(clazzSym, member), body)
- case Seq(clazz, _*) =>
- link(findClass(clazz), body)
- case _ =>
- parse(body)
- }
- } catch {
- case f: FatalError =>
- log("Error resolving @see: " + f.toString)
- parse(body)
- }
- }
-
- def classBody(entity: ClassOrObject)(implicit from: Frame): NodeSeq =
- <xml:group>
- {categories.mkXML("","\n","")(c => shortList(entity, c)) : NodeSeq}
- {categories.mkXML("","\n","")(c => longList(entity, c)) : NodeSeq}
- </xml:group>;
-
- def longList(entity: ClassOrObject, category: Category)(implicit from: Frame): NodeSeq = {
- val xs = entity.members(category)
- if (!xs.iterator.hasNext)
- NodeSeq.Empty
- else Group(
- <table cellpadding="3" class="member-detail" summary="">
- <tr><td class="title">{Text(category.label)} Details</td></tr>
- </table>
- <div>{xs.mkXML("","\n","")(m => longHeader(m))}</div>)
- }
-
- def shortList(entity: ClassOrObject, category: Category)(implicit from: Frame): NodeSeq = {
- val xs = entity.members(category)
- var seq: NodeSeq = NodeSeq.Empty
- if (xs.iterator.hasNext) {
- // alphabetic
- val set = new scala.collection.immutable.TreeSet[entity.Member]()(new Ordering[entity.Member] {
- def compare(mA : entity.Member, mB: entity.Member): Int =
- if (mA eq mB) 0
- else {
- val diff = mA.name compare mB.name
- if (diff != 0) diff
- else {
- val diff0 = mA.hashCode - mB.hashCode
- assert(diff0 != 0, mA.name)
- diff0
- }
- }
- })++xs
- seq = seq ++ <table cellpadding="3" class="member" summary="">
- <tr><td colspan="2" class="title">{Text(category.label + " Summary")}</td></tr>
- {set.mkXML("","\n","")(mmbr => shortHeader(mmbr))}
- </table>
- }
- // list inherited members...if any.
- for ((tpe,members) <- entity.inherited) {
- val members0 = members.filter(m => category.f(m.sym));
- if (!members0.isEmpty) seq = seq ++ <table cellpadding="3" class="inherited" summary="">
- <tr><td colspan="2" class="title">
- {Text(category.plural + " inherited from ") ++ link(tpe)}
- </td></tr>
- <tr><td colspan="2" class="signature">
- {members0.mkXML((""), (", "), (""))(m => {
- link(decode(m.sym)) ++
- (if (m.sym.hasFlag(symtab.Flags.ABSTRACT) || m.sym.hasFlag(symtab.Flags.DEFERRED)) {
- Text(" (abstract)");
- } else NodeSeq.Empty);
- })}
- </td></tr>
- </table>
- }
- seq;
- }
-
- protected def decodeOption(tag: String, string: String): NodeSeq =
- <code>{Text(string + " - ")}</code>;
-
- protected def decodeTag(tag: String): String = tag.capitalize
-
- def shortHeader(entity: Entity)(implicit from: Frame): NodeSeq =
- <tr>
- <td valign="top" class="modifiers">
- <code>{Text(entity.flagsString)} {Text(entity.kind)}</code>
- </td>
- <td class="signature">
- <em>{link(decode(entity.sym))}</em>
- {bodyFor(entity) ++ extendsFor(entity)}
- {
- entity.resultType match {
- case Some(PolyType(_, ConstantType(v))) => Text(" = " + v.escapedStringValue)
- case _ => NodeSeq.Empty
- }
- }
- {
- val cmnt = entity.decodeComment
- if (cmnt.isEmpty) NodeSeq.Empty
- else shortComment(cmnt.get)
- }
- </td>
- </tr>
-
- import java.util.regex.Pattern
- // pattern detecting first line of comment (see ticket #224)
- private val pat = Pattern.compile("[ \t]*(/\\*)[ \t]*")
-
- /** Ticket #224
- * Write the first sentence as a short summary of the method, as scaladoc
- * automatically places it in the method summary table (and index).
- * (see http://java.sun.com/j2se/javadoc/writingdoccomments/)
- */
- def shortComment(cmnt: Comment): NodeSeq = {
- val lines = cmnt.body split "<p>"
- val first =
- if (lines.length < 2)
- lines(0)
- else {
- val line0 = lines(0)
- val mat = pat matcher line0
- if (mat.matches()) line0 + lines(1)
- else line0
- }
- <div>{parse(first/*cmnt.body*/)}</div>
- }
-
- def attrsFor(entity: Entity)(implicit from: Frame): NodeSeq = {
- def attrFor(attr: AnnotationInfo): Node = {
- val buf = new StringBuilder
- val AnnotationInfo(tpe, args, nvPairs) = attr
- val name = link(decode(tpe.typeSymbol))
- if (!args.isEmpty)
- buf.append(args.mkString("(", ",", ")"))
- if (!nvPairs.isEmpty)
- for (((name, value), index) <- nvPairs.zipWithIndex) {
- if (index > 0)
- buf.append(", ")
- buf.append(name).append(" = ").append(value)
- }
- Group(name ++ Text(buf.toString))
- }
- def toGroup(x: AnnotationInfo): Node = Group(Text("@") ++ attrFor(x) ++ <br/>)
- if (entity.sym.hasFlag(symtab.Flags.CASE)) NodeSeq.Empty
- else NodeSeq fromSeq (entity.attributes map toGroup)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
index 1c878f1022..e567602fae 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/doc/Settings.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package doc
@@ -10,21 +9,30 @@ package doc
import java.io.File
import java.lang.System
+/** An extended version of compiler settings, with additional Scaladoc-specific options.
+ * @param error A function that prints a string to the appropriate error stream. */
class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
- /** scaladoc specific options */
- val memberaccess = ChoiceSetting ("-access", "Show only public, protected/public (default) or all classes and members",
- List("public", "protected", "private"), "protected")
- val pagebottom = StringSetting ("-bottom", "pagebottom", "Include bottom text for each page", "")
- val doccharset = StringSetting ("-charset", "doccharset", "Charset for cross-platform viewing of generated documentation.", "")
- val doctitle = StringSetting ("-doctitle", "doctitle", "Include title for the overview page", "Scala 2<br/>API Specification")
- val pagefooter = StringSetting ("-footer", "pagefooter", "Include footer text for each page", "")
- val pageheader = StringSetting ("-header", "pageheader", "Include header text for each page", "")
- val linksource = BooleanSetting ("-linksource", "Generate source in HTML")
- val nocomment = BooleanSetting ("-nocomment", "Suppress description and tags, generate only declarations.")
- val stylesheetfile = StringSetting ("-stylesheetfile", "stylesheetfile", "File to change style of the generated documentation", "style.css")
- val pagetop = StringSetting ("-top", "pagetop", "Include top text for each page", "")
- val windowtitle = StringSetting ("-windowtitle", "windowtitle", "Specify window title of generated HTML documentation", "Scala 2")
+
+ /** A setting that defines in which format the documentation is output. ''Note:'' this setting is currently always
+ * `html`. */
+ val docformat = ChoiceSetting ("-doc-format", "Selects in which format documentation is rendered", List("html"), "html")
+
+ /** A setting that defines the overall title of the documentation, typically the name of the library being
+ * documented. 'Note:'' This setting is currently not used. */
+ val doctitle = StringSetting ("-doc-title", "doc-title", "The overall name of the Scaladoc site", "")
+
+ /** A setting that defines the overall version number of the documentation, typically the version of the library being
+ * documented. 'Note:'' This setting is currently not used. */
+ val docversion = StringSetting ("-doc-version", "doc-version", "An optional version number, to be appended to the title", "")
+
+ /** A setting that defines a URL to be concatenated with source locations and show a link to source files.
+ * If needed the sourcepath option can be used to exclude undesired initial part of the link to sources */
+ val docsourceurl = StringSetting ("-doc-source-url", "url", "A URL pattern used to build links to template sources; use variables, for example: €{TPL_NAME} ('Seq'), €{TPL_OWNER} ('scala.collection'), €{FILE_PATH} ('scala/collection/Seq')", "")
+
+ val useStupidTypes = BooleanSetting ("-Yuse-stupid-types", "Print the types of inherited members as seen from their original definition context. Hint: you don't want to do that!")
// working around issue described in r18708.
suppressVTWarn.value = true
+
+ // TODO: add a new setting for whether or not to document sourceless entities (e.g., Any, Unit, etc)
}
diff --git a/src/compiler/scala/tools/nsc/doc/SourcelessComments.scala b/src/compiler/scala/tools/nsc/doc/SourcelessComments.scala
new file mode 100644
index 0000000000..46e45a861b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/SourcelessComments.scala
@@ -0,0 +1,249 @@
+/* NSC -- new Scala compiler -- Copyright 2007-2010 LAMP/EPFL */
+
+package scala.tools.nsc
+package doc
+
+import scala.collection._
+
+/**
+ * A class that provides comments for all symbols which pre-exist in Scala (Any, Nothing, ...)
+ * It also contains a HashSet of the given symbols
+ * The comments are to be added to a HashMap called comments, which resides in the Global.scala file
+ * @author Manohar Jonnalagedda, Stephane Micheloud, Sean McDirmid, Geoffrey Washburn
+ * @version 1.0 */
+abstract class SourcelessComments {
+
+ val global: Global
+
+ import global._
+ import definitions._
+
+ lazy val comments = {
+
+ val comment = mutable.HashMap.empty[Symbol, DocComment]
+
+ comment(NothingClass) = new DocComment("""
+ /** Class `Nothing` is - together with class [[scala.Null]] - at the bottom of Scala's type hierarchy.
+ *
+ * Type `Nothing` is a subtype of every other type (including [[scala.Null]]); there exist ''no instances'' of
+ * this type. Even though type `Nothing` is empty, it is nevertheless useful as a type parameter. For instance,
+ * the Scala library defines a value [[scala.collection.immutable.Nil]] of type `List[Nothing]`. Because lists
+ * are covariant in Scala, this makes [[scala.collection.immutable.Nil]] an instance of `List[T]`, for any
+ * element of type `T`. */
+ """)
+
+ comment(NullClass) = new DocComment("""
+ /** Class `Null` is - together with class [[scala.Nothing]] - at the bottom of the Scala type hierarchy.
+ *
+ * Type `Null` is a subtype of all reference types; its only instance is the `null` reference. Since `Null` is
+ * not a subtype of value types, `null` is not a member of any such type. For instance, it is not possible to
+ * assign `null` to a variable of type [[scala.Int]]. */
+ """)
+
+ /*******************************************************************/
+ /* Documentation for Any */
+
+ comment(AnyClass) = new DocComment("""
+ /** Class `Any` is the root of the Scala class hierarchy. Every class in a Scala execution environment inherits
+ * directly or indirectly from this class. Class `Any` has two direct subclasses: [[scala.AnyRef]] and
+ * [[scala.AnyVal]]. */
+ """)
+
+ comment(Any_equals) = new DocComment("""
+ /** This method is used to compare the receiver object (`this`) with the argument object (`arg0`) for equivalence.
+ *
+ * The default implementations of this method is an [http://en.wikipedia.org/wiki/Equivalence_relation equivalence
+ * relation]:
+ * * It is reflexive: for any instance `x` of type `Any`, `x.equals(x)` should return `true`.
+ * * It is symmetric: for any instances `x` and `y` of type `Any`, `x.equals(y)` should return `true` if and
+ * only if `y.equals(x)` returns `true`.
+ * * It is transitive: for any instances `x`, `y`, and `z` of type `AnyRef` if `x.equals(y)` returns `true` and
+ * `y.equals(z)` returns `true`, then `x.equals(z)` should return `true`.
+ *
+ * If you override this method, you should verify that your implementation remains an equivalence relation.
+ * Additionally, when overriding this method it is often necessary to override `hashCode` to ensure that objects
+ * that are "equal" (`o1.equals(o2)` returns `true`) hash to the same [[scala.Int]]
+ * (`o1.hashCode.equals(o2.hashCode)`).
+ *
+ * @param arg0 the object to compare against this object for equality.
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. */
+ """)
+
+ comment(Any_==) = new DocComment("""
+ /** `o == arg0` is the same as `o.equals(arg0)`.
+ *
+ * @param arg0 the object to compare against this object for equality.
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. */
+ """)
+
+ comment(Any_!=) = new DocComment("""
+ /** `o != arg0` is the same as `!(o == (arg0))`.
+ *
+ * @param arg0 the object to compare against this object for dis-equality.
+ * @return `false` if the receiver object is equivalent to the argument; `true` otherwise. */
+ """)
+
+ comment(Any_toString) = new DocComment("""
+ /** Returns a string representation of the object.
+ *
+ * The default representation is platform dependent.
+ *
+ * @return a string representation of the object. */
+ """)
+
+ comment(Any_asInstanceOf) = new DocComment("""
+ /** This method is used to cast the receiver object to be of type `T0`.
+ *
+ * Note that the success of a cast at runtime is modulo Scala's erasure semantics. Therefore the expression
+ * `1.asInstanceOf[String]` will throw a `ClassCastException` at runtime, while the expression
+ * `List(1).asInstanceOf[List[String]]` will not. In the latter example, because the type argument is erased as
+ * part of compilation it is not possible to check whether the contents of the list are of the requested typed.
+ *
+ * @throws ClassCastException if the receiver object is not an instance of erasure of type `T0`.
+ * @return the receiver object. */
+ """)
+
+ comment(Any_isInstanceOf) = new DocComment("""
+ /** This method is used to test whether the dynamic type of the receiver object is `T0`.
+ *
+ * Note that the test result of the test is modulo Scala's erasure semantics. Therefore the expression
+ * `1.isInstanceOf[String]` will return `false`, while the expression `List(1).isInstanceOf[List[String]]` will
+ * return `true`. In the latter example, because the type argument is erased as part of compilation it is not
+ * possible to check whether the contents of the list are of the requested typed.
+ *
+ * @return `true` if the receiver object is an instance of erasure of type `T0`; `false` otherwise. */
+ """)
+
+ comment(Any_hashCode) = new DocComment("""
+ /** Returns a hash code value for the object.
+ *
+ * The default hashing algorithm is platform dependent.
+ *
+ * Note that it is allowed for two objects to have identical hash codes (`o1.hashCode.equals(o2.hashCode)`) yet
+ * not be equal (`o1.equals(o2)` returns `false`). A degenerate implementation could always return `0`.
+ * However, it is required that if two objects are equal (`o1.equals(o2)` returns `true`) that they have
+ * identical hash codes (`o1.hashCode.equals(o2.hashCode)`). Therefore, when overriding this method, be sure
+ * to verify that the behavior is consistent with the `equals` method.
+ *
+ * @return the hash code value for the object. */
+ """)
+
+ /*******************************************************************/
+ /* Documentation for AnyRef */
+
+ comment(AnyRefClass) = new DocComment("""
+ /** Class `AnyRef` is the root class of all ''reference types''. */
+ """)
+
+ comment(Object_==) = new DocComment("""
+ /** `o == arg0` is the same as `if (o eq null) arg0 eq null else o.equals(arg0)`.
+ *
+ * @param arg0 the object to compare against this object for equality.
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. */
+ """)
+
+ comment(Object_ne) = new DocComment("""
+ /** `o.ne(arg0)` is the same as `!(o.eq(arg0))`.
+ *
+ * @param arg0 the object to compare against this object for reference dis-equality.
+ * @return `false` if the argument is not a reference to the receiver object; `true` otherwise. */
+ """)
+
+
+ comment(Object_finalize) = new DocComment("""
+ /** This method is called by the garbage collector on the receiver object when garbage collection determines that
+ * there are no more references to the object.
+ *
+ * The details of when and if the `finalize` method are invoked, as well as the interaction between `finalize`
+ * and non-local returns and exceptions, are all platform dependent. */
+ """)
+
+ comment(Object_clone) = new DocComment("""
+ /** This method creates and returns a copy of the receiver object.
+ *
+ * The default implementation of the `clone` method is platform dependent.
+ *
+ * @return a copy of the receiver object. */
+ """)
+
+ comment(Object_getClass) = new DocComment("""
+ /** Returns a representation that corresponds to the dynamic class of the receiver object.
+ *
+ * The nature of the representation is platform dependent.
+ *
+ * @return a representation that corresponds to the dynamic class of the receiver object. */
+ """)
+
+ comment(Object_notify) = new DocComment("""
+ /** Wakes up a single thread that is waiting on the receiver object's monitor. */
+ """)
+
+ comment(Object_notifyAll) = new DocComment("""
+ /** Wakes up all threads that are waiting on the receiver object's monitor. */
+ """)
+
+ comment(Object_eq) = new DocComment("""
+ /** This method is used to test whether the argument (`arg0`) is a reference to the
+ * receiver object (`this`).
+ *
+ * The `eq` method implements an [http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation] on
+ * non-null instances of `AnyRef`:
+ * * It is reflexive: for any non-null instance `x` of type `AnyRef`, `x.eq(x)` returns `true`.
+ * * It is symmetric: for any non-null instances `x` and `y` of type `AnyRef`, `x.eq(y)` returns `true` if and
+ * only if `y.eq(x)` returns `true`.
+ * * It is transitive: for any non-null instances `x`, `y`, and `z` of type `AnyRef` if `x.eq(y)` returns `true`
+ * and `y.eq(z)` returns `true`, then `x.eq(z)` returns `true`.
+ *
+ * Additionally, the `eq` method has three other properties.
+ * * It is consistent: for any non-null instances `x` and `y` of type `AnyRef`, multiple invocations of
+ * `x.eq(y)` consistently returns `true` or consistently returns `false`.
+ * * For any non-null instance `x` of type `AnyRef`, `x.eq(null)` and `null.eq(x)` returns `false`.
+ * * `null.eq(null)` returns `true`.
+ *
+ * When overriding the `equals` or `hashCode` methods, it is important to ensure that their behavior is
+ * consistent with reference equality. Therefore, if two objects are references to each other (`o1 eq o2`), they
+ * should be equal to each other (`o1 == o2`) and they should hash to the same value (`o1.hashCode == o2.hashCode`).
+ *
+ * @param arg0 the object to compare against this object for reference equality.
+ * @return `true` if the argument is a reference to the receiver object; `false` otherwise. */
+ """)
+
+ /*******************************************************************/
+
+ comment(AnyValClass) = new DocComment("""
+ /** Class `AnyVal` is the root class of all ''value types''.
+ *
+ * `AnyVal` has a fixed number of subclasses, which describe values which are not implemented as objects in the
+ * underlying host system.
+ *
+ * Classes [[scala.Double]], [[scala.Float]], [[scala.Long]], [[scala.Int]], [[scala.Char]], [[scala.Short]],
+ * and [[scala.Byte]] are together called ''numeric value types''. Classes [[scala.Byte]], [[scala.Short]], and
+ * [[scala.Char]] are called ''subrange types''. Subrange types, as well as [[scala.Int]] and [[scala.Long]] are
+ * called ''integer types'', whereas [[scala.Float]] and [[scala.Double]] are called ''floating point types''. */
+ """)
+
+ comment(BooleanClass) = new DocComment("""
+ /** Class `Boolean` has only two values: `true` and `false`. */
+ """)
+
+ comment(UnitClass) = new DocComment("""
+ /** Class `Unit` has only one value: `()`. */
+ """)
+
+ List(ByteClass, CharClass, DoubleClass, LongClass, FloatClass, IntClass, ShortClass) foreach { sym =>
+ val maxValue = "MAX_" + sym.name.toString().toUpperCase()
+ val minValue = "MIN_" + sym.name.toString().toUpperCase()
+ comment(sym) = new DocComment("""
+ /** Class `""" + sym.name + """` belongs to the value classes whose instances are not represented as objects by
+ * the underlying host system. There is an implicit conversion from instances of `""" + sym.name + """` to
+ * instances of [[scala.runtime.Rich""" + sym.name + """]] which provides useful non-primitive operations.
+ * All value classes inherit from class [[scala.AnyVal]].
+ *
+ * Values `""" + maxValue + """` and `""" + minValue + """` are defined in object [[scala.Math]]. */
+ """)
+ }
+
+ comment
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/doc/Universe.scala b/src/compiler/scala/tools/nsc/doc/Universe.scala
new file mode 100644
index 0000000000..71b4a4a4b0
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/Universe.scala
@@ -0,0 +1,11 @@
+package scala.tools.nsc.doc
+
+/**
+ * Class to hold common dependencies across Scaladoc classes.
+ * @author Pedro Furlanetto
+ * @author Gilles Dubochet
+ */
+trait Universe {
+ def settings: Settings
+ def rootPackage: model.Package
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
new file mode 100644
index 0000000000..3c8286809e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -0,0 +1,79 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author David Bernard, Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package html
+
+import model._
+
+import java.io.{ File => JFile }
+import io.{ Streamable, Directory }
+import scala.collection._
+
+/** A class that can generate Scaladoc sites to some fixed root folder.
+ * @author David Bernard
+ * @author Gilles Dubochet */
+class HtmlFactory(val universe: Universe) {
+
+ /** The character encoding to be used for generated Scaladoc sites. This value is currently always UTF-8. */
+ def encoding: String = "UTF-8"
+
+ def siteRoot: JFile = new JFile(universe.settings.outdir.value)
+
+ /** Generates the Scaladoc site for a model into the site root. A scaladoc site is a set of HTML and related files
+ * that document a model extracted from a compiler run.
+ * @param model The model to generate in the form of a sequence of packages. */
+ def generate : Unit = {
+
+ def copyResource(subPath: String) {
+ val bytes = new Streamable.Bytes {
+ val inputStream = getClass.getResourceAsStream("/scala/tools/nsc/doc/html/resource/" + subPath)
+ assert(inputStream != null)
+ }.toByteArray
+ val dest = Directory(siteRoot) / subPath
+ dest.parent.createDirectory()
+ val out = dest.toFile.bufferedOutput()
+ try out.write(bytes, 0, bytes.length)
+ finally out.close()
+ }
+
+ copyResource("lib/jquery.js")
+ copyResource("lib/jquery-ui.js")
+ copyResource("lib/jquery.layout.js")
+ copyResource("lib/tools.tooltip.js")
+ copyResource("lib/scheduler.js")
+ copyResource("lib/index.css")
+ copyResource("lib/index.js")
+ copyResource("lib/template.css")
+ copyResource("lib/template.js")
+ copyResource("lib/class.png")
+ copyResource("lib/class_big.png")
+ copyResource("lib/object.png")
+ copyResource("lib/object_big.png")
+ copyResource("lib/trait.png")
+ copyResource("lib/trait_big.png")
+ copyResource("lib/package.png")
+ copyResource("lib/package_big.png")
+ copyResource("lib/filter_box_left.png")
+ copyResource("lib/filter_box_right.png")
+ copyResource("lib/remove.png")
+
+ new page.Index(universe) writeFor this
+
+ val written = mutable.HashSet.empty[DocTemplateEntity]
+
+ def writeTemplate(tpl: DocTemplateEntity): Unit =
+ if (!(written contains tpl)) {
+ new page.Template(tpl) writeFor this
+ written += tpl
+ tpl.templates map (writeTemplate(_))
+ }
+
+ writeTemplate(universe.rootPackage)
+
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
new file mode 100644
index 0000000000..47d7503534
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -0,0 +1,239 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author David Bernard, Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package html
+
+import model._
+import comment._
+
+import xml.{XML, NodeSeq}
+import xml.dtd.{DocType, PublicID}
+import scala.collection._
+import scala.reflect.NameTransformer
+import java.nio.channels.Channels
+import java.io.{FileOutputStream, File}
+
+/** An html page that is part of a Scaladoc site.
+ * @author David Bernard
+ * @author Gilles Dubochet */
+abstract class HtmlPage { thisPage =>
+
+ /** The path of this page, relative to the API site. `path.tail` is a list of folder names leading to this page (from
+ * closest package to one-above-root package), `path.head` is the file name of this page. Note that `path` has a
+ * length of at least one. */
+ def path: List[String]
+
+ /** The title of this page. */
+ protected def title: String
+
+ /** Additional header elements (links, scripts, meta tags, etc.) required for this page. */
+ protected def headers: NodeSeq
+
+ /** The body of this page. */
+ protected def body: NodeSeq
+
+ /** Writes this page as a file. The file's location is relative to the generator's site root, and the encoding is
+ * also defined by the generator.
+ * @param generator The generator that is writing this page. */
+ def writeFor(site: HtmlFactory): Unit = {
+ val doctype =
+ DocType("html", PublicID("-//W3C//DTD XHTML 1.1//EN", "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"), Nil)
+ val html =
+ <html>
+ <head>
+ <title>{ title }</title>
+ <meta http-equiv="content-type" content={ "text/html; charset=" + site.encoding }/>
+ { headers }
+ </head>
+ { body }
+ </html>
+ val pageFile = new File(site.siteRoot, absoluteLinkTo(thisPage.path))
+ val pageFolder = pageFile.getParentFile
+ if (!pageFolder.exists) pageFolder.mkdirs()
+ val fos = new FileOutputStream(pageFile.getPath)
+ val w = Channels.newWriter(fos.getChannel, site.encoding)
+ try {
+ w.write("<?xml version='1.0' encoding='" + site.encoding + "'?>\n")
+ w.write( doctype.toString + "\n")
+ w.write(xml.Xhtml.toXhtml(html))
+ }
+ finally {
+ w.close()
+ fos.close()
+ }
+ //XML.save(pageFile.getPath, html, site.encoding, xmlDecl = false, doctype = doctype)
+ }
+
+ def templateToPath(tpl: TemplateEntity): List[String] = {
+ def doName(tpl: TemplateEntity): String =
+ NameTransformer.encode(tpl.name) + (if (tpl.isObject) "$" else "")
+ def downPacks(pack: Package): List[String] =
+ if (pack.isRootPackage) Nil else (doName(pack) :: downPacks(pack.inTemplate))
+ def downInner(nme: String, tpl: TemplateEntity): (String, Package) = {
+ tpl.inTemplate match {
+ case inPkg: Package => (nme + ".html", inPkg)
+ case inTpl => downInner(doName(inTpl) + "$" + nme, inTpl)
+ }
+ }
+ val (file, pack) =
+ tpl match {
+ case p: Package => ("package.html", p)
+ case _ => downInner(doName(tpl), tpl)
+ }
+ file :: downPacks(pack)
+ }
+
+ /** A relative link from this page to some destination class entity.
+ * @param destEntity The class or object entity that the link will point to. */
+ def relativeLinkTo(destClass: TemplateEntity): String =
+ relativeLinkTo(templateToPath(destClass))
+
+ /** A relative link from this page to some destination page in the Scaladoc site.
+ * @param destPage The page that the link will point to. */
+ def relativeLinkTo(destPage: HtmlPage): String = {
+ relativeLinkTo(destPage.path)
+ }
+
+ /** A relative link from this page to some destination path.
+ * @param destPath The path that the link will point to. */
+ def relativeLinkTo(destPath: List[String]): String = {
+ def relativize(from: List[String], to: List[String]): List[String] = (from, to) match {
+ case (f :: fs, t :: ts) if (f == t) => // both paths are identical to that point
+ relativize(fs, ts)
+ case (fss, tss) =>
+ List.fill(fss.length - 1)("..") ::: tss
+ }
+ relativize(thisPage.path.reverse, destPath.reverse).mkString("/")
+ }
+
+ def absoluteLinkTo(destPath: List[String]): String = {
+ destPath.reverse.mkString("/")
+ }
+
+ /** Transforms an optional comment into an styled HTML tree representing its body if it is defined, or into an empty
+ * node sequence if it is not. */
+ def commentToHtml(comment: Option[Comment]): NodeSeq =
+ (comment map (commentToHtml(_))) getOrElse NodeSeq.Empty
+
+ /** Transforms a comment into an styled HTML tree representing its body. */
+ def commentToHtml(comment: Comment): NodeSeq =
+ bodyToHtml(comment.body)
+
+ def bodyToHtml(body: Body): NodeSeq =
+ body.blocks flatMap (blockToHtml(_))
+
+ def blockToHtml(block: Block): NodeSeq = block match {
+ case Title(in, 1) => <h3>{ inlineToHtml(in) }</h3>
+ case Title(in, 2) => <h4>{ inlineToHtml(in) }</h4>
+ case Title(in, 3) => <h5>{ inlineToHtml(in) }</h5>
+ case Title(in, _) => <h6>{ inlineToHtml(in) }</h6>
+ case Paragraph(in) => <p>{ inlineToHtml(in) }</p>
+ case Code(data) => <pre>{ xml.Text(data) }</pre>
+ case UnorderedList(items) =>
+ <ul>{ listItemsToHtml(items) }</ul>
+ case OrderedList(items, listStyle) =>
+ <ol class={ listStyle }>{ listItemsToHtml(items) }</ol>
+ case DefinitionList(items) =>
+ <dl>{items map { case (t, d) => <dt>{ inlineToHtml(t) }</dt><dd>{ blockToHtml(d) }</dd> } }</dl>
+ case HorizontalRule() =>
+ <hr/>
+ }
+
+ def listItemsToHtml(items: Seq[Block]) =
+ items.foldLeft(xml.NodeSeq.Empty){ (xmlList, item) =>
+ item match {
+ case OrderedList(_, _) | UnorderedList(_) => // html requires sub ULs to be put into the last LI
+ xmlList.init ++ <li>{ xmlList.last.child ++ blockToHtml(item) }</li>
+ case Paragraph(inline) =>
+ xmlList :+ <li>{ inlineToHtml(inline) }</li> // LIs are blocks, no need to use Ps
+ case block =>
+ xmlList :+ <li>{ blockToHtml(block) }</li>
+ }
+ }
+
+ def inlineToHtml(inl: Inline): NodeSeq = inl match {
+ case Chain(items) => items flatMap (inlineToHtml(_))
+ case Italic(in) => <i>{ inlineToHtml(in) }</i>
+ case Bold(in) => <b>{ inlineToHtml(in) }</b>
+ case Underline(in) => <u>{ inlineToHtml(in) }</u>
+ case Superscript(in) => <sup>{ inlineToHtml(in) }</sup>
+ case Subscript(in) => <sub>{ inlineToHtml(in) }</sub>
+ case Link(raw, title) => <a href={ raw }>{ inlineToHtml(title) }</a>
+ case EntityLink(entity) => templateToHtml(entity)
+ case Monospace(text) => <code>{ xml.Text(text) }</code>
+ case Text(text) => xml.Text(text)
+ case Summary(in) => inlineToHtml(in)
+ case HtmlTag(tag) => xml.Unparsed(tag)
+ }
+
+ def typeToHtml(tpe: model.TypeEntity, hasLinks: Boolean): NodeSeq = {
+ val string = tpe.name
+ def toLinksOut(inPos: Int, starts: List[Int]): NodeSeq = {
+ if (starts.isEmpty && (inPos == string.length))
+ NodeSeq.Empty
+ else if (starts.isEmpty)
+ xml.Text(string.slice(inPos, string.length))
+ else if (inPos == starts.head)
+ toLinksIn(inPos, starts)
+ else {
+ xml.Text(string.slice(inPos, starts.head)) ++ toLinksIn(starts.head, starts)
+ }
+ }
+ def toLinksIn(inPos: Int, starts: List[Int]): NodeSeq = {
+ val (tpl, width) = tpe.refEntity(inPos)
+ (tpl match {
+ case dtpl:DocTemplateEntity if hasLinks =>
+ <a href={ relativeLinkTo(dtpl) } class="extype" name={ dtpl.qualifiedName }>{
+ string.slice(inPos, inPos + width)
+ }</a>
+ case tpl =>
+ <span class="extype" name={ tpl.qualifiedName }>{ string.slice(inPos, inPos + width) }</span>
+ }) ++ toLinksOut(inPos + width, starts.tail)
+ }
+ if (hasLinks)
+ toLinksOut(0, tpe.refEntity.keySet.toList)
+ else
+ xml.Text(string)
+ }
+
+ def typesToHtml(tpess: List[model.TypeEntity], hasLinks: Boolean, sep: NodeSeq): NodeSeq = tpess match {
+ case Nil => NodeSeq.Empty
+ case tpe :: Nil => typeToHtml(tpe, hasLinks)
+ case tpe :: tpes => typeToHtml(tpe, hasLinks) ++ sep ++ typesToHtml(tpes, hasLinks, sep)
+ }
+
+ /** Returns the HTML code that represents the template in `tpl` as a hyperlinked name. */
+ def templateToHtml(tpl: TemplateEntity) = tpl match {
+ case dTpl: DocTemplateEntity =>
+ <a href={ relativeLinkTo(dTpl) } class="extype" name={ dTpl.qualifiedName }>{ dTpl.name }</a>
+ case ndTpl: NoDocTemplate =>
+ xml.Text(ndTpl.name)
+ }
+
+ /** Returns the HTML code that represents the templates in `tpls` as a list of hyperlinked names. */
+ def templatesToHtml(tplss: List[TemplateEntity], sep: NodeSeq): NodeSeq = tplss match {
+ case Nil => NodeSeq.Empty
+ case tpl :: Nil => templateToHtml(tpl)
+ case tpl :: tpls => templateToHtml(tpl) ++ sep ++ templatesToHtml(tpls, sep)
+ }
+
+ def docEntityKindToString(ety: DocTemplateEntity) =
+ if (ety.isTrait) "trait"
+ else if (ety.isCaseClass) "case class"
+ else if (ety.isClass) "class"
+ else if (ety.isObject) "object"
+ else if (ety.isPackage) "package"
+ else "class" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
+
+ /** Returns the _big image name corresponding to the DocTemplate Entity (upper left icon) */
+ def docEntityKindToBigImage(ety: DocTemplateEntity) =
+ if (ety.isTrait) "trait_big.png"
+ else if (ety.isClass) "class_big.png"
+ else if (ety.isObject) "object_big.png"
+ else if (ety.isPackage) "package_big.png"
+ else "class_big.png" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
new file mode 100644
index 0000000000..23ba7f0aaf
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -0,0 +1,127 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author David Bernard, Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package html
+package page
+
+import model._
+
+import scala.collection._
+import scala.xml._
+
+class Index(universe: Universe) extends HtmlPage {
+
+ def path = List("index.html")
+
+ def title = {
+ val s = universe.settings
+ ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
+ ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
+ }
+
+ val headers =
+ <xml:group>
+ <link href={ relativeLinkTo{List("index.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.layout.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("index.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("scheduler.js", "lib")} }></script>
+ </xml:group>
+
+ val body =
+ <body>
+ <div id="library">
+ <img class='class icon' width="13" height="13" src={ relativeLinkTo{List("class.png", "lib")} }/>
+ <img class='trait icon' width="13" height="13" src={ relativeLinkTo{List("trait.png", "lib")} }/>
+ <img class='object icon' width="13" height="13" src={ relativeLinkTo{List("object.png", "lib")} }/>
+ <img class='package icon' width="13" height="13" src={ relativeLinkTo{List("package.png", "lib")} }/>
+ </div>
+ { browser }
+ <div id="content" class="ui-layout-center">
+ <iframe name="template" src={ relativeLinkTo{List("package.html")} }/>
+ </div>
+ </body>
+
+ def browser =
+ <div id="browser" class="ui-layout-west">
+ <div id="filter"></div>
+ <div class="pack" id="tpl">{
+ def isExcluded(dtpl: DocTemplateEntity) = {
+ val qname = dtpl.qualifiedName
+ ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") ||
+ qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction")
+ ) && !(
+ qname == "scala.Tuple1" || qname == "scala.Tuple2" ||
+ qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" ||
+ qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" ||
+ qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" ||
+ qname == "scala.runtime.AbstractFunction2"
+ )
+ )
+ }
+ def packageElem(pack: model.Package): NodeSeq = {
+ <xml:group>
+ { if (!pack.isRootPackage)
+ <h3><a class="tplshow" href={ relativeLinkTo(pack) }>{ pack.qualifiedName }</a></h3>
+ else NodeSeq.Empty
+ }
+ <ol class="templates">{
+ val tpls: Map[String, Seq[DocTemplateEntity]] =
+ (pack.templates filter (t => !t.isPackage && !isExcluded(t) )) groupBy (_.name)
+
+ val placeholderSeq: NodeSeq = <div class="placeholder"></div>
+
+ def createLink(entity: DocTemplateEntity, includePlaceholder: Boolean, includeText: Boolean) = {
+ val entityType = docEntityKindToString(entity)
+ val linkContent = (
+ { if (includePlaceholder) placeholderSeq else NodeSeq.Empty }
+ ++
+ { if (includeText) <span class="tplLink">{ Text(packageQualifiedName(entity)) }</span> else NodeSeq.Empty }
+ )
+ <a class="tplshow" href={ relativeLinkTo(entity) }><span class={ entityType }>({ Text(entityType) })</span>{ linkContent }</a>
+ }
+
+ for (tn <- tpls.keySet.toSeq sortBy (_.toLowerCase)) yield {
+ val entities = tpls(tn)
+ val row = (entities find (e => e.isPackage || e.isObject), entities find (e => e.isTrait || e.isClass))
+
+ val itemContents = row match {
+ case (Some(obj), None) => createLink(obj, includePlaceholder = true, includeText = true)
+
+ case (maybeObj, Some(template)) =>
+ val firstLink = maybeObj match {
+ case Some(obj) => createLink(obj, includePlaceholder = false, includeText = false)
+ case None => placeholderSeq
+ }
+
+ firstLink ++ createLink(template, includePlaceholder = false, includeText = true)
+
+ case _ => // FIXME: this default case should not be necessary. For some reason AnyRef is not a package, object, trait, or class
+ val entry = entities.head
+ placeholderSeq ++ createLink(entry, includePlaceholder = false, includeText = true)
+ }
+
+ <li title={ entities.head.qualifiedName }>{
+ itemContents
+ }</li>
+ }
+ }</ol>
+ <ol class="packages"> {
+ for (sp <- pack.packages sortBy (_.name.toLowerCase)) yield
+ <li class="pack" title={ sp.qualifiedName }>{ packageElem(sp) }</li>
+ }</ol>
+ </xml:group>
+ }
+ packageElem(universe.rootPackage)
+ }</div>
+ </div>
+
+ def packageQualifiedName(ety: DocTemplateEntity): String =
+ if (ety.inTemplate.isPackage) ety.name else (packageQualifiedName(ety.inTemplate) + "." + ety.name)
+
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
new file mode 100644
index 0000000000..22568e0a88
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
@@ -0,0 +1,129 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author David Bernard, Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package html
+package page
+
+import model._
+import comment._
+import xml.{NodeSeq, Unparsed}
+import java.io.File
+
+class Source(sourceFile: File) extends HtmlPage {
+
+ val path = List("source.html")
+
+ val title = "Scaladoc: page source"
+
+ val headers =
+ NodeSeq.Empty
+
+ val body =
+ <body>
+ <h1>Page source is not implemented yet</h1>
+ </body>
+
+ /*
+
+
+ def readTextFromSrcDir(subPath: String) :Option[String] = {
+ readTextFromFile(new File(sourceDir, subPath))
+ }
+
+ def readTextFromFile(f : File) :Option[String] = {
+ if (f.exists) {
+ Some(Source.fromFile(f)(Codec.default).getLines().mkString(""))
+ } else {
+ None
+ }
+ }
+
+
+ def writeTextToFile(f : File, txt : String, header: Option[String], footer: Option[String]) {
+ val out = new FileOutputStream(f)
+ try {
+ val enc = "UTF-8"
+ header.foreach(s => out.write(s.getBytes(enc)))
+ out.write(txt.getBytes(enc))
+ footer.foreach(s => out.write(s.getBytes(enc)))
+ } finally {
+ try {
+ out.close()
+ } catch {
+ case _ => //ignore
+ }
+ }
+ }
+
+ trait SourceHtmlizer {
+ def scalaToHtml(src :File) : Option[File]
+ }
+
+ lazy val sourceHtmlizer : SourceHtmlizer = {
+ if (cfg.htmlizeSource) {
+ new SourceHtmlizer {
+
+ val inDir: File = cfg.sourcedir
+ val outDir: File = cfg.outputdir
+
+ private def relativize(uri: URI, from: URI) = linkHelper.relativize(uri, from).getOrElse("__notFound__" + uri.getPath)
+
+ def header(dest: URI) = Some("""
+ <html>
+ <head>
+ <link href='""" + relativize(new URI("site:/_highlighter/SyntaxHighlighter.css"), dest) + """' rel='stylesheet' type='text/css'/>
+ <script language='javascript' src='""" + relativize(new URI("site:/_highlighter/shAll.js"), dest) + """'></script>
+ </head>
+ <body>
+ <pre name="code" class="scala" style="width:100%">
+ """)
+
+ def footer(dest: URI) = Some("""</pre>
+ <script language='javascript'>
+ dp.SyntaxHighlighter.ClipboardSwf = '""" + relativize(new URI("site:/_highlighter/clipboard.swf"), dest) + """';
+ dp.SyntaxHighlighter.HighlightAll('code');
+ </script>
+ </body>
+ </html>
+ """)
+
+ //TODO: escape the source code
+ def scalaToHtml(src :File) = {
+ val dest = new File(outDir, fileHelper.relativePathUnderDir(src, inDir) + ".html")
+ if (!dest.exists || dest.lastModified < src.lastModified) {
+
+ //we need to verify whether the directory we are trying to write to has already been created or not
+ if(!dest.getParentFile.exists) dest.getParentFile.mkdirs
+
+ val uri = linkHelper.uriFor(dest).get
+ var txt = fileHelper.readTextFromFile(src).getOrElse("")
+ txt = txt.replace("<", "&lt;")
+ fileHelper.writeTextToFile(dest, txt, header(uri), footer(uri))
+ }
+ Some(dest)
+ }
+
+ def copyResources() {
+ val loader = this.getClass().getClassLoader()
+ val buf = new Array[Byte](1024)
+ def copyResource(name: String) = fileHelper.copyResource("/scala/tools/nsc/doc/html/resource/", name, outDir, loader, buf)
+ copyResource("_highlighter/clipboard.swf")
+ copyResource("_highlighter/shAll.js")
+ copyResource("_highlighter/SyntaxHighlighter.css")
+ }
+
+ copyResources()
+ }
+ } else {
+ new SourceHtmlizer {
+ def scalaToHtml(src :File) = None
+ }
+ }
+ }
+ */
+
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
new file mode 100644
index 0000000000..3001a1dc5a
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -0,0 +1,540 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author David Bernard, Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package html
+package page
+
+import model._
+
+import scala.xml.{NodeSeq, Text}
+import scala.collection.mutable.HashSet
+
+class Template(tpl: DocTemplateEntity) extends HtmlPage {
+
+ val path =
+ templateToPath(tpl)
+
+ val title =
+ tpl.qualifiedName
+
+ val headers =
+ <xml:group>
+ <link href={ relativeLinkTo{List("template.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
+ </xml:group>
+
+ val valueMembers =
+ (tpl.methods ::: tpl.values ::: (tpl.templates filter { tpl => tpl.isObject || tpl.isPackage })) sortBy (_.name)
+
+ val typeMembers =
+ (tpl.abstractTypes ::: tpl.aliasTypes ::: (tpl.templates filter { tpl => tpl.isTrait || tpl.isClass })) sortBy (_.name)
+
+ val constructors = (tpl match {
+ case cls: Class => cls.constructors
+ case _ => Nil
+ }) sortBy (_.name)
+
+ /* for body, there is a special case for AnyRef, otherwise AnyRef appears like a package/object
+ * this problem should be fixed, this implementation is just a patch
+ */
+ val body =
+ <body class={ if (tpl.isTrait || tpl.isClass || tpl.qualifiedName == "scala.AnyRef") "type" else "value" } onload="windowTitle();">
+
+ { if (tpl.isRootPackage || tpl.inTemplate.isRootPackage)
+ NodeSeq.Empty
+ else
+ <p id="owner">{ templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, xml.Text(".")) }</p>
+ }
+
+ <div id="definition">
+ <img src={ relativeLinkTo(List(docEntityKindToBigImage(tpl), "lib")) }/>
+ <h1>{ if (tpl.isRootPackage) "root package" else tpl.name }</h1>
+ </div>
+
+ { signature(tpl, true) }
+ { memberToCommentHtml(tpl, true) }
+
+ <div id="template">
+
+ <div id="mbrsel">
+ <div id='textfilter'><span class='pre'/><span class='input'><input type='text' accesskey='/'/></span><span class='post'/></div>
+ { if (tpl.linearization.isEmpty) NodeSeq.Empty else
+ <div id="order">
+ <span class="filtertype">Ordering</span>
+ <ol><li class="alpha in">Alphabetic</li><li class="inherit out">By inheritance</li></ol>
+ </div>
+ }
+ { if (tpl.linearization.isEmpty) NodeSeq.Empty else
+ <div id="ancestors">
+ <span class="filtertype">Inherited</span>
+ <ol><li class="hideall">Hide All</li><li class="showall">Show all</li></ol>
+ <ol id="linearization">{ (tpl :: tpl.linearizationTemplates) map { wte => <li class="in" name={ wte.qualifiedName }>{ wte.name }</li> } }</ol>
+ </div>
+ }
+ {
+ <div id="visbl">
+ <span class="filtertype">Visibility</span>
+ <ol><li class="public in">Public</li><li class="all out">All</li></ol>
+ </div>
+ }
+ {
+ <div id="impl">
+ <span class="filtertype">Impl.</span>
+ <ol><li class="concrete in">Concrete</li><li class="abstract in">Abstract</li></ol>
+ </div>
+ }
+ </div>
+
+ { if (constructors.isEmpty) NodeSeq.Empty else
+ <div id="constructors" class="members">
+ <h3>Instance constructors</h3>
+ <ol>{ constructors map (memberToHtml(_)) }</ol>
+ </div>
+ }
+
+ { if (typeMembers.isEmpty) NodeSeq.Empty else
+ <div id="types" class="types members">
+ <h3>Type Members</h3>
+ <ol>{ typeMembers map (memberToHtml(_)) }</ol>
+ </div>
+ }
+
+ { if (valueMembers.isEmpty) NodeSeq.Empty else
+ <div id="values" class="values members">
+ <h3>Value Members</h3>
+ <ol>{ valueMembers map (memberToHtml(_)) }</ol>
+ </div>
+ }
+
+ {
+ NodeSeq fromSeq (for ((superTpl, superType) <- tpl.linearization) yield
+ <div class="parent" name={ superTpl.qualifiedName }>
+ <h3>Inherited from {
+ if (tpl.universe.settings.useStupidTypes.value)
+ superTpl match {
+ case dtpl: DocTemplateEntity =>
+ val sig = signature(dtpl, false, true) \ "_"
+ sig
+ case tpl: TemplateEntity =>
+ tpl.name
+ }
+ else
+ typeToHtml(superType, true)
+ }</h3>
+ </div>
+ )
+ }
+
+ </div>
+
+ <div id="tooltip" ></div>
+
+ </body>
+
+ def boundsToString(hi: Option[TypeEntity], lo: Option[TypeEntity]): String = {
+ def bound0(bnd: Option[TypeEntity], pre: String): String = bnd match {
+ case None => ""
+ case Some(tpe) => pre ++ tpe.toString
+ }
+ bound0(hi, "<:") ++ bound0(lo, ">:")
+ }
+
+ def tparamsToString(tpss: List[TypeParam]): String =
+ if (tpss.isEmpty) "" else {
+ def tparam0(tp: TypeParam): String =
+ tp.variance + tp.name + boundsToString(tp.hi, tp.lo)
+ def tparams0(tpss: List[TypeParam]): String = (tpss: @unchecked) match {
+ case tp :: Nil => tparam0(tp)
+ case tp :: tps => tparam0(tp) ++ ", " ++ tparams0(tps)
+ }
+ "[" + tparams0(tpss) + "]"
+ }
+
+ def defParamsToString(d: MemberEntity with Def):String = {
+ val namess = for( ps <- d.valueParams ) yield
+ for( p <- ps ) yield p.resultType.name
+ tparamsToString(d.typeParams) + namess.foldLeft("") { (s,names) => s + (names mkString("(",",",")")) }
+ }
+
+ def memberToHtml(mbr: MemberEntity): NodeSeq = {
+ val defParamsString = mbr match {
+ case d:MemberEntity with Def => defParamsToString(d)
+ case _ => ""
+ }
+ <li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
+ data-isabs={ mbr.isAbstract.toString }>
+ <a id={ mbr.name +defParamsString +":"+ mbr.resultType.name}/>
+ { signature(mbr, false) }
+ { memberToCommentHtml(mbr, false) }
+ </li>
+ }
+
+ def memberToCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
+ mbr match {
+ case dte: DocTemplateEntity if isSelf =>
+ // comment of class itself
+ <div id="comment" class="fullcomment">{ memberToCommentBodyHtml(mbr, isSelf = true) }</div>
+ case dte: DocTemplateEntity if mbr.comment.isDefined =>
+ // comment of inner, documented class (only short comment, full comment is on the class' own page)
+ memberToInlineCommentHtml(mbr, isSelf)
+ case _ =>
+ // comment of non-class member or non-documentented inner class
+ val commentBody = memberToCommentBodyHtml(mbr, isSelf = false)
+ if (commentBody.isEmpty)
+ NodeSeq.Empty
+ else {
+ <xml:group>
+ { memberToShortCommentHtml(mbr, isSelf) }
+ <div class="fullcomment">{ memberToUseCaseCommentHtml(mbr, isSelf) }{ memberToCommentBodyHtml(mbr, isSelf) }</div>
+ </xml:group>
+ }
+ }
+
+ def memberToUseCaseCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
+ mbr match {
+ case nte: NonTemplateMemberEntity if nte.isUseCase =>
+ inlineToHtml(comment.Text("[use case] "))
+ case _ => NodeSeq.Empty
+ }
+
+ def memberToShortCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
+ if (mbr.comment.isEmpty)
+ NodeSeq.Empty
+ else
+ <p class="shortcomment cmt">{ memberToUseCaseCommentHtml(mbr, isSelf) }{ inlineToHtml(mbr.comment.get.short) }</p>
+
+ def memberToInlineCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
+ <p class="comment cmt">{ inlineToHtml(mbr.comment.get.short) }</p>
+
+ def memberToCommentBodyHtml(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq =
+ NodeSeq.Empty ++
+ { if (mbr.comment.isEmpty) NodeSeq.Empty else
+ <div class="comment cmt">{ commentToHtml(mbr.comment) }</div>
+ } ++
+ { val prs: List[ParameterEntity] = mbr match {
+ case cls: Class => cls.typeParams ::: cls.valueParams.flatten
+ case trt: Trait => trt.typeParams
+ case dfe: Def => dfe.typeParams ::: dfe.valueParams.flatten
+ case ctr: Constructor => ctr.valueParams.flatten
+ case _ => Nil
+ }
+ def mbrCmt = mbr.comment.get
+ def paramCommentToHtml(prs: List[ParameterEntity]): NodeSeq = prs match {
+ case Nil =>
+ NodeSeq.Empty
+ case (tp: TypeParam) :: rest =>
+ val paramEntry: NodeSeq = {
+ <dt class="tparam">{ tp.name }</dt><dd class="cmt">{ bodyToHtml(mbrCmt.typeParams(tp.name)) }</dd>
+ }
+ paramEntry ++ paramCommentToHtml(rest)
+ case (vp: ValueParam) :: rest =>
+ val paramEntry: NodeSeq = {
+ <dt class="param">{ vp.name }</dt><dd class="cmt">{ bodyToHtml(mbrCmt.valueParams(vp.name)) }</dd>
+ }
+ paramEntry ++ paramCommentToHtml(rest)
+ }
+ if (mbr.comment.isEmpty) NodeSeq.Empty
+ else {
+ val cmtedPrs = prs filter {
+ case tp: TypeParam => mbrCmt.typeParams isDefinedAt tp.name
+ case vp: ValueParam => mbrCmt.valueParams isDefinedAt vp.name
+ }
+ if (cmtedPrs.isEmpty && mbrCmt.result.isEmpty) NodeSeq.Empty
+ else
+ <dl class="paramcmts block">{
+ paramCommentToHtml(cmtedPrs) ++ (
+ mbrCmt.result match {
+ case None => NodeSeq.Empty
+ case Some(cmt) =>
+ <dt>returns</dt><dd class="cmt">{ bodyToHtml(cmt) }</dd>
+ })
+ }</dl>
+ }
+ } ++
+ { val fvs: List[comment.Paragraph] = visibility(mbr).toList ::: mbr.flags
+ if (fvs.isEmpty || isReduced) NodeSeq.Empty else
+ <div class="block">
+ attributes: { fvs map { fv => { inlineToHtml(fv.text) ++ xml.Text(" ") } } }
+ </div>
+ } ++
+ { tpl.companion match {
+ case Some(companion) if (isSelf && !isReduced) =>
+ <div class="block">
+ go to: <a href={relativeLinkTo(companion)}>companion</a>
+ </div>
+ case _ =>
+ NodeSeq.Empty
+ }
+ } ++
+ { val inDefTpls = mbr.inDefinitionTemplates
+ if ((inDefTpls.tail.isEmpty && (inDefTpls.head == mbr.inTemplate)) || isReduced) NodeSeq.Empty else {
+ <div class="block">
+ definition classes: { templatesToHtml(inDefTpls, xml.Text(" → ")) }
+ </div>
+ }
+ } ++
+ { mbr match {
+ case dtpl: DocTemplateEntity if (isSelf && !dtpl.linearization.isEmpty && !isReduced) =>
+ <div class="block">
+ linear super types: { typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = xml.Text(", ")) }
+ </div>
+ case _ => NodeSeq.Empty
+ }
+ } ++
+ { mbr match {
+ case dtpl: DocTemplateEntity if (isSelf && !dtpl.subClasses.isEmpty && !isReduced) =>
+ <div class="block">
+ known subclasses: { templatesToHtml(dtpl.subClasses, xml.Text(", ")) }
+ </div>
+ case _ => NodeSeq.Empty
+ }
+ } ++
+ { mbr match {
+ case dtpl: DocTemplateEntity if (isSelf && !dtpl.selfType.isEmpty && !isReduced) =>
+ <div class="block">
+ self type: { typeToHtml(dtpl.selfType.get, hasLinks = true) }
+ </div>
+ case _ => NodeSeq.Empty
+ }
+ } ++
+ { mbr match {
+ case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined && dtpl.inSource.isDefined && !isReduced) =>
+ val (absFile, line) = dtpl.inSource.get
+ <div class="block">
+ source: { <a href={ dtpl.sourceUrl.get.toString }>{ Text(absFile.file.getName) }</a> }
+ </div>
+ case _ => NodeSeq.Empty
+ }
+ } ++
+ { if (mbr.deprecation.isEmpty || isReduced) NodeSeq.Empty else
+ <div class="block"><ol>deprecated:
+ { <li>{ bodyToHtml(mbr.deprecation.get) }</li> }
+ </ol></div>
+ } ++
+ { mbr.comment match {
+ case Some(comment) =>
+ <xml:group>
+ { if(!comment.version.isEmpty && !isReduced)
+ <div class="block"><ol>version
+ { for(body <- comment.version.toList) yield <li>{bodyToHtml(body)}</li> }
+ </ol></div>
+ else NodeSeq.Empty
+ }
+ { if(!comment.since.isEmpty && !isReduced)
+ <div class="block"><ol>since
+ { for(body <- comment.since.toList) yield <li>{bodyToHtml(body)}</li> }
+ </ol></div>
+ else NodeSeq.Empty
+ }
+ { if(!comment.see.isEmpty && !isReduced)
+ <div class="block"><ol>see also:
+ { val seeXml:List[scala.xml.NodeSeq]=(for(see <- comment.see ) yield <li>{bodyToHtml(see)}</li> )
+ seeXml.reduceLeft(_ ++ Text(", ") ++ _)
+ }
+ </ol></div>
+ else NodeSeq.Empty
+ }
+ </xml:group>
+ case None => NodeSeq.Empty
+ }
+ }
+
+ def kindToString(mbr: MemberEntity): String = mbr match {
+ case tpl: DocTemplateEntity => docEntityKindToString(tpl)
+ case ctor: Constructor => "new"
+ case tme: MemberEntity =>
+ ( if (tme.isImplicit) "implicit " else "" ) +
+ ( if (tme.isDef) "def"
+ else if (tme.isVal) "val"
+ else if (tme.isLazyVal) "lazy val"
+ else if (tme.isVar) "var"
+ else "type")
+ }
+
+ def boundsToHtml(hi: Option[TypeEntity], lo: Option[TypeEntity], hasLinks: Boolean): NodeSeq = {
+ def bound0(bnd: Option[TypeEntity], pre: String): NodeSeq = bnd match {
+ case None => NodeSeq.Empty
+ case Some(tpe) => xml.Text(pre) ++ typeToHtml(tpe, hasLinks)
+ }
+ bound0(lo, " >: ") ++ bound0(hi, " <: ")
+ }
+
+ def visibility(mbr: MemberEntity): Option[comment.Paragraph] = {
+ import comment._
+ import comment.{ Text => CText }
+ mbr.visibility match {
+ case PrivateInInstance() =>
+ Some(Paragraph(CText("private[this]")))
+ case PrivateInTemplate(owner) if (owner == mbr.inTemplate) =>
+ Some(Paragraph(CText("private")))
+ case PrivateInTemplate(owner) =>
+ Some(Paragraph(Chain(List(CText("private["), EntityLink(owner), CText("]")))))
+ case ProtectedInInstance() =>
+ Some(Paragraph(CText("protected[this]")))
+ case ProtectedInTemplate(owner) if (owner == mbr.inTemplate) =>
+ Some(Paragraph(CText("protected")))
+ case ProtectedInTemplate(owner) =>
+ Some(Paragraph(Chain(List(CText("protected["), EntityLink(owner), CText("]")))))
+ case Public() =>
+ None
+ }
+ }
+
+ /** name, tparams, params, result */
+ def signature(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
+ def inside(hasLinks: Boolean): NodeSeq =
+ <xml:group>
+ <span class="kind">{ kindToString(mbr) }</span>
+ <span class="symbol">
+ <span class={"name" + (if (mbr.deprecation.isDefined) " deprecated" else "") }>{ if (mbr.isConstructor) tpl.name else mbr.name }</span>
+ {
+ def tparamsToHtml(mbr: Entity): NodeSeq = mbr match {
+ case hk: HigherKinded =>
+ val tpss = hk.typeParams
+ if (tpss.isEmpty) NodeSeq.Empty else {
+ def tparam0(tp: TypeParam): NodeSeq =
+ <span name={ tp.name }>{ tp.variance + tp.name }{ tparamsToHtml(tp) }{ boundsToHtml(tp.hi, tp.lo, hasLinks)}</span>
+ def tparams0(tpss: List[TypeParam]): NodeSeq = (tpss: @unchecked) match {
+ case tp :: Nil => tparam0(tp)
+ case tp :: tps => tparam0(tp) ++ Text(", ") ++ tparams0(tps)
+ }
+ <span class="tparams">[{ tparams0(tpss) }]</span>
+ }
+ case _ => NodeSeq.Empty
+ }
+ tparamsToHtml(mbr)
+ }
+ { if (isReduced) NodeSeq.Empty else {
+ def paramsToHtml(vlsss: List[List[ValueParam]]): NodeSeq = {
+ def param0(vl: ValueParam): NodeSeq =
+ // notice the }{ in the next lines, they are necessary to avoid a undesired withspace in output
+ <span name={ vl.name }>{ Text(vl.name + ": ") }{ typeToHtml(vl.resultType, hasLinks) }{
+ if(!vl.defaultValue.isEmpty) {
+ defaultValueToHtml(vl.defaultValue.get);
+ }
+ else NodeSeq.Empty
+ }</span>
+ def params0(vlss: List[ValueParam]): NodeSeq = vlss match {
+ case Nil => NodeSeq.Empty
+ case vl :: Nil => param0(vl)
+ case vl :: vls => param0(vl) ++ Text(", ") ++ params0(vls)
+ }
+ def implicitCheck(vlss: List[ValueParam]): NodeSeq = vlss match {
+ case vl :: vls => if(vl.isImplicit) { <span class="implicit">implicit </span> } else Text("")
+ case _ => Text("")
+ }
+ vlsss map { vlss => <span class="params">({implicitCheck(vlss) ++ params0(vlss) })</span> }
+ }
+ mbr match {
+ case cls: Class => paramsToHtml(cls.valueParams)
+ case ctr: Constructor => paramsToHtml(ctr.valueParams)
+ case dfe: Def => paramsToHtml(dfe.valueParams)
+ case _ => NodeSeq.Empty
+ }
+ }}
+ { if (isReduced) NodeSeq.Empty else {
+ mbr match {
+ case tpl: DocTemplateEntity if (!tpl.isPackage) =>
+ tpl.parentType match {
+ case Some(st) => <span class="result"> extends { typeToHtml(st, hasLinks) }</span>
+ case None =>NodeSeq.Empty
+ }
+ case tme: MemberEntity if (tme.isDef || tme.isVal || tme.isLazyVal || tme.isVar) =>
+ <span class="result">: { typeToHtml(tme.resultType, hasLinks) }</span>
+ case abt: AbstractType =>
+ val b2s = boundsToHtml(abt.hi, abt.lo, hasLinks)
+ if (b2s != NodeSeq.Empty)
+ <span class="result">{ b2s }</span>
+ else NodeSeq.Empty
+ case alt: AliasType =>
+ <span class="result"> = { typeToHtml(alt.alias, hasLinks) }</span>
+ case _ => NodeSeq.Empty
+ }
+ }}
+ </span>
+ </xml:group>
+ mbr match {
+ case dte: DocTemplateEntity if !isSelf =>
+ <h4 class="signature"><a href={ relativeLinkTo(dte) }>{ inside(hasLinks = false) }</a></h4>
+ case _ if isSelf =>
+ <h4 id="signature" class="signature">{ inside(hasLinks = true) }</h4>
+ case _ =>
+ <h4 class="signature">{ inside(hasLinks = true) }</h4>
+ }
+ }
+
+ /** */
+ def defaultValueToHtml(defVal:TreeEntity):NodeSeq = {
+ var index = 0
+ val str = defVal.expression
+ val length = str.length
+ var myXml: NodeSeq = NodeSeq.Empty
+ for( x <- defVal.refs) {
+ val from = x._1
+ val to = x._2._2
+ if (index < from) {
+ myXml ++= stringToXml(str.substring(index,from))
+ index = from
+ }
+
+ if (index == from) {
+ val member:Entity = x._2._1
+ member match {
+ case mbr: DocTemplateEntity =>
+ val link = relativeLinkTo(mbr)
+ myXml ++= <span class="name"><a href={link}>{str.substring(from, to)}</a></span>
+ case mbr: MemberEntity =>
+ val anchor = "#" + mbr.name + defParamsString(mbr) + ":" + mbr.resultType.name
+ val link = relativeLinkTo(mbr.inTemplate)
+ myXml ++= <span class="name"><a href={link ++ anchor}>{str.substring(from, to)}</a></span>
+ case _ => assert(false, "unexpected case in defaultValueToHtml")
+ }
+ index = to
+ }
+ }
+ // function used in the MemberEntity case above
+ def defParamsString(mbr: Entity):String = mbr match {
+ case d:MemberEntity with Def => defParamsToString(d)
+ case _ => ""
+ }
+
+ if (index <= length-1) myXml ++= stringToXml(str.substring(index, length ))
+
+ Text(" =") ++
+ {
+ if(length< 7) <span class="symbol">{ myXml }</span>
+ else <span class="defval" name={ myXml }>{ " ..." }</span>
+ }
+ }
+
+ /** Makes text good looking in the html page : newlines and basic indentation,
+ * You must change this function if you want to improve pretty printing of default Values
+ */
+ def stringToXml(text: String): NodeSeq = {
+ var goodLookingXml: NodeSeq = NodeSeq.Empty
+ var indent = 0
+ for(c<-text) c match {
+ case '{' => indent+=1
+ goodLookingXml ++= Text("{")
+ case '}' => indent-=1
+ goodLookingXml ++= Text("}")
+ case '\n' =>
+ goodLookingXml++= <br/> ++ indentation
+ case _ => goodLookingXml ++= Text(c.toString)
+ }
+ def indentation:NodeSeq = {
+ var indentXml = NodeSeq.Empty
+ for (x<- 1 to indent) indentXml ++= Text("&nbsp;&nbsp;")
+ indentXml
+ }
+ goodLookingXml
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png
new file mode 100644
index 0000000000..25a8b4c185
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png
new file mode 100644
index 0000000000..51e85957fe
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
new file mode 100644
index 0000000000..4127dbf3c2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
new file mode 100644
index 0000000000..4d740f3b17
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
new file mode 100644
index 0000000000..942736e44d
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
new file mode 100644
index 0000000000..63a1ae8349
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
new file mode 100644
index 0000000000..aef3f341aa
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
@@ -0,0 +1,204 @@
+* {
+ color: inherit;
+ font-size: 10pt;
+ text-decoration: none;
+ font-family: sans-serif;
+ border-width: 0px;
+ padding: 0px;
+ margin: 0px;
+}
+
+a {
+ cursor: pointer;
+}
+
+a:hover {
+ text-decoration: underline;
+}
+
+h1 {
+ display: none;
+}
+
+#library {
+ display: none;
+}
+
+#browser {
+ top: 0px;
+ left: 0px;
+ bottom: 0px;
+ width: 100%;
+ display: block;
+ position: fixed;
+}
+
+#filter {
+ position: absolute;
+ display: block;
+ padding: 5px;
+ right: 0;
+ left: 0;
+ top: 0;
+ background-color: #B78E99;
+}
+
+#textfilter {
+ position: relative;
+ display: block;
+ height: 20px;
+ margin-bottom: 5px;
+}
+
+#textfilter > .pre {
+ display: block;
+ position: absolute;
+ top: 0;
+ left: 0;
+ height: 20px;
+ width: 20px;
+ background: url("filter_box_left.png");
+}
+
+#textfilter > .input {
+ display: block;
+ position: absolute;
+ top: 0;
+ right: 20px;
+ left: 20px;
+}
+
+#textfilter > .input > input {
+ height: 16px;
+ padding: 2px;
+ font-weight: bold;
+ color: #993300;
+ background-color: white;
+ width: 100%;
+}
+
+#textfilter > .post {
+ display: block;
+ position: absolute;
+ top: 0;
+ right: 0;
+ height: 20px;
+ width: 20px;
+ background: url("filter_box_right.png");
+}
+
+#focusfilter {
+ position: relative;
+ display: block;
+ padding: 5px;
+ background-color: pink;
+}
+
+#focusfilter .focuscoll {
+ font-weight: bold;
+}
+
+#focusfilter img {
+ bottom: -2px;
+ position: relative;
+}
+
+#kindfilter {
+ position: relative;
+ display: block;
+ padding: 5px;
+ background-color: #F0F8FF;
+}
+
+#tpl {
+ display: block;
+ position: fixed;
+ overflow: auto;
+ right: 0;
+ left: 0;
+ bottom: 0;
+ top: 5px;
+ position: absolute;
+ display: block;
+}
+
+#tpl .packhide {
+ display: block;
+ float: right;
+ font-weight: normal;
+ color: white;
+ padding: 1px 4px 1px 4px;
+}
+
+#tpl .packfocus {
+ display: block;
+ float: right;
+ font-weight: normal;
+ color: white;
+ padding: 1px 0.5em 1px 4px;
+}
+
+#tpl .packages > li > h3 {
+ display: block;
+ background-color: #2C475C;
+ color: white;
+ padding: 1px 4px 1px 0.5em;
+ font-weight: bold;
+ display: block;
+}
+
+#tpl ol > li {
+ display: block;
+}
+
+#tpl .templates > li {
+ padding-left: 0.5em;
+}
+
+#tpl ol > li .icon {
+ padding-right: 5px;
+ bottom: -2px;
+ position: relative;
+}
+
+#tpl .templates div.placeholder {
+ padding-right: 5px;
+ width: 13px;
+ display: inline-block;
+}
+
+#tpl .templates span.tplLink {
+ padding-left: 8px;
+}
+
+#content {
+ border-left-width: 1px;
+ border-left-color: black;
+ border-left-style: white;
+ right: 0px;
+ left: 0px;
+ bottom: 0px;
+ top: 0px;
+ position: fixed;
+ margin-left: 300px;
+ display: block;
+}
+
+#content > iframe {
+ display: block;
+ height: 100%;
+ width: 100%;
+}
+
+.ui-layout-pane {
+ background: #FFF;
+ overflow: auto;
+}
+
+.ui-layout-resizer {
+ background: #DDD;
+}
+
+.ui-layout-toggler {
+ background: #AAA;
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
new file mode 100644
index 0000000000..21c09bf5e1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -0,0 +1,290 @@
+// © 2009–2010 EPFL/LAMP
+// code by Gilles Dubochet with contributions by Johannes Rudolph and "spiros"
+
+var topLevelTemplates = undefined;
+var topLevelPackages = undefined;
+
+var scheduler = undefined;
+var domCache = undefined;
+
+var kindFilterState = undefined;
+var focusFilterState = undefined;
+
+var title = $(document).attr('title')
+
+$(document).ready(function() {
+ $('body').layout({ west__size: '20%' });
+ $('iframe').bind("load", function(){
+ var subtitle = $(this).contents().find('title').text();
+ $(document).attr('title', (title ? title + " - " : "") + subtitle);
+ });
+
+ // workaround for IE's iframe sizing lack of smartness
+ if($.browser.msie) {
+ function fixIFrame() {
+ $('iframe').height($(window).height() )
+ }
+ $('iframe').bind("load",fixIFrame)
+ $('iframe').bind("resize",fixIFrame)
+ }
+
+ scheduler = new Scheduler();
+ scheduler.addLabel("init", 1);
+ scheduler.addLabel("focus", 2);
+ scheduler.addLabel("kind", 3);
+ scheduler.addLabel("filter", 4);
+
+ scheduler.addForAll = function(labelName, elems, fn) {
+ var idx = 0;
+ var elem = undefined;
+ while (idx < elems.length) {
+ elem = elems[idx];
+ scheduler.add(labelName, function(elem0) { fn(elem0); }, undefined, [elem]);
+ idx = idx + 1;
+ }
+ }
+
+ domCache = new DomCache();
+ domCache.update();
+
+ prepareEntityList();
+
+ configureTextFilter();
+ configureKindFilter();
+ configureEntityList();
+
+});
+
+function configureEntityList() {
+ kindFilterSync();
+ configureHideFilter();
+ configureFocusFilter();
+ textFilter();
+}
+
+/* The DomCache class holds a series of pointers to interesting parts of the page's DOM tree. Generally, any DOM
+ accessor should be reduced to the context of a relevant entity from the cache. This is crucial to maintaining
+ decent performance of the page. */
+function DomCache() {
+ var cache = this;
+ this.packs = undefined;
+ this.liPacks = undefined;
+ this.update = function() {
+ cache.packs = $(".pack");
+ cache.liPacks = cache.packs.filter("li");
+ }
+}
+
+/* Updates the list of entities (i.e. the content of the #tpl element) from the raw form generated by Scaladoc to a
+ form suitable for display. In particular, it adds class and object etc. icons, and it configures links to open in
+ the right frame. Furthermore, it sets the two reference top-level entities lists (topLevelTemplates and
+ topLevelPackages) to serve as reference for resetting the list when needed.
+ Be advised: this function should only be called once, on page load. */
+function prepareEntityList() {
+ var classIcon = $("#library > img.class");
+ var traitIcon = $("#library > img.trait");
+ var objectIcon = $("#library > img.object");
+ var packageIcon = $("#library > img.package");
+ scheduler.addForAll("init", domCache.packs, function(pack) {
+ var packTemplates = $("> ol.templates > li", pack);
+ $("> h3 > a.tplshow", pack).add("> a.tplshow", packTemplates).attr("target", "template");
+ $("span.class", packTemplates).each(function() { $(this).replaceWith(classIcon.clone()); });
+ $("span.trait", packTemplates).each(function() { $(this).replaceWith(traitIcon.clone()); });
+ $("span.object", packTemplates).each(function() { $(this).replaceWith(objectIcon.clone()); });
+ $("span.package", packTemplates).each(function() { $(this).replaceWith(packageIcon.clone()); });
+ });
+ scheduler.add("init", function() {
+ topLevelTemplates = $("#tpl > ol.templates").clone();
+ topLevelPackages = $("#tpl > ol.packages").clone();
+ });
+}
+
+/* Configures the text filter */
+function configureTextFilter() {
+ scheduler.add("init", function() {
+ $("#filter").append("<div id='textfilter'><span class='pre'/><span class='input'><input type='text' accesskey='/'/></span><span class='post'/></div>");
+ var input = $("#textfilter input");
+ resizeFilterBlock();
+ input.bind("keyup", function(event) {
+ if (event.keyCode == 27) { // escape
+ input.attr("value", "");
+ }
+ textFilter();
+ });
+ input.focus(function(event) { input.select(); });
+ });
+ scheduler.add("init", function() {
+ $("#textfilter > .post").click(function(){
+ $("#textfilter input").attr("value", "");
+ textFilter();
+ });
+ });
+}
+
+// Filters all focused templates and packages. This function should be made less-blocking.
+// @param query The string of the query
+function textFilter() {
+ scheduler.clear("filter");
+ scheduler.add("filter", function() {
+ var query = $("#textfilter input").attr("value")
+ var queryRegExp;
+ if (query.toLowerCase() != query) {
+ // Regexp that matches CamelCase subbits: "BiSe" is
+ // "[a-z]*Bi[a-z]*Se" and matches "BitSet", "ABitSet", ...
+ queryRegExp = new RegExp(query.replace(/([A-Z])/g,"[a-z]*$1"));
+ }
+ else { // if query is all lower case make a normal case insensitive search
+ queryRegExp = new RegExp(query, "i");
+ }
+ scheduler.addForAll("filter", domCache.packs, function(pack0) {
+ var pack = $(pack0);
+ $("> ol.templates > li", pack).each(function(){
+ var item = $(this).attr("title");
+ if (item == "" || queryRegExp.test(item)) {
+ $(this).show();
+ $(this).removeClass("hide");
+ }
+ else {
+ $(this).addClass("hide");
+ $(this).hide();
+ }
+ });
+ if ($("> ol > li:not(.hide)", pack).length > 0) {
+ pack.show();
+ pack.removeClass("hide");
+ }
+ else {
+ pack.addClass("hide");
+ pack.hide();
+ }
+ if ($("> ol.templates > li:not(.hide)", pack).length > 0) {
+ $("> h3", pack).show();
+ $("> .packhide", pack).show();
+ $("> .packfocus", pack).show();
+ }
+ else {
+ $("> h3", pack).hide();
+ $("> .packhide", pack).hide();
+ $("> .packfocus", pack).hide();
+ }
+ });
+ });
+}
+
+/* Configures the hide tool by adding the hide link to all packages. */
+function configureHideFilter() {
+ scheduler.addForAll("init", domCache.liPacks, function(pack) {
+ $(pack).prepend("<a class='packhide'>hide</a>");
+ $("> a.packhide", pack).click(function(event) {
+ var packhide = $(this)
+ var action = packhide.text();
+ if (action == "hide") {
+ $("~ ol", packhide).hide();
+ packhide.text("show");
+ }
+ else {
+ $("~ ol", packhide).show();
+ packhide.text("hide");
+ }
+ return false;
+ });
+ });
+}
+
+/* Configures the focus tool by adding the focus bar in the filter box (initially hidden), and by adding the focus
+ link to all packages. */
+function configureFocusFilter() {
+ scheduler.add("init", function() {
+ focusFilterState = null;
+ if ($("#focusfilter").length == 0) {
+ $("#filter").append("<div id='focusfilter'>focused on <span class='focuscoll'></span> <a class='focusremove'><img class='icon' src='lib/remove.png'/></a></div>");
+ $("#focusfilter > .focusremove").click(function(event) {
+ scheduler.clear("filter");
+ scheduler.add("focus", function() {
+ $("#tpl > ol.templates").replaceWith(topLevelTemplates.clone());
+ $("#tpl > ol.packages").replaceWith(topLevelPackages.clone());
+ domCache.update();
+ $("#focusfilter").hide();
+ $("#kindfilter").show();
+ resizeFilterBlock();
+ focusFilterState = null;
+ configureEntityList();
+ });
+ });
+ $("#focusfilter").hide();
+ resizeFilterBlock();
+ }
+ });
+ scheduler.addForAll("init", domCache.liPacks, function(pack) {
+ $(pack).prepend("<a class='packfocus'>focus</a>");
+ $("> a.packfocus", pack).click(function(event) {
+ focusFilter($(this).parent());
+ return false;
+ });
+ });
+}
+
+/* Focuses the entity index on a specific package. To do so, it will copy the sub-templates and sub-packages of the
+ focuses package into the top-level templates and packages position of the index. The original top-level
+ @param package The <li> element that corresponds to the package in the entity index */
+function focusFilter(package) {
+ scheduler.add("focus", function() {
+ scheduler.clear("filter");
+ var currentFocus = package.attr("title");
+ $("#focusfilter > .focuscoll").empty();
+ $("#focusfilter > .focuscoll").append(currentFocus);
+ var packTemplates = $("> ol.templates", package);
+ var packPackages = $("> ol.packages", package);
+ $("#tpl > ol.templates").replaceWith(packTemplates);
+ $("#tpl > ol.packages").replaceWith(packPackages);
+ domCache.update();
+ $("#focusfilter").show();
+ $("#kindfilter").hide();
+ resizeFilterBlock();
+ focusFilterState = package;
+ kindFilterSync();
+ });
+}
+
+function configureKindFilter() {
+ scheduler.add("init", function() {
+ kindFilterState = "all";
+ $("#filter").append("<div id='kindfilter'><a>display packages only</a></div>");
+ $("#kindfilter > a").click(function(event) { kindFilter("packs"); });
+ resizeFilterBlock();
+ });
+}
+
+function kindFilter(kind) {
+ if (kind == "packs") {
+ kindFilterState = "packs";
+ kindFilterSync();
+ $("#kindfilter > a").replaceWith("<a>display all entities</a>");
+ $("#kindfilter > a").click(function(event) { kindFilter("all"); });
+ }
+ else {
+ kindFilterState = "all";
+ kindFilterSync();
+ $("#kindfilter > a").replaceWith("<a>display packages only</a>");
+ $("#kindfilter > a").click(function(event) { kindFilter("packs"); });
+ }
+}
+
+/* Applies the kind filter. */
+function kindFilterSync() {
+ scheduler.add("kind", function () {
+ if (kindFilterState == "all" || focusFilterState != null)
+ scheduler.addForAll("kind", domCache.packs, function(pack0) {
+ $("> ol.templates", pack0).show();
+ });
+ else
+ scheduler.addForAll("kind", domCache.packs, function(pack0) {
+ $("> ol.templates", pack0).hide();
+ });
+ textFilter();
+ });
+}
+
+function resizeFilterBlock() {
+ $("#tpl").css("top", $("#filter").outerHeight(true));
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
new file mode 100644
index 0000000000..c4078dde52
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
@@ -0,0 +1,401 @@
+/*!
+ * jQuery UI 1.9m2
+ *
+ * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
+ * Dual licensed under the MIT (MIT-LICENSE.txt)
+ * and GPL (GPL-LICENSE.txt) licenses.
+ *
+ * http://docs.jquery.com/UI
+ */
+(function(c){c.ui=c.ui||{};if(!c.ui.version){c.extend(c.ui,{version:"1.9m2",plugin:{add:function(a,b,d){a=c.ui[a].prototype;for(var e in d){a.plugins[e]=a.plugins[e]||[];a.plugins[e].push([b,d[e]])}},call:function(a,b,d){if((b=a.plugins[b])&&a.element[0].parentNode)for(var e=0;e<b.length;e++)a.options[b[e][0]]&&b[e][1].apply(a.element,d)}},contains:function(a,b){return document.compareDocumentPosition?a.compareDocumentPosition(b)&16:a!==b&&a.contains(b)},hasScroll:function(a,b){if(c(a).css("overflow")==
+"hidden")return false;b=b&&b=="left"?"scrollLeft":"scrollTop";var d=false;if(a[b]>0)return true;a[b]=1;d=a[b]>0;a[b]=0;return d},isOverAxis:function(a,b,d){return a>b&&a<b+d},isOver:function(a,b,d,e,f,g){return c.ui.isOverAxis(a,d,f)&&c.ui.isOverAxis(b,e,g)},keyCode:{ALT:18,BACKSPACE:8,CAPS_LOCK:20,COMMA:188,COMMAND:91,COMMAND_LEFT:91,COMMAND_RIGHT:93,CONTROL:17,DELETE:46,DOWN:40,END:35,ENTER:13,ESCAPE:27,HOME:36,INSERT:45,LEFT:37,MENU:93,NUMPAD_ADD:107,NUMPAD_DECIMAL:110,NUMPAD_DIVIDE:111,NUMPAD_ENTER:108,
+NUMPAD_MULTIPLY:106,NUMPAD_SUBTRACT:109,PAGE_DOWN:34,PAGE_UP:33,PERIOD:190,RIGHT:39,SHIFT:16,SPACE:32,TAB:9,UP:38,WINDOWS:91}});c.fn.extend({_focus:c.fn.focus,focus:function(a,b){return typeof a==="number"?this.each(function(){var d=this;setTimeout(function(){c(d).focus();b&&b.call(d)},a)}):this._focus.apply(this,arguments)},enableSelection:function(){return this.attr("unselectable","off").css("MozUserSelect","")},disableSelection:function(){return this.attr("unselectable","on").css("MozUserSelect",
+"none")},scrollParent:function(){var a;a=c.browser.msie&&/(static|relative)/.test(this.css("position"))||/absolute/.test(this.css("position"))?this.parents().filter(function(){return/(relative|absolute|fixed)/.test(c.curCSS(this,"position",1))&&/(auto|scroll)/.test(c.curCSS(this,"overflow",1)+c.curCSS(this,"overflow-y",1)+c.curCSS(this,"overflow-x",1))}).eq(0):this.parents().filter(function(){return/(auto|scroll)/.test(c.curCSS(this,"overflow",1)+c.curCSS(this,"overflow-y",1)+c.curCSS(this,"overflow-x",
+1))}).eq(0);return/fixed/.test(this.css("position"))||!a.length?c(document):a},zIndex:function(a){if(a!==undefined)return this.css("zIndex",a);if(this.length){a=c(this[0]);for(var b;a.length&&a[0]!==document;){b=a.css("position");if(b=="absolute"||b=="relative"||b=="fixed"){b=parseInt(a.css("zIndex"));if(!isNaN(b)&&b!=0)return b}a=a.parent()}}return 0}});c.extend(c.expr[":"],{data:function(a,b,d){return!!c.data(a,d[3])},focusable:function(a){var b=a.nodeName.toLowerCase(),d=c.attr(a,"tabindex");return(/input|select|textarea|button|object/.test(b)?
+!a.disabled:"a"==b||"area"==b?a.href||!isNaN(d):!isNaN(d))&&!c(a)["area"==b?"parents":"closest"](":hidden").length},tabbable:function(a){var b=c.attr(a,"tabindex");return(isNaN(b)||b>=0)&&c(a).is(":focusable")}})}})(jQuery);
+(function(b){var j=b.fn.remove;b.fn.remove=function(a,c){return this.each(function(){if(!c)if(!a||b.filter(a,[this]).length)b("*",this).add(this).each(function(){b(this).triggerHandler("remove")});return j.call(b(this),a,c)})};b.widget=function(a,c,d){var e=a.split(".")[0],g;a=a.split(".")[1];g=e+"-"+a;if(!d){d=c;c=b.Widget}b.expr[":"][g]=function(f){return!!b.data(f,a)};b[e]=b[e]||{};b[e][a]=function(f,i){arguments.length&&this._createWidget(f,i)};var h=new c;h.options=b.extend({},h.options);b[e][a].prototype=
+b.extend(true,h,{namespace:e,widgetName:a,widgetEventPrefix:b[e][a].prototype.widgetEventPrefix||a,widgetBaseClass:g,base:c.prototype},d);b.widget.bridge(a,b[e][a])};b.widget.bridge=function(a,c){b.fn[a]=function(d){var e=typeof d==="string",g=Array.prototype.slice.call(arguments,1),h=this;d=!e&&g.length?b.extend.apply(null,[true,d].concat(g)):d;if(e&&d.substring(0,1)==="_")return h;e?this.each(function(){var f=b.data(this,a),i=f&&b.isFunction(f[d])?f[d].apply(f,g):f;if(i!==f&&i!==undefined){h=i;
+return false}}):this.each(function(){var f=b.data(this,a);if(f){d&&f.option(d);f._init()}else b.data(this,a,new c(d,this))});return h}};b.Widget=function(a,c){arguments.length&&this._createWidget(a,c)};b.Widget.prototype={widgetName:"widget",widgetEventPrefix:"",options:{disabled:false},_createWidget:function(a,c){this.element=b(c).data(this.widgetName,this);this.options=b.extend(true,{},this.options,b.metadata&&b.metadata.get(c)[this.widgetName],a);var d=this;this.element.bind("remove."+this.widgetName,
+function(){d.destroy()});this._create();this._init()},_create:function(){},_init:function(){},_super:function(a){return this.base[a].apply(this,Array.prototype.slice.call(arguments,1))},_superApply:function(a,c){return this.base[a].apply(this,c)},destroy:function(){this.element.unbind("."+this.widgetName).removeData(this.widgetName);this.widget().unbind("."+this.widgetName).removeAttr("aria-disabled").removeClass(this.widgetBaseClass+"-disabled ui-state-disabled")},widget:function(){return this.element},
+option:function(a,c){var d=a,e=this;if(arguments.length===0)return b.extend({},e.options);if(typeof a==="string"){if(c===undefined)return this.options[a];d={};d[a]=c}b.each(d,function(g,h){e._setOption(g,h)});return e},_setOption:function(a,c){this.options[a]=c;if(a==="disabled")this.widget()[c?"addClass":"removeClass"](this.widgetBaseClass+"-disabled ui-state-disabled").attr("aria-disabled",c);return this},enable:function(){return this._setOption("disabled",false)},disable:function(){return this._setOption("disabled",
+true)},_trigger:function(a,c,d){var e=this.options[a];c=b.Event(c);c.type=(a===this.widgetEventPrefix?a:this.widgetEventPrefix+a).toLowerCase();d=d||{};if(c.originalEvent){a=b.event.props.length;for(var g;a;){g=b.event.props[--a];c[g]=c.originalEvent[g]}}this.element.trigger(c,d);return!(b.isFunction(e)&&e.call(this.element[0],c,d)===false||c.isDefaultPrevented())}}})(jQuery);
+(function(c){c.widget("ui.mouse",{options:{cancel:":input,option",distance:1,delay:0},_mouseInit:function(){var a=this;this.element.bind("mousedown."+this.widgetName,function(b){return a._mouseDown(b)}).bind("click."+this.widgetName,function(b){if(a._preventClickEvent){a._preventClickEvent=false;b.stopImmediatePropagation();return false}});this.started=false},_mouseDestroy:function(){this.element.unbind("."+this.widgetName)},_mouseDown:function(a){a.originalEvent=a.originalEvent||{};if(!a.originalEvent.mouseHandled){this._mouseStarted&&
+this._mouseUp(a);this._mouseDownEvent=a;var b=this,e=a.which==1,f=typeof this.options.cancel=="string"?c(a.target).parents().add(a.target).filter(this.options.cancel).length:false;if(!e||f||!this._mouseCapture(a))return true;this.mouseDelayMet=!this.options.delay;if(!this.mouseDelayMet)this._mouseDelayTimer=setTimeout(function(){b.mouseDelayMet=true},this.options.delay);if(this._mouseDistanceMet(a)&&this._mouseDelayMet(a)){this._mouseStarted=this._mouseStart(a)!==false;if(!this._mouseStarted){a.preventDefault();
+return true}}this._mouseMoveDelegate=function(d){return b._mouseMove(d)};this._mouseUpDelegate=function(d){return b._mouseUp(d)};c(document).bind("mousemove."+this.widgetName,this._mouseMoveDelegate).bind("mouseup."+this.widgetName,this._mouseUpDelegate);c.browser.safari||a.preventDefault();return a.originalEvent.mouseHandled=true}},_mouseMove:function(a){if(c.browser.msie&&!a.button)return this._mouseUp(a);if(this._mouseStarted){this._mouseDrag(a);return a.preventDefault()}if(this._mouseDistanceMet(a)&&
+this._mouseDelayMet(a))(this._mouseStarted=this._mouseStart(this._mouseDownEvent,a)!==false)?this._mouseDrag(a):this._mouseUp(a);return!this._mouseStarted},_mouseUp:function(a){c(document).unbind("mousemove."+this.widgetName,this._mouseMoveDelegate).unbind("mouseup."+this.widgetName,this._mouseUpDelegate);if(this._mouseStarted){this._mouseStarted=false;this._preventClickEvent=a.target==this._mouseDownEvent.target;this._mouseStop(a)}return false},_mouseDistanceMet:function(a){return Math.max(Math.abs(this._mouseDownEvent.pageX-
+a.pageX),Math.abs(this._mouseDownEvent.pageY-a.pageY))>=this.options.distance},_mouseDelayMet:function(){return this.mouseDelayMet},_mouseStart:function(){},_mouseDrag:function(){},_mouseStop:function(){},_mouseCapture:function(){return true}})})(jQuery);
+(function(d){d.widget("ui.draggable",d.ui.mouse,{widgetEventPrefix:"drag",options:{addClasses:true,appendTo:"parent",axis:false,connectToSortable:false,containment:false,cursor:"auto",cursorAt:false,grid:false,handle:false,helper:"original",iframeFix:false,opacity:false,refreshPositions:false,revert:false,revertDuration:500,scope:"default",scroll:true,scrollSensitivity:20,scrollSpeed:20,snap:false,snapMode:"both",snapTolerance:20,stack:false,zIndex:false},_create:function(){if(this.options.helper==
+"original"&&!/^(?:r|a|f)/.test(this.element.css("position")))this.element[0].style.position="relative";this.options.addClasses&&this.element.addClass("ui-draggable");this.options.disabled&&this.element.addClass("ui-draggable-disabled");this._mouseInit()},destroy:function(){if(this.element.data("draggable")){this.element.removeData("draggable").unbind(".draggable").removeClass("ui-draggable ui-draggable-dragging ui-draggable-disabled");this._mouseDestroy();return this}},_mouseCapture:function(a){var b=
+this.options;if(this.helper||b.disabled||d(a.target).is(".ui-resizable-handle"))return false;this.handle=this._getHandle(a);if(!this.handle)return false;return true},_mouseStart:function(a){var b=this.options;this.helper=this._createHelper(a);this._cacheHelperProportions();if(d.ui.ddmanager)d.ui.ddmanager.current=this;this._cacheMargins();this.cssPosition=this.helper.css("position");this.scrollParent=this.helper.scrollParent();this.offset=this.positionAbs=this.element.offset();this.offset={top:this.offset.top-
+this.margins.top,left:this.offset.left-this.margins.left};d.extend(this.offset,{click:{left:a.pageX-this.offset.left,top:a.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()});this.originalPosition=this.position=this._generatePosition(a);this.originalPageX=a.pageX;this.originalPageY=a.pageY;b.cursorAt&&this._adjustOffsetFromHelper(b.cursorAt);b.containment&&this._setContainment();if(this._trigger("start",a)===false){this._clear();return false}this._cacheHelperProportions();
+d.ui.ddmanager&&!b.dropBehaviour&&d.ui.ddmanager.prepareOffsets(this,a);this.helper.addClass("ui-draggable-dragging");this._mouseDrag(a,true);return true},_mouseDrag:function(a,b){this.position=this._generatePosition(a);this.positionAbs=this._convertPositionTo("absolute");if(!b){b=this._uiHash();if(this._trigger("drag",a,b)===false){this._mouseUp({});return false}this.position=b.position}if(!this.options.axis||this.options.axis!="y")this.helper[0].style.left=this.position.left+"px";if(!this.options.axis||
+this.options.axis!="x")this.helper[0].style.top=this.position.top+"px";d.ui.ddmanager&&d.ui.ddmanager.drag(this,a);return false},_mouseStop:function(a){var b=false;if(d.ui.ddmanager&&!this.options.dropBehaviour)b=d.ui.ddmanager.drop(this,a);if(this.dropped){b=this.dropped;this.dropped=false}if(!this.element[0]||!this.element[0].parentNode)return false;if(this.options.revert=="invalid"&&!b||this.options.revert=="valid"&&b||this.options.revert===true||d.isFunction(this.options.revert)&&this.options.revert.call(this.element,
+b)){var c=this;d(this.helper).animate(this.originalPosition,parseInt(this.options.revertDuration,10),function(){c._trigger("stop",a)!==false&&c._clear()})}else this._trigger("stop",a)!==false&&this._clear();return false},cancel:function(){this.helper.is(".ui-draggable-dragging")?this._mouseUp({}):this._clear();return this},_getHandle:function(a){var b=!this.options.handle||!d(this.options.handle,this.element).length?true:false;d(this.options.handle,this.element).find("*").andSelf().each(function(){if(this==
+a.target)b=true});return b},_createHelper:function(a){var b=this.options;a=d.isFunction(b.helper)?d(b.helper.apply(this.element[0],[a])):b.helper=="clone"?this.element.clone():this.element;a.parents("body").length||a.appendTo(b.appendTo=="parent"?this.element[0].parentNode:b.appendTo);a[0]!=this.element[0]&&!/(fixed|absolute)/.test(a.css("position"))&&a.css("position","absolute");return a},_adjustOffsetFromHelper:function(a){if(typeof a=="string")a=a.split(" ");if(d.isArray(a))a={left:+a[0],top:+a[1]||
+0};if("left"in a)this.offset.click.left=a.left+this.margins.left;if("right"in a)this.offset.click.left=this.helperProportions.width-a.right+this.margins.left;if("top"in a)this.offset.click.top=a.top+this.margins.top;if("bottom"in a)this.offset.click.top=this.helperProportions.height-a.bottom+this.margins.top},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var a=this.offsetParent.offset();if(this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&d.ui.contains(this.scrollParent[0],
+this.offsetParent[0])){a.left+=this.scrollParent.scrollLeft();a.top+=this.scrollParent.scrollTop()}if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&d.browser.msie)a={top:0,left:0};return{top:a.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:a.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var a=this.element.position();return{top:a.top-
+(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:a.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}else return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.element.css("marginLeft"),10)||0,top:parseInt(this.element.css("marginTop"),10)||0}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var a=this.options;if(a.containment==
+"parent")a.containment=this.helper[0].parentNode;if(a.containment=="document"||a.containment=="window")this.containment=[0-this.offset.relative.left-this.offset.parent.left,0-this.offset.relative.top-this.offset.parent.top,d(a.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(d(a.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(document|window|parent)$/.test(a.containment)&&
+a.containment.constructor!=Array){var b=d(a.containment)[0];if(b){a=d(a.containment).offset();var c=d(b).css("overflow")!="hidden";this.containment=[a.left+(parseInt(d(b).css("borderLeftWidth"),10)||0)+(parseInt(d(b).css("paddingLeft"),10)||0)-this.margins.left,a.top+(parseInt(d(b).css("borderTopWidth"),10)||0)+(parseInt(d(b).css("paddingTop"),10)||0)-this.margins.top,a.left+(c?Math.max(b.scrollWidth,b.offsetWidth):b.offsetWidth)-(parseInt(d(b).css("borderLeftWidth"),10)||0)-(parseInt(d(b).css("paddingRight"),
+10)||0)-this.helperProportions.width-this.margins.left,a.top+(c?Math.max(b.scrollHeight,b.offsetHeight):b.offsetHeight)-(parseInt(d(b).css("borderTopWidth"),10)||0)-(parseInt(d(b).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top]}}else if(a.containment.constructor==Array)this.containment=a.containment},_convertPositionTo:function(a,b){if(!b)b=this.position;a=a=="absolute"?1:-1;var c=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&d.ui.contains(this.scrollParent[0],
+this.offsetParent[0]))?this.offsetParent:this.scrollParent,f=/(html|body)/i.test(c[0].tagName);return{top:b.top+this.offset.relative.top*a+this.offset.parent.top*a-(d.browser.safari&&d.browser.version<526&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():f?0:c.scrollTop())*a),left:b.left+this.offset.relative.left*a+this.offset.parent.left*a-(d.browser.safari&&d.browser.version<526&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():
+f?0:c.scrollLeft())*a)}},_generatePosition:function(a){var b=this.options,c=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&d.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,f=/(html|body)/i.test(c[0].tagName),e=a.pageX,g=a.pageY;if(this.originalPosition){if(this.containment){if(a.pageX-this.offset.click.left<this.containment[0])e=this.containment[0]+this.offset.click.left;if(a.pageY-this.offset.click.top<this.containment[1])g=this.containment[1]+
+this.offset.click.top;if(a.pageX-this.offset.click.left>this.containment[2])e=this.containment[2]+this.offset.click.left;if(a.pageY-this.offset.click.top>this.containment[3])g=this.containment[3]+this.offset.click.top}if(b.grid){g=this.originalPageY+Math.round((g-this.originalPageY)/b.grid[1])*b.grid[1];g=this.containment?!(g-this.offset.click.top<this.containment[1]||g-this.offset.click.top>this.containment[3])?g:!(g-this.offset.click.top<this.containment[1])?g-b.grid[1]:g+b.grid[1]:g;e=this.originalPageX+
+Math.round((e-this.originalPageX)/b.grid[0])*b.grid[0];e=this.containment?!(e-this.offset.click.left<this.containment[0]||e-this.offset.click.left>this.containment[2])?e:!(e-this.offset.click.left<this.containment[0])?e-b.grid[0]:e+b.grid[0]:e}}return{top:g-this.offset.click.top-this.offset.relative.top-this.offset.parent.top+(d.browser.safari&&d.browser.version<526&&this.cssPosition=="fixed"?0:this.cssPosition=="fixed"?-this.scrollParent.scrollTop():f?0:c.scrollTop()),left:e-this.offset.click.left-
+this.offset.relative.left-this.offset.parent.left+(d.browser.safari&&d.browser.version<526&&this.cssPosition=="fixed"?0:this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():f?0:c.scrollLeft())}},_clear:function(){this.helper.removeClass("ui-draggable-dragging");this.helper[0]!=this.element[0]&&!this.cancelHelperRemoval&&this.helper.remove();this.helper=null;this.cancelHelperRemoval=false},_trigger:function(a,b,c){c=c||this._uiHash();d.ui.plugin.call(this,a,[b,c]);if(a=="drag")this.positionAbs=
+this._convertPositionTo("absolute");return d.Widget.prototype._trigger.call(this,a,b,c)},plugins:{},_uiHash:function(){return{helper:this.helper,position:this.position,originalPosition:this.originalPosition,offset:this.positionAbs}}});d.extend(d.ui.draggable,{version:"1.9m2"});d.ui.plugin.add("draggable","connectToSortable",{start:function(a,b){var c=d(this).data("draggable"),f=c.options,e=d.extend({},b,{item:c.element});c.sortables=[];d(f.connectToSortable).each(function(){var g=d.data(this,"sortable");
+if(g&&!g.options.disabled){c.sortables.push({instance:g,shouldRevert:g.options.revert});g._refreshItems();g._trigger("activate",a,e)}})},stop:function(a,b){var c=d(this).data("draggable"),f=d.extend({},b,{item:c.element});d.each(c.sortables,function(){if(this.instance.isOver){this.instance.isOver=0;c.cancelHelperRemoval=true;this.instance.cancelHelperRemoval=false;if(this.shouldRevert)this.instance.options.revert=true;this.instance._mouseStop(a);this.instance.options.helper=this.instance.options._helper;
+c.options.helper=="original"&&this.instance.currentItem.css({top:"auto",left:"auto"})}else{this.instance.cancelHelperRemoval=false;this.instance._trigger("deactivate",a,f)}})},drag:function(a,b){var c=d(this).data("draggable"),f=this;d.each(c.sortables,function(){this.instance.positionAbs=c.positionAbs;this.instance.helperProportions=c.helperProportions;this.instance.offset.click=c.offset.click;if(this.instance._intersectsWith(this.instance.containerCache)){if(!this.instance.isOver){this.instance.isOver=
+1;this.instance.currentItem=d(f).clone().appendTo(this.instance.element).data("sortable-item",true);this.instance.options._helper=this.instance.options.helper;this.instance.options.helper=function(){return b.helper[0]};a.target=this.instance.currentItem[0];this.instance._mouseCapture(a,true);this.instance._mouseStart(a,true,true);this.instance.offset.click.top=c.offset.click.top;this.instance.offset.click.left=c.offset.click.left;this.instance.offset.parent.left-=c.offset.parent.left-this.instance.offset.parent.left;
+this.instance.offset.parent.top-=c.offset.parent.top-this.instance.offset.parent.top;c._trigger("toSortable",a);c.dropped=this.instance.element;c.currentItem=c.element;this.instance.fromOutside=c}this.instance.currentItem&&this.instance._mouseDrag(a)}else if(this.instance.isOver){this.instance.isOver=0;this.instance.cancelHelperRemoval=true;this.instance.options.revert=false;this.instance._trigger("out",a,this.instance._uiHash(this.instance));this.instance._mouseStop(a,true);this.instance.options.helper=
+this.instance.options._helper;this.instance.currentItem.remove();this.instance.placeholder&&this.instance.placeholder.remove();c._trigger("fromSortable",a);c.dropped=false}})}});d.ui.plugin.add("draggable","cursor",{start:function(){var a=d("body"),b=d(this).data("draggable").options;if(a.css("cursor"))b._cursor=a.css("cursor");a.css("cursor",b.cursor)},stop:function(){var a=d(this).data("draggable").options;a._cursor&&d("body").css("cursor",a._cursor)}});d.ui.plugin.add("draggable","iframeFix",{start:function(){var a=
+d(this).data("draggable").options;d(a.iframeFix===true?"iframe":a.iframeFix).each(function(){d('<div class="ui-draggable-iframeFix" style="background: #fff;"></div>').css({width:this.offsetWidth+"px",height:this.offsetHeight+"px",position:"absolute",opacity:"0.001",zIndex:1E3}).css(d(this).offset()).appendTo("body")})},stop:function(){d("div.ui-draggable-iframeFix").each(function(){this.parentNode.removeChild(this)})}});d.ui.plugin.add("draggable","opacity",{start:function(a,b){a=d(b.helper);b=d(this).data("draggable").options;
+if(a.css("opacity"))b._opacity=a.css("opacity");a.css("opacity",b.opacity)},stop:function(a,b){a=d(this).data("draggable").options;a._opacity&&d(b.helper).css("opacity",a._opacity)}});d.ui.plugin.add("draggable","scroll",{start:function(){var a=d(this).data("draggable");if(a.scrollParent[0]!=document&&a.scrollParent[0].tagName!="HTML")a.overflowOffset=a.scrollParent.offset()},drag:function(a){var b=d(this).data("draggable"),c=b.options,f=false;if(b.scrollParent[0]!=document&&b.scrollParent[0].tagName!=
+"HTML"){if(!c.axis||c.axis!="x")if(b.overflowOffset.top+b.scrollParent[0].offsetHeight-a.pageY<c.scrollSensitivity)b.scrollParent[0].scrollTop=f=b.scrollParent[0].scrollTop+c.scrollSpeed;else if(a.pageY-b.overflowOffset.top<c.scrollSensitivity)b.scrollParent[0].scrollTop=f=b.scrollParent[0].scrollTop-c.scrollSpeed;if(!c.axis||c.axis!="y")if(b.overflowOffset.left+b.scrollParent[0].offsetWidth-a.pageX<c.scrollSensitivity)b.scrollParent[0].scrollLeft=f=b.scrollParent[0].scrollLeft+c.scrollSpeed;else if(a.pageX-
+b.overflowOffset.left<c.scrollSensitivity)b.scrollParent[0].scrollLeft=f=b.scrollParent[0].scrollLeft-c.scrollSpeed}else{if(!c.axis||c.axis!="x")if(a.pageY-d(document).scrollTop()<c.scrollSensitivity)f=d(document).scrollTop(d(document).scrollTop()-c.scrollSpeed);else if(d(window).height()-(a.pageY-d(document).scrollTop())<c.scrollSensitivity)f=d(document).scrollTop(d(document).scrollTop()+c.scrollSpeed);if(!c.axis||c.axis!="y")if(a.pageX-d(document).scrollLeft()<c.scrollSensitivity)f=d(document).scrollLeft(d(document).scrollLeft()-
+c.scrollSpeed);else if(d(window).width()-(a.pageX-d(document).scrollLeft())<c.scrollSensitivity)f=d(document).scrollLeft(d(document).scrollLeft()+c.scrollSpeed)}f!==false&&d.ui.ddmanager&&!c.dropBehaviour&&d.ui.ddmanager.prepareOffsets(b,a)}});d.ui.plugin.add("draggable","snap",{start:function(){var a=d(this).data("draggable"),b=a.options;a.snapElements=[];d(b.snap.constructor!=String?b.snap.items||":data(draggable)":b.snap).each(function(){var c=d(this),f=c.offset();this!=a.element[0]&&a.snapElements.push({item:this,
+width:c.outerWidth(),height:c.outerHeight(),top:f.top,left:f.left})})},drag:function(a,b){for(var c=d(this).data("draggable"),f=c.options,e=f.snapTolerance,g=b.offset.left,n=g+c.helperProportions.width,m=b.offset.top,o=m+c.helperProportions.height,h=c.snapElements.length-1;h>=0;h--){var i=c.snapElements[h].left,k=i+c.snapElements[h].width,j=c.snapElements[h].top,l=j+c.snapElements[h].height;if(i-e<g&&g<k+e&&j-e<m&&m<l+e||i-e<g&&g<k+e&&j-e<o&&o<l+e||i-e<n&&n<k+e&&j-e<m&&m<l+e||i-e<n&&n<k+e&&j-e<o&&
+o<l+e){if(f.snapMode!="inner"){var p=Math.abs(j-o)<=e,q=Math.abs(l-m)<=e,r=Math.abs(i-n)<=e,s=Math.abs(k-g)<=e;if(p)b.position.top=c._convertPositionTo("relative",{top:j-c.helperProportions.height,left:0}).top-c.margins.top;if(q)b.position.top=c._convertPositionTo("relative",{top:l,left:0}).top-c.margins.top;if(r)b.position.left=c._convertPositionTo("relative",{top:0,left:i-c.helperProportions.width}).left-c.margins.left;if(s)b.position.left=c._convertPositionTo("relative",{top:0,left:k}).left-c.margins.left}var t=
+p||q||r||s;if(f.snapMode!="outer"){p=Math.abs(j-m)<=e;q=Math.abs(l-o)<=e;r=Math.abs(i-g)<=e;s=Math.abs(k-n)<=e;if(p)b.position.top=c._convertPositionTo("relative",{top:j,left:0}).top-c.margins.top;if(q)b.position.top=c._convertPositionTo("relative",{top:l-c.helperProportions.height,left:0}).top-c.margins.top;if(r)b.position.left=c._convertPositionTo("relative",{top:0,left:i}).left-c.margins.left;if(s)b.position.left=c._convertPositionTo("relative",{top:0,left:k-c.helperProportions.width}).left-c.margins.left}if(!c.snapElements[h].snapping&&
+(p||q||r||s||t))c.options.snap.snap&&c.options.snap.snap.call(c.element,a,d.extend(c._uiHash(),{snapItem:c.snapElements[h].item}));c.snapElements[h].snapping=p||q||r||s||t}else{c.snapElements[h].snapping&&c.options.snap.release&&c.options.snap.release.call(c.element,a,d.extend(c._uiHash(),{snapItem:c.snapElements[h].item}));c.snapElements[h].snapping=false}}}});d.ui.plugin.add("draggable","stack",{start:function(){var a=d(this).data("draggable").options;a=d.makeArray(d(a.stack)).sort(function(c,f){return(parseInt(d(c).css("zIndex"),
+10)||0)-(parseInt(d(f).css("zIndex"),10)||0)});if(a.length){var b=parseInt(a[0].style.zIndex)||0;d(a).each(function(c){this.style.zIndex=b+c});this[0].style.zIndex=b+a.length}}});d.ui.plugin.add("draggable","zIndex",{start:function(a,b){a=d(b.helper);b=d(this).data("draggable").options;if(a.css("zIndex"))b._zIndex=a.css("zIndex");a.css("zIndex",b.zIndex)},stop:function(a,b){a=d(this).data("draggable").options;a._zIndex&&d(b.helper).css("zIndex",a._zIndex)}})})(jQuery);
+(function(d){d.widget("ui.droppable",{widgetEventPrefix:"drop",options:{accept:"*",activeClass:false,addClasses:true,greedy:false,hoverClass:false,scope:"default",tolerance:"intersect"},_create:function(){var a=this.options,b=a.accept;this.isover=0;this.isout=1;this.accept=d.isFunction(b)?b:function(c){return c.is(b)};this.proportions={width:this.element[0].offsetWidth,height:this.element[0].offsetHeight};d.ui.ddmanager.droppables[a.scope]=d.ui.ddmanager.droppables[a.scope]||[];d.ui.ddmanager.droppables[a.scope].push(this);
+a.addClasses&&this.element.addClass("ui-droppable")},destroy:function(){for(var a=d.ui.ddmanager.droppables[this.options.scope],b=0;b<a.length;b++)a[b]==this&&a.splice(b,1);this.element.removeClass("ui-droppable ui-droppable-disabled").removeData("droppable").unbind(".droppable");return this},_setOption:function(a,b){if(a=="accept")this.accept=d.isFunction(b)?b:function(c){return c.is(b)};d.Widget.prototype._setOption.apply(this,arguments)},_activate:function(a){var b=d.ui.ddmanager.current;this.options.activeClass&&
+this.element.addClass(this.options.activeClass);b&&this._trigger("activate",a,this.ui(b))},_deactivate:function(a){var b=d.ui.ddmanager.current;this.options.activeClass&&this.element.removeClass(this.options.activeClass);b&&this._trigger("deactivate",a,this.ui(b))},_over:function(a){var b=d.ui.ddmanager.current;if(!(!b||(b.currentItem||b.element)[0]==this.element[0]))if(this.accept.call(this.element[0],b.currentItem||b.element)){this.options.hoverClass&&this.element.addClass(this.options.hoverClass);
+this._trigger("over",a,this.ui(b))}},_out:function(a){var b=d.ui.ddmanager.current;if(!(!b||(b.currentItem||b.element)[0]==this.element[0]))if(this.accept.call(this.element[0],b.currentItem||b.element)){this.options.hoverClass&&this.element.removeClass(this.options.hoverClass);this._trigger("out",a,this.ui(b))}},_drop:function(a,b){var c=b||d.ui.ddmanager.current;if(!c||(c.currentItem||c.element)[0]==this.element[0])return false;var e=false;this.element.find(":data(droppable)").not(".ui-draggable-dragging").each(function(){var g=
+d.data(this,"droppable");if(g.options.greedy&&!g.options.disabled&&g.options.scope==c.options.scope&&g.accept.call(g.element[0],c.currentItem||c.element)&&d.ui.intersect(c,d.extend(g,{offset:g.element.offset()}),g.options.tolerance)){e=true;return false}});if(e)return false;if(this.accept.call(this.element[0],c.currentItem||c.element)){this.options.activeClass&&this.element.removeClass(this.options.activeClass);this.options.hoverClass&&this.element.removeClass(this.options.hoverClass);this._trigger("drop",
+a,this.ui(c));return this.element}return false},ui:function(a){return{draggable:a.currentItem||a.element,helper:a.helper,position:a.position,offset:a.positionAbs}}});d.extend(d.ui.droppable,{version:"1.9m2"});d.ui.intersect=function(a,b,c){if(!b.offset)return false;var e=(a.positionAbs||a.position.absolute).left,g=e+a.helperProportions.width,f=(a.positionAbs||a.position.absolute).top,h=f+a.helperProportions.height,i=b.offset.left,k=i+b.proportions.width,j=b.offset.top,l=j+b.proportions.height;
+switch(c){case "fit":return i<e&&g<k&&j<f&&h<l;case "intersect":return i<e+a.helperProportions.width/2&&g-a.helperProportions.width/2<k&&j<f+a.helperProportions.height/2&&h-a.helperProportions.height/2<l;case "pointer":return d.ui.isOver((a.positionAbs||a.position.absolute).top+(a.clickOffset||a.offset.click).top,(a.positionAbs||a.position.absolute).left+(a.clickOffset||a.offset.click).left,j,i,b.proportions.height,b.proportions.width);case "touch":return(f>=j&&f<=l||h>=j&&h<=l||f<j&&h>l)&&(e>=i&&
+e<=k||g>=i&&g<=k||e<i&&g>k);default:return false}};d.ui.ddmanager={current:null,droppables:{"default":[]},prepareOffsets:function(a,b){var c=d.ui.ddmanager.droppables[a.options.scope]||[],e=b?b.type:null,g=(a.currentItem||a.element).find(":data(droppable)").andSelf(),f=0;a:for(;f<c.length;f++)if(!(c[f].options.disabled||a&&!c[f].accept.call(c[f].element[0],a.currentItem||a.element))){for(var h=0;h<g.length;h++)if(g[h]==c[f].element[0]){c[f].proportions.height=0;continue a}c[f].visible=c[f].element.css("display")!=
+"none";if(c[f].visible){c[f].offset=c[f].element.offset();c[f].proportions={width:c[f].element[0].offsetWidth,height:c[f].element[0].offsetHeight};e=="mousedown"&&c[f]._activate.call(c[f],b)}}},drop:function(a,b){var c=false;d.each(d.ui.ddmanager.droppables[a.options.scope]||[],function(){if(this.options){if(!this.options.disabled&&this.visible&&d.ui.intersect(a,this,this.options.tolerance))c=c||this._drop.call(this,b);if(!this.options.disabled&&this.visible&&this.accept.call(this.element[0],a.currentItem||
+a.element)){this.isout=1;this.isover=0;this._deactivate.call(this,b)}}});return c},drag:function(a,b){a.options.refreshPositions&&d.ui.ddmanager.prepareOffsets(a,b);d.each(d.ui.ddmanager.droppables[a.options.scope]||[],function(){if(!(this.options.disabled||this.greedyChild||!this.visible)){var c=d.ui.intersect(a,this,this.options.tolerance);if(c=!c&&this.isover==1?"isout":c&&this.isover==0?"isover":null){var e;if(this.options.greedy){var g=this.element.parents(":data(droppable):eq(0)");if(g.length){e=
+d.data(g[0],"droppable");e.greedyChild=c=="isover"?1:0}}if(e&&c=="isover"){e.isover=0;e.isout=1;e._out.call(e,b)}this[c]=1;this[c=="isout"?"isover":"isout"]=0;this[c=="isover"?"_over":"_out"].call(this,b);if(e&&c=="isout"){e.isout=0;e.isover=1;e._over.call(e,b)}}}})}}})(jQuery);
+(function(e){e.widget("ui.resizable",e.ui.mouse,{widgetEventPrefix:"resize",options:{alsoResize:false,animate:false,animateDuration:"slow",animateEasing:"swing",aspectRatio:false,autoHide:false,containment:false,ghost:false,grid:false,handles:"e,s,se",helper:false,maxHeight:null,maxWidth:null,minHeight:10,minWidth:10,zIndex:1E3},_create:function(){var b=this,a=this.options;this.element.addClass("ui-resizable");e.extend(this,{_aspectRatio:!!a.aspectRatio,aspectRatio:a.aspectRatio,originalElement:this.element,
+_proportionallyResizeElements:[],_helper:a.helper||a.ghost||a.animate?a.helper||"ui-resizable-helper":null});if(this.element[0].nodeName.match(/canvas|textarea|input|select|button|img/i)){/relative/.test(this.element.css("position"))&&e.browser.opera&&this.element.css({position:"relative",top:"auto",left:"auto"});this.element.wrap(e('<div class="ui-wrapper" style="overflow: hidden;"></div>').css({position:this.element.css("position"),width:this.element.outerWidth(),height:this.element.outerHeight(),
+top:this.element.css("top"),left:this.element.css("left")}));this.element=this.element.parent().data("resizable",this.element.data("resizable"));this.elementIsWrapper=true;this.element.css({marginLeft:this.originalElement.css("marginLeft"),marginTop:this.originalElement.css("marginTop"),marginRight:this.originalElement.css("marginRight"),marginBottom:this.originalElement.css("marginBottom")});this.originalElement.css({marginLeft:0,marginTop:0,marginRight:0,marginBottom:0});this.originalResizeStyle=
+this.originalElement.css("resize");this.originalElement.css("resize","none");this._proportionallyResizeElements.push(this.originalElement.css({position:"static",zoom:1,display:"block"}));this.originalElement.css({margin:this.originalElement.css("margin")});this._proportionallyResize()}this.handles=a.handles||(!e(".ui-resizable-handle",this.element).length?"e,s,se":{n:".ui-resizable-n",e:".ui-resizable-e",s:".ui-resizable-s",w:".ui-resizable-w",se:".ui-resizable-se",sw:".ui-resizable-sw",ne:".ui-resizable-ne",
+nw:".ui-resizable-nw"});if(this.handles.constructor==String){if(this.handles=="all")this.handles="n,e,s,w,se,sw,ne,nw";var c=this.handles.split(",");this.handles={};for(var d=0;d<c.length;d++){var f=e.trim(c[d]),g=e('<div class="ui-resizable-handle '+("ui-resizable-"+f)+'"></div>');/sw|se|ne|nw/.test(f)&&g.css({zIndex:++a.zIndex});"se"==f&&g.addClass("ui-icon ui-icon-gripsmall-diagonal-se");this.handles[f]=".ui-resizable-"+f;this.element.append(g)}}this._renderAxis=function(h){h=h||this.element;for(var i in this.handles){if(this.handles[i].constructor==
+String)this.handles[i]=e(this.handles[i],this.element).show();if(this.elementIsWrapper&&this.originalElement[0].nodeName.match(/textarea|input|select|button/i)){var j=e(this.handles[i],this.element),k=0;k=/sw|ne|nw|se|n|s/.test(i)?j.outerHeight():j.outerWidth();j=["padding",/ne|nw|n/.test(i)?"Top":/se|sw|s/.test(i)?"Bottom":/^e$/.test(i)?"Right":"Left"].join("");h.css(j,k);this._proportionallyResize()}e(this.handles[i])}};this._renderAxis(this.element);this._handles=e(".ui-resizable-handle",this.element).disableSelection();
+this._handles.mouseover(function(){if(!b.resizing){if(this.className)var h=this.className.match(/ui-resizable-(se|sw|ne|nw|n|e|s|w)/i);b.axis=h&&h[1]?h[1]:"se"}});if(a.autoHide){this._handles.hide();e(this.element).addClass("ui-resizable-autohide").hover(function(){e(this).removeClass("ui-resizable-autohide");b._handles.show()},function(){if(!b.resizing){e(this).addClass("ui-resizable-autohide");b._handles.hide()}})}this._mouseInit()},destroy:function(){this._mouseDestroy();var b=function(c){e(c).removeClass("ui-resizable ui-resizable-disabled ui-resizable-resizing").removeData("resizable").unbind(".resizable").find(".ui-resizable-handle").remove()};
+if(this.elementIsWrapper){b(this.element);var a=this.element;a.after(this.originalElement.css({position:a.css("position"),width:a.outerWidth(),height:a.outerHeight(),top:a.css("top"),left:a.css("left")})).remove()}this.originalElement.css("resize",this.originalResizeStyle);b(this.originalElement);return this},_mouseCapture:function(b){var a=false;for(var c in this.handles)if(e(this.handles[c])[0]==b.target)a=true;return!this.options.disabled&&a},_mouseStart:function(b){var a=this.options,c=this.element.position(),
+d=this.element;this.resizing=true;this.documentScroll={top:e(document).scrollTop(),left:e(document).scrollLeft()};if(d.is(".ui-draggable")||/absolute/.test(d.css("position")))d.css({position:"absolute",top:c.top,left:c.left});e.browser.opera&&/relative/.test(d.css("position"))&&d.css({position:"relative",top:"auto",left:"auto"});this._renderProxy();c=m(this.helper.css("left"));var f=m(this.helper.css("top"));if(a.containment){c+=e(a.containment).scrollLeft()||0;f+=e(a.containment).scrollTop()||0}this.offset=
+this.helper.offset();this.position={left:c,top:f};this.size=this._helper?{width:d.outerWidth(),height:d.outerHeight()}:{width:d.width(),height:d.height()};this.originalSize=this._helper?{width:d.outerWidth(),height:d.outerHeight()}:{width:d.width(),height:d.height()};this.originalPosition={left:c,top:f};this.sizeDiff={width:d.outerWidth()-d.width(),height:d.outerHeight()-d.height()};this.originalMousePosition={left:b.pageX,top:b.pageY};this.aspectRatio=typeof a.aspectRatio=="number"?a.aspectRatio:
+this.originalSize.width/this.originalSize.height||1;a=e(".ui-resizable-"+this.axis).css("cursor");e("body").css("cursor",a=="auto"?this.axis+"-resize":a);d.addClass("ui-resizable-resizing");this._propagate("start",b);return true},_mouseDrag:function(b){var a=this.helper,c=this.originalMousePosition,d=this._change[this.axis];if(!d)return false;c=d.apply(this,[b,b.pageX-c.left||0,b.pageY-c.top||0]);if(this._aspectRatio||b.shiftKey)c=this._updateRatio(c,b);c=this._respectSize(c,b);this._propagate("resize",
+b);a.css({top:this.position.top+"px",left:this.position.left+"px",width:this.size.width+"px",height:this.size.height+"px"});!this._helper&&this._proportionallyResizeElements.length&&this._proportionallyResize();this._updateCache(c);this._trigger("resize",b,this.ui());return false},_mouseStop:function(b){this.resizing=false;var a=this.options,c=this;if(this._helper){var d=this._proportionallyResizeElements,f=d.length&&/textarea/i.test(d[0].nodeName);d=f&&e.ui.hasScroll(d[0],"left")?0:c.sizeDiff.height;
+f={width:c.size.width-(f?0:c.sizeDiff.width),height:c.size.height-d};d=parseInt(c.element.css("left"),10)+(c.position.left-c.originalPosition.left)||null;var g=parseInt(c.element.css("top"),10)+(c.position.top-c.originalPosition.top)||null;a.animate||this.element.css(e.extend(f,{top:g,left:d}));c.helper.height(c.size.height);c.helper.width(c.size.width);this._helper&&!a.animate&&this._proportionallyResize()}e("body").css("cursor","auto");this.element.removeClass("ui-resizable-resizing");this._propagate("stop",
+b);this._helper&&this.helper.remove();return false},_updateCache:function(b){this.offset=this.helper.offset();if(l(b.left))this.position.left=b.left;if(l(b.top))this.position.top=b.top;if(l(b.height))this.size.height=b.height;if(l(b.width))this.size.width=b.width},_updateRatio:function(b){var a=this.position,c=this.size,d=this.axis;if(b.height)b.width=c.height*this.aspectRatio;else if(b.width)b.height=c.width/this.aspectRatio;if(d=="sw"){b.left=a.left+(c.width-b.width);b.top=null}if(d=="nw"){b.top=
+a.top+(c.height-b.height);b.left=a.left+(c.width-b.width)}return b},_respectSize:function(b){var a=this.options,c=this.axis,d=l(b.width)&&a.maxWidth&&a.maxWidth<b.width,f=l(b.height)&&a.maxHeight&&a.maxHeight<b.height,g=l(b.width)&&a.minWidth&&a.minWidth>b.width,h=l(b.height)&&a.minHeight&&a.minHeight>b.height;if(g)b.width=a.minWidth;if(h)b.height=a.minHeight;if(d)b.width=a.maxWidth;if(f)b.height=a.maxHeight;var i=this.originalPosition.left+this.originalSize.width,j=this.position.top+this.size.height,
+k=/sw|nw|w/.test(c);c=/nw|ne|n/.test(c);if(g&&k)b.left=i-a.minWidth;if(d&&k)b.left=i-a.maxWidth;if(h&&c)b.top=j-a.minHeight;if(f&&c)b.top=j-a.maxHeight;if((a=!b.width&&!b.height)&&!b.left&&b.top)b.top=null;else if(a&&!b.top&&b.left)b.left=null;return b},_proportionallyResize:function(){if(this._proportionallyResizeElements.length)for(var b=this.helper||this.element,a=0;a<this._proportionallyResizeElements.length;a++){var c=this._proportionallyResizeElements[a];if(!this.borderDif){var d=[c.css("borderTopWidth"),
+c.css("borderRightWidth"),c.css("borderBottomWidth"),c.css("borderLeftWidth")],f=[c.css("paddingTop"),c.css("paddingRight"),c.css("paddingBottom"),c.css("paddingLeft")];this.borderDif=e.map(d,function(g,h){g=parseInt(g,10)||0;h=parseInt(f[h],10)||0;return g+h})}e.browser.msie&&(e(b).is(":hidden")||e(b).parents(":hidden").length)||c.css({height:b.height()-this.borderDif[0]-this.borderDif[2]||0,width:b.width()-this.borderDif[1]-this.borderDif[3]||0})}},_renderProxy:function(){var b=this.options;this.elementOffset=
+this.element.offset();if(this._helper){this.helper=this.helper||e('<div style="overflow:hidden;"></div>');var a=e.browser.msie&&e.browser.version<7,c=a?1:0;a=a?2:-1;this.helper.addClass(this._helper).css({width:this.element.outerWidth()+a,height:this.element.outerHeight()+a,position:"absolute",left:this.elementOffset.left-c+"px",top:this.elementOffset.top-c+"px",zIndex:++b.zIndex});this.helper.appendTo("body").disableSelection()}else this.helper=this.element},_change:{e:function(b,a){return{width:this.originalSize.width+
+a}},w:function(b,a){return{left:this.originalPosition.left+a,width:this.originalSize.width-a}},n:function(b,a,c){return{top:this.originalPosition.top+c,height:this.originalSize.height-c}},s:function(b,a,c){return{height:this.originalSize.height+c}},se:function(b,a,c){return e.extend(this._change.s.apply(this,arguments),this._change.e.apply(this,[b,a,c]))},sw:function(b,a,c){return e.extend(this._change.s.apply(this,arguments),this._change.w.apply(this,[b,a,c]))},ne:function(b,a,c){return e.extend(this._change.n.apply(this,
+arguments),this._change.e.apply(this,[b,a,c]))},nw:function(b,a,c){return e.extend(this._change.n.apply(this,arguments),this._change.w.apply(this,[b,a,c]))}},_propagate:function(b,a){e.ui.plugin.call(this,b,[a,this.ui()]);b!="resize"&&this._trigger(b,a,this.ui())},plugins:{},ui:function(){return{originalElement:this.originalElement,element:this.element,helper:this.helper,position:this.position,size:this.size,originalSize:this.originalSize,originalPosition:this.originalPosition}}});e.extend(e.ui.resizable,
+{version:"1.9m2"});e.ui.plugin.add("resizable","alsoResize",{start:function(){var b=e(this).data("resizable").options,a=function(c){e(c).each(function(){var d=e(this);d.data("resizable-alsoresize",{width:parseInt(d.width(),10),height:parseInt(d.height(),10),left:parseInt(d.css("left"),10),top:parseInt(d.css("top"),10),position:d.css("position")})})};if(typeof b.alsoResize=="object"&&!b.alsoResize.parentNode)if(b.alsoResize.length){b.alsoResize=b.alsoResize[0];a(b.alsoResize)}else e.each(b.alsoResize,
+function(c){a(c)});else a(b.alsoResize)},resize:function(b,a){var c=e(this).data("resizable");b=c.options;var d=c.originalSize,f=c.originalPosition,g={height:c.size.height-d.height||0,width:c.size.width-d.width||0,top:c.position.top-f.top||0,left:c.position.left-f.left||0},h=function(i,j){e(i).each(function(){var k=e(this),r=e(this).data("resizable-alsoresize"),q={},s=j&&j.length?j:k.parents(a.originalElement[0]).length?["width","height"]:["width","height","top","left"];e.each(s,function(n,p){if((n=
+(r[p]||0)+(g[p]||0))&&n>=0)q[p]=n||null});if(e.browser.opera&&/relative/.test(k.css("position"))){c._revertToRelativePosition=true;k.css({position:"absolute",top:"auto",left:"auto"})}k.css(q)})};typeof b.alsoResize=="object"&&!b.alsoResize.nodeType?e.each(b.alsoResize,function(i,j){h(i,j)}):h(b.alsoResize)},stop:function(){var b=e(this).data("resizable"),a=function(c){e(c).each(function(){var d=e(this);d.css({position:d.data("resizable-alsoresize").position})})};if(b._revertToRelativePosition){b._revertToRelativePosition=
+false;typeof o.alsoResize=="object"&&!o.alsoResize.nodeType?e.each(o.alsoResize,function(c){a(c)}):a(o.alsoResize)}e(this).removeData("resizable-alsoresize")}});e.ui.plugin.add("resizable","animate",{stop:function(b){var a=e(this).data("resizable"),c=a.options,d=a._proportionallyResizeElements,f=d.length&&/textarea/i.test(d[0].nodeName),g=f&&e.ui.hasScroll(d[0],"left")?0:a.sizeDiff.height;f={width:a.size.width-(f?0:a.sizeDiff.width),height:a.size.height-g};g=parseInt(a.element.css("left"),10)+(a.position.left-
+a.originalPosition.left)||null;var h=parseInt(a.element.css("top"),10)+(a.position.top-a.originalPosition.top)||null;a.element.animate(e.extend(f,h&&g?{top:h,left:g}:{}),{duration:c.animateDuration,easing:c.animateEasing,step:function(){var i={width:parseInt(a.element.css("width"),10),height:parseInt(a.element.css("height"),10),top:parseInt(a.element.css("top"),10),left:parseInt(a.element.css("left"),10)};d&&d.length&&e(d[0]).css({width:i.width,height:i.height});a._updateCache(i);a._propagate("resize",
+b)}})}});e.ui.plugin.add("resizable","containment",{start:function(){var b=e(this).data("resizable"),a=b.element,c=b.options.containment;if(a=c instanceof e?c.get(0):/parent/.test(c)?a.parent().get(0):c){b.containerElement=e(a);if(/document/.test(c)||c==document){b.containerOffset={left:0,top:0};b.containerPosition={left:0,top:0};b.parentData={element:e(document),left:0,top:0,width:e(document).width(),height:e(document).height()||document.body.parentNode.scrollHeight}}else{var d=e(a),f=[];e(["Top",
+"Right","Left","Bottom"]).each(function(i,j){f[i]=m(d.css("padding"+j))});b.containerOffset=d.offset();b.containerPosition=d.position();b.containerSize={height:d.innerHeight()-f[3],width:d.innerWidth()-f[1]};c=b.containerOffset;var g=b.containerSize.height,h=b.containerSize.width;h=e.ui.hasScroll(a,"left")?a.scrollWidth:h;g=e.ui.hasScroll(a)?a.scrollHeight:g;b.parentData={element:a,left:c.left,top:c.top,width:h,height:g}}}},resize:function(b){var a=e(this).data("resizable"),c=a.options,d=a.containerOffset,
+f=a.position;b=a._aspectRatio||b.shiftKey;var g={top:0,left:0},h=a.containerElement;if(h[0]!=document&&/static/.test(h.css("position")))g=d;if(f.left<(a._helper?d.left:0)){a.size.width+=a._helper?a.position.left-d.left:a.position.left-g.left;if(b)a.size.height=a.size.width/c.aspectRatio;a.position.left=c.helper?d.left:0}if(f.top<(a._helper?d.top:0)){a.size.height+=a._helper?a.position.top-d.top:a.position.top;if(b)a.size.width=a.size.height*c.aspectRatio;a.position.top=a._helper?d.top:0}a.offset.left=
+a.parentData.left+a.position.left;a.offset.top=a.parentData.top+a.position.top;c=Math.abs((a._helper?a.offset.left-g.left:a.offset.left-g.left)+a.sizeDiff.width);d=Math.abs((a._helper?a.offset.top-g.top:a.offset.top-d.top)+a.sizeDiff.height);f=a.containerElement.get(0)==a.element.parent().get(0);g=/relative|absolute/.test(a.containerElement.css("position"));if(f&&g)c-=a.parentData.left;if(c+a.size.width>=a.parentData.width){a.size.width=a.parentData.width-c;if(b)a.size.height=a.size.width/a.aspectRatio}if(d+
+a.size.height>=a.parentData.height){a.size.height=a.parentData.height-d;if(b)a.size.width=a.size.height*a.aspectRatio}},stop:function(){var b=e(this).data("resizable"),a=b.options,c=b.containerOffset,d=b.containerPosition,f=b.containerElement,g=e(b.helper),h=g.offset(),i=g.outerWidth()-b.sizeDiff.width;g=g.outerHeight()-b.sizeDiff.height;b._helper&&!a.animate&&/relative/.test(f.css("position"))&&e(this).css({left:h.left-d.left-c.left,width:i,height:g});b._helper&&!a.animate&&/static/.test(f.css("position"))&&
+e(this).css({left:h.left-d.left-c.left,width:i,height:g})}});e.ui.plugin.add("resizable","ghost",{start:function(){var b=e(this).data("resizable"),a=b.options,c=b.size;b.ghost=b.originalElement.clone();b.ghost.css({opacity:0.25,display:"block",position:"relative",height:c.height,width:c.width,margin:0,left:0,top:0}).addClass("ui-resizable-ghost").addClass(typeof a.ghost=="string"?a.ghost:"");b.ghost.appendTo(b.helper)},resize:function(){var b=e(this).data("resizable");b.ghost&&b.ghost.css({position:"relative",
+height:b.size.height,width:b.size.width})},stop:function(){var b=e(this).data("resizable");b.ghost&&b.helper&&b.helper.get(0).removeChild(b.ghost.get(0))}});e.ui.plugin.add("resizable","grid",{resize:function(){var b=e(this).data("resizable"),a=b.options,c=b.size,d=b.originalSize,f=b.originalPosition,g=b.axis;a.grid=typeof a.grid=="number"?[a.grid,a.grid]:a.grid;var h=Math.round((c.width-d.width)/(a.grid[0]||1))*(a.grid[0]||1);a=Math.round((c.height-d.height)/(a.grid[1]||1))*(a.grid[1]||1);if(/^(se|s|e)$/.test(g)){b.size.width=
+d.width+h;b.size.height=d.height+a}else if(/^(ne)$/.test(g)){b.size.width=d.width+h;b.size.height=d.height+a;b.position.top=f.top-a}else{if(/^(sw)$/.test(g)){b.size.width=d.width+h;b.size.height=d.height+a}else{b.size.width=d.width+h;b.size.height=d.height+a;b.position.top=f.top-a}b.position.left=f.left-h}}});var m=function(b){return parseInt(b,10)||0},l=function(b){return!isNaN(parseInt(b,10))}})(jQuery);
+(function(e){e.widget("ui.selectable",e.ui.mouse,{options:{appendTo:"body",autoRefresh:true,distance:0,filter:"*",tolerance:"touch"},_create:function(){var c=this;this.element.addClass("ui-selectable");this.dragged=false;var f;this.refresh=function(){f=e(c.options.filter,c.element[0]);f.each(function(){var d=e(this),b=d.offset();e.data(this,"selectable-item",{element:this,$element:d,left:b.left,top:b.top,right:b.left+d.outerWidth(),bottom:b.top+d.outerHeight(),startselected:false,selected:d.hasClass("ui-selected"),
+selecting:d.hasClass("ui-selecting"),unselecting:d.hasClass("ui-unselecting")})})};this.refresh();this.selectees=f.addClass("ui-selectee");this._mouseInit();this.helper=e("<div class='ui-selectable-helper'></div>")},destroy:function(){this.selectees.removeClass("ui-selectee").removeData("selectable-item");this.element.removeClass("ui-selectable ui-selectable-disabled").removeData("selectable").unbind(".selectable");this._mouseDestroy();return this},_mouseStart:function(c){var f=this;this.opos=[c.pageX,
+c.pageY];if(!this.options.disabled){var d=this.options;this.selectees=e(d.filter,this.element[0]);this._trigger("start",c);e(d.appendTo).append(this.helper);this.helper.css({left:c.clientX,top:c.clientY,width:0,height:0});d.autoRefresh&&this.refresh();this.selectees.filter(".ui-selected").each(function(){var b=e.data(this,"selectable-item");b.startselected=true;if(!c.metaKey){b.$element.removeClass("ui-selected");b.selected=false;b.$element.addClass("ui-unselecting");b.unselecting=true;f._trigger("unselecting",
+c,{unselecting:b.element})}});e(c.target).parents().andSelf().each(function(){var b=e.data(this,"selectable-item");if(b){var g=!c.metaKey||!b.$element.hasClass("ui-selected");b.$element.removeClass(g?"ui-unselecting":"ui-selected").addClass(g?"ui-selecting":"ui-unselecting");b.unselecting=!g;b.selecting=g;(b.selected=g)?f._trigger("selecting",c,{selecting:b.element}):f._trigger("unselecting",c,{unselecting:b.element});return false}})}},_mouseDrag:function(c){var f=this;this.dragged=true;if(!this.options.disabled){var d=
+this.options,b=this.opos[0],g=this.opos[1],h=c.pageX,i=c.pageY;if(b>h){var j=h;h=b;b=j}if(g>i){j=i;i=g;g=j}this.helper.css({left:b,top:g,width:h-b,height:i-g});this.selectees.each(function(){var a=e.data(this,"selectable-item");if(!(!a||a.element==f.element[0])){var k=false;if(d.tolerance=="touch")k=!(a.left>h||a.right<b||a.top>i||a.bottom<g);else if(d.tolerance=="fit")k=a.left>b&&a.right<h&&a.top>g&&a.bottom<i;if(k){if(a.selected){a.$element.removeClass("ui-selected");a.selected=false}if(a.unselecting){a.$element.removeClass("ui-unselecting");
+a.unselecting=false}if(!a.selecting){a.$element.addClass("ui-selecting");a.selecting=true;f._trigger("selecting",c,{selecting:a.element})}}else{if(a.selecting)if(c.metaKey&&a.startselected){a.$element.removeClass("ui-selecting");a.selecting=false;a.$element.addClass("ui-selected");a.selected=true}else{a.$element.removeClass("ui-selecting");a.selecting=false;if(a.startselected){a.$element.addClass("ui-unselecting");a.unselecting=true}f._trigger("unselecting",c,{unselecting:a.element})}if(a.selected)if(!c.metaKey&&
+!a.startselected){a.$element.removeClass("ui-selected");a.selected=false;a.$element.addClass("ui-unselecting");a.unselecting=true;f._trigger("unselecting",c,{unselecting:a.element})}}}});return false}},_mouseStop:function(c){var f=this;this.dragged=false;e(".ui-unselecting",this.element[0]).each(function(){var d=e.data(this,"selectable-item");d.$element.removeClass("ui-unselecting");d.unselecting=false;d.startselected=false;f._trigger("unselected",c,{unselected:d.element})});e(".ui-selecting",this.element[0]).each(function(){var d=
+e.data(this,"selectable-item");d.$element.removeClass("ui-selecting").addClass("ui-selected");d.selecting=false;d.selected=true;d.startselected=true;f._trigger("selected",c,{selected:d.element})});this._trigger("stop",c);this.helper.remove();return false}});e.extend(e.ui.selectable,{version:"1.9m2"})})(jQuery);
+(function(d){d.widget("ui.sortable",d.ui.mouse,{widgetEventPrefix:"sort",options:{appendTo:"parent",axis:false,connectWith:false,containment:false,cursor:"auto",cursorAt:false,dropOnEmpty:true,forcePlaceholderSize:false,forceHelperSize:false,grid:false,handle:false,helper:"original",items:"> *",opacity:false,placeholder:false,revert:false,scroll:true,scrollSensitivity:20,scrollSpeed:20,scope:"default",tolerance:"intersect",zIndex:1E3},_create:function(){this.containerCache={};this.element.addClass("ui-sortable");
+this.refresh();this.floating=this.items.length?/left|right/.test(this.items[0].item.css("float")):false;this.offset=this.element.offset();this._mouseInit()},destroy:function(){this.element.removeClass("ui-sortable ui-sortable-disabled").removeData("sortable").unbind(".sortable");this._mouseDestroy();for(var a=this.items.length-1;a>=0;a--)this.items[a].item.removeData("sortable-item");return this},_setOption:function(a,b){if(a==="disabled"){this.options[a]=b;this.widget()[b?"addClass":"removeClass"]("ui-sortable-disabled")}else this._superApply("_setOption",
+arguments)},_mouseCapture:function(a,b){if(this.reverting)return false;if(this.options.disabled||this.options.type=="static")return false;this._refreshItems(a);var c=null,e=this;d(a.target).parents().each(function(){if(d.data(this,"sortable-item")==e){c=d(this);return false}});if(d.data(a.target,"sortable-item")==e)c=d(a.target);if(!c)return false;if(this.options.handle&&!b){var f=false;d(this.options.handle,c).find("*").andSelf().each(function(){if(this==a.target)f=true});if(!f)return false}this.currentItem=
+c;this._removeCurrentsFromItems();return true},_mouseStart:function(a,b,c){b=this.options;var e=this;this.currentContainer=this;this.refreshPositions();this.helper=this._createHelper(a);this._cacheHelperProportions();this._cacheMargins();this.scrollParent=this.helper.scrollParent();this.offset=this.currentItem.offset();this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left};this.helper.css("position","absolute");this.cssPosition=this.helper.css("position");d.extend(this.offset,
+{click:{left:a.pageX-this.offset.left,top:a.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()});this.originalPosition=this._generatePosition(a);this.originalPageX=a.pageX;this.originalPageY=a.pageY;b.cursorAt&&this._adjustOffsetFromHelper(b.cursorAt);this.domPosition={prev:this.currentItem.prev()[0],parent:this.currentItem.parent()[0]};this.helper[0]!=this.currentItem[0]&&this.currentItem.hide();this._createPlaceholder();b.containment&&this._setContainment();
+if(b.cursor){if(d("body").css("cursor"))this._storedCursor=d("body").css("cursor");d("body").css("cursor",b.cursor)}if(b.opacity){if(this.helper.css("opacity"))this._storedOpacity=this.helper.css("opacity");this.helper.css("opacity",b.opacity)}if(b.zIndex){if(this.helper.css("zIndex"))this._storedZIndex=this.helper.css("zIndex");this.helper.css("zIndex",b.zIndex)}if(this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML")this.overflowOffset=this.scrollParent.offset();this._trigger("start",
+a,this._uiHash());this._preserveHelperProportions||this._cacheHelperProportions();if(!c)for(c=this.containers.length-1;c>=0;c--)this.containers[c]._trigger("activate",a,e._uiHash(this));if(d.ui.ddmanager)d.ui.ddmanager.current=this;d.ui.ddmanager&&!b.dropBehaviour&&d.ui.ddmanager.prepareOffsets(this,a);this.dragging=true;this.helper.addClass("ui-sortable-helper");this._mouseDrag(a);return true},_mouseDrag:function(a){this.position=this._generatePosition(a);this.positionAbs=this._convertPositionTo("absolute");
+if(!this.lastPositionAbs)this.lastPositionAbs=this.positionAbs;if(this.options.scroll){var b=this.options,c=false;if(this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML"){if(this.overflowOffset.top+this.scrollParent[0].offsetHeight-a.pageY<b.scrollSensitivity)this.scrollParent[0].scrollTop=c=this.scrollParent[0].scrollTop+b.scrollSpeed;else if(a.pageY-this.overflowOffset.top<b.scrollSensitivity)this.scrollParent[0].scrollTop=c=this.scrollParent[0].scrollTop-b.scrollSpeed;if(this.overflowOffset.left+
+this.scrollParent[0].offsetWidth-a.pageX<b.scrollSensitivity)this.scrollParent[0].scrollLeft=c=this.scrollParent[0].scrollLeft+b.scrollSpeed;else if(a.pageX-this.overflowOffset.left<b.scrollSensitivity)this.scrollParent[0].scrollLeft=c=this.scrollParent[0].scrollLeft-b.scrollSpeed}else{if(a.pageY-d(document).scrollTop()<b.scrollSensitivity)c=d(document).scrollTop(d(document).scrollTop()-b.scrollSpeed);else if(d(window).height()-(a.pageY-d(document).scrollTop())<b.scrollSensitivity)c=d(document).scrollTop(d(document).scrollTop()+
+b.scrollSpeed);if(a.pageX-d(document).scrollLeft()<b.scrollSensitivity)c=d(document).scrollLeft(d(document).scrollLeft()-b.scrollSpeed);else if(d(window).width()-(a.pageX-d(document).scrollLeft())<b.scrollSensitivity)c=d(document).scrollLeft(d(document).scrollLeft()+b.scrollSpeed)}c!==false&&d.ui.ddmanager&&!b.dropBehaviour&&d.ui.ddmanager.prepareOffsets(this,a)}this.positionAbs=this._convertPositionTo("absolute");if(!this.options.axis||this.options.axis!="y")this.helper[0].style.left=this.position.left+
+"px";if(!this.options.axis||this.options.axis!="x")this.helper[0].style.top=this.position.top+"px";for(b=this.items.length-1;b>=0;b--){c=this.items[b];var e=c.item[0],f=this._intersectsWithPointer(c);if(f)if(e!=this.currentItem[0]&&this.placeholder[f==1?"next":"prev"]()[0]!=e&&!d.ui.contains(this.placeholder[0],e)&&(this.options.type=="semi-dynamic"?!d.ui.contains(this.element[0],e):true)){this.direction=f==1?"down":"up";if(this.options.tolerance=="pointer"||this._intersectsWithSides(c))this._rearrange(a,
+c);else break;this._trigger("change",a,this._uiHash());break}}this._contactContainers(a);d.ui.ddmanager&&d.ui.ddmanager.drag(this,a);this._trigger("sort",a,this._uiHash());this.lastPositionAbs=this.positionAbs;return false},_mouseStop:function(a,b){if(a){d.ui.ddmanager&&!this.options.dropBehaviour&&d.ui.ddmanager.drop(this,a);if(this.options.revert){var c=this;b=c.placeholder.offset();c.reverting=true;d(this.helper).animate({left:b.left-this.offset.parent.left-c.margins.left+(this.offsetParent[0]==
+document.body?0:this.offsetParent[0].scrollLeft),top:b.top-this.offset.parent.top-c.margins.top+(this.offsetParent[0]==document.body?0:this.offsetParent[0].scrollTop)},parseInt(this.options.revert,10)||500,function(){c._clear(a)})}else this._clear(a,b);return false}},cancel:function(){var a=this;if(this.dragging){this._mouseUp();this.options.helper=="original"?this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper"):this.currentItem.show();for(var b=this.containers.length-1;b>=0;b--){this.containers[b]._trigger("deactivate",
+null,a._uiHash(this));if(this.containers[b].containerCache.over){this.containers[b]._trigger("out",null,a._uiHash(this));this.containers[b].containerCache.over=0}}}this.placeholder[0].parentNode&&this.placeholder[0].parentNode.removeChild(this.placeholder[0]);this.options.helper!="original"&&this.helper&&this.helper[0].parentNode&&this.helper.remove();d.extend(this,{helper:null,dragging:false,reverting:false,_noFinalSort:null});this.domPosition.prev?d(this.domPosition.prev).after(this.currentItem):
+d(this.domPosition.parent).prepend(this.currentItem);return this},serialize:function(a){var b=this._getItemsAsjQuery(a&&a.connected),c=[];a=a||{};d(b).each(function(){var e=(d(a.item||this).attr(a.attribute||"id")||"").match(a.expression||/(.+)[-=_](.+)/);if(e)c.push((a.key||e[1]+"[]")+"="+(a.key&&a.expression?e[1]:e[2]))});return c.join("&")},toArray:function(a){var b=this._getItemsAsjQuery(a&&a.connected),c=[];a=a||{};b.each(function(){c.push(d(a.item||this).attr(a.attribute||"id")||"")});return c},
+_intersectsWith:function(a){var b=this.positionAbs.left,c=b+this.helperProportions.width,e=this.positionAbs.top,f=e+this.helperProportions.height,g=a.left,h=g+a.width,i=a.top,k=i+a.height,j=this.offset.click.top,l=this.offset.click.left;j=e+j>i&&e+j<k&&b+l>g&&b+l<h;return this.options.tolerance=="pointer"||this.options.forcePointerForContainers||this.options.tolerance!="pointer"&&this.helperProportions[this.floating?"width":"height"]>a[this.floating?"width":"height"]?j:g<b+this.helperProportions.width/
+2&&c-this.helperProportions.width/2<h&&i<e+this.helperProportions.height/2&&f-this.helperProportions.height/2<k},_intersectsWithPointer:function(a){var b=d.ui.isOverAxis(this.positionAbs.top+this.offset.click.top,a.top,a.height);a=d.ui.isOverAxis(this.positionAbs.left+this.offset.click.left,a.left,a.width);b=b&&a;a=this._getDragVerticalDirection();var c=this._getDragHorizontalDirection();if(!b)return false;return this.floating?c&&c=="right"||a=="down"?2:1:a&&(a=="down"?2:1)},_intersectsWithSides:function(a){var b=
+d.ui.isOverAxis(this.positionAbs.top+this.offset.click.top,a.top+a.height/2,a.height);a=d.ui.isOverAxis(this.positionAbs.left+this.offset.click.left,a.left+a.width/2,a.width);var c=this._getDragVerticalDirection(),e=this._getDragHorizontalDirection();return this.floating&&e?e=="right"&&a||e=="left"&&!a:c&&(c=="down"&&b||c=="up"&&!b)},_getDragVerticalDirection:function(){var a=this.positionAbs.top-this.lastPositionAbs.top;return a!=0&&(a>0?"down":"up")},_getDragHorizontalDirection:function(){var a=
+this.positionAbs.left-this.lastPositionAbs.left;return a!=0&&(a>0?"right":"left")},refresh:function(a){this._refreshItems(a);this.refreshPositions();return this},_connectWith:function(){var a=this.options;return a.connectWith.constructor==String?[a.connectWith]:a.connectWith},_getItemsAsjQuery:function(a){var b=[],c=[],e=this._connectWith();if(e&&a)for(a=e.length-1;a>=0;a--)for(var f=d(e[a]),g=f.length-1;g>=0;g--){var h=d.data(f[g],"sortable");if(h&&h!=this&&!h.options.disabled)c.push([d.isFunction(h.options.items)?
+h.options.items.call(h.element):d(h.options.items,h.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),h])}c.push([d.isFunction(this.options.items)?this.options.items.call(this.element,null,{options:this.options,item:this.currentItem}):d(this.options.items,this.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),this]);for(a=c.length-1;a>=0;a--)c[a][0].each(function(){b.push(this)});return d(b)},_removeCurrentsFromItems:function(){for(var a=this.currentItem.find(":data(sortable-item)"),
+b=0;b<this.items.length;b++)for(var c=0;c<a.length;c++)a[c]==this.items[b].item[0]&&this.items.splice(b,1)},_refreshItems:function(a){this.items=[];this.containers=[this];var b=this.items,c=[[d.isFunction(this.options.items)?this.options.items.call(this.element[0],a,{item:this.currentItem}):d(this.options.items,this.element),this]],e=this._connectWith();if(e)for(var f=e.length-1;f>=0;f--)for(var g=d(e[f]),h=g.length-1;h>=0;h--){var i=d.data(g[h],"sortable");if(i&&i!=this&&!i.options.disabled){c.push([d.isFunction(i.options.items)?
+i.options.items.call(i.element[0],a,{item:this.currentItem}):d(i.options.items,i.element),i]);this.containers.push(i)}}for(f=c.length-1;f>=0;f--){a=c[f][1];e=c[f][0];h=0;for(g=e.length;h<g;h++){i=d(e[h]);i.data("sortable-item",a);b.push({item:i,instance:a,width:0,height:0,left:0,top:0})}}},refreshPositions:function(a){if(this.offsetParent&&this.helper)this.offset.parent=this._getParentOffset();for(var b=this.items.length-1;b>=0;b--){var c=this.items[b],e=this.options.toleranceElement?d(this.options.toleranceElement,
+c.item):c.item;if(!a){c.width=e.outerWidth();c.height=e.outerHeight()}e=e.offset();c.left=e.left;c.top=e.top}if(this.options.custom&&this.options.custom.refreshContainers)this.options.custom.refreshContainers.call(this);else for(b=this.containers.length-1;b>=0;b--){e=this.containers[b].element.offset();this.containers[b].containerCache.left=e.left;this.containers[b].containerCache.top=e.top;this.containers[b].containerCache.width=this.containers[b].element.outerWidth();this.containers[b].containerCache.height=
+this.containers[b].element.outerHeight()}return this},_createPlaceholder:function(a){var b=a||this,c=b.options;if(!c.placeholder||c.placeholder.constructor==String){var e=c.placeholder;c.placeholder={element:function(){var f=d(document.createElement(b.currentItem[0].nodeName)).addClass(e||b.currentItem[0].className+" ui-sortable-placeholder").removeClass("ui-sortable-helper")[0];if(!e)f.style.visibility="hidden";return f},update:function(f,g){if(!(e&&!c.forcePlaceholderSize)){g.height()||g.height(b.currentItem.innerHeight()-
+parseInt(b.currentItem.css("paddingTop")||0,10)-parseInt(b.currentItem.css("paddingBottom")||0,10));g.width()||g.width(b.currentItem.innerWidth()-parseInt(b.currentItem.css("paddingLeft")||0,10)-parseInt(b.currentItem.css("paddingRight")||0,10))}}}}b.placeholder=d(c.placeholder.element.call(b.element,b.currentItem));b.currentItem.after(b.placeholder);c.placeholder.update(b,b.placeholder)},_contactContainers:function(a){for(var b=null,c=null,e=this.containers.length-1;e>=0;e--)if(!d.ui.contains(this.currentItem[0],
+this.containers[e].element[0]))if(this._intersectsWith(this.containers[e].containerCache)){if(!(b&&d.ui.contains(this.containers[e].element[0],b.element[0]))){b=this.containers[e];c=e}}else if(this.containers[e].containerCache.over){this.containers[e]._trigger("out",a,this._uiHash(this));this.containers[e].containerCache.over=0}if(b)if(this.containers.length===1){this.containers[c]._trigger("over",a,this._uiHash(this));this.containers[c].containerCache.over=1}else if(this.currentContainer!=this.containers[c]){b=
+1E4;e=null;for(var f=this.positionAbs[this.containers[c].floating?"left":"top"],g=this.items.length-1;g>=0;g--)if(d.ui.contains(this.containers[c].element[0],this.items[g].item[0])){var h=this.items[g][this.containers[c].floating?"left":"top"];if(Math.abs(h-f)<b){b=Math.abs(h-f);e=this.items[g]}}if(e||this.options.dropOnEmpty){this.currentContainer=this.containers[c];e?this._rearrange(a,e,null,true):this._rearrange(a,null,this.containers[c].element,true);this._trigger("change",a,this._uiHash());this.containers[c]._trigger("change",
+a,this._uiHash(this));this.options.placeholder.update(this.currentContainer,this.placeholder);this.containers[c]._trigger("over",a,this._uiHash(this));this.containers[c].containerCache.over=1}}},_createHelper:function(a){var b=this.options;a=d.isFunction(b.helper)?d(b.helper.apply(this.element[0],[a,this.currentItem])):b.helper=="clone"?this.currentItem.clone():this.currentItem;a.parents("body").length||d(b.appendTo!="parent"?b.appendTo:this.currentItem[0].parentNode)[0].appendChild(a[0]);if(a[0]==
+this.currentItem[0])this._storedCSS={width:this.currentItem[0].style.width,height:this.currentItem[0].style.height,position:this.currentItem.css("position"),top:this.currentItem.css("top"),left:this.currentItem.css("left")};if(a[0].style.width==""||b.forceHelperSize)a.width(this.currentItem.width());if(a[0].style.height==""||b.forceHelperSize)a.height(this.currentItem.height());return a},_adjustOffsetFromHelper:function(a){if(typeof a=="string")a=a.split(" ");if(d.isArray(a))a={left:+a[0],top:+a[1]||
+0};if("left"in a)this.offset.click.left=a.left+this.margins.left;if("right"in a)this.offset.click.left=this.helperProportions.width-a.right+this.margins.left;if("top"in a)this.offset.click.top=a.top+this.margins.top;if("bottom"in a)this.offset.click.top=this.helperProportions.height-a.bottom+this.margins.top},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var a=this.offsetParent.offset();if(this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&d.ui.contains(this.scrollParent[0],
+this.offsetParent[0])){a.left+=this.scrollParent.scrollLeft();a.top+=this.scrollParent.scrollTop()}if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&d.browser.msie)a={top:0,left:0};return{top:a.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:a.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var a=this.currentItem.position();return{top:a.top-
+(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:a.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}else return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.currentItem.css("marginLeft"),10)||0,top:parseInt(this.currentItem.css("marginTop"),10)||0}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var a=this.options;
+if(a.containment=="parent")a.containment=this.helper[0].parentNode;if(a.containment=="document"||a.containment=="window")this.containment=[0-this.offset.relative.left-this.offset.parent.left,0-this.offset.relative.top-this.offset.parent.top,d(a.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(d(a.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(document|window|parent)$/.test(a.containment)){var b=
+d(a.containment)[0];a=d(a.containment).offset();var c=d(b).css("overflow")!="hidden";this.containment=[a.left+(parseInt(d(b).css("borderLeftWidth"),10)||0)+(parseInt(d(b).css("paddingLeft"),10)||0)-this.margins.left,a.top+(parseInt(d(b).css("borderTopWidth"),10)||0)+(parseInt(d(b).css("paddingTop"),10)||0)-this.margins.top,a.left+(c?Math.max(b.scrollWidth,b.offsetWidth):b.offsetWidth)-(parseInt(d(b).css("borderLeftWidth"),10)||0)-(parseInt(d(b).css("paddingRight"),10)||0)-this.helperProportions.width-
+this.margins.left,a.top+(c?Math.max(b.scrollHeight,b.offsetHeight):b.offsetHeight)-(parseInt(d(b).css("borderTopWidth"),10)||0)-(parseInt(d(b).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top]}},_convertPositionTo:function(a,b){if(!b)b=this.position;a=a=="absolute"?1:-1;var c=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&d.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,e=/(html|body)/i.test(c[0].tagName);return{top:b.top+
+this.offset.relative.top*a+this.offset.parent.top*a-(d.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():e?0:c.scrollTop())*a),left:b.left+this.offset.relative.left*a+this.offset.parent.left*a-(d.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():e?0:c.scrollLeft())*a)}},_generatePosition:function(a){var b=this.options,c=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&d.ui.contains(this.scrollParent[0],
+this.offsetParent[0]))?this.offsetParent:this.scrollParent,e=/(html|body)/i.test(c[0].tagName);if(this.cssPosition=="relative"&&!(this.scrollParent[0]!=document&&this.scrollParent[0]!=this.offsetParent[0]))this.offset.relative=this._getRelativeOffset();var f=a.pageX,g=a.pageY;if(this.originalPosition){if(this.containment){if(a.pageX-this.offset.click.left<this.containment[0])f=this.containment[0]+this.offset.click.left;if(a.pageY-this.offset.click.top<this.containment[1])g=this.containment[1]+this.offset.click.top;
+if(a.pageX-this.offset.click.left>this.containment[2])f=this.containment[2]+this.offset.click.left;if(a.pageY-this.offset.click.top>this.containment[3])g=this.containment[3]+this.offset.click.top}if(b.grid){g=this.originalPageY+Math.round((g-this.originalPageY)/b.grid[1])*b.grid[1];g=this.containment?!(g-this.offset.click.top<this.containment[1]||g-this.offset.click.top>this.containment[3])?g:!(g-this.offset.click.top<this.containment[1])?g-b.grid[1]:g+b.grid[1]:g;f=this.originalPageX+Math.round((f-
+this.originalPageX)/b.grid[0])*b.grid[0];f=this.containment?!(f-this.offset.click.left<this.containment[0]||f-this.offset.click.left>this.containment[2])?f:!(f-this.offset.click.left<this.containment[0])?f-b.grid[0]:f+b.grid[0]:f}}return{top:g-this.offset.click.top-this.offset.relative.top-this.offset.parent.top+(d.browser.safari&&this.cssPosition=="fixed"?0:this.cssPosition=="fixed"?-this.scrollParent.scrollTop():e?0:c.scrollTop()),left:f-this.offset.click.left-this.offset.relative.left-this.offset.parent.left+
+(d.browser.safari&&this.cssPosition=="fixed"?0:this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():e?0:c.scrollLeft())}},_rearrange:function(a,b,c,e){c?c[0].appendChild(this.placeholder[0]):b.item[0].parentNode.insertBefore(this.placeholder[0],this.direction=="down"?b.item[0]:b.item[0].nextSibling);this.counter=this.counter?++this.counter:1;var f=this,g=this.counter;window.setTimeout(function(){g==f.counter&&f.refreshPositions(!e)},0)},_clear:function(a,b){this.reverting=false;var c=[];!this._noFinalSort&&
+this.currentItem[0].parentNode&&this.placeholder.before(this.currentItem);this._noFinalSort=null;if(this.helper[0]==this.currentItem[0]){for(var e in this._storedCSS)if(this._storedCSS[e]=="auto"||this._storedCSS[e]=="static")this._storedCSS[e]="";this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper")}else this.currentItem.show();this.fromOutside&&!b&&c.push(function(f){this._trigger("receive",f,this._uiHash(this.fromOutside))});if((this.fromOutside||this.domPosition.prev!=this.currentItem.prev().not(".ui-sortable-helper")[0]||
+this.domPosition.parent!=this.currentItem.parent()[0])&&!b)c.push(function(f){this._trigger("update",f,this._uiHash())});if(!d.ui.contains(this.element[0],this.currentItem[0])){b||c.push(function(f){this._trigger("remove",f,this._uiHash())});for(e=this.containers.length-1;e>=0;e--)if(d.ui.contains(this.containers[e].element[0],this.currentItem[0])&&!b){c.push(function(f){return function(g){f._trigger("receive",g,this._uiHash(this))}}.call(this,this.containers[e]));c.push(function(f){return function(g){f._trigger("update",
+g,this._uiHash(this))}}.call(this,this.containers[e]))}}for(e=this.containers.length-1;e>=0;e--){b||c.push(function(f){return function(g){f._trigger("deactivate",g,this._uiHash(this))}}.call(this,this.containers[e]));if(this.containers[e].containerCache.over){c.push(function(f){return function(g){f._trigger("out",g,this._uiHash(this))}}.call(this,this.containers[e]));this.containers[e].containerCache.over=0}}this._storedCursor&&d("body").css("cursor",this._storedCursor);this._storedOpacity&&this.helper.css("opacity",
+this._storedOpacity);if(this._storedZIndex)this.helper.css("zIndex",this._storedZIndex=="auto"?"":this._storedZIndex);this.dragging=false;if(this.cancelHelperRemoval){if(!b){this._trigger("beforeStop",a,this._uiHash());for(e=0;e<c.length;e++)c[e].call(this,a);this._trigger("stop",a,this._uiHash())}return false}b||this._trigger("beforeStop",a,this._uiHash());this.placeholder[0].parentNode.removeChild(this.placeholder[0]);this.helper[0]!=this.currentItem[0]&&this.helper.remove();this.helper=null;if(!b){for(e=
+0;e<c.length;e++)c[e].call(this,a);this._trigger("stop",a,this._uiHash())}this.fromOutside=false;return true},_trigger:function(){d.Widget.prototype._trigger.apply(this,arguments)===false&&this.cancel()},_uiHash:function(a){var b=a||this;return{helper:b.helper,placeholder:b.placeholder||d([]),position:b.position,originalPosition:b.originalPosition,offset:b.positionAbs,item:b.currentItem,sender:a?a.element:null}}});d.extend(d.ui.sortable,{version:"1.9m2"})})(jQuery);
+jQuery.effects||function(f){function k(c){var a;if(c&&c.constructor==Array&&c.length==3)return c;if(a=/rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/.exec(c))return[parseInt(a[1],10),parseInt(a[2],10),parseInt(a[3],10)];if(a=/rgb\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*\)/.exec(c))return[parseFloat(a[1])*2.55,parseFloat(a[2])*2.55,parseFloat(a[3])*2.55];if(a=/#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(c))return[parseInt(a[1],
+16),parseInt(a[2],16),parseInt(a[3],16)];if(a=/#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(c))return[parseInt(a[1]+a[1],16),parseInt(a[2]+a[2],16),parseInt(a[3]+a[3],16)];if(/rgba\(0, 0, 0, 0\)/.exec(c))return l.transparent;return l[f.trim(c).toLowerCase()]}function q(c,a){var b;do{b=f.curCSS(c,a);if(b!=""&&b!="transparent"||f.nodeName(c,"body"))break;a="backgroundColor"}while(c=c.parentNode);return k(b)}function m(){var c=document.defaultView?document.defaultView.getComputedStyle(this,null):this.currentStyle,
+a={},b,d;if(c&&c.length&&c[0]&&c[c[0]])for(var e=c.length;e--;){b=c[e];if(typeof c[b]=="string"){d=b.replace(/\-(\w)/g,function(g,h){return h.toUpperCase()});a[d]=c[b]}}else for(b in c)if(typeof c[b]==="string")a[b]=c[b];return a}function n(c){var a,b;for(a in c){b=c[a];if(b==null||f.isFunction(b)||a in r||/scrollbar/.test(a)||!/color/i.test(a)&&isNaN(parseFloat(b)))delete c[a]}return c}function s(c,a){var b={_:0},d;for(d in a)if(c[d]!=a[d])b[d]=a[d];return b}function j(c,a,b,d){if(typeof c=="object"){d=
+a;b=null;a=c;c=a.effect}if(f.isFunction(a)){d=a;b=null;a={}}if(f.isFunction(b)){d=b;b=null}if(typeof a=="number"||f.fx.speeds[a]){d=b;b=a;a={}}a=a||{};b=b||a.duration;b=f.fx.off?0:typeof b=="number"?b:f.fx.speeds[b]||f.fx.speeds._default;d=d||a.complete;return[c,a,b,d]}f.effects={};f.each(["backgroundColor","borderBottomColor","borderLeftColor","borderRightColor","borderTopColor","color","outlineColor"],function(c,a){f.fx.step[a]=function(b){if(!b.colorInit){b.start=q(b.elem,a);b.end=k(b.end);b.colorInit=
+true}b.elem.style[a]="rgb("+Math.max(Math.min(parseInt(b.pos*(b.end[0]-b.start[0])+b.start[0],10),255),0)+","+Math.max(Math.min(parseInt(b.pos*(b.end[1]-b.start[1])+b.start[1],10),255),0)+","+Math.max(Math.min(parseInt(b.pos*(b.end[2]-b.start[2])+b.start[2],10),255),0)+")"}});var l={aqua:[0,255,255],azure:[240,255,255],beige:[245,245,220],black:[0,0,0],blue:[0,0,255],brown:[165,42,42],cyan:[0,255,255],darkblue:[0,0,139],darkcyan:[0,139,139],darkgrey:[169,169,169],darkgreen:[0,100,0],darkkhaki:[189,
+183,107],darkmagenta:[139,0,139],darkolivegreen:[85,107,47],darkorange:[255,140,0],darkorchid:[153,50,204],darkred:[139,0,0],darksalmon:[233,150,122],darkviolet:[148,0,211],fuchsia:[255,0,255],gold:[255,215,0],green:[0,128,0],indigo:[75,0,130],khaki:[240,230,140],lightblue:[173,216,230],lightcyan:[224,255,255],lightgreen:[144,238,144],lightgrey:[211,211,211],lightpink:[255,182,193],lightyellow:[255,255,224],lime:[0,255,0],magenta:[255,0,255],maroon:[128,0,0],navy:[0,0,128],olive:[128,128,0],orange:[255,
+165,0],pink:[255,192,203],purple:[128,0,128],violet:[128,0,128],red:[255,0,0],silver:[192,192,192],white:[255,255,255],yellow:[255,255,0],transparent:[255,255,255]},o=["add","remove","toggle"],r={border:1,borderBottom:1,borderColor:1,borderLeft:1,borderRight:1,borderTop:1,borderWidth:1,margin:1,padding:1};f.effects.animateClass=function(c,a,b,d){if(f.isFunction(b)){d=b;b=null}return this.each(function(){var e=f(this),g=e.attr("style")||" ",h=n(m.call(this)),p,t=e.attr("className");f.each(o,function(u,
+i){c[i]&&e[i+"Class"](c[i])});p=n(m.call(this));e.attr("className",t);e.animate(s(h,p),a,b,function(){f.each(o,function(u,i){c[i]&&e[i+"Class"](c[i])});if(typeof e.attr("style")=="object"){e.attr("style").cssText="";e.attr("style").cssText=g}else e.attr("style",g);d&&d.apply(this,arguments)})})};f.fn.extend({_addClass:f.fn.addClass,addClass:function(c,a,b,d){return a?f.effects.animateClass.apply(this,[{add:c},a,b,d]):this._addClass(c)},_removeClass:f.fn.removeClass,removeClass:function(c,a,b,d){return a?
+f.effects.animateClass.apply(this,[{remove:c},a,b,d]):this._removeClass(c)},_toggleClass:f.fn.toggleClass,toggleClass:function(c,a,b,d,e){return typeof a=="boolean"||a===undefined?b?f.effects.animateClass.apply(this,[a?{add:c}:{remove:c},b,d,e]):this._toggleClass(c,a):f.effects.animateClass.apply(this,[{toggle:c},a,b,d])},switchClass:function(c,a,b,d,e){return f.effects.animateClass.apply(this,[{add:a,remove:c},b,d,e])}});f.extend(f.effects,{version:"1.9m2",save:function(c,a){for(var b=0;b<a.length;b++)a[b]!==
+null&&c.data("ec.storage."+a[b],c[0].style[a[b]])},restore:function(c,a){for(var b=0;b<a.length;b++)a[b]!==null&&c.css(a[b],c.data("ec.storage."+a[b]))},setMode:function(c,a){if(a=="toggle")a=c.is(":hidden")?"show":"hide";return a},getBaseline:function(c,a){var b;switch(c[0]){case "top":b=0;break;case "middle":b=0.5;break;case "bottom":b=1;break;default:b=c[0]/a.height}switch(c[1]){case "left":c=0;break;case "center":c=0.5;break;case "right":c=1;break;default:c=c[1]/a.width}return{x:c,y:b}},createWrapper:function(c){if(c.parent().is(".ui-effects-wrapper"))return c.parent();
+var a={width:c.outerWidth(true),height:c.outerHeight(true),"float":c.css("float")},b=f("<div></div>").addClass("ui-effects-wrapper").css({fontSize:"100%",background:"transparent",border:"none",margin:0,padding:0});c.wrap(b);b=c.parent();if(c.css("position")=="static"){b.css({position:"relative"});c.css({position:"relative"})}else{f.extend(a,{position:c.css("position"),zIndex:c.css("z-index")});f.each(["top","left","bottom","right"],function(d,e){a[e]=c.css(e);if(isNaN(parseInt(a[e],10)))a[e]="auto"});
+c.css({position:"relative",top:0,left:0})}return b.css(a).show()},removeWrapper:function(c){if(c.parent().is(".ui-effects-wrapper"))return c.parent().replaceWith(c);return c},setTransition:function(c,a,b,d){d=d||{};f.each(a,function(e,g){unit=c.cssUnit(g);if(unit[0]>0)d[g]=unit[0]*b+unit[1]});return d}});f.fn.extend({effect:function(c){var a=j.apply(this,arguments);a={options:a[1],duration:a[2],callback:a[3]};var b=f.effects[c];return b&&!f.fx.off?b.call(this,a):this},_show:f.fn.show,show:function(c){if(!c||
+typeof c=="number"||f.fx.speeds[c])return this._show.apply(this,arguments);else{var a=j.apply(this,arguments);a[1].mode="show";return this.effect.apply(this,a)}},_hide:f.fn.hide,hide:function(c){if(!c||typeof c=="number"||f.fx.speeds[c])return this._hide.apply(this,arguments);else{var a=j.apply(this,arguments);a[1].mode="hide";return this.effect.apply(this,a)}},__toggle:f.fn.toggle,toggle:function(c){if(!c||typeof c=="number"||f.fx.speeds[c]||typeof c=="boolean"||f.isFunction(c))return this.__toggle.apply(this,
+arguments);else{var a=j.apply(this,arguments);a[1].mode="toggle";return this.effect.apply(this,a)}},cssUnit:function(c){var a=this.css(c),b=[];f.each(["em","px","%","pt"],function(d,e){if(a.indexOf(e)>0)b=[parseFloat(a),e]});return b}});f.easing.jswing=f.easing.swing;f.extend(f.easing,{def:"easeOutQuad",swing:function(c,a,b,d,e){return f.easing[f.easing.def](c,a,b,d,e)},easeInQuad:function(c,a,b,d,e){return d*(a/=e)*a+b},easeOutQuad:function(c,a,b,d,e){return-d*(a/=e)*(a-2)+b},easeInOutQuad:function(c,
+a,b,d,e){if((a/=e/2)<1)return d/2*a*a+b;return-d/2*(--a*(a-2)-1)+b},easeInCubic:function(c,a,b,d,e){return d*(a/=e)*a*a+b},easeOutCubic:function(c,a,b,d,e){return d*((a=a/e-1)*a*a+1)+b},easeInOutCubic:function(c,a,b,d,e){if((a/=e/2)<1)return d/2*a*a*a+b;return d/2*((a-=2)*a*a+2)+b},easeInQuart:function(c,a,b,d,e){return d*(a/=e)*a*a*a+b},easeOutQuart:function(c,a,b,d,e){return-d*((a=a/e-1)*a*a*a-1)+b},easeInOutQuart:function(c,a,b,d,e){if((a/=e/2)<1)return d/2*a*a*a*a+b;return-d/2*((a-=2)*a*a*a-2)+
+b},easeInQuint:function(c,a,b,d,e){return d*(a/=e)*a*a*a*a+b},easeOutQuint:function(c,a,b,d,e){return d*((a=a/e-1)*a*a*a*a+1)+b},easeInOutQuint:function(c,a,b,d,e){if((a/=e/2)<1)return d/2*a*a*a*a*a+b;return d/2*((a-=2)*a*a*a*a+2)+b},easeInSine:function(c,a,b,d,e){return-d*Math.cos(a/e*(Math.PI/2))+d+b},easeOutSine:function(c,a,b,d,e){return d*Math.sin(a/e*(Math.PI/2))+b},easeInOutSine:function(c,a,b,d,e){return-d/2*(Math.cos(Math.PI*a/e)-1)+b},easeInExpo:function(c,a,b,d,e){return a==0?b:d*Math.pow(2,
+10*(a/e-1))+b},easeOutExpo:function(c,a,b,d,e){return a==e?b+d:d*(-Math.pow(2,-10*a/e)+1)+b},easeInOutExpo:function(c,a,b,d,e){if(a==0)return b;if(a==e)return b+d;if((a/=e/2)<1)return d/2*Math.pow(2,10*(a-1))+b;return d/2*(-Math.pow(2,-10*--a)+2)+b},easeInCirc:function(c,a,b,d,e){return-d*(Math.sqrt(1-(a/=e)*a)-1)+b},easeOutCirc:function(c,a,b,d,e){return d*Math.sqrt(1-(a=a/e-1)*a)+b},easeInOutCirc:function(c,a,b,d,e){if((a/=e/2)<1)return-d/2*(Math.sqrt(1-a*a)-1)+b;return d/2*(Math.sqrt(1-(a-=2)*
+a)+1)+b},easeInElastic:function(c,a,b,d,e){c=1.70158;var g=0,h=d;if(a==0)return b;if((a/=e)==1)return b+d;g||(g=e*0.3);if(h<Math.abs(d)){h=d;c=g/4}else c=g/(2*Math.PI)*Math.asin(d/h);return-(h*Math.pow(2,10*(a-=1))*Math.sin((a*e-c)*2*Math.PI/g))+b},easeOutElastic:function(c,a,b,d,e){c=1.70158;var g=0,h=d;if(a==0)return b;if((a/=e)==1)return b+d;g||(g=e*0.3);if(h<Math.abs(d)){h=d;c=g/4}else c=g/(2*Math.PI)*Math.asin(d/h);return h*Math.pow(2,-10*a)*Math.sin((a*e-c)*2*Math.PI/g)+d+b},easeInOutElastic:function(c,
+a,b,d,e){c=1.70158;var g=0,h=d;if(a==0)return b;if((a/=e/2)==2)return b+d;g||(g=e*0.3*1.5);if(h<Math.abs(d)){h=d;c=g/4}else c=g/(2*Math.PI)*Math.asin(d/h);if(a<1)return-0.5*h*Math.pow(2,10*(a-=1))*Math.sin((a*e-c)*2*Math.PI/g)+b;return h*Math.pow(2,-10*(a-=1))*Math.sin((a*e-c)*2*Math.PI/g)*0.5+d+b},easeInBack:function(c,a,b,d,e,g){if(g==undefined)g=1.70158;return d*(a/=e)*a*((g+1)*a-g)+b},easeOutBack:function(c,a,b,d,e,g){if(g==undefined)g=1.70158;return d*((a=a/e-1)*a*((g+1)*a+g)+1)+b},easeInOutBack:function(c,
+a,b,d,e,g){if(g==undefined)g=1.70158;if((a/=e/2)<1)return d/2*a*a*(((g*=1.525)+1)*a-g)+b;return d/2*((a-=2)*a*(((g*=1.525)+1)*a+g)+2)+b},easeInBounce:function(c,a,b,d,e){return d-f.easing.easeOutBounce(c,e-a,0,d,e)+b},easeOutBounce:function(c,a,b,d,e){return(a/=e)<1/2.75?d*7.5625*a*a+b:a<2/2.75?d*(7.5625*(a-=1.5/2.75)*a+0.75)+b:a<2.5/2.75?d*(7.5625*(a-=2.25/2.75)*a+0.9375)+b:d*(7.5625*(a-=2.625/2.75)*a+0.984375)+b},easeInOutBounce:function(c,a,b,d,e){if(a<e/2)return f.easing.easeInBounce(c,a*2,0,
+d,e)*0.5+b;return f.easing.easeOutBounce(c,a*2-e,0,d,e)*0.5+d*0.5+b}})}(jQuery);
+(function(b){b.effects.blind=function(c){return this.queue(function(){var a=b(this),g=["position","top","left"],f=b.effects.setMode(a,c.options.mode||"hide"),d=c.options.direction||"vertical";b.effects.save(a,g);a.show();var e=b.effects.createWrapper(a).css({overflow:"hidden"}),h=d=="vertical"?"height":"width";d=d=="vertical"?e.height():e.width();f=="show"&&e.css(h,0);var i={};i[h]=f=="show"?d:0;e.animate(i,c.duration,c.options.easing,function(){f=="hide"&&a.hide();b.effects.restore(a,g);b.effects.removeWrapper(a);
+c.callback&&c.callback.apply(a[0],arguments);a.dequeue()})})}})(jQuery);
+(function(e){e.effects.bounce=function(b){return this.queue(function(){var a=e(this),l=["position","top","left"],h=e.effects.setMode(a,b.options.mode||"effect"),d=b.options.direction||"up",c=b.options.distance||20,m=b.options.times||5,i=b.duration||250;/show|hide/.test(h)&&l.push("opacity");e.effects.save(a,l);a.show();e.effects.createWrapper(a);var f=d=="up"||d=="down"?"top":"left";d=d=="up"||d=="left"?"pos":"neg";c=b.options.distance||(f=="top"?a.outerHeight({margin:true})/3:a.outerWidth({margin:true})/
+3);if(h=="show")a.css("opacity",0).css(f,d=="pos"?-c:c);if(h=="hide")c/=m*2;h!="hide"&&m--;if(h=="show"){var g={opacity:1};g[f]=(d=="pos"?"+=":"-=")+c;a.animate(g,i/2,b.options.easing);c/=2;m--}for(g=0;g<m;g++){var j={},k={};j[f]=(d=="pos"?"-=":"+=")+c;k[f]=(d=="pos"?"+=":"-=")+c;a.animate(j,i/2,b.options.easing).animate(k,i/2,b.options.easing);c=h=="hide"?c*2:c/2}if(h=="hide"){g={opacity:0};g[f]=(d=="pos"?"-=":"+=")+c;a.animate(g,i/2,b.options.easing,function(){a.hide();e.effects.restore(a,l);e.effects.removeWrapper(a);
+b.callback&&b.callback.apply(this,arguments)})}else{j={};k={};j[f]=(d=="pos"?"-=":"+=")+c;k[f]=(d=="pos"?"+=":"-=")+c;a.animate(j,i/2,b.options.easing).animate(k,i/2,b.options.easing,function(){e.effects.restore(a,l);e.effects.removeWrapper(a);b.callback&&b.callback.apply(this,arguments)})}a.queue("fx",function(){a.dequeue()});a.dequeue()})}})(jQuery);
+(function(b){b.effects.clip=function(e){return this.queue(function(){var a=b(this),i=["position","top","left","height","width"],f=b.effects.setMode(a,e.options.mode||"hide"),c=e.options.direction||"vertical";b.effects.save(a,i);a.show();var d=b.effects.createWrapper(a).css({overflow:"hidden"});d=a[0].tagName=="IMG"?d:a;var g={size:c=="vertical"?"height":"width",position:c=="vertical"?"top":"left"};c=c=="vertical"?d.height():d.width();if(f=="show"){d.css(g.size,0);d.css(g.position,c/2)}var h={};h[g.size]=
+f=="show"?c:0;h[g.position]=f=="show"?0:c/2;d.animate(h,{queue:false,duration:e.duration,easing:e.options.easing,complete:function(){f=="hide"&&a.hide();b.effects.restore(a,i);b.effects.removeWrapper(a);e.callback&&e.callback.apply(a[0],arguments);a.dequeue()}})})}})(jQuery);
+(function(c){c.effects.drop=function(d){return this.queue(function(){var a=c(this),h=["position","top","left","opacity"],e=c.effects.setMode(a,d.options.mode||"hide"),b=d.options.direction||"left";c.effects.save(a,h);a.show();c.effects.createWrapper(a);var f=b=="up"||b=="down"?"top":"left";b=b=="up"||b=="left"?"pos":"neg";var g=d.options.distance||(f=="top"?a.outerHeight({margin:true})/2:a.outerWidth({margin:true})/2);if(e=="show")a.css("opacity",0).css(f,b=="pos"?-g:g);var i={opacity:e=="show"?1:
+0};i[f]=(e=="show"?b=="pos"?"+=":"-=":b=="pos"?"-=":"+=")+g;a.animate(i,{queue:false,duration:d.duration,easing:d.options.easing,complete:function(){e=="hide"&&a.hide();c.effects.restore(a,h);c.effects.removeWrapper(a);d.callback&&d.callback.apply(this,arguments);a.dequeue()}})})}})(jQuery);
+(function(j){j.effects.explode=function(a){return this.queue(function(){var c=a.options.pieces?Math.round(Math.sqrt(a.options.pieces)):3,d=a.options.pieces?Math.round(Math.sqrt(a.options.pieces)):3;a.options.mode=a.options.mode=="toggle"?j(this).is(":visible")?"hide":"show":a.options.mode;var b=j(this).show().css("visibility","hidden"),g=b.offset();g.top-=parseInt(b.css("marginTop"),10)||0;g.left-=parseInt(b.css("marginLeft"),10)||0;for(var h=b.outerWidth(true),i=b.outerHeight(true),e=0;e<c;e++)for(var f=
+0;f<d;f++)b.clone().appendTo("body").wrap("<div></div>").css({position:"absolute",visibility:"visible",left:-f*(h/d),top:-e*(i/c)}).parent().addClass("ui-effects-explode").css({position:"absolute",overflow:"hidden",width:h/d,height:i/c,left:g.left+f*(h/d)+(a.options.mode=="show"?(f-Math.floor(d/2))*(h/d):0),top:g.top+e*(i/c)+(a.options.mode=="show"?(e-Math.floor(c/2))*(i/c):0),opacity:a.options.mode=="show"?0:1}).animate({left:g.left+f*(h/d)+(a.options.mode=="show"?0:(f-Math.floor(d/2))*(h/d)),top:g.top+
+e*(i/c)+(a.options.mode=="show"?0:(e-Math.floor(c/2))*(i/c)),opacity:a.options.mode=="show"?1:0},a.duration||500);setTimeout(function(){a.options.mode=="show"?b.css({visibility:"visible"}):b.css({visibility:"visible"}).hide();a.callback&&a.callback.apply(b[0]);b.dequeue();j("div.ui-effects-explode").remove()},a.duration||500)})}})(jQuery);
+(function(b){b.effects.fade=function(a){return this.queue(function(){var c=b(this),d=b.effects.setMode(c,a.options.mode||"hide");c.animate({opacity:d},{queue:false,duration:a.duration,easing:a.options.easing,complete:function(){a.callback&&a.callback.apply(this,arguments);c.dequeue()}})})}})(jQuery);
+(function(c){c.effects.fold=function(a){return this.queue(function(){var b=c(this),j=["position","top","left"],d=c.effects.setMode(b,a.options.mode||"hide"),g=a.options.size||15,h=!!a.options.horizFirst,k=a.duration?a.duration/2:c.fx.speeds._default/2;c.effects.save(b,j);b.show();var e=c.effects.createWrapper(b).css({overflow:"hidden"}),f=d=="show"!=h,l=f?["width","height"]:["height","width"];f=f?[e.width(),e.height()]:[e.height(),e.width()];var i=/([0-9]+)%/.exec(g);if(i)g=parseInt(i[1],10)/100*
+f[d=="hide"?0:1];if(d=="show")e.css(h?{height:0,width:g}:{height:g,width:0});h={};i={};h[l[0]]=d=="show"?f[0]:g;i[l[1]]=d=="show"?f[1]:0;e.animate(h,k,a.options.easing).animate(i,k,a.options.easing,function(){d=="hide"&&b.hide();c.effects.restore(b,j);c.effects.removeWrapper(b);a.callback&&a.callback.apply(b[0],arguments);b.dequeue()})})}})(jQuery);
+(function(b){b.effects.highlight=function(c){return this.queue(function(){var a=b(this),e=["backgroundImage","backgroundColor","opacity"],d=b.effects.setMode(a,c.options.mode||"show"),f={backgroundColor:a.css("backgroundColor")};if(d=="hide")f.opacity=0;b.effects.save(a,e);a.show().css({backgroundImage:"none",backgroundColor:c.options.color||"#ffff99"}).animate(f,{queue:false,duration:c.duration,easing:c.options.easing,complete:function(){d=="hide"&&a.hide();b.effects.restore(a,e);d=="show"&&!b.support.opacity&&
+this.style.removeAttribute("filter");c.callback&&c.callback.apply(this,arguments);a.dequeue()}})})}})(jQuery);
+(function(d){d.effects.pulsate=function(a){return this.queue(function(){var b=d(this),c=d.effects.setMode(b,a.options.mode||"show");times=(a.options.times||5)*2-1;duration=a.duration?a.duration/2:d.fx.speeds._default/2;isVisible=b.is(":visible");animateTo=0;if(!isVisible){b.css("opacity",0).show();animateTo=1}if(c=="hide"&&isVisible||c=="show"&&!isVisible)times--;for(c=0;c<times;c++){b.animate({opacity:animateTo},duration,a.options.easing);animateTo=(animateTo+1)%2}b.animate({opacity:animateTo},duration,
+a.options.easing,function(){animateTo==0&&b.hide();a.callback&&a.callback.apply(this,arguments)});b.queue("fx",function(){b.dequeue()}).dequeue()})}})(jQuery);
+(function(c){c.effects.puff=function(b){return this.queue(function(){var a=c(this),e=c.effects.setMode(a,b.options.mode||"hide"),g=parseInt(b.options.percent,10)||150,h=g/100,i={height:a.height(),width:a.width()};c.extend(b.options,{fade:true,mode:e,percent:e=="hide"?g:100,from:e=="hide"?i:{height:i.height*h,width:i.width*h}});a.effect("scale",b.options,b.duration,b.callback);a.dequeue()})};c.effects.scale=function(b){return this.queue(function(){var a=c(this),e=c.extend(true,{},b.options),g=c.effects.setMode(a,
+b.options.mode||"effect"),h=parseInt(b.options.percent,10)||(parseInt(b.options.percent,10)==0?0:g=="hide"?0:100),i=b.options.direction||"both",f=b.options.origin;if(g!="effect"){e.origin=f||["middle","center"];e.restore=true}f={height:a.height(),width:a.width()};a.from=b.options.from||(g=="show"?{height:0,width:0}:f);h={y:i!="horizontal"?h/100:1,x:i!="vertical"?h/100:1};a.to={height:f.height*h.y,width:f.width*h.x};if(b.options.fade){if(g=="show"){a.from.opacity=0;a.to.opacity=1}if(g=="hide"){a.from.opacity=
+1;a.to.opacity=0}}e.from=a.from;e.to=a.to;e.mode=g;a.effect("size",e,b.duration,b.callback);a.dequeue()})};c.effects.size=function(b){return this.queue(function(){var a=c(this),e=["position","top","left","width","height","overflow","opacity"],g=["position","top","left","overflow","opacity"],h=["width","height","overflow"],i=["fontSize"],f=["borderTopWidth","borderBottomWidth","paddingTop","paddingBottom"],k=["borderLeftWidth","borderRightWidth","paddingLeft","paddingRight"],p=c.effects.setMode(a,
+b.options.mode||"effect"),n=b.options.restore||false,m=b.options.scale||"both",l=b.options.origin,j={height:a.height(),width:a.width()};a.from=b.options.from||j;a.to=b.options.to||j;if(l){l=c.effects.getBaseline(l,j);a.from.top=(j.height-a.from.height)*l.y;a.from.left=(j.width-a.from.width)*l.x;a.to.top=(j.height-a.to.height)*l.y;a.to.left=(j.width-a.to.width)*l.x}var d={from:{y:a.from.height/j.height,x:a.from.width/j.width},to:{y:a.to.height/j.height,x:a.to.width/j.width}};if(m=="box"||m=="both"){if(d.from.y!=
+d.to.y){e=e.concat(f);a.from=c.effects.setTransition(a,f,d.from.y,a.from);a.to=c.effects.setTransition(a,f,d.to.y,a.to)}if(d.from.x!=d.to.x){e=e.concat(k);a.from=c.effects.setTransition(a,k,d.from.x,a.from);a.to=c.effects.setTransition(a,k,d.to.x,a.to)}}if(m=="content"||m=="both")if(d.from.y!=d.to.y){e=e.concat(i);a.from=c.effects.setTransition(a,i,d.from.y,a.from);a.to=c.effects.setTransition(a,i,d.to.y,a.to)}c.effects.save(a,n?e:g);a.show();c.effects.createWrapper(a);a.css("overflow","hidden").css(a.from);
+if(m=="content"||m=="both"){f=f.concat(["marginTop","marginBottom"]).concat(i);k=k.concat(["marginLeft","marginRight"]);h=e.concat(f).concat(k);a.find("*[width]").each(function(){child=c(this);n&&c.effects.save(child,h);var o={height:child.height(),width:child.width()};child.from={height:o.height*d.from.y,width:o.width*d.from.x};child.to={height:o.height*d.to.y,width:o.width*d.to.x};if(d.from.y!=d.to.y){child.from=c.effects.setTransition(child,f,d.from.y,child.from);child.to=c.effects.setTransition(child,
+f,d.to.y,child.to)}if(d.from.x!=d.to.x){child.from=c.effects.setTransition(child,k,d.from.x,child.from);child.to=c.effects.setTransition(child,k,d.to.x,child.to)}child.css(child.from);child.animate(child.to,b.duration,b.options.easing,function(){n&&c.effects.restore(child,h)})})}a.animate(a.to,{queue:false,duration:b.duration,easing:b.options.easing,complete:function(){a.to.opacity===0&&a.css("opacity",a.from.opacity);p=="hide"&&a.hide();c.effects.restore(a,n?e:g);c.effects.removeWrapper(a);b.callback&&
+b.callback.apply(this,arguments);a.dequeue()}})})}})(jQuery);
+(function(d){d.effects.shake=function(a){return this.queue(function(){var b=d(this),j=["position","top","left"];d.effects.setMode(b,a.options.mode||"effect");var c=a.options.direction||"left",e=a.options.distance||20,l=a.options.times||3,f=a.duration||a.options.duration||140;d.effects.save(b,j);b.show();d.effects.createWrapper(b);var g=c=="up"||c=="down"?"top":"left",h=c=="up"||c=="left"?"pos":"neg";c={};var i={},k={};c[g]=(h=="pos"?"-=":"+=")+e;i[g]=(h=="pos"?"+=":"-=")+e*2;k[g]=(h=="pos"?"-=":"+=")+
+e*2;b.animate(c,f,a.options.easing);for(e=1;e<l;e++)b.animate(i,f,a.options.easing).animate(k,f,a.options.easing);b.animate(i,f,a.options.easing).animate(c,f/2,a.options.easing,function(){d.effects.restore(b,j);d.effects.removeWrapper(b);a.callback&&a.callback.apply(this,arguments)});b.queue("fx",function(){b.dequeue()});b.dequeue()})}})(jQuery);
+(function(c){c.effects.slide=function(d){return this.queue(function(){var a=c(this),h=["position","top","left"],e=c.effects.setMode(a,d.options.mode||"show"),b=d.options.direction||"left";c.effects.save(a,h);a.show();c.effects.createWrapper(a).css({overflow:"hidden"});var f=b=="up"||b=="down"?"top":"left";b=b=="up"||b=="left"?"pos":"neg";var g=d.options.distance||(f=="top"?a.outerHeight({margin:true}):a.outerWidth({margin:true}));if(e=="show")a.css(f,b=="pos"?-g:g);var i={};i[f]=(e=="show"?b=="pos"?
+"+=":"-=":b=="pos"?"-=":"+=")+g;a.animate(i,{queue:false,duration:d.duration,easing:d.options.easing,complete:function(){e=="hide"&&a.hide();c.effects.restore(a,h);c.effects.removeWrapper(a);d.callback&&d.callback.apply(this,arguments);a.dequeue()}})})}})(jQuery);
+(function(e){e.effects.transfer=function(a){return this.queue(function(){var b=e(this),c=e(a.options.to),d=c.offset();c={top:d.top,left:d.left,height:c.innerHeight(),width:c.innerWidth()};d=b.offset();var f=e('<div class="ui-effects-transfer"></div>').appendTo(document.body).addClass(a.options.className).css({top:d.top,left:d.left,height:b.innerHeight(),width:b.innerWidth(),position:"absolute"}).animate(c,a.duration,a.options.easing,function(){f.remove();a.callback&&a.callback.apply(b[0],arguments);
+b.dequeue()})})}})(jQuery);
+(function(d){d.widget("ui.accordion",{options:{active:0,animated:"slide",autoHeight:true,clearStyle:false,collapsible:false,event:"click",fillSpace:false,header:"> li > :first-child,> :not(li):even",icons:{header:"ui-icon-triangle-1-e",headerSelected:"ui-icon-triangle-1-s"},navigation:false,navigationFilter:function(){return this.href.toLowerCase()==location.href.toLowerCase()}},_create:function(){var a=this.options,b=this;this.running=0;this.element.addClass("ui-accordion ui-widget ui-helper-reset");
+this.element.children("li").addClass("ui-accordion-li-fix");this.headers=this.element.find(a.header).addClass("ui-accordion-header ui-helper-reset ui-state-default ui-corner-all").bind("mouseenter.accordion",function(){d(this).addClass("ui-state-hover")}).bind("mouseleave.accordion",function(){d(this).removeClass("ui-state-hover")}).bind("focus.accordion",function(){d(this).addClass("ui-state-focus")}).bind("blur.accordion",function(){d(this).removeClass("ui-state-focus")});this.headers.next().addClass("ui-accordion-content ui-helper-reset ui-widget-content ui-corner-bottom");
+if(a.navigation){var c=this.element.find("a").filter(a.navigationFilter);if(c.length){var f=c.closest(".ui-accordion-header");this.active=f.length?f:c.closest(".ui-accordion-content").prev()}}this.active=this._findActive(this.active||a.active).toggleClass("ui-state-default").toggleClass("ui-state-active").toggleClass("ui-corner-all").toggleClass("ui-corner-top");this.active.next().addClass("ui-accordion-content-active");this._createIcons();this.resize();this.element.attr("role","tablist");this.headers.attr("role",
+"tab").bind("keydown",function(g){return b._keydown(g)}).next().attr("role","tabpanel");this.headers.not(this.active||"").attr("aria-expanded","false").attr("tabIndex","-1").next().hide();this.active.length?this.active.attr("aria-expanded","true").attr("tabIndex","0"):this.headers.eq(0).attr("tabIndex","0");d.browser.safari||this.headers.find("a").attr("tabIndex","-1");a.event&&this.headers.bind(a.event+".accordion",function(g){b._clickHandler.call(b,g,this);g.preventDefault()})},_createIcons:function(){var a=
+this.options;if(a.icons){d("<span/>").addClass("ui-icon "+a.icons.header).prependTo(this.headers);this.active.find(".ui-icon").toggleClass(a.icons.header).toggleClass(a.icons.headerSelected);this.element.addClass("ui-accordion-icons")}},_destroyIcons:function(){this.headers.children(".ui-icon").remove();this.element.removeClass("ui-accordion-icons")},destroy:function(){var a=this.options;this.element.removeClass("ui-accordion ui-widget ui-helper-reset").removeAttr("role").unbind(".accordion").removeData("accordion");
+this.headers.unbind(".accordion").removeClass("ui-accordion-header ui-helper-reset ui-state-default ui-corner-all ui-state-active ui-corner-top").removeAttr("role").removeAttr("aria-expanded").removeAttr("tabIndex");this.headers.find("a").removeAttr("tabIndex");this._destroyIcons();var b=this.headers.next().css("display","").removeAttr("role").removeClass("ui-helper-reset ui-widget-content ui-corner-bottom ui-accordion-content ui-accordion-content-active");if(a.autoHeight||a.fillHeight)b.css("height",
+"");return this},_setOption:function(a,b){this._superApply("_setOption",arguments);a=="active"&&this.activate(b);if(a=="icons"){this._destroyIcons();b&&this._createIcons()}},_keydown:function(a){var b=d.ui.keyCode;if(!(this.options.disabled||a.altKey||a.ctrlKey)){var c=this.headers.length,f=this.headers.index(a.target),g=false;switch(a.keyCode){case b.RIGHT:case b.DOWN:g=this.headers[(f+1)%c];break;case b.LEFT:case b.UP:g=this.headers[(f-1+c)%c];break;case b.SPACE:case b.ENTER:this._clickHandler({target:a.target},
+a.target);a.preventDefault()}if(g){d(a.target).attr("tabIndex","-1");d(g).attr("tabIndex","0");g.focus();return false}return true}},resize:function(){var a=this.options,b;if(a.fillSpace){if(d.browser.msie){var c=this.element.parent().css("overflow");this.element.parent().css("overflow","hidden")}b=this.element.parent().height();d.browser.msie&&this.element.parent().css("overflow",c);this.headers.each(function(){b-=d(this).outerHeight(true)});this.headers.next().each(function(){d(this).height(Math.max(0,
+b-d(this).innerHeight()+d(this).height()))}).css("overflow","auto")}else if(a.autoHeight){b=0;this.headers.next().each(function(){b=Math.max(b,d(this).height())}).height(b)}return this},activate:function(a){this.options.active=a;a=this._findActive(a)[0];this._clickHandler({target:a},a);return this},_findActive:function(a){return a?typeof a=="number"?this.headers.filter(":eq("+a+")"):this.headers.not(this.headers.not(a)):a===false?d([]):this.headers.filter(":eq(0)")},_clickHandler:function(a,b){var c=
+this.options;if(!c.disabled)if(a.target){a=d(a.currentTarget||b);b=a[0]==this.active[0];c.active=c.collapsible&&b?false:d(".ui-accordion-header",this.element).index(a);if(!(this.running||!c.collapsible&&b)){this.active.removeClass("ui-state-active ui-corner-top").addClass("ui-state-default ui-corner-all").find(".ui-icon").removeClass(c.icons.headerSelected).addClass(c.icons.header);if(!b){a.removeClass("ui-state-default ui-corner-all").addClass("ui-state-active ui-corner-top").find(".ui-icon").removeClass(c.icons.header).addClass(c.icons.headerSelected);
+a.next().addClass("ui-accordion-content-active")}e=a.next();f=this.active.next();g={options:c,newHeader:b&&c.collapsible?d([]):a,oldHeader:this.active,newContent:b&&c.collapsible?d([]):e,oldContent:f};c=this.headers.index(this.active[0])>this.headers.index(a[0]);this.active=b?d([]):a;this._toggle(e,f,g,b,c)}}else if(c.collapsible){this.active.removeClass("ui-state-active ui-corner-top").addClass("ui-state-default ui-corner-all").find(".ui-icon").removeClass(c.icons.headerSelected).addClass(c.icons.header);
+this.active.next().addClass("ui-accordion-content-active");var f=this.active.next(),g={options:c,newHeader:d([]),oldHeader:c.active,newContent:d([]),oldContent:f},e=this.active=d([]);this._toggle(e,f,g)}},_toggle:function(a,b,c,f,g){var e=this.options,k=this;this.toShow=a;this.toHide=b;this.data=c;var i=function(){if(k)return k._completed.apply(k,arguments)};this._trigger("changestart",null,this.data);this.running=b.size()===0?a.size():b.size();if(e.animated){c={};c=e.collapsible&&f?{toShow:d([]),
+toHide:b,complete:i,down:g,autoHeight:e.autoHeight||e.fillSpace}:{toShow:a,toHide:b,complete:i,down:g,autoHeight:e.autoHeight||e.fillSpace};if(!e.proxied)e.proxied=e.animated;if(!e.proxiedDuration)e.proxiedDuration=e.duration;e.animated=d.isFunction(e.proxied)?e.proxied(c):e.proxied;e.duration=d.isFunction(e.proxiedDuration)?e.proxiedDuration(c):e.proxiedDuration;f=d.ui.accordion.animations;var h=e.duration,j=e.animated;if(j&&!f[j]&&!d.easing[j])j="slide";f[j]||(f[j]=function(l){this.slide(l,{easing:j,
+duration:h||700})});f[j](c)}else{if(e.collapsible&&f)a.toggle();else{b.hide();a.show()}i(true)}b.prev().attr("aria-expanded","false").attr("tabIndex","-1").blur();a.prev().attr("aria-expanded","true").attr("tabIndex","0").focus()},_completed:function(a){var b=this.options;this.running=a?0:--this.running;if(!this.running){b.clearStyle&&this.toShow.add(this.toHide).css({height:"",overflow:""});this.toHide.removeClass("ui-accordion-content-active");this._trigger("change",null,this.data)}}});d.extend(d.ui.accordion,
+{version:"1.9m2",animations:{slide:function(a,b){a=d.extend({easing:"swing",duration:300},a,b);if(a.toHide.size())if(a.toShow.size()){var c=a.toShow.css("overflow"),f=0,g={},e={},k;b=a.toShow;k=b[0].style.width;b.width(parseInt(b.parent().width(),10)-parseInt(b.css("paddingLeft"),10)-parseInt(b.css("paddingRight"),10)-(parseInt(b.css("borderLeftWidth"),10)||0)-(parseInt(b.css("borderRightWidth"),10)||0));d.each(["height","paddingTop","paddingBottom"],function(i,h){e[h]="hide";i=(""+d.css(a.toShow[0],
+h)).match(/^([\d+-.]+)(.*)$/);g[h]={value:i[1],unit:i[2]||"px"}});a.toShow.css({height:0,overflow:"hidden"}).show();a.toHide.filter(":hidden").each(a.complete).end().filter(":visible").animate(e,{step:function(i,h){if(h.prop=="height")f=h.end-h.start===0?0:(h.now-h.start)/(h.end-h.start);a.toShow[0].style[h.prop]=f*g[h.prop].value+g[h.prop].unit},duration:a.duration,easing:a.easing,complete:function(){a.autoHeight||a.toShow.css("height","");a.toShow.css("width",k);a.toShow.css({overflow:c});a.complete()}})}else a.toHide.animate({height:"hide"},
+a);else a.toShow.animate({height:"show"},a)},bounceslide:function(a){this.slide(a,{easing:a.down?"easeOutBounce":"swing",duration:a.down?1E3:200})}}})})(jQuery);
+(function(e){e.widget("ui.autocomplete",{options:{minLength:1,delay:300},_create:function(){var a=this,d=this.element[0].ownerDocument;this.element.addClass("ui-autocomplete-input").attr("autocomplete","off").attr({role:"textbox","aria-autocomplete":"list","aria-haspopup":"true"}).bind("keydown.autocomplete",function(c){var b=e.ui.keyCode;switch(c.keyCode){case b.PAGE_UP:a._move("previousPage",c);break;case b.PAGE_DOWN:a._move("nextPage",c);break;case b.UP:a._move("previous",c);c.preventDefault();
+break;case b.DOWN:a._move("next",c);c.preventDefault();break;case b.ENTER:case b.NUMPAD_ENTER:a.menu.active&&c.preventDefault();case b.TAB:if(!a.menu.active)return;a.menu.select(c);break;case b.ESCAPE:a.element.val(a.term);a.close(c);break;case b.LEFT:case b.RIGHT:case b.SHIFT:case b.CONTROL:case b.ALT:case b.COMMAND:case b.COMMAND_RIGHT:case b.INSERT:case b.CAPS_LOCK:case b.END:case b.HOME:break;default:clearTimeout(a.searching);a.searching=setTimeout(function(){a.search(null,c)},a.options.delay);
+break}}).bind("focus.autocomplete",function(){a.selectedItem=null;a.previous=a.element.val()}).bind("blur.autocomplete",function(c){clearTimeout(a.searching);a.closing=setTimeout(function(){a.close(c);a._change(c)},150)});this._initSource();this.response=function(){return a._response.apply(a,arguments)};this.menu=e("<ul></ul>").addClass("ui-autocomplete").appendTo("body",d).mousedown(function(){setTimeout(function(){clearTimeout(a.closing)},13)}).menu({input:e(),focus:function(c,b){b=b.item.data("item.autocomplete");
+false!==a._trigger("focus",null,{item:b})&&/^key/.test(c.originalEvent.type)&&a.element.val(b.value)},select:function(c,b){b=b.item.data("item.autocomplete");false!==a._trigger("select",c,{item:b})&&a.element.val(b.value);a.close(c);c=a.previous;if(a.element[0]!==d.activeElement){a.element.focus();a.previous=c}a.selectedItem=b},blur:function(){a.menu.element.is(":visible")&&a.element.val(a.term)}}).zIndex(this.element.zIndex()+1).css({top:0,left:0}).hide().data("menu");e.fn.bgiframe&&this.menu.element.bgiframe()},
+destroy:function(){this.element.removeClass("ui-autocomplete-input").removeAttr("autocomplete").removeAttr("role").removeAttr("aria-autocomplete").removeAttr("aria-haspopup");this.menu.element.remove();this._super("destroy")},_setOption:function(a){this._superApply("_setOption",arguments);a==="source"&&this._initSource()},_initSource:function(){var a,d;if(e.isArray(this.options.source)){a=this.options.source;this.source=function(c,b){b(e.ui.autocomplete.filter(a,c.term))}}else if(typeof this.options.source===
+"string"){d=this.options.source;this.source=function(c,b){e.getJSON(d,c,b)}}else this.source=this.options.source},search:function(a,d){a=a!=null?a:this.element.val();if(a.length<this.options.minLength)return this.close(d);clearTimeout(this.closing);if(this._trigger("search")!==false)return this._search(a)},_search:function(a){this.term=this.element.addClass("ui-autocomplete-loading").val();this.source({term:a},this.response)},_response:function(a){if(a.length){a=this._normalize(a);this._suggest(a);
+this._trigger("open")}else this.close();this.element.removeClass("ui-autocomplete-loading")},close:function(a){clearTimeout(this.closing);if(this.menu.element.is(":visible")){this._trigger("close",a);this.menu.element.hide();this.menu.deactivate()}},_change:function(a){this.previous!==this.element.val()&&this._trigger("change",a,{item:this.selectedItem})},_normalize:function(a){if(a.length&&a[0].label&&a[0].value)return a;return e.map(a,function(d){if(typeof d==="string")return{label:d,value:d};return e.extend({label:d.label||
+d.value,value:d.value||d.label},d)})},_suggest:function(a){var d=this.menu.element.empty().zIndex(this.element.zIndex()+1),c;this._renderMenu(d,a);this.menu.deactivate();this.menu.refresh();this.menu.element.show().position({my:"left top",at:"left bottom",of:this.element,collision:"none"});a=d.width("").width();c=this.element.width();d.width(Math.max(a,c))},_renderMenu:function(a,d){var c=this;e.each(d,function(b,f){c._renderItem(a,f)})},_renderItem:function(a,d){return e("<li></li>").data("item.autocomplete",
+d).append("<a>"+d.label+"</a>").appendTo(a)},_move:function(a,d){if(this.menu.element.is(":visible"))if(this.menu.first()&&/^previous/.test(a)||this.menu.last()&&/^next/.test(a)){this.element.val(this.term);this.menu.deactivate()}else this.menu[a](d);else this.search(null,d)},widget:function(){return this.menu.element}});e.extend(e.ui.autocomplete,{escapeRegex:function(a){return a.replace(/([\^\$\(\)\[\]\{\}\*\.\+\?\|\\])/gi,"\\$1")},filter:function(a,d){var c=new RegExp(e.ui.autocomplete.escapeRegex(d),
+"i");return e.grep(a,function(b){return c.test(b.label||b.value||b)})}})})(jQuery);
+(function(a){var g,i=function(b){a(":ui-button",b.target.form).each(function(){var c=a(this).data("button");setTimeout(function(){c.refresh()},1)})},h=function(b){var c=b.name,d=b.form,e=a([]);if(c)e=d?a(d).find("[name='"+c+"']"):a("[name='"+c+"']",b.ownerDocument).filter(function(){return!this.form});return e};a.widget("ui.button",{options:{text:true,label:null,icons:{primary:null,secondary:null}},_create:function(){this.element.closest("form").unbind("reset.button").bind("reset.button",i);this._determineButtonType();
+this.hasTitle=!!this.buttonElement.attr("title");var b=this,c=this.options,d=this.type==="checkbox"||this.type==="radio",e="ui-state-hover"+(!d?" ui-state-active":"");if(c.label===null)c.label=this.buttonElement.html();if(this.element.is(":disabled"))c.disabled=true;this.buttonElement.addClass("ui-button ui-widget ui-state-default ui-corner-all").attr("role","button").bind("mouseenter.button",function(){if(!c.disabled){a(this).addClass("ui-state-hover");this===g&&a(this).addClass("ui-state-active")}}).bind("mouseleave.button",
+function(){c.disabled||a(this).removeClass(e)}).bind("focus.button",function(){a(this).addClass("ui-state-focus")}).bind("blur.button",function(){a(this).removeClass("ui-state-focus")});d&&this.element.bind("change.button",function(){b.refresh()});if(this.type==="checkbox")this.buttonElement.bind("click.button",function(){if(c.disabled)return false;a(this).toggleClass("ui-state-active");b.buttonElement.attr("aria-pressed",b.element[0].checked)});else if(this.type==="radio")this.buttonElement.bind("click.button",
+function(){if(c.disabled)return false;a(this).addClass("ui-state-active");b.buttonElement.attr("aria-pressed",true);var f=b.element[0];h(f).not(f).map(function(){return a(this).button("widget")[0]}).removeClass("ui-state-active").attr("aria-pressed",false)});else{this.buttonElement.bind("mousedown.button",function(){if(c.disabled)return false;a(this).addClass("ui-state-active");g=this;a(document).one("mouseup",function(){g=null})}).bind("mouseup.button",function(){if(c.disabled)return false;a(this).removeClass("ui-state-active")}).bind("keydown.button",
+function(f){if(c.disabled)return false;if(f.keyCode==a.ui.keyCode.SPACE||f.keyCode==a.ui.keyCode.ENTER)a(this).addClass("ui-state-active")}).bind("keyup.button",function(){a(this).removeClass("ui-state-active")});this.buttonElement.is("a")&&this.buttonElement.keyup(function(f){f.keyCode===a.ui.keyCode.SPACE&&a(this).click()})}this._setOption("disabled",c.disabled)},_determineButtonType:function(){this.type=this.element.is(":checkbox")?"checkbox":this.element.is(":radio")?"radio":this.element.is("input")?
+"input":"button";if(this.type==="checkbox"||this.type==="radio"){this.buttonElement=this.element.parents().last().find("[for="+this.element.attr("id")+"]");this.element.addClass("ui-helper-hidden-accessible");var b=this.element.is(":checked");b&&this.buttonElement.addClass("ui-state-active");this.buttonElement.attr("aria-pressed",b)}else this.buttonElement=this.element},widget:function(){return this.buttonElement},destroy:function(){this.element.removeClass("ui-helper-hidden-accessible");this.buttonElement.removeClass("ui-button ui-widget ui-state-default ui-corner-all ui-state-hover ui-state-active ui-button-icons-only ui-button-icon-only ui-button-text-icons ui-button-text-icon-primary ui-button-text-icon-secondary ui-button-text-only").removeAttr("role").removeAttr("aria-pressed").html(this.buttonElement.find(".ui-button-text").html());
+this.hasTitle||this.buttonElement.removeAttr("title");this._super("destroy")},_setOption:function(b,c){this._superApply("_setOption",arguments);if(b==="disabled")c?this.element.attr("disabled",true):this.element.removeAttr("disabled");this._resetButton()},refresh:function(){var b=this.element.is(":disabled");b!==this.options.disabled&&this._setOption("disabled",b);if(this.type==="radio")h(this.element[0]).each(function(){a(this).is(":checked")?a(this).button("widget").addClass("ui-state-active").attr("aria-pressed",
+true):a(this).button("widget").removeClass("ui-state-active").attr("aria-pressed",false)});else if(this.type==="checkbox")this.element.is(":checked")?this.buttonElement.addClass("ui-state-active").attr("aria-pressed",true):this.buttonElement.removeClass("ui-state-active").attr("aria-pressed",false)},_resetButton:function(){if(this.type==="input")this.options.label&&this.element.val(this.options.label);else{var b=this.buttonElement.removeClass("ui-button-icons-only ui-button-icon-only ui-button-text-icons ui-button-text-icon-primary ui-button-text-icon-secondary ui-button-text-only"),
+c=a("<span></span>").addClass("ui-button-text").html(this.options.label).appendTo(b.empty()).text(),d=this.options.icons,e=d.primary&&d.secondary;if(d.primary||d.secondary){b.addClass("ui-button-text-icon"+(e?"s":d.primary?"-primary":"-secondary"));d.primary&&b.prepend("<span class='ui-button-icon-primary ui-icon "+d.primary+"'></span>");d.secondary&&b.append("<span class='ui-button-icon-secondary ui-icon "+d.secondary+"'></span>");if(!this.options.text){b.addClass(e?"ui-button-icons-only":"ui-button-icon-only").removeClass("ui-button-text-icons ui-button-text-icon-primary ui-button-text-icon-secondary");
+this.hasTitle||b.attr("title",c)}}else b.addClass("ui-button-text-only")}}});a.widget("ui.buttonset",{_create:function(){this.element.addClass("ui-buttonset");this._init()},_init:function(){this.refresh()},_setOption:function(b,c){b==="disabled"&&this.buttons.button("option",b,c);this._superApply("_setOption",arguments)},refresh:function(){this.buttons=this.element.find(":button, :submit, :reset, :checkbox, :radio, a, :data(button)").filter(":ui-button").button("refresh").end().not(":ui-button").button().end().map(function(){return a(this).button("widget")[0]}).removeClass("ui-corner-all ui-corner-left ui-corner-right").filter(":first").addClass("ui-corner-left").end().filter(":last").addClass("ui-corner-right").end().end()},
+destroy:function(){this.element.removeClass("ui-buttonset");this.buttons.map(function(){return a(this).button("widget")[0]}).removeClass("ui-corner-left ui-corner-right").end().button("destroy");this._super("destroy")}})})(jQuery);
+(function(d){function K(){this.debug=false;this._curInst=null;this._keyEvent=false;this._disabledInputs=[];this._inDialog=this._datepickerShowing=false;this._mainDivId="ui-datepicker-div";this._inlineClass="ui-datepicker-inline";this._appendClass="ui-datepicker-append";this._triggerClass="ui-datepicker-trigger";this._dialogClass="ui-datepicker-dialog";this._disableClass="ui-datepicker-disabled";this._unselectableClass="ui-datepicker-unselectable";this._currentClass="ui-datepicker-current-day";this._dayOverClass=
+"ui-datepicker-days-cell-over";this.regional=[];this.regional[""]={closeText:"Done",prevText:"Prev",nextText:"Next",currentText:"Today",monthNames:["January","February","March","April","May","June","July","August","September","October","November","December"],monthNamesShort:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],dayNames:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],dayNamesShort:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],dayNamesMin:["Su",
+"Mo","Tu","We","Th","Fr","Sa"],weekHeader:"Wk",dateFormat:"mm/dd/yy",firstDay:0,isRTL:false,showMonthAfterYear:false,yearSuffix:""};this._defaults={showOn:"focus",showAnim:"fadeIn",showOptions:{},defaultDate:null,appendText:"",buttonText:"...",buttonImage:"",buttonImageOnly:false,hideIfNoPrevNext:false,navigationAsDateFormat:false,gotoCurrent:false,changeMonth:false,changeYear:false,yearRange:"c-10:c+10",showOtherMonths:false,selectOtherMonths:false,showWeek:false,calculateWeek:this.iso8601Week,shortYearCutoff:"+10",
+minDate:null,maxDate:null,duration:"fast",beforeShowDay:null,beforeShow:null,onSelect:null,onChangeMonthYear:null,onClose:null,numberOfMonths:1,showCurrentAtPos:0,stepMonths:1,stepBigMonths:12,altField:"",altFormat:"",constrainInput:true,showButtonPanel:false,autoSize:false};d.extend(this._defaults,this.regional[""]);this.dpDiv=d('<div id="'+this._mainDivId+'" class="ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all ui-helper-hidden-accessible"></div>')}function E(a,b){d.extend(a,
+b);for(var c in b)if(b[c]==null||b[c]==undefined)a[c]=b[c];return a}d.extend(d.ui,{datepicker:{version:"1.9m2"}});var y=(new Date).getTime();d.extend(K.prototype,{markerClassName:"hasDatepicker",log:function(){this.debug&&console.log.apply("",arguments)},_widgetDatepicker:function(){return this.dpDiv},setDefaults:function(a){E(this._defaults,a||{});return this},_attachDatepicker:function(a,b){var c=null;for(var e in this._defaults){var f=a.getAttribute("date:"+e);if(f){c=c||{};try{c[e]=eval(f)}catch(h){c[e]=
+f}}}e=a.nodeName.toLowerCase();f=e=="div"||e=="span";if(!a.id){this.uuid+=1;a.id="dp"+this.uuid}var i=this._newInst(d(a),f);i.settings=d.extend({},b||{},c||{});if(e=="input")this._connectDatepicker(a,i);else f&&this._inlineDatepicker(a,i)},_newInst:function(a,b){return{id:a[0].id.replace(/([^A-Za-z0-9_])/g,"\\\\$1"),input:a,selectedDay:0,selectedMonth:0,selectedYear:0,drawMonth:0,drawYear:0,inline:b,dpDiv:!b?this.dpDiv:d('<div class="'+this._inlineClass+' ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all"></div>')}},
+_connectDatepicker:function(a,b){var c=d(a);b.append=d([]);b.trigger=d([]);if(!c.hasClass(this.markerClassName)){this._attachments(c,b);c.addClass(this.markerClassName).keydown(this._doKeyDown).keypress(this._doKeyPress).keyup(this._doKeyUp).bind("setData.datepicker",function(e,f,h){b.settings[f]=h}).bind("getData.datepicker",function(e,f){return this._get(b,f)});this._autoSize(b);d.data(a,"datepicker",b)}},_attachments:function(a,b){var c=this._get(b,"appendText"),e=this._get(b,"isRTL");b.append&&
+b.append.remove();if(c){b.append=d('<span class="'+this._appendClass+'">'+c+"</span>");a[e?"before":"after"](b.append)}a.unbind("focus",this._showDatepicker);b.trigger&&b.trigger.remove();c=this._get(b,"showOn");if(c=="focus"||c=="both")a.focus(this._showDatepicker);if(c=="button"||c=="both"){c=this._get(b,"buttonText");var f=this._get(b,"buttonImage");b.trigger=d(this._get(b,"buttonImageOnly")?d("<img/>").addClass(this._triggerClass).attr({src:f,alt:c,title:c}):d('<button type="button"></button>').addClass(this._triggerClass).html(f==
+""?c:d("<img/>").attr({src:f,alt:c,title:c})));a[e?"before":"after"](b.trigger);b.trigger.click(function(){d.datepicker._datepickerShowing&&d.datepicker._lastInput==a[0]?d.datepicker._hideDatepicker():d.datepicker._showDatepicker(a[0]);return false})}},_autoSize:function(a){if(this._get(a,"autoSize")&&!a.inline){var b=new Date(2009,11,20),c=this._get(a,"dateFormat");if(c.match(/[DM]/)){var e=function(f){for(var h=0,i=0,g=0;g<f.length;g++)if(f[g].length>h){h=f[g].length;i=g}return i};b.setMonth(e(this._get(a,
+c.match(/MM/)?"monthNames":"monthNamesShort")));b.setDate(e(this._get(a,c.match(/DD/)?"dayNames":"dayNamesShort"))+20-b.getDay())}a.input.attr("size",this._formatDate(a,b).length)}},_inlineDatepicker:function(a,b){var c=d(a);if(!c.hasClass(this.markerClassName)){c.addClass(this.markerClassName).append(b.dpDiv).bind("setData.datepicker",function(e,f,h){b.settings[f]=h}).bind("getData.datepicker",function(e,f){return this._get(b,f)});d.data(a,"datepicker",b);this._setDate(b,this._getDefaultDate(b),
+true);this._updateDatepicker(b);this._updateAlternate(b)}},_dialogDatepicker:function(a,b,c,e,f){a=this._dialogInst;if(!a){this.uuid+=1;this._dialogInput=d('<input type="text" id="'+("dp"+this.uuid)+'" style="position: absolute; top: -100px; width: 0px; z-index: -10;"/>');this._dialogInput.keydown(this._doKeyDown);d("body").append(this._dialogInput);a=this._dialogInst=this._newInst(this._dialogInput,false);a.settings={};d.data(this._dialogInput[0],"datepicker",a)}E(a.settings,e||{});b=b&&b.constructor==
+Date?this._formatDate(a,b):b;this._dialogInput.val(b);this._pos=f?f.length?f:[f.pageX,f.pageY]:null;if(!this._pos)this._pos=[document.documentElement.clientWidth/2-100+(document.documentElement.scrollLeft||document.body.scrollLeft),document.documentElement.clientHeight/2-150+(document.documentElement.scrollTop||document.body.scrollTop)];this._dialogInput.css("left",this._pos[0]+20+"px").css("top",this._pos[1]+"px");a.settings.onSelect=c;this._inDialog=true;this.dpDiv.addClass(this._dialogClass);this._showDatepicker(this._dialogInput[0]);
+d.blockUI&&d.blockUI(this.dpDiv);d.data(this._dialogInput[0],"datepicker",a);return this},_destroyDatepicker:function(a){var b=d(a),c=d.data(a,"datepicker");if(b.hasClass(this.markerClassName)){var e=a.nodeName.toLowerCase();d.removeData(a,"datepicker");if(e=="input"){c.append.remove();c.trigger.remove();b.removeClass(this.markerClassName).unbind("focus",this._showDatepicker).unbind("keydown",this._doKeyDown).unbind("keypress",this._doKeyPress).unbind("keyup",this._doKeyUp)}else if(e=="div"||e=="span")b.removeClass(this.markerClassName).empty()}},
+_enableDatepicker:function(a){var b=d(a),c=d.data(a,"datepicker");if(b.hasClass(this.markerClassName)){var e=a.nodeName.toLowerCase();if(e=="input"){a.disabled=false;c.trigger.filter("button").each(function(){this.disabled=false}).end().filter("img").css({opacity:"1.0",cursor:""})}else if(e=="div"||e=="span")b.children("."+this._inlineClass).children().removeClass("ui-state-disabled");this._disabledInputs=d.map(this._disabledInputs,function(f){return f==a?null:f})}},_disableDatepicker:function(a){var b=
+d(a),c=d.data(a,"datepicker");if(b.hasClass(this.markerClassName)){var e=a.nodeName.toLowerCase();if(e=="input"){a.disabled=true;c.trigger.filter("button").each(function(){this.disabled=true}).end().filter("img").css({opacity:"0.5",cursor:"default"})}else if(e=="div"||e=="span")b.children("."+this._inlineClass).children().addClass("ui-state-disabled");this._disabledInputs=d.map(this._disabledInputs,function(f){return f==a?null:f});this._disabledInputs[this._disabledInputs.length]=a}},_isDisabledDatepicker:function(a){if(!a)return false;
+for(var b=0;b<this._disabledInputs.length;b++)if(this._disabledInputs[b]==a)return true;return false},_getInst:function(a){try{return d.data(a,"datepicker")}catch(b){throw"Missing instance data for this datepicker";}},_optionDatepicker:function(a,b,c){var e=this._getInst(a);if(arguments.length==2&&typeof b=="string")return b=="defaults"?d.extend({},d.datepicker._defaults):e?b=="all"?d.extend({},e.settings):this._get(e,b):null;var f=b||{};if(typeof b=="string"){f={};f[b]=c}if(e){this._curInst==e&&
+this._hideDatepicker();var h=this._getDateDatepicker(a,true);E(e.settings,f);this._attachments(d(a),e);this._autoSize(e);this._setDateDatepicker(a,h);this._updateDatepicker(e)}},_changeDatepicker:function(a,b,c){this._optionDatepicker(a,b,c)},_refreshDatepicker:function(a){(a=this._getInst(a))&&this._updateDatepicker(a)},_setDateDatepicker:function(a,b){if(a=this._getInst(a)){this._setDate(a,b);this._updateDatepicker(a);this._updateAlternate(a)}},_getDateDatepicker:function(a,b){(a=this._getInst(a))&&
+!a.inline&&this._setDateFromField(a,b);return a?this._getDate(a):null},_doKeyDown:function(a){var b=d.datepicker._getInst(a.target),c=true,e=b.dpDiv.is(".ui-datepicker-rtl");b._keyEvent=true;if(d.datepicker._datepickerShowing)switch(a.keyCode){case 9:d.datepicker._hideDatepicker();c=false;break;case 13:c=d("td."+d.datepicker._dayOverClass,b.dpDiv).add(d("td."+d.datepicker._currentClass,b.dpDiv));c[0]?d.datepicker._selectDay(a.target,b.selectedMonth,b.selectedYear,c[0]):d.datepicker._hideDatepicker();
+return false;case 27:d.datepicker._hideDatepicker();break;case 33:d.datepicker._adjustDate(a.target,a.ctrlKey?-d.datepicker._get(b,"stepBigMonths"):-d.datepicker._get(b,"stepMonths"),"M");break;case 34:d.datepicker._adjustDate(a.target,a.ctrlKey?+d.datepicker._get(b,"stepBigMonths"):+d.datepicker._get(b,"stepMonths"),"M");break;case 35:if(a.ctrlKey||a.metaKey)d.datepicker._clearDate(a.target);c=a.ctrlKey||a.metaKey;break;case 36:if(a.ctrlKey||a.metaKey)d.datepicker._gotoToday(a.target);c=a.ctrlKey||
+a.metaKey;break;case 37:if(a.ctrlKey||a.metaKey)d.datepicker._adjustDate(a.target,e?+1:-1,"D");c=a.ctrlKey||a.metaKey;if(a.originalEvent.altKey)d.datepicker._adjustDate(a.target,a.ctrlKey?-d.datepicker._get(b,"stepBigMonths"):-d.datepicker._get(b,"stepMonths"),"M");break;case 38:if(a.ctrlKey||a.metaKey)d.datepicker._adjustDate(a.target,-7,"D");c=a.ctrlKey||a.metaKey;break;case 39:if(a.ctrlKey||a.metaKey)d.datepicker._adjustDate(a.target,e?-1:+1,"D");c=a.ctrlKey||a.metaKey;if(a.originalEvent.altKey)d.datepicker._adjustDate(a.target,
+a.ctrlKey?+d.datepicker._get(b,"stepBigMonths"):+d.datepicker._get(b,"stepMonths"),"M");break;case 40:if(a.ctrlKey||a.metaKey)d.datepicker._adjustDate(a.target,+7,"D");c=a.ctrlKey||a.metaKey;break;default:c=false}else if(a.keyCode==36&&a.ctrlKey)d.datepicker._showDatepicker(this);else c=false;if(c){a.preventDefault();a.stopPropagation()}},_doKeyPress:function(a){var b=d.datepicker._getInst(a.target);if(d.datepicker._get(b,"constrainInput")){b=d.datepicker._possibleChars(d.datepicker._get(b,"dateFormat"));
+var c=String.fromCharCode(a.charCode==undefined?a.keyCode:a.charCode);return a.ctrlKey||c<" "||!b||b.indexOf(c)>-1}},_doKeyUp:function(a){a=d.datepicker._getInst(a.target);if(a.input.val()!=a.lastVal)try{if(d.datepicker.parseDate(d.datepicker._get(a,"dateFormat"),a.input?a.input.val():null,d.datepicker._getFormatConfig(a))){d.datepicker._setDateFromField(a);d.datepicker._updateAlternate(a);d.datepicker._updateDatepicker(a)}}catch(b){d.datepicker.log(b)}return true},_showDatepicker:function(a){a=a.target||
+a;if(a.nodeName.toLowerCase()!="input")a=d("input",a.parentNode)[0];if(!(d.datepicker._isDisabledDatepicker(a)||d.datepicker._lastInput==a)){var b=d.datepicker._getInst(a);d.datepicker._curInst&&d.datepicker._curInst!=b&&d.datepicker._curInst.dpDiv.stop(true,true);var c=d.datepicker._get(b,"beforeShow");E(b.settings,c?c.apply(a,[a,b]):{});b.lastVal=null;d.datepicker._lastInput=a;d.datepicker._setDateFromField(b);if(d.datepicker._inDialog)a.value="";if(!d.datepicker._pos){d.datepicker._pos=d.datepicker._findPos(a);
+d.datepicker._pos[1]+=a.offsetHeight}var e=false;d(a).parents().each(function(){e|=d(this).css("position")=="fixed";return!e});if(e&&d.browser.opera){d.datepicker._pos[0]-=document.documentElement.scrollLeft;d.datepicker._pos[1]-=document.documentElement.scrollTop}c={left:d.datepicker._pos[0],top:d.datepicker._pos[1]};d.datepicker._pos=null;b.dpDiv.css({position:"absolute",display:"block",top:"-1000px"});d.datepicker._updateDatepicker(b);c=d.datepicker._checkOffset(b,c,e);b.dpDiv.css({position:d.datepicker._inDialog&&
+d.blockUI?"static":e?"fixed":"absolute",display:"none",left:c.left+"px",top:c.top+"px"});if(!b.inline){c=d.datepicker._get(b,"showAnim");var f=d.datepicker._get(b,"duration"),h=function(){d.datepicker._datepickerShowing=true;var i=d.datepicker._getBorders(b.dpDiv);b.dpDiv.find("iframe.ui-datepicker-cover").css({left:-i[0],top:-i[1],width:b.dpDiv.outerWidth(),height:b.dpDiv.outerHeight()})};b.dpDiv.zIndex(d(a).zIndex()+1);d.effects&&d.effects[c]?b.dpDiv.show(c,d.datepicker._get(b,"showOptions"),f,
+h):b.dpDiv[c||"show"](c?f:null,h);if(!c||!f)h();b.input.is(":visible")&&!b.input.is(":disabled")&&b.input.focus();d.datepicker._curInst=b}}},_updateDatepicker:function(a){var b=this,c=d.datepicker._getBorders(a.dpDiv);a.dpDiv.empty().append(this._generateHTML(a)).find("iframe.ui-datepicker-cover").css({left:-c[0],top:-c[1],width:a.dpDiv.outerWidth(),height:a.dpDiv.outerHeight()}).end().find("button, .ui-datepicker-prev, .ui-datepicker-next, .ui-datepicker-calendar td a").bind("mouseout",function(){d(this).removeClass("ui-state-hover");
+this.className.indexOf("ui-datepicker-prev")!=-1&&d(this).removeClass("ui-datepicker-prev-hover");this.className.indexOf("ui-datepicker-next")!=-1&&d(this).removeClass("ui-datepicker-next-hover")}).bind("mouseover",function(){if(!b._isDisabledDatepicker(a.inline?a.dpDiv.parent()[0]:a.input[0])){d(this).parents(".ui-datepicker-calendar").find("a").removeClass("ui-state-hover");d(this).addClass("ui-state-hover");this.className.indexOf("ui-datepicker-prev")!=-1&&d(this).addClass("ui-datepicker-prev-hover");
+this.className.indexOf("ui-datepicker-next")!=-1&&d(this).addClass("ui-datepicker-next-hover")}}).end().find("."+this._dayOverClass+" a").trigger("mouseover").end();c=this._getNumberOfMonths(a);var e=c[1];e>1?a.dpDiv.addClass("ui-datepicker-multi-"+e).css("width",17*e+"em"):a.dpDiv.removeClass("ui-datepicker-multi-2 ui-datepicker-multi-3 ui-datepicker-multi-4").width("");a.dpDiv[(c[0]!=1||c[1]!=1?"add":"remove")+"Class"]("ui-datepicker-multi");a.dpDiv[(this._get(a,"isRTL")?"add":"remove")+"Class"]("ui-datepicker-rtl");
+a==d.datepicker._curInst&&d.datepicker._datepickerShowing&&a.input&&a.input.is(":visible")&&!a.input.is(":disabled")&&a.input.focus()},_getBorders:function(a){var b=function(c){return{thin:1,medium:2,thick:3}[c]||c};return[parseFloat(b(a.css("border-left-width"))),parseFloat(b(a.css("border-top-width")))]},_checkOffset:function(a,b,c){var e=a.dpDiv.outerWidth(),f=a.dpDiv.outerHeight(),h=a.input?a.input.outerWidth():0,i=a.input?a.input.outerHeight():0,g=document.documentElement.clientWidth+d(document).scrollLeft(),
+k=document.documentElement.clientHeight+d(document).scrollTop();b.left-=this._get(a,"isRTL")?e-h:0;b.left-=c&&b.left==a.input.offset().left?d(document).scrollLeft():0;b.top-=c&&b.top==a.input.offset().top+i?d(document).scrollTop():0;b.left-=Math.min(b.left,b.left+e>g&&g>e?Math.abs(b.left+e-g):0);b.top-=Math.min(b.top,b.top+f>k&&k>f?Math.abs(f+i):0);return b},_findPos:function(a){for(var b=this._get(this._getInst(a),"isRTL");a&&(a.type=="hidden"||a.nodeType!=1);)a=a[b?"previousSibling":"nextSibling"];
+a=d(a).offset();return[a.left,a.top]},_hideDatepicker:function(a){var b=this._curInst;if(!(!b||a&&b!=d.data(a,"datepicker")))if(this._datepickerShowing){a=this._get(b,"showAnim");var c=this._get(b,"duration"),e=function(){d.datepicker._tidyDialog(b);this._curInst=null};d.effects&&d.effects[a]?b.dpDiv.hide(a,d.datepicker._get(b,"showOptions"),c,e):b.dpDiv[a=="slideDown"?"slideUp":a=="fadeIn"?"fadeOut":"hide"](a?c:null,e);a||e();if(a=this._get(b,"onClose"))a.apply(b.input?b.input[0]:null,[b.input?b.input.val():
+"",b]);this._datepickerShowing=false;this._lastInput=null;if(this._inDialog){this._dialogInput.css({position:"absolute",left:"0",top:"-100px"});if(d.blockUI){d.unblockUI();d("body").append(this.dpDiv)}}this._inDialog=false}},_tidyDialog:function(a){a.dpDiv.removeClass(this._dialogClass).unbind(".ui-datepicker-calendar")},_checkExternalClick:function(a){if(d.datepicker._curInst){a=d(a.target);a[0].id!=d.datepicker._mainDivId&&a.parents("#"+d.datepicker._mainDivId).length==0&&!a.hasClass(d.datepicker.markerClassName)&&
+!a.hasClass(d.datepicker._triggerClass)&&d.datepicker._datepickerShowing&&!(d.datepicker._inDialog&&d.blockUI)&&d.datepicker._hideDatepicker()}},_adjustDate:function(a,b,c){a=d(a);var e=this._getInst(a[0]);if(!this._isDisabledDatepicker(a[0])){this._adjustInstDate(e,b+(c=="M"?this._get(e,"showCurrentAtPos"):0),c);this._updateDatepicker(e)}},_gotoToday:function(a){a=d(a);var b=this._getInst(a[0]);if(this._get(b,"gotoCurrent")&&b.currentDay){b.selectedDay=b.currentDay;b.drawMonth=b.selectedMonth=b.currentMonth;
+b.drawYear=b.selectedYear=b.currentYear}else{var c=new Date;b.selectedDay=c.getDate();b.drawMonth=b.selectedMonth=c.getMonth();b.drawYear=b.selectedYear=c.getFullYear()}this._notifyChange(b);this._adjustDate(a)},_selectMonthYear:function(a,b,c){a=d(a);var e=this._getInst(a[0]);e._selectingMonthYear=false;e["selected"+(c=="M"?"Month":"Year")]=e["draw"+(c=="M"?"Month":"Year")]=parseInt(b.options[b.selectedIndex].value,10);this._notifyChange(e);this._adjustDate(a)},_clickMonthYear:function(a){a=this._getInst(d(a)[0]);
+a.input&&a._selectingMonthYear&&!d.browser.msie&&a.input.focus();a._selectingMonthYear=!a._selectingMonthYear},_selectDay:function(a,b,c,e){var f=d(a);if(!(d(e).hasClass(this._unselectableClass)||this._isDisabledDatepicker(f[0]))){f=this._getInst(f[0]);f.selectedDay=f.currentDay=d("a",e).html();f.selectedMonth=f.currentMonth=b;f.selectedYear=f.currentYear=c;this._selectDate(a,this._formatDate(f,f.currentDay,f.currentMonth,f.currentYear))}},_clearDate:function(a){a=d(a);this._getInst(a[0]);this._selectDate(a,
+"")},_selectDate:function(a,b){a=this._getInst(d(a)[0]);b=b!=null?b:this._formatDate(a);a.input&&a.input.val(b);this._updateAlternate(a);var c=this._get(a,"onSelect");if(c)c.apply(a.input?a.input[0]:null,[b,a]);else a.input&&a.input.trigger("change");if(a.inline)this._updateDatepicker(a);else{this._hideDatepicker();this._lastInput=a.input[0];typeof a.input[0]!="object"&&a.input.focus();this._lastInput=null}},_updateAlternate:function(a){var b=this._get(a,"altField");if(b){var c=this._get(a,"altFormat")||
+this._get(a,"dateFormat"),e=this._getDate(a),f=this.formatDate(c,e,this._getFormatConfig(a));d(b).each(function(){d(this).val(f)})}},noWeekends:function(a){a=a.getDay();return[a>0&&a<6,""]},iso8601Week:function(a){a=new Date(a.getTime());a.setDate(a.getDate()+4-(a.getDay()||7));var b=a.getTime();a.setMonth(0);a.setDate(1);return Math.floor(Math.round((b-a)/864E5)/7)+1},parseDate:function(a,b,c){if(a==null||b==null)throw"Invalid arguments";b=typeof b=="object"?b.toString():b+"";if(b=="")return null;
+for(var e=(c?c.shortYearCutoff:null)||this._defaults.shortYearCutoff,f=(c?c.dayNamesShort:null)||this._defaults.dayNamesShort,h=(c?c.dayNames:null)||this._defaults.dayNames,i=(c?c.monthNamesShort:null)||this._defaults.monthNamesShort,g=(c?c.monthNames:null)||this._defaults.monthNames,k=c=-1,l=-1,u=-1,j=false,o=function(p){(p=z+1<a.length&&a.charAt(z+1)==p)&&z++;return p},m=function(p){o(p);p=new RegExp("^\\d{1,"+(p=="@"?14:p=="!"?20:p=="y"?4:p=="o"?3:2)+"}");p=b.substring(s).match(p);if(!p)throw"Missing number at position "+
+s;s+=p[0].length;return parseInt(p[0],10)},n=function(p,w,G){p=o(p)?G:w;for(w=0;w<p.length;w++)if(b.substr(s,p[w].length)==p[w]){s+=p[w].length;return w+1}throw"Unknown name at position "+s;},r=function(){if(b.charAt(s)!=a.charAt(z))throw"Unexpected literal at position "+s;s++},s=0,z=0;z<a.length;z++)if(j)if(a.charAt(z)=="'"&&!o("'"))j=false;else r();else switch(a.charAt(z)){case "d":l=m("d");break;case "D":n("D",f,h);break;case "o":u=m("o");break;case "m":k=m("m");break;case "M":k=n("M",i,g);break;
+case "y":c=m("y");break;case "@":var v=new Date(m("@"));c=v.getFullYear();k=v.getMonth()+1;l=v.getDate();break;case "!":v=new Date((m("!")-this._ticksTo1970)/1E4);c=v.getFullYear();k=v.getMonth()+1;l=v.getDate();break;case "'":if(o("'"))r();else j=true;break;default:r()}if(c==-1)c=(new Date).getFullYear();else if(c<100)c+=(new Date).getFullYear()-(new Date).getFullYear()%100+(c<=e?0:-100);if(u>-1){k=1;l=u;do{e=this._getDaysInMonth(c,k-1);if(l<=e)break;k++;l-=e}while(1)}v=this._daylightSavingAdjust(new Date(c,
+k-1,l));if(v.getFullYear()!=c||v.getMonth()+1!=k||v.getDate()!=l)throw"Invalid date";return v},ATOM:"yy-mm-dd",COOKIE:"D, dd M yy",ISO_8601:"yy-mm-dd",RFC_822:"D, d M y",RFC_850:"DD, dd-M-y",RFC_1036:"D, d M y",RFC_1123:"D, d M yy",RFC_2822:"D, d M yy",RSS:"D, d M y",TICKS:"!",TIMESTAMP:"@",W3C:"yy-mm-dd",_ticksTo1970:(718685+Math.floor(492.5)-Math.floor(19.7)+Math.floor(4.925))*24*60*60*1E7,formatDate:function(a,b,c){if(!b)return"";var e=(c?c.dayNamesShort:null)||this._defaults.dayNamesShort,f=(c?
+c.dayNames:null)||this._defaults.dayNames,h=(c?c.monthNamesShort:null)||this._defaults.monthNamesShort;c=(c?c.monthNames:null)||this._defaults.monthNames;var i=function(o){(o=j+1<a.length&&a.charAt(j+1)==o)&&j++;return o},g=function(o,m,n){m=""+m;if(i(o))for(;m.length<n;)m="0"+m;return m},k=function(o,m,n,r){return i(o)?r[m]:n[m]},l="",u=false;if(b)for(var j=0;j<a.length;j++)if(u)if(a.charAt(j)=="'"&&!i("'"))u=false;else l+=a.charAt(j);else switch(a.charAt(j)){case "d":l+=g("d",b.getDate(),2);break;
+case "D":l+=k("D",b.getDay(),e,f);break;case "o":l+=g("o",(b.getTime()-(new Date(b.getFullYear(),0,0)).getTime())/864E5,3);break;case "m":l+=g("m",b.getMonth()+1,2);break;case "M":l+=k("M",b.getMonth(),h,c);break;case "y":l+=i("y")?b.getFullYear():(b.getYear()%100<10?"0":"")+b.getYear()%100;break;case "@":l+=b.getTime();break;case "!":l+=b.getTime()*1E4+this._ticksTo1970;break;case "'":if(i("'"))l+="'";else u=true;break;default:l+=a.charAt(j)}return l},_possibleChars:function(a){for(var b="",c=false,
+e=function(h){(h=f+1<a.length&&a.charAt(f+1)==h)&&f++;return h},f=0;f<a.length;f++)if(c)if(a.charAt(f)=="'"&&!e("'"))c=false;else b+=a.charAt(f);else switch(a.charAt(f)){case "d":case "m":case "y":case "@":b+="0123456789";break;case "D":case "M":return null;case "'":if(e("'"))b+="'";else c=true;break;default:b+=a.charAt(f)}return b},_get:function(a,b){return a.settings[b]!==undefined?a.settings[b]:this._defaults[b]},_setDateFromField:function(a,b){if(a.input.val()!=a.lastVal){var c=this._get(a,"dateFormat"),
+e=a.lastVal=a.input?a.input.val():null,f,h;f=h=this._getDefaultDate(a);var i=this._getFormatConfig(a);try{f=this.parseDate(c,e,i)||h}catch(g){this.log(g);e=b?"":e}a.selectedDay=f.getDate();a.drawMonth=a.selectedMonth=f.getMonth();a.drawYear=a.selectedYear=f.getFullYear();a.currentDay=e?f.getDate():0;a.currentMonth=e?f.getMonth():0;a.currentYear=e?f.getFullYear():0;this._adjustInstDate(a)}},_getDefaultDate:function(a){return this._restrictMinMax(a,this._determineDate(a,this._get(a,"defaultDate"),new Date))},
+_determineDate:function(a,b,c){var e=function(h){var i=new Date;i.setDate(i.getDate()+h);return i},f=function(h){try{return d.datepicker.parseDate(d.datepicker._get(a,"dateFormat"),h,d.datepicker._getFormatConfig(a))}catch(i){}var g=(h.toLowerCase().match(/^c/)?d.datepicker._getDate(a):null)||new Date,k=g.getFullYear(),l=g.getMonth();g=g.getDate();for(var u=/([+-]?[0-9]+)\s*(d|D|w|W|m|M|y|Y)?/g,j=u.exec(h);j;){switch(j[2]||"d"){case "d":case "D":g+=parseInt(j[1],10);break;case "w":case "W":g+=parseInt(j[1],
+10)*7;break;case "m":case "M":l+=parseInt(j[1],10);g=Math.min(g,d.datepicker._getDaysInMonth(k,l));break;case "y":case "Y":k+=parseInt(j[1],10);g=Math.min(g,d.datepicker._getDaysInMonth(k,l));break}j=u.exec(h)}return new Date(k,l,g)};if(b=(b=b==null?c:typeof b=="string"?f(b):typeof b=="number"?isNaN(b)?c:e(b):b)&&b.toString()=="Invalid Date"?c:b){b.setHours(0);b.setMinutes(0);b.setSeconds(0);b.setMilliseconds(0)}return this._daylightSavingAdjust(b)},_daylightSavingAdjust:function(a){if(!a)return null;
+a.setHours(a.getHours()>12?a.getHours()+2:0);return a},_setDate:function(a,b,c){var e=!b,f=a.selectedMonth,h=a.selectedYear;b=this._restrictMinMax(a,this._determineDate(a,b,new Date));a.selectedDay=a.currentDay=b.getDate();a.drawMonth=a.selectedMonth=a.currentMonth=b.getMonth();a.drawYear=a.selectedYear=a.currentYear=b.getFullYear();if((f!=a.selectedMonth||h!=a.selectedYear)&&!c)this._notifyChange(a);this._adjustInstDate(a);if(a.input)a.input.val(e?"":this._formatDate(a))},_getDate:function(a){return!a.currentYear||
+a.input&&a.input.val()==""?null:this._daylightSavingAdjust(new Date(a.currentYear,a.currentMonth,a.currentDay))},_generateHTML:function(a){var b=new Date;b=this._daylightSavingAdjust(new Date(b.getFullYear(),b.getMonth(),b.getDate()));var c=this._get(a,"isRTL"),e=this._get(a,"showButtonPanel"),f=this._get(a,"hideIfNoPrevNext"),h=this._get(a,"navigationAsDateFormat"),i=this._getNumberOfMonths(a),g=this._get(a,"showCurrentAtPos"),k=this._get(a,"stepMonths"),l=i[0]!=1||i[1]!=1,u=this._daylightSavingAdjust(!a.currentDay?
+new Date(9999,9,9):new Date(a.currentYear,a.currentMonth,a.currentDay)),j=this._getMinMaxDate(a,"min"),o=this._getMinMaxDate(a,"max");g=a.drawMonth-g;var m=a.drawYear;if(g<0){g+=12;m--}if(o){var n=this._daylightSavingAdjust(new Date(o.getFullYear(),o.getMonth()-i[0]*i[1]+1,o.getDate()));for(n=j&&n<j?j:n;this._daylightSavingAdjust(new Date(m,g,1))>n;){g--;if(g<0){g=11;m--}}}a.drawMonth=g;a.drawYear=m;n=this._get(a,"prevText");n=!h?n:this.formatDate(n,this._daylightSavingAdjust(new Date(m,g-k,1)),this._getFormatConfig(a));
+n=this._canAdjustMonth(a,-1,m,g)?'<a class="ui-datepicker-prev ui-corner-all" onclick="DP_jQuery_'+y+".datepicker._adjustDate('#"+a.id+"', -"+k+", 'M');\" title=\""+n+'"><span class="ui-icon ui-icon-circle-triangle-'+(c?"e":"w")+'">'+n+"</span></a>":f?"":'<a class="ui-datepicker-prev ui-corner-all ui-state-disabled" title="'+n+'"><span class="ui-icon ui-icon-circle-triangle-'+(c?"e":"w")+'">'+n+"</span></a>";var r=this._get(a,"nextText");r=!h?r:this.formatDate(r,this._daylightSavingAdjust(new Date(m,
+g+k,1)),this._getFormatConfig(a));f=this._canAdjustMonth(a,+1,m,g)?'<a class="ui-datepicker-next ui-corner-all" onclick="DP_jQuery_'+y+".datepicker._adjustDate('#"+a.id+"', +"+k+", 'M');\" title=\""+r+'"><span class="ui-icon ui-icon-circle-triangle-'+(c?"w":"e")+'">'+r+"</span></a>":f?"":'<a class="ui-datepicker-next ui-corner-all ui-state-disabled" title="'+r+'"><span class="ui-icon ui-icon-circle-triangle-'+(c?"w":"e")+'">'+r+"</span></a>";k=this._get(a,"currentText");r=this._get(a,"gotoCurrent")&&
+a.currentDay?u:b;k=!h?k:this.formatDate(k,r,this._getFormatConfig(a));h=!a.inline?'<button type="button" class="ui-datepicker-close ui-state-default ui-priority-primary ui-corner-all" onclick="DP_jQuery_'+y+'.datepicker._hideDatepicker();">'+this._get(a,"closeText")+"</button>":"";e=e?'<div class="ui-datepicker-buttonpane ui-widget-content">'+(c?h:"")+(this._isInRange(a,r)?'<button type="button" class="ui-datepicker-current ui-state-default ui-priority-secondary ui-corner-all" onclick="DP_jQuery_'+
+y+".datepicker._gotoToday('#"+a.id+"');\">"+k+"</button>":"")+(c?"":h)+"</div>":"";h=parseInt(this._get(a,"firstDay"),10);h=isNaN(h)?0:h;k=this._get(a,"showWeek");r=this._get(a,"dayNames");this._get(a,"dayNamesShort");var s=this._get(a,"dayNamesMin"),z=this._get(a,"monthNames"),v=this._get(a,"monthNamesShort"),p=this._get(a,"beforeShowDay"),w=this._get(a,"showOtherMonths"),G=this._get(a,"selectOtherMonths");this._get(a,"calculateWeek");for(var L=this._getDefaultDate(a),H="",C=0;C<i[0];C++){for(var M=
+"",D=0;D<i[1];D++){var I=this._daylightSavingAdjust(new Date(m,g,a.selectedDay)),t=" ui-corner-all",x="";if(l){x+='<div class="ui-datepicker-group';if(i[1]>1)switch(D){case 0:x+=" ui-datepicker-group-first";t=" ui-corner-"+(c?"right":"left");break;case i[1]-1:x+=" ui-datepicker-group-last";t=" ui-corner-"+(c?"left":"right");break;default:x+=" ui-datepicker-group-middle";t="";break}x+='">'}x+='<div class="ui-datepicker-header ui-widget-header ui-helper-clearfix'+t+'">'+(/all|left/.test(t)&&C==0?c?
+f:n:"")+(/all|right/.test(t)&&C==0?c?n:f:"")+this._generateMonthYearHeader(a,g,m,j,o,C>0||D>0,z,v)+'</div><table class="ui-datepicker-calendar"><thead><tr>';var A=k?'<th class="ui-datepicker-week-col">'+this._get(a,"weekHeader")+"</th>":"";for(t=0;t<7;t++){var q=(t+h)%7;A+="<th"+((t+h+6)%7>=5?' class="ui-datepicker-week-end"':"")+'><span title="'+r[q]+'">'+s[q]+"</span></th>"}x+=A+"</tr></thead><tbody>";A=this._getDaysInMonth(m,g);if(m==a.selectedYear&&g==a.selectedMonth)a.selectedDay=Math.min(a.selectedDay,
+A);t=(this._getFirstDayOfMonth(m,g)-h+7)%7;A=l?6:Math.ceil((t+A)/7);q=this._daylightSavingAdjust(new Date(m,g,1-t));for(var N=0;N<A;N++){x+="<tr>";var O=!k?"":'<td class="ui-datepicker-week-col">'+this._get(a,"calculateWeek")(q)+"</td>";for(t=0;t<7;t++){var F=p?p.apply(a.input?a.input[0]:null,[q]):[true,""],B=q.getMonth()!=g,J=B&&!G||!F[0]||j&&q<j||o&&q>o;O+='<td class="'+((t+h+6)%7>=5?" ui-datepicker-week-end":"")+(B?" ui-datepicker-other-month":"")+(q.getTime()==I.getTime()&&g==a.selectedMonth&&
+a._keyEvent||L.getTime()==q.getTime()&&L.getTime()==I.getTime()?" "+this._dayOverClass:"")+(J?" "+this._unselectableClass+" ui-state-disabled":"")+(B&&!w?"":" "+F[1]+(q.getTime()==u.getTime()?" "+this._currentClass:"")+(q.getTime()==b.getTime()?" ui-datepicker-today":""))+'"'+((!B||w)&&F[2]?' title="'+F[2]+'"':"")+(J?"":' onclick="DP_jQuery_'+y+".datepicker._selectDay('#"+a.id+"',"+q.getMonth()+","+q.getFullYear()+', this);return false;"')+">"+(B&&!w?"&#xa0;":J?'<span class="ui-state-default">'+q.getDate()+
+"</span>":'<a class="ui-state-default'+(q.getTime()==b.getTime()?" ui-state-highlight":"")+(q.getTime()==I.getTime()?" ui-state-active":"")+(B?" ui-priority-secondary":"")+'" href="#">'+q.getDate()+"</a>")+"</td>";q.setDate(q.getDate()+1);q=this._daylightSavingAdjust(q)}x+=O+"</tr>"}g++;if(g>11){g=0;m++}x+="</tbody></table>"+(l?"</div>"+(i[0]>0&&D==i[1]-1?'<div class="ui-datepicker-row-break"></div>':""):"");M+=x}H+=M}H+=e+(d.browser.msie&&parseInt(d.browser.version,10)<7&&!a.inline?'<iframe src="javascript:false;" class="ui-datepicker-cover" frameborder="0"></iframe>':
+"");a._keyEvent=false;return H},_generateMonthYearHeader:function(a,b,c,e,f,h,i,g){var k=this._get(a,"changeMonth"),l=this._get(a,"changeYear"),u=this._get(a,"showMonthAfterYear"),j='<div class="ui-datepicker-title">',o="";if(h||!k)o+='<span class="ui-datepicker-month">'+i[b]+"</span>";else{i=e&&e.getFullYear()==c;var m=f&&f.getFullYear()==c;o+='<select class="ui-datepicker-month" onchange="DP_jQuery_'+y+".datepicker._selectMonthYear('#"+a.id+"', this, 'M');\" onclick=\"DP_jQuery_"+y+".datepicker._clickMonthYear('#"+
+a.id+"');\">";for(var n=0;n<12;n++)if((!i||n>=e.getMonth())&&(!m||n<=f.getMonth()))o+='<option value="'+n+'"'+(n==b?' selected="selected"':"")+">"+g[n]+"</option>";o+="</select>"}u||(j+=o+(h||!(k&&l)?"&#xa0;":""));if(h||!l)j+='<span class="ui-datepicker-year">'+c+"</span>";else{g=this._get(a,"yearRange").split(":");var r=(new Date).getFullYear();i=function(s){s=s.match(/c[+-].*/)?c+parseInt(s.substring(1),10):s.match(/[+-].*/)?r+parseInt(s,10):parseInt(s,10);return isNaN(s)?r:s};b=i(g[0]);g=Math.max(b,
+i(g[1]||""));b=e?Math.max(b,e.getFullYear()):b;g=f?Math.min(g,f.getFullYear()):g;for(j+='<select class="ui-datepicker-year" onchange="DP_jQuery_'+y+".datepicker._selectMonthYear('#"+a.id+"', this, 'Y');\" onclick=\"DP_jQuery_"+y+".datepicker._clickMonthYear('#"+a.id+"');\">";b<=g;b++)j+='<option value="'+b+'"'+(b==c?' selected="selected"':"")+">"+b+"</option>";j+="</select>"}j+=this._get(a,"yearSuffix");if(u)j+=(h||!(k&&l)?"&#xa0;":"")+o;j+="</div>";return j},_adjustInstDate:function(a,b,c){var e=
+a.drawYear+(c=="Y"?b:0),f=a.drawMonth+(c=="M"?b:0);b=Math.min(a.selectedDay,this._getDaysInMonth(e,f))+(c=="D"?b:0);e=this._restrictMinMax(a,this._daylightSavingAdjust(new Date(e,f,b)));a.selectedDay=e.getDate();a.drawMonth=a.selectedMonth=e.getMonth();a.drawYear=a.selectedYear=e.getFullYear();if(c=="M"||c=="Y")this._notifyChange(a)},_restrictMinMax:function(a,b){var c=this._getMinMaxDate(a,"min");a=this._getMinMaxDate(a,"max");b=c&&b<c?c:b;return b=a&&b>a?a:b},_notifyChange:function(a){var b=this._get(a,
+"onChangeMonthYear");if(b)b.apply(a.input?a.input[0]:null,[a.selectedYear,a.selectedMonth+1,a])},_getNumberOfMonths:function(a){a=this._get(a,"numberOfMonths");return a==null?[1,1]:typeof a=="number"?[1,a]:a},_getMinMaxDate:function(a,b){return this._determineDate(a,this._get(a,b+"Date"),null)},_getDaysInMonth:function(a,b){return 32-(new Date(a,b,32)).getDate()},_getFirstDayOfMonth:function(a,b){return(new Date(a,b,1)).getDay()},_canAdjustMonth:function(a,b,c,e){var f=this._getNumberOfMonths(a);
+c=this._daylightSavingAdjust(new Date(c,e+(b<0?b:f[0]*f[1]),1));b<0&&c.setDate(this._getDaysInMonth(c.getFullYear(),c.getMonth()));return this._isInRange(a,c)},_isInRange:function(a,b){var c=this._getMinMaxDate(a,"min");a=this._getMinMaxDate(a,"max");return(!c||b.getTime()>=c.getTime())&&(!a||b.getTime()<=a.getTime())},_getFormatConfig:function(a){var b=this._get(a,"shortYearCutoff");b=typeof b!="string"?b:(new Date).getFullYear()%100+parseInt(b,10);return{shortYearCutoff:b,dayNamesShort:this._get(a,
+"dayNamesShort"),dayNames:this._get(a,"dayNames"),monthNamesShort:this._get(a,"monthNamesShort"),monthNames:this._get(a,"monthNames")}},_formatDate:function(a,b,c,e){if(!b){a.currentDay=a.selectedDay;a.currentMonth=a.selectedMonth;a.currentYear=a.selectedYear}b=b?typeof b=="object"?b:this._daylightSavingAdjust(new Date(e,c,b)):this._daylightSavingAdjust(new Date(a.currentYear,a.currentMonth,a.currentDay));return this.formatDate(this._get(a,"dateFormat"),b,this._getFormatConfig(a))}});d.fn.datepicker=
+function(a){if(!d.datepicker.initialized){d(document).mousedown(d.datepicker._checkExternalClick).find("body").append(d.datepicker.dpDiv);d.datepicker.initialized=true}var b=Array.prototype.slice.call(arguments,1);if(typeof a=="string"&&(a=="isDisabled"||a=="getDate"||a=="widget"))return d.datepicker["_"+a+"Datepicker"].apply(d.datepicker,[this[0]].concat(b));if(a=="option"&&arguments.length==2&&typeof arguments[1]=="string")return d.datepicker["_"+a+"Datepicker"].apply(d.datepicker,[this[0]].concat(b));
+return this.each(function(){typeof a=="string"?d.datepicker["_"+a+"Datepicker"].apply(d.datepicker,[this].concat(b)):d.datepicker._attachDatepicker(this,a)})};d.datepicker=new K;d.datepicker.initialized=false;d.datepicker.uuid=(new Date).getTime();d.datepicker.version="1.9m2";window["DP_jQuery_"+y]=d})(jQuery);
+(function(c){c.widget("ui.dialog",{options:{autoOpen:true,buttons:{},closeOnEscape:true,closeText:"close",dialogClass:"",draggable:true,hide:null,height:"auto",maxHeight:false,maxWidth:false,minHeight:150,minWidth:150,modal:false,position:"center",resizable:true,show:null,stack:true,title:"",width:300,zIndex:1E3},_create:function(){this.originalTitle=this.element.attr("title");var a=this,b=a.options,d=b.title||a.originalTitle||"&#160;",e=c.ui.dialog.getTitleId(a.element),g=(a.uiDialog=c("<div></div>")).appendTo(document.body).hide().addClass("ui-dialog ui-widget ui-widget-content ui-corner-all "+
+b.dialogClass).css({zIndex:b.zIndex}).attr("tabIndex",-1).css("outline",0).keydown(function(i){if(b.closeOnEscape&&i.keyCode&&i.keyCode===c.ui.keyCode.ESCAPE){a.close(i);i.preventDefault()}}).attr({role:"dialog","aria-labelledby":e}).mousedown(function(i){a.moveToTop(false,i)});a.element.show().removeAttr("title").addClass("ui-dialog-content ui-widget-content").appendTo(g);var f=(a.uiDialogTitlebar=c("<div></div>")).addClass("ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix").prependTo(g),
+h=c('<a href="#"></a>').addClass("ui-dialog-titlebar-close ui-corner-all").attr("role","button").hover(function(){h.addClass("ui-state-hover")},function(){h.removeClass("ui-state-hover")}).focus(function(){h.addClass("ui-state-focus")}).blur(function(){h.removeClass("ui-state-focus")}).click(function(i){a.close(i);return false}).appendTo(f);(a.uiDialogTitlebarCloseText=c("<span></span>")).addClass("ui-icon ui-icon-closethick").text(b.closeText).appendTo(h);c("<span></span>").addClass("ui-dialog-title").attr("id",
+e).html(d).prependTo(f);if(c.isFunction(b.beforeclose)&&!c.isFunction(b.beforeClose))b.beforeClose=b.beforeclose;f.find("*").add(f).disableSelection();b.draggable&&c.fn.draggable&&a._makeDraggable();b.resizable&&c.fn.resizable&&a._makeResizable();a._createButtons(b.buttons);a._isOpen=false;c.fn.bgiframe&&g.bgiframe()},_init:function(){this.options.autoOpen&&this.open()},destroy:function(){var a=this;a.overlay&&a.overlay.destroy();a.uiDialog.hide();a.element.unbind(".dialog").removeData("dialog").removeClass("ui-dialog-content ui-widget-content").hide().appendTo("body");
+a.uiDialog.remove();a.originalTitle&&a.element.attr("title",a.originalTitle);return a},widget:function(){return this.uiDialog},close:function(a){var b=this,d;if(false!==b._trigger("beforeClose",a)){b.overlay&&b.overlay.destroy();b.uiDialog.unbind("keypress.ui-dialog");b._isOpen=false;if(b.options.hide)b.uiDialog.hide(b.options.hide,function(){b._trigger("close",a)});else{b.uiDialog.hide();b._trigger("close",a)}c.ui.dialog.overlay.resize();if(b.options.modal){d=0;c(".ui-dialog").each(function(){if(this!==
+b.uiDialog[0])d=Math.max(d,c(this).css("z-index"))});c.ui.dialog.maxZ=d}return b}},isOpen:function(){return this._isOpen},moveToTop:function(a,b){var d=this,e=d.options;if(e.modal&&!a||!e.stack&&!e.modal)return d._trigger("focus",b);if(e.zIndex>c.ui.dialog.maxZ)c.ui.dialog.maxZ=e.zIndex;if(d.overlay){c.ui.dialog.maxZ+=1;d.overlay.$el.css("z-index",c.ui.dialog.overlay.maxZ=c.ui.dialog.maxZ)}a={scrollTop:d.element.attr("scrollTop"),scrollLeft:d.element.attr("scrollLeft")};c.ui.dialog.maxZ+=1;d.uiDialog.css("z-index",
+c.ui.dialog.maxZ);d.element.attr(a);d._trigger("focus",b);return d},open:function(){if(!this._isOpen){var a=this,b=a.options,d=a.uiDialog;a.overlay=b.modal?new c.ui.dialog.overlay(a):null;d.next().length&&d.appendTo("body");a._size();a._position(b.position);d.show(b.show);a.moveToTop(true);b.modal&&d.bind("keypress.ui-dialog",function(e){if(e.keyCode===c.ui.keyCode.TAB){var g=c(":tabbable",this),f=g.filter(":first");g=g.filter(":last");if(e.target===g[0]&&!e.shiftKey){f.focus(1);return false}else if(e.target===
+f[0]&&e.shiftKey){g.focus(1);return false}}});c([]).add(d.find(".ui-dialog-content :tabbable:first")).add(d.find(".ui-dialog-buttonpane :tabbable:first")).add(d).filter(":first").focus();a._trigger("open");a._isOpen=true;return a}},_createButtons:function(a){var b=this,d=false,e=c("<div></div>").addClass("ui-dialog-buttonpane ui-widget-content ui-helper-clearfix");b.uiDialog.find(".ui-dialog-buttonpane").remove();typeof a==="object"&&a!==null&&c.each(a,function(){return!(d=true)});if(d){c.each(a,
+function(g,f){g=c('<button type="button"></button>').text(g).click(function(){f.apply(b.element[0],arguments)}).appendTo(e);c.fn.button&&g.button()});e.appendTo(b.uiDialog)}},_makeDraggable:function(){function a(f){return{position:f.position,offset:f.offset}}var b=this,d=b.options,e=c(document),g;b.uiDialog.draggable({cancel:".ui-dialog-content, .ui-dialog-titlebar-close",handle:".ui-dialog-titlebar",containment:"document",start:function(f,h){g=d.height==="auto"?"auto":c(this).height();c(this).height(c(this).height()).addClass("ui-dialog-dragging");
+b._trigger("dragStart",f,a(h))},drag:function(f,h){b._trigger("drag",f,a(h))},stop:function(f,h){d.position=[h.position.left-e.scrollLeft(),h.position.top-e.scrollTop()];c(this).removeClass("ui-dialog-dragging").height(g);b._trigger("dragStop",f,a(h));c.ui.dialog.overlay.resize()}})},_makeResizable:function(a){function b(f){return{originalPosition:f.originalPosition,originalSize:f.originalSize,position:f.position,size:f.size}}a=a===undefined?this.options.resizable:a;var d=this,e=d.options,g=d.uiDialog.css("position");
+a=typeof a==="string"?a:"n,e,s,w,se,sw,ne,nw";d.uiDialog.resizable({cancel:".ui-dialog-content",containment:"document",alsoResize:d.element,maxWidth:e.maxWidth,maxHeight:e.maxHeight,minWidth:e.minWidth,minHeight:d._minHeight(),handles:a,start:function(f,h){c(this).addClass("ui-dialog-resizing");d._trigger("resizeStart",f,b(h))},resize:function(f,h){d._trigger("resize",f,b(h))},stop:function(f,h){c(this).removeClass("ui-dialog-resizing");e.height=c(this).height();e.width=c(this).width();d._trigger("resizeStop",
+f,b(h));c.ui.dialog.overlay.resize()}}).css("position",g).find(".ui-resizable-se").addClass("ui-icon ui-icon-grip-diagonal-se")},_minHeight:function(){var a=this.options;return a.height==="auto"?a.minHeight:Math.min(a.minHeight,a.height)},_position:function(a){var b=[],d=[0,0];a=a||c.ui.dialog.prototype.options.position;if(typeof a==="string"||typeof a==="object"&&"0"in a){b=a.split?a.split(" "):[a[0],a[1]];if(b.length===1)b[1]=b[0];c.each(["left","top"],function(e,g){if(+b[e]===b[e]){d[e]=b[e];b[e]=
+g}})}else if(typeof a==="object"){if("left"in a){b[0]="left";d[0]=a.left}else if("right"in a){b[0]="right";d[0]=-a.right}if("top"in a){b[1]="top";d[1]=a.top}else if("bottom"in a){b[1]="bottom";d[1]=-a.bottom}}(a=this.uiDialog.is(":visible"))||this.uiDialog.show();this.uiDialog.css({top:0,left:0}).position({my:b.join(" "),at:b.join(" "),offset:d.join(" "),of:window,collision:"fit",using:function(e){var g=c(this).css(e).offset().top;g<0&&c(this).css("top",e.top-g)}});a||this.uiDialog.hide()},_setOption:function(a,
+b){var d=this,e=d.uiDialog,g=e.is(":data(resizable)"),f=false;switch(a){case "beforeclose":a="beforeClose";break;case "buttons":d._createButtons(b);break;case "closeText":d.uiDialogTitlebarCloseText.text(""+b);break;case "dialogClass":e.removeClass(d.options.dialogClass).addClass("ui-dialog ui-widget ui-widget-content ui-corner-all "+b);break;case "disabled":b?e.addClass("ui-dialog-disabled"):e.removeClass("ui-dialog-disabled");break;case "draggable":b?d._makeDraggable():e.draggable("destroy");break;
+case "height":f=true;break;case "maxHeight":g&&e.resizable("option","maxHeight",b);f=true;break;case "maxWidth":g&&e.resizable("option","maxWidth",b);f=true;break;case "minHeight":g&&e.resizable("option","minHeight",b);f=true;break;case "minWidth":g&&e.resizable("option","minWidth",b);f=true;break;case "position":d._position(b);break;case "resizable":g&&!b&&e.resizable("destroy");g&&typeof b==="string"&&e.resizable("option","handles",b);!g&&b!==false&&d._makeResizable(b);break;case "title":c(".ui-dialog-title",
+d.uiDialogTitlebar).html(""+(b||"&#160;"));break;case "width":f=true;break}c.Widget.prototype._setOption.apply(d,arguments);f&&d._size()},_size:function(){var a=this.options,b;this.element.css({width:"auto",minHeight:0,height:0});b=this.uiDialog.css({height:"auto",width:a.width}).height();this.element.css(a.height==="auto"?{minHeight:Math.max(a.minHeight-b,0),height:"auto"}:{minHeight:0,height:Math.max(a.height-b,0)}).show();this.uiDialog.is(":data(resizable)")&&this.uiDialog.resizable("option","minHeight",
+this._minHeight())}});c.extend(c.ui.dialog,{version:"1.9m2",uuid:0,maxZ:0,getTitleId:function(a){a=a.attr("id");if(!a){this.uuid+=1;a=this.uuid}return"ui-dialog-title-"+a},overlay:function(a){this.$el=c.ui.dialog.overlay.create(a)}});c.extend(c.ui.dialog.overlay,{instances:[],oldInstances:[],maxZ:0,events:c.map("focus,mousedown,mouseup,keydown,keypress,click".split(","),function(a){return a+".dialog-overlay"}).join(" "),create:function(a){if(this.instances.length===0){setTimeout(function(){c.ui.dialog.overlay.instances.length&&
+c(document).bind(c.ui.dialog.overlay.events,function(d){return c(d.target).zIndex()>=c.ui.dialog.overlay.maxZ})},1);c(document).bind("keydown.dialog-overlay",function(d){if(a.options.closeOnEscape&&d.keyCode&&d.keyCode===c.ui.keyCode.ESCAPE){a.close(d);d.preventDefault()}});c(window).bind("resize.dialog-overlay",c.ui.dialog.overlay.resize)}var b=(this.oldInstances.pop()||c("<div></div>").addClass("ui-widget-overlay")).appendTo(document.body).css({width:this.width(),height:this.height()});c.fn.bgiframe&&
+b.bgiframe();this.instances.push(b);return b},destroy:function(a){this.oldInstances.push(this.instances.splice(c.inArray(a,this.instances),1)[0]);this.instances.length===0&&c([document,window]).unbind(".dialog-overlay");a.remove();var b=0;c.each(this.instances,function(){b=Math.max(b,this.css("z-index"))});this.maxZ=b},height:function(){var a,b;if(c.browser.msie&&c.browser.version<7){a=Math.max(document.documentElement.scrollHeight,document.body.scrollHeight);b=Math.max(document.documentElement.offsetHeight,
+document.body.offsetHeight);return a<b?c(window).height()+"px":a+"px"}else return c(document).height()+"px"},width:function(){var a,b;if(c.browser.msie&&c.browser.version<7){a=Math.max(document.documentElement.scrollWidth,document.body.scrollWidth);b=Math.max(document.documentElement.offsetWidth,document.body.offsetWidth);return a<b?c(window).width()+"px":a+"px"}else return c(document).width()+"px"},resize:function(){var a=c([]);c.each(c.ui.dialog.overlay.instances,function(){a=a.add(this)});a.css({width:0,
+height:0}).css({width:c.ui.dialog.overlay.width(),height:c.ui.dialog.overlay.height()})}});c.extend(c.ui.dialog.overlay.prototype,{destroy:function(){c.ui.dialog.overlay.destroy(this.$el)}})})(jQuery);
+(function(c){c.widget("ui.menu",{_create:function(){var a=this;this.element.addClass("ui-menu ui-widget ui-widget-content ui-corner-all").attr({role:"listbox","aria-activedescendant":"ui-active-menuitem"}).bind("click.menu",function(b){if(a.options.disabled)return false;if(c(b.target).closest(".ui-menu-item a").length){b.preventDefault();a.select(b)}});this.refresh();if(!this.options.input)this.options.input=this.element.attr("tabIndex",0);this.options.input.bind("keydown.menu",function(b){if(!a.options.disabled)switch(b.keyCode){case c.ui.keyCode.PAGE_UP:a.previousPage();
+b.preventDefault();b.stopImmediatePropagation();break;case c.ui.keyCode.PAGE_DOWN:a.nextPage();b.preventDefault();b.stopImmediatePropagation();break;case c.ui.keyCode.UP:a.previous();b.preventDefault();b.stopImmediatePropagation();break;case c.ui.keyCode.DOWN:a.next();b.preventDefault();b.stopImmediatePropagation();break;case c.ui.keyCode.ENTER:a.select();b.preventDefault();b.stopImmediatePropagation();break}})},destroy:function(){c.Widget.prototype.destroy.apply(this,arguments);this.element.removeClass("ui-menu ui-widget ui-widget-content ui-corner-all").removeAttr("tabIndex").removeAttr("role").removeAttr("aria-activedescendant");
+this.element.children(".ui-menu-item").removeClass("ui-menu-item").removeAttr("role").children("a").removeClass("ui-corner-all").removeAttr("tabIndex").unbind(".menu")},refresh:function(){var a=this;this.element.children("li:not(.ui-menu-item):has(a)").addClass("ui-menu-item").attr("role","menuitem").children("a").addClass("ui-corner-all").attr("tabIndex",-1).bind("mouseenter.menu",function(b){a.options.disabled||a.activate(b,c(this).parent())}).bind("mouseleave.menu",function(){a.options.disabled||
+a.deactivate()})},activate:function(a,b){this.deactivate();if(this._hasScroll()){var d=b.offset().top-this.element.offset().top,e=this.element.attr("scrollTop"),f=this.element.height();if(d<0)this.element.attr("scrollTop",e+d);else d>f&&this.element.attr("scrollTop",e+d-f+b.height())}this.active=b.eq(0).children("a").addClass("ui-state-hover").attr("id","ui-active-menuitem").end();this._trigger("focus",a,{item:b})},deactivate:function(){if(this.active){this.active.children("a").removeClass("ui-state-hover").removeAttr("id");
+this._trigger("blur");this.active=null}},next:function(a){this._move("next",".ui-menu-item:first",a)},previous:function(a){this._move("prev",".ui-menu-item:last",a)},first:function(){return this.active&&!this.active.prevAll(".ui-menu-item").length},last:function(){return this.active&&!this.active.nextAll(".ui-menu-item").length},_move:function(a,b,d){if(this.active){a=this.active[a+"All"](".ui-menu-item").eq(0);a.length?this.activate(d,a):this.activate(d,this.element.children(b))}else this.activate(d,
+this.element.children(b))},nextPage:function(a){if(this._hasScroll())if(!this.active||this.last())this.activate(a,this.element.children(":first"));else{var b=this.active.offset().top,d=this.element.height(),e=this.element.children("li").filter(function(){var f=c(this).offset().top-b-d+c(this).height();return f<10&&f>-10});e.length||(e=this.element.children(":last"));this.activate(a,e)}else this.activate(a,this.element.children(!this.active||this.last()?":first":":last"))},previousPage:function(a){if(this._hasScroll())if(!this.active||
+this.first())this.activate(a,this.element.children(":last"));else{var b=this.active.offset().top,d=this.element.height();result=this.element.children("li").filter(function(){var e=c(this).offset().top-b+d-c(this).height();return e<10&&e>-10});result.length||(result=this.element.children(":first"));this.activate(a,result)}else this.activate(a,this.element.children(!this.active||this.first()?":last":":first"))},_hasScroll:function(){return this.element.height()<this.element.attr("scrollHeight")},select:function(a){this._trigger("select",
+a,{item:this.active})}})})(jQuery);
+(function(c){c.ui=c.ui||{};var m=/left|center|right/,n=/top|center|bottom/,p=c.fn.position,q=c.fn.offset;c.fn.position=function(a){if(!a||!a.of)return p.apply(this,arguments);a=c.extend({},a);var b=c(a.of),d=(a.collision||"flip").split(" "),e=a.offset?a.offset.split(" "):[0,0],g,h,i;if(a.of.nodeType===9){g=b.width();h=b.height();i={top:0,left:0}}else if(a.of.scrollTo&&a.of.document){g=b.width();h=b.height();i={top:b.scrollTop(),left:b.scrollLeft()}}else if(a.of.preventDefault){a.at="left top";g=h=
+0;i={top:a.of.pageY,left:a.of.pageX}}else{g=b.outerWidth();h=b.outerHeight();i=b.offset()}c.each(["my","at"],function(){var f=(a[this]||"").split(" ");if(f.length===1)f=m.test(f[0])?f.concat(["center"]):n.test(f[0])?["center"].concat(f):["center","center"];f[0]=m.test(f[0])?f[0]:"center";f[1]=n.test(f[1])?f[1]:"center";a[this]=f});if(d.length===1)d[1]=d[0];e[0]=parseInt(e[0],10)||0;if(e.length===1)e[1]=e[0];e[1]=parseInt(e[1],10)||0;if(a.at[0]==="right")i.left+=g;else if(a.at[0]==="center")i.left+=
+g/2;if(a.at[1]==="bottom")i.top+=h;else if(a.at[1]==="center")i.top+=h/2;i.left+=e[0];i.top+=e[1];return this.each(function(){var f=c(this),k=f.outerWidth(),l=f.outerHeight(),j=c.extend({},i);if(a.my[0]==="right")j.left-=k;else if(a.my[0]==="center")j.left-=k/2;if(a.my[1]==="bottom")j.top-=l;else if(a.my[1]==="center")j.top-=l/2;j.left=parseInt(j.left);j.top=parseInt(j.top);c.each(["left","top"],function(o,r){c.ui.position[d[o]]&&c.ui.position[d[o]][r](j,{targetWidth:g,targetHeight:h,elemWidth:k,
+elemHeight:l,offset:e,my:a.my,at:a.at})});c.fn.bgiframe&&f.bgiframe();f.offset(c.extend(j,{using:a.using}))})};c.ui.position={fit:{left:function(a,b){var d=c(window);b=a.left+b.elemWidth-d.width()-d.scrollLeft();a.left=b>0?a.left-b:Math.max(0,a.left)},top:function(a,b){var d=c(window);b=a.top+b.elemHeight-d.height()-d.scrollTop();a.top=b>0?a.top-b:Math.max(0,a.top)}},flip:{left:function(a,b){if(b.at[0]!=="center"){var d=c(window);d=a.left+b.elemWidth-d.width()-d.scrollLeft();var e=b.my[0]==="left"?
+-b.elemWidth:b.my[0]==="right"?b.elemWidth:0,g=-2*b.offset[0];a.left+=a.left<0?e+b.targetWidth+g:d>0?e-b.targetWidth+g:0}},top:function(a,b){if(b.at[1]!=="center"){var d=c(window);d=a.top+b.elemHeight-d.height()-d.scrollTop();var e=b.my[1]==="top"?-b.elemHeight:b.my[1]==="bottom"?b.elemHeight:0,g=b.at[1]==="top"?b.targetHeight:-b.targetHeight,h=-2*b.offset[1];a.top+=a.top<0?e+b.targetHeight+h:d>0?e+g+h:0}}}};if(!c.offset.setOffset){c.offset.setOffset=function(a,b){if(/static/.test(c.curCSS(a,"position")))a.style.position=
+"relative";var d=c(a),e=d.offset(),g=parseInt(c.curCSS(a,"top",true),10)||0,h=parseInt(c.curCSS(a,"left",true),10)||0;e={top:b.top-e.top+g,left:b.left-e.left+h};"using"in b?b.using.call(a,e):d.css(e)};c.fn.offset=function(a){var b=this[0];if(!b||!b.ownerDocument)return null;if(a)return this.each(function(){c.offset.setOffset(this,a)});return q.call(this)}}})(jQuery);
+(function(b){b.widget("ui.progressbar",{options:{value:0},_create:function(){this.element.addClass("ui-progressbar ui-widget ui-widget-content ui-corner-all").attr({role:"progressbar","aria-valuemin":this._valueMin(),"aria-valuemax":this._valueMax(),"aria-valuenow":this._value()});this.valueDiv=b("<div class='ui-progressbar-value ui-widget-header ui-corner-left'></div>").appendTo(this.element);this._refreshValue()},destroy:function(){this.element.removeClass("ui-progressbar ui-widget ui-widget-content ui-corner-all").removeAttr("role").removeAttr("aria-valuemin").removeAttr("aria-valuemax").removeAttr("aria-valuenow");
+this.valueDiv.remove();this._superApply("destroy",arguments)},value:function(a){if(a===undefined)return this._value();this._setOption("value",a);return this},_setOption:function(a,c){switch(a){case "value":this.options.value=c;this._refreshValue();this._trigger("change");break}this._superApply("_setOption",arguments)},_value:function(){var a=this.options.value;if(typeof a!=="number")a=0;if(a<this._valueMin())a=this._valueMin();if(a>this._valueMax())a=this._valueMax();return a},_valueMin:function(){return 0},
+_valueMax:function(){return 100},_refreshValue:function(){var a=this.value();this.valueDiv[a===this._valueMax()?"addClass":"removeClass"]("ui-corner-right").width(a+"%");this.element.attr("aria-valuenow",a)}});b.extend(b.ui.progressbar,{version:"1.9m2"})})(jQuery);
+(function(d){d.widget("ui.slider",d.ui.mouse,{widgetEventPrefix:"slide",options:{animate:false,distance:0,max:100,min:0,orientation:"horizontal",range:false,step:1,value:0,values:null},_create:function(){var a=this,b=this.options;this._mouseSliding=this._keySliding=false;this._animateOff=true;this._handleIndex=null;this._detectOrientation();this._mouseInit();this.element.addClass("ui-slider ui-slider-"+this.orientation+" ui-widget ui-widget-content ui-corner-all");b.disabled&&this.element.addClass("ui-slider-disabled ui-disabled");
+this.range=d([]);if(b.range){if(b.range===true){this.range=d("<div></div>");if(!b.values)b.values=[this._valueMin(),this._valueMin()];if(b.values.length&&b.values.length!==2)b.values=[b.values[0],b.values[0]]}else this.range=d("<div></div>");this.range.appendTo(this.element).addClass("ui-slider-range");if(b.range==="min"||b.range==="max")this.range.addClass("ui-slider-range-"+b.range);this.range.addClass("ui-widget-header")}d(".ui-slider-handle",this.element).length===0&&d("<a href='#'></a>").appendTo(this.element).addClass("ui-slider-handle");
+if(b.values&&b.values.length)for(;d(".ui-slider-handle",this.element).length<b.values.length;)d("<a href='#'></a>").appendTo(this.element).addClass("ui-slider-handle");this.handles=d(".ui-slider-handle",this.element).addClass("ui-state-default ui-corner-all");this.handle=this.handles.eq(0);this.handles.add(this.range).filter("a").click(function(c){c.preventDefault()}).hover(function(){b.disabled||d(this).addClass("ui-state-hover")},function(){d(this).removeClass("ui-state-hover")}).focus(function(){if(b.disabled)d(this).blur();
+else{d(".ui-slider .ui-state-focus").removeClass("ui-state-focus");d(this).addClass("ui-state-focus")}}).blur(function(){d(this).removeClass("ui-state-focus")});this.handles.each(function(c){d(this).data("index.ui-slider-handle",c)});this.handles.keydown(function(c){var e=true,f=d(this).data("index.ui-slider-handle"),g,h,i;if(!a.options.disabled){switch(c.keyCode){case d.ui.keyCode.HOME:case d.ui.keyCode.END:case d.ui.keyCode.PAGE_UP:case d.ui.keyCode.PAGE_DOWN:case d.ui.keyCode.UP:case d.ui.keyCode.RIGHT:case d.ui.keyCode.DOWN:case d.ui.keyCode.LEFT:e=
+false;if(!a._keySliding){a._keySliding=true;d(this).addClass("ui-state-active");g=a._start(c,f);if(g===false)return}break}i=a.options.step;g=a.options.values&&a.options.values.length?(h=a.values(f)):(h=a.value());switch(c.keyCode){case d.ui.keyCode.HOME:h=a._valueMin();break;case d.ui.keyCode.END:h=a._valueMax();break;case d.ui.keyCode.PAGE_UP:h=a._trimAlignValue(g+(a._valueMax()-a._valueMin())/5);break;case d.ui.keyCode.PAGE_DOWN:h=a._trimAlignValue(g-(a._valueMax()-a._valueMin())/5);break;case d.ui.keyCode.UP:case d.ui.keyCode.RIGHT:if(g===
+a._valueMax())return;h=a._trimAlignValue(g+i);break;case d.ui.keyCode.DOWN:case d.ui.keyCode.LEFT:if(g===a._valueMin())return;h=a._trimAlignValue(g-i);break}a._slide(c,f,h);return e}}).keyup(function(c){var e=d(this).data("index.ui-slider-handle");if(a._keySliding){a._keySliding=false;a._stop(c,e);a._change(c,e);d(this).removeClass("ui-state-active")}});this._refreshValue();this._animateOff=false},destroy:function(){this.handles.remove();this.range.remove();this.element.removeClass("ui-slider ui-slider-horizontal ui-slider-vertical ui-slider-disabled ui-widget ui-widget-content ui-corner-all").removeData("slider").unbind(".slider");
+this._mouseDestroy();return this},_mouseCapture:function(a){var b=this.options,c,e,f,g,h,i;if(b.disabled)return false;this.elementSize={width:this.element.outerWidth(),height:this.element.outerHeight()};this.elementOffset=this.element.offset();c={x:a.pageX,y:a.pageY};e=this._normValueFromMouse(c);f=this._valueMax()-this._valueMin()+1;h=this;this.handles.each(function(j){var k=Math.abs(e-h.values(j));if(f>k){f=k;g=d(this);i=j}});if(b.range===true&&this.values(1)===b.min){i+=1;g=d(this.handles[i])}if(this._start(a,
+i)===false)return false;this._mouseSliding=true;h._handleIndex=i;g.addClass("ui-state-active").focus();b=g.offset();this._clickOffset=!d(a.target).parents().andSelf().is(".ui-slider-handle")?{left:0,top:0}:{left:a.pageX-b.left-g.width()/2,top:a.pageY-b.top-g.height()/2-(parseInt(g.css("borderTopWidth"),10)||0)-(parseInt(g.css("borderBottomWidth"),10)||0)+(parseInt(g.css("marginTop"),10)||0)};e=this._normValueFromMouse(c);this._slide(a,i,e);return this._animateOff=true},_mouseStart:function(){return true},
+_mouseDrag:function(a){var b=this._normValueFromMouse({x:a.pageX,y:a.pageY});this._slide(a,this._handleIndex,b);return false},_mouseStop:function(a){this.handles.removeClass("ui-state-active");this._mouseSliding=false;this._stop(a,this._handleIndex);this._change(a,this._handleIndex);this._clickOffset=this._handleIndex=null;return this._animateOff=false},_detectOrientation:function(){this.orientation=this.options.orientation==="vertical"?"vertical":"horizontal"},_normValueFromMouse:function(a){var b;
+if(this.orientation==="horizontal"){b=this.elementSize.width;a=a.x-this.elementOffset.left-(this._clickOffset?this._clickOffset.left:0)}else{b=this.elementSize.height;a=a.y-this.elementOffset.top-(this._clickOffset?this._clickOffset.top:0)}b=a/b;if(b>1)b=1;if(b<0)b=0;if(this.orientation==="vertical")b=1-b;a=this._valueMax()-this._valueMin();return this._trimAlignValue(this._valueMin()+b*a)},_start:function(a,b){var c={handle:this.handles[b],value:this.value()};if(this.options.values&&this.options.values.length){c.value=
+this.values(b);c.values=this.values()}return this._trigger("start",a,c)},_slide:function(a,b,c){var e;if(this.options.values&&this.options.values.length){e=this.values(b?0:1);if(this.options.values.length===2&&this.options.range===true&&(b===0&&c>e||b===1&&c<e))c=e;if(c!==this.values(b)){e=this.values();e[b]=c;a=this._trigger("slide",a,{handle:this.handles[b],value:c,values:e});this.values(b?0:1);a!==false&&this.values(b,c,true)}}else if(c!==this.value()){a=this._trigger("slide",a,{handle:this.handles[b],
+value:c});a!==false&&this.value(c)}},_stop:function(a,b){var c={handle:this.handles[b],value:this.value()};if(this.options.values&&this.options.values.length){c.value=this.values(b);c.values=this.values()}this._trigger("stop",a,c)},_change:function(a,b){if(!this._keySliding&&!this._mouseSliding){var c={handle:this.handles[b],value:this.value()};if(this.options.values&&this.options.values.length){c.value=this.values(b);c.values=this.values()}this._trigger("change",a,c)}},value:function(a){if(arguments.length){this.options.value=
+this._trimAlignValue(a);this._refreshValue();this._change(null,0)}return this._value()},values:function(a,b){var c,e,f;if(arguments.length>1){this.options.values[a]=this._trimAlignValue(b);this._refreshValue();this._change(null,a)}if(arguments.length)if(d.isArray(arguments[0])){c=this.options.values;e=arguments[0];for(f=0;f<c.length;f+=1){c[f]=this._trimAlignValue(e[f]);this._change(null,f)}this._refreshValue()}else return this.options.values&&this.options.values.length?this._values(a):this.value();
+else return this._values()},_setOption:function(a,b){var c,e=0;if(d.isArray(this.options.values))e=this.options.values.length;this._superApply("_setOption",arguments);switch(a){case "disabled":if(b){this.handles.filter(".ui-state-focus").blur();this.handles.removeClass("ui-state-hover");this.handles.attr("disabled","disabled");this.element.addClass("ui-disabled")}else{this.handles.removeAttr("disabled");this.element.removeClass("ui-disabled")}break;case "orientation":this._detectOrientation();this.element.removeClass("ui-slider-horizontal ui-slider-vertical").addClass("ui-slider-"+
+this.orientation);this._refreshValue();break;case "value":this._animateOff=true;this._refreshValue();this._change(null,0);this._animateOff=false;break;case "values":this._animateOff=true;this._refreshValue();for(c=0;c<e;c+=1)this._change(null,c);this._animateOff=false;break}},_value:function(){var a=this.options.value;return a=this._trimAlignValue(a)},_values:function(a){var b,c;if(arguments.length){b=this.options.values[a];return b=this._trimAlignValue(b)}else{b=this.options.values.slice();for(c=
+0;c<b.length;c+=1)b[c]=this._trimAlignValue(b[c]);return b}},_trimAlignValue:function(a){if(a<this._valueMin())return this._valueMin();if(a>this._valueMax())return this._valueMax();var b=this.options.step>0?this.options.step:1,c=a%b;a=a-c;if(Math.abs(c)*2>=b)a+=c>0?b:-b;return parseFloat(a.toFixed(5))},_valueMin:function(){return this.options.min},_valueMax:function(){return this.options.max},_refreshValue:function(){var a=this.options.range,b=this.options,c=this,e=!this._animateOff?b.animate:false,
+f,g={},h,i,j,k;if(this.options.values&&this.options.values.length)this.handles.each(function(l){f=(c.values(l)-c._valueMin())/(c._valueMax()-c._valueMin())*100;g[c.orientation==="horizontal"?"left":"bottom"]=f+"%";d(this).stop(1,1)[e?"animate":"css"](g,b.animate);if(c.options.range===true)if(c.orientation==="horizontal"){if(l===0)c.range.stop(1,1)[e?"animate":"css"]({left:f+"%"},b.animate);if(l===1)c.range[e?"animate":"css"]({width:f-h+"%"},{queue:false,duration:b.animate})}else{if(l===0)c.range.stop(1,
+1)[e?"animate":"css"]({bottom:f+"%"},b.animate);if(l===1)c.range[e?"animate":"css"]({height:f-h+"%"},{queue:false,duration:b.animate})}h=f});else{i=this.value();j=this._valueMin();k=this._valueMax();f=k!==j?(i-j)/(k-j)*100:0;g[c.orientation==="horizontal"?"left":"bottom"]=f+"%";this.handle.stop(1,1)[e?"animate":"css"](g,b.animate);if(a==="min"&&this.orientation==="horizontal")this.range.stop(1,1)[e?"animate":"css"]({width:f+"%"},b.animate);if(a==="max"&&this.orientation==="horizontal")this.range[e?
+"animate":"css"]({width:100-f+"%"},{queue:false,duration:b.animate});if(a==="min"&&this.orientation==="vertical")this.range.stop(1,1)[e?"animate":"css"]({height:f+"%"},b.animate);if(a==="max"&&this.orientation==="vertical")this.range[e?"animate":"css"]({height:100-f+"%"},{queue:false,duration:b.animate})}}});d.extend(d.ui.slider,{version:"1.9m2"})})(jQuery);
+(function(d){function s(){return++u}function v(){return++w}var u=0,w=0;d.widget("ui.tabs",{options:{add:null,ajaxOptions:null,cache:false,cookie:null,collapsible:false,disable:null,disabled:[],enable:null,event:"click",fx:null,idPrefix:"ui-tabs-",load:null,panelTemplate:"<div></div>",remove:null,select:null,show:null,spinner:"<em>Loading&#8230;</em>",tabTemplate:'<li><a href="#{href}"><span>#{label}</span></a></li>'},_create:function(){this._tabify(true)},_setOption:function(c,e){if(c=="selected")this.options.collapsible&&
+e==this.options.selected||this.select(e);else{this.options[c]=e;this._tabify()}},_tabId:function(c){return c.title&&c.title.replace(/\s/g,"_").replace(/[^A-Za-z0-9\-_:\.]/g,"")||this.options.idPrefix+s()},_sanitizeSelector:function(c){return c.replace(/:/g,"\\:")},_cookie:function(){var c=this.cookie||(this.cookie=this.options.cookie.name||"ui-tabs-"+v());return d.cookie.apply(null,[c].concat(d.makeArray(arguments)))},_ui:function(c,e){return{tab:c,panel:e,index:this.anchors.index(c)}},_cleanup:function(){this.lis.filter(".ui-state-processing").removeClass("ui-state-processing").find("span:data(label.tabs)").each(function(){var c=
+d(this);c.html(c.data("label.tabs")).removeData("label.tabs")})},_tabify:function(c){function e(g,f){g.css({display:""});!d.support.opacity&&f.opacity&&g[0].style.removeAttribute("filter")}this.list=this.element.find("ol,ul").eq(0);this.lis=d("li:has(a[href])",this.list);this.anchors=this.lis.map(function(){return d("a",this)[0]});this.panels=d([]);var a=this,b=this.options,h=/^#.+/;this.anchors.each(function(g,f){var j=d(f).attr("href"),l=j.split("#")[0],p;if(l&&(l===location.toString().split("#")[0]||
+(p=d("base")[0])&&l===p.href)){j=f.hash;f.href=j}if(h.test(j))a.panels=a.panels.add(a._sanitizeSelector(j));else if(j!="#"){d.data(f,"href.tabs",j);d.data(f,"load.tabs",j.replace(/#.*$/,""));j=a._tabId(f);f.href="#"+j;f=d("#"+j);if(!f.length){f=d(b.panelTemplate).attr("id",j).addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").insertAfter(a.panels[g-1]||a.list);f.data("destroy.tabs",true)}a.panels=a.panels.add(f)}else b.disabled.push(g)});if(c){this.element.addClass("ui-tabs ui-widget ui-widget-content ui-corner-all");
+this.list.addClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all");this.lis.addClass("ui-state-default ui-corner-top");this.panels.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom");if(b.selected===undefined){location.hash&&this.anchors.each(function(g,f){if(f.hash==location.hash){b.selected=g;return false}});if(typeof b.selected!="number"&&b.cookie)b.selected=parseInt(a._cookie(),10);if(typeof b.selected!="number"&&this.lis.filter(".ui-tabs-selected").length)b.selected=
+this.lis.index(this.lis.filter(".ui-tabs-selected"));b.selected=b.selected||(this.lis.length?0:-1)}else if(b.selected===null)b.selected=-1;b.selected=b.selected>=0&&this.anchors[b.selected]||b.selected<0?b.selected:0;b.disabled=d.unique(b.disabled.concat(d.map(this.lis.filter(".ui-state-disabled"),function(g){return a.lis.index(g)}))).sort();d.inArray(b.selected,b.disabled)!=-1&&b.disabled.splice(d.inArray(b.selected,b.disabled),1);this.panels.addClass("ui-tabs-hide");this.lis.removeClass("ui-tabs-selected ui-state-active");
+if(b.selected>=0&&this.anchors.length){this.panels.eq(b.selected).removeClass("ui-tabs-hide");this.lis.eq(b.selected).addClass("ui-tabs-selected ui-state-active");a.element.queue("tabs",function(){a._trigger("show",null,a._ui(a.anchors[b.selected],a.panels[b.selected]))});this.load(b.selected)}d(window).bind("unload",function(){a.lis.add(a.anchors).unbind(".tabs");a.lis=a.anchors=a.panels=null})}else b.selected=this.lis.index(this.lis.filter(".ui-tabs-selected"));this.element[b.collapsible?"addClass":
+"removeClass"]("ui-tabs-collapsible");b.cookie&&this._cookie(b.selected,b.cookie);c=0;for(var i;i=this.lis[c];c++)d(i)[d.inArray(c,b.disabled)!=-1&&!d(i).hasClass("ui-tabs-selected")?"addClass":"removeClass"]("ui-state-disabled");b.cache===false&&this.anchors.removeData("cache.tabs");this.lis.add(this.anchors).unbind(".tabs");if(b.event!="mouseover"){var k=function(g,f){f.is(":not(.ui-state-disabled)")&&f.addClass("ui-state-"+g)},n=function(g,f){f.removeClass("ui-state-"+g)};this.lis.bind("mouseover.tabs",
+function(){k("hover",d(this))});this.lis.bind("mouseout.tabs",function(){n("hover",d(this))});this.anchors.bind("focus.tabs",function(){k("focus",d(this).closest("li"))});this.anchors.bind("blur.tabs",function(){n("focus",d(this).closest("li"))})}var m,o;if(b.fx)if(d.isArray(b.fx)){m=b.fx[0];o=b.fx[1]}else m=o=b.fx;var q=o?function(g,f){d(g).closest("li").addClass("ui-tabs-selected ui-state-active");f.hide().removeClass("ui-tabs-hide").animate(o,o.duration||"normal",function(){e(f,o);a._trigger("show",
+null,a._ui(g,f[0]))})}:function(g,f){d(g).closest("li").addClass("ui-tabs-selected ui-state-active");f.removeClass("ui-tabs-hide");a._trigger("show",null,a._ui(g,f[0]))},r=m?function(g,f){f.animate(m,m.duration||"normal",function(){a.lis.removeClass("ui-tabs-selected ui-state-active");f.addClass("ui-tabs-hide");e(f,m);a.element.dequeue("tabs")})}:function(g,f){a.lis.removeClass("ui-tabs-selected ui-state-active");f.addClass("ui-tabs-hide");a.element.dequeue("tabs")};this.anchors.bind(b.event+".tabs",
+function(){var g=this,f=d(this).closest("li"),j=a.panels.filter(":not(.ui-tabs-hide)"),l=d(a._sanitizeSelector(this.hash));if(f.hasClass("ui-tabs-selected")&&!b.collapsible||f.hasClass("ui-state-disabled")||f.hasClass("ui-state-processing")||a._trigger("select",null,a._ui(this,l[0]))===false){this.blur();return false}b.selected=a.anchors.index(this);a.abort();if(b.collapsible)if(f.hasClass("ui-tabs-selected")){b.selected=-1;b.cookie&&a._cookie(b.selected,b.cookie);a.element.queue("tabs",function(){r(g,
+j)}).dequeue("tabs");this.blur();return false}else if(!j.length){b.cookie&&a._cookie(b.selected,b.cookie);a.element.queue("tabs",function(){q(g,l)});a.load(a.anchors.index(this));this.blur();return false}b.cookie&&a._cookie(b.selected,b.cookie);if(l.length){j.length&&a.element.queue("tabs",function(){r(g,j)});a.element.queue("tabs",function(){q(g,l)});a.load(a.anchors.index(this))}else throw"jQuery UI Tabs: Mismatching fragment identifier.";d.browser.msie&&this.blur()});this.anchors.bind("click.tabs",
+function(){return false})},destroy:function(){var c=this.options;this.abort();this.element.unbind(".tabs").removeClass("ui-tabs ui-widget ui-widget-content ui-corner-all ui-tabs-collapsible").removeData("tabs");this.list.removeClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all");this.anchors.each(function(){var e=d.data(this,"href.tabs");if(e)this.href=e;var a=d(this).unbind(".tabs");d.each(["href","load","cache"],function(b,h){a.removeData(h+".tabs")})});this.lis.unbind(".tabs").add(this.panels).each(function(){d.data(this,
+"destroy.tabs")?d(this).remove():d(this).removeClass("ui-state-default ui-corner-top ui-tabs-selected ui-state-active ui-state-hover ui-state-focus ui-state-disabled ui-tabs-panel ui-widget-content ui-corner-bottom ui-tabs-hide")});c.cookie&&this._cookie(null,c.cookie);return this},add:function(c,e,a){if(a===undefined)a=this.anchors.length;var b=this,h=this.options;e=d(h.tabTemplate.replace(/#\{href\}/g,c).replace(/#\{label\}/g,e));c=!c.indexOf("#")?c.replace("#",""):this._tabId(d("a",e)[0]);e.addClass("ui-state-default ui-corner-top").data("destroy.tabs",
+true);var i=d("#"+c);i.length||(i=d(h.panelTemplate).attr("id",c).data("destroy.tabs",true));i.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom ui-tabs-hide");if(a>=this.lis.length){e.appendTo(this.list);i.appendTo(this.list[0].parentNode)}else{e.insertBefore(this.lis[a]);i.insertBefore(this.panels[a])}h.disabled=d.map(h.disabled,function(k){return k>=a?++k:k});this._tabify();if(this.anchors.length==1){h.selected=0;e.addClass("ui-tabs-selected ui-state-active");i.removeClass("ui-tabs-hide");
+this.element.queue("tabs",function(){b._trigger("show",null,b._ui(b.anchors[0],b.panels[0]))});this.load(0)}this._trigger("add",null,this._ui(this.anchors[a],this.panels[a]));return this},remove:function(c){var e=this.options,a=this.lis.eq(c).remove(),b=this.panels.eq(c).remove();if(a.hasClass("ui-tabs-selected")&&this.anchors.length>1)this.select(c+(c+1<this.anchors.length?1:-1));e.disabled=d.map(d.grep(e.disabled,function(h){return h!=c}),function(h){return h>=c?--h:h});this._tabify();this._trigger("remove",
+null,this._ui(a.find("a")[0],b[0]));return this},enable:function(c){var e=this.options;if(d.inArray(c,e.disabled)!=-1){this.lis.eq(c).removeClass("ui-state-disabled");e.disabled=d.grep(e.disabled,function(a){return a!=c});this._trigger("enable",null,this._ui(this.anchors[c],this.panels[c]));return this}},disable:function(c){var e=this.options;if(c!=e.selected){this.lis.eq(c).addClass("ui-state-disabled");e.disabled.push(c);e.disabled.sort();this._trigger("disable",null,this._ui(this.anchors[c],this.panels[c]))}return this},
+select:function(c){if(typeof c=="string")c=this.anchors.index(this.anchors.filter("[href$="+c+"]"));else if(c===null)c=-1;if(c==-1&&this.options.collapsible)c=this.options.selected;this.anchors.eq(c).trigger(this.options.event+".tabs");return this},load:function(c){var e=this,a=this.options,b=this.anchors.eq(c)[0],h=d.data(b,"load.tabs");this.abort();if(!h||this.element.queue("tabs").length!==0&&d.data(b,"cache.tabs"))this.element.dequeue("tabs");else{this.lis.eq(c).addClass("ui-state-processing");
+if(a.spinner){var i=d("span",b);i.data("label.tabs",i.html()).html(a.spinner)}this.xhr=d.ajax(d.extend({},a.ajaxOptions,{url:h,success:function(k,n){d(e._sanitizeSelector(b.hash)).html(k);e._cleanup();a.cache&&d.data(b,"cache.tabs",true);e._trigger("load",null,e._ui(e.anchors[c],e.panels[c]));try{a.ajaxOptions.success(k,n)}catch(m){}},error:function(k,n){e._cleanup();e._trigger("load",null,e._ui(e.anchors[c],e.panels[c]));try{a.ajaxOptions.error(k,n,c,b)}catch(m){}}}));e.element.dequeue("tabs");return this}},
+abort:function(){this.element.queue([]);this.panels.stop(false,true);this.element.queue("tabs",this.element.queue("tabs").splice(-2,2));if(this.xhr){this.xhr.abort();delete this.xhr}this._cleanup();return this},url:function(c,e){this.anchors.eq(c).removeData("cache.tabs").data("load.tabs",e);return this},length:function(){return this.anchors.length}});d.extend(d.ui.tabs,{version:"1.9m2"});d.extend(d.ui.tabs.prototype,{rotation:null,rotate:function(c,e){var a=this,b=this.options,h=a._rotate||(a._rotate=
+function(i){clearTimeout(a.rotation);a.rotation=setTimeout(function(){var k=b.selected;a.select(++k<a.anchors.length?k:0)},c);i&&i.stopPropagation()});e=a._unrotate||(a._unrotate=!e?function(i){i.clientX&&a.rotate(null)}:function(){t=b.selected;h()});if(c){this.element.bind("tabsshow",h);this.anchors.bind(b.event+".tabs",e);h()}else{clearTimeout(a.rotation);this.element.unbind("tabsshow",h);this.anchors.unbind(b.event+".tabs",e);delete this._rotate;delete this._unrotate}return this}})})(jQuery);
+(function(b){b(document.body).is("[role]")||b(document.body).attr("role","application");var f=0;b.widget("ui.tooltip",{options:{tooltipClass:"ui-widget-content",content:function(){return b(this).attr("title")},position:{my:"left center",at:"right center",offset:"15 0"}},_init:function(){var c=this;this.tooltip=b("<div></div>").attr("id","ui-tooltip-"+f++).attr("role","tooltip").attr("aria-hidden","true").addClass("ui-tooltip ui-widget ui-corner-all").addClass(this.options.tooltipClass).appendTo(document.body).hide();
+this.tooltipContent=b("<div></div>").addClass("ui-tooltip-content").appendTo(this.tooltip);this.opacity=this.tooltip.css("opacity");this.element.bind("focus.tooltip mouseenter.tooltip",function(a){c.open(a)}).bind("blur.tooltip mouseleave.tooltip",function(a){c.close(a)})},enable:function(){this.options.disabled=false},disable:function(){this.options.disabled=true},destroy:function(){this.tooltip.remove();b.Widget.prototype.destroy.apply(this,arguments)},widget:function(){return this.tooltip},open:function(c){var a=
+this.element;if(!(this.current&&this.current[0]==a[0])){var d=this;this.current=a;this.currentTitle=a.attr("title");var e=this.options.content.call(a[0],function(g){d.current==a&&d._show(c,a,g)});e&&d._show(c,a,e)}},_show:function(c,a,d){if(d){a.attr("title","");if(!this.options.disabled){this.tooltipContent.html(d);this.tooltip.css({top:0,left:0}).show().position(b.extend(this.options.position,{of:a})).hide();this.tooltip.attr("aria-hidden","false");a.attr("aria-describedby",this.tooltip.attr("id"));
+if(this.tooltip.is(":animated"))this.tooltip.stop().show().fadeTo("normal",this.opacity);else this.tooltip.is(":visible")?this.tooltip.fadeTo("normal",this.opacity):this.tooltip.fadeIn();this._trigger("open",c)}}},close:function(c){if(this.current){var a=this.current.attr("title",this.currentTitle);this.current=null;if(!this.options.disabled){a.removeAttr("aria-describedby");this.tooltip.attr("aria-hidden","true");this.tooltip.is(":animated")?this.tooltip.stop().fadeTo("normal",0,function(){b(this).hide().css("opacity",
+"")}):this.tooltip.stop().fadeOut();this._trigger("close",c)}}}})})(jQuery);
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
new file mode 100644
index 0000000000..7c24308023
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
@@ -0,0 +1,154 @@
+/*!
+ * jQuery JavaScript Library v1.4.2
+ * http://jquery.com/
+ *
+ * Copyright 2010, John Resig
+ * Dual licensed under the MIT or GPL Version 2 licenses.
+ * http://jquery.org/license
+ *
+ * Includes Sizzle.js
+ * http://sizzlejs.com/
+ * Copyright 2010, The Dojo Foundation
+ * Released under the MIT, BSD, and GPL Licenses.
+ *
+ * Date: Sat Feb 13 22:33:48 2010 -0500
+ */
+(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o<i;o++)e(a[o],b,f?d.call(a[o],o,e(a[o],b)):d,j);return a}return i?
+e(a[0],b):w}function J(){return(new Date).getTime()}function Y(){return false}function Z(){return true}function na(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function oa(a){var b,d=[],f=[],e=arguments,j,i,o,k,n,r;i=c.data(this,"events");if(!(a.liveFired===this||!i||!i.live||a.button&&a.type==="click")){a.liveFired=this;var u=i.live.slice(0);for(k=0;k<u.length;k++){i=u[k];i.origType.replace(O,"")===a.type?f.push(i.selector):u.splice(k--,1)}j=c(a.target).closest(f,a.currentTarget);n=0;for(r=
+j.length;n<r;n++)for(k=0;k<u.length;k++){i=u[k];if(j[n].selector===i.selector){o=j[n].elem;f=null;if(i.preType==="mouseenter"||i.preType==="mouseleave")f=c(a.relatedTarget).closest(i.selector)[0];if(!f||f!==o)d.push({elem:o,handleObj:i})}}n=0;for(r=d.length;n<r;n++){j=d[n];a.currentTarget=j.elem;a.data=j.handleObj.data;a.handleObj=j.handleObj;if(j.handleObj.origHandler.apply(j.elem,e)===false){b=false;break}}return b}}function pa(a,b){return"live."+(a&&a!=="*"?a+".":"")+b.replace(/\./g,"`").replace(/ /g,
+"&")}function qa(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function ra(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var f=c.data(a[d++]),e=c.data(this,f);if(f=f&&f.events){delete e.handle;e.events={};for(var j in f)for(var i in f[j])c.event.add(this,j,f[j][i],f[j][i].data)}}})}function sa(a,b,d){var f,e,j;b=b&&b[0]?b[0].ownerDocument||b[0]:s;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===s&&!ta.test(a[0])&&(c.support.checkClone||!ua.test(a[0]))){e=
+true;if(j=c.fragments[a[0]])if(j!==1)f=j}if(!f){f=b.createDocumentFragment();c.clean(a,b,f,d)}if(e)c.fragments[a[0]]=j?f:1;return{fragment:f,cacheable:e}}function K(a,b){var d={};c.each(va.concat.apply([],va.slice(0,b)),function(){d[this]=a});return d}function wa(a){return"scrollTo"in a&&a.document?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var c=function(a,b){return new c.fn.init(a,b)},Ra=A.jQuery,Sa=A.$,s=A.document,T,Ta=/^[^<]*(<[\w\W]+>)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/,
+Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&&
+(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this,
+a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b===
+"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this,
+function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b<d;b++)if((e=arguments[b])!=null)for(j in e){i=a[j];o=e[j];if(a!==o)if(f&&o&&(c.isPlainObject(o)||c.isArray(o))){i=i&&(c.isPlainObject(i)||
+c.isArray(i))?i:c.isArray(o)?[]:{};a[j]=c.extend(f,i,o)}else if(o!==w)a[j]=o}return a};c.extend({noConflict:function(a){A.$=Sa;if(a)A.jQuery=Ra;return c},isReady:false,ready:function(){if(!c.isReady){if(!s.body)return setTimeout(c.ready,13);c.isReady=true;if(Q){for(var a,b=0;a=Q[b++];)a.call(s,c);Q=null}c.fn.triggerHandler&&c(s).triggerHandler("ready")}},bindReady:function(){if(!xa){xa=true;if(s.readyState==="complete")return c.ready();if(s.addEventListener){s.addEventListener("DOMContentLoaded",
+L,false);A.addEventListener("load",c.ready,false)}else if(s.attachEvent){s.attachEvent("onreadystatechange",L);A.attachEvent("onload",c.ready);var a=false;try{a=A.frameElement==null}catch(b){}s.documentElement.doScroll&&a&&ma()}}},isFunction:function(a){return $.call(a)==="[object Function]"},isArray:function(a){return $.call(a)==="[object Array]"},isPlainObject:function(a){if(!a||$.call(a)!=="[object Object]"||a.nodeType||a.setInterval)return false;if(a.constructor&&!aa.call(a,"constructor")&&!aa.call(a.constructor.prototype,
+"isPrototypeOf"))return false;var b;for(b in a);return b===w||aa.call(a,b)},isEmptyObject:function(a){for(var b in a)return false;return true},error:function(a){throw a;},parseJSON:function(a){if(typeof a!=="string"||!a)return null;a=c.trim(a);if(/^[\],:{}\s]*$/.test(a.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,"")))return A.JSON&&A.JSON.parse?A.JSON.parse(a):(new Function("return "+
+a))();else c.error("Invalid JSON: "+a)},noop:function(){},globalEval:function(a){if(a&&Va.test(a)){var b=s.getElementsByTagName("head")[0]||s.documentElement,d=s.createElement("script");d.type="text/javascript";if(c.support.scriptEval)d.appendChild(s.createTextNode(a));else d.text=a;b.insertBefore(d,b.firstChild);b.removeChild(d)}},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,b,d){var f,e=0,j=a.length,i=j===w||c.isFunction(a);if(d)if(i)for(f in a){if(b.apply(a[f],
+d)===false)break}else for(;e<j;){if(b.apply(a[e++],d)===false)break}else if(i)for(f in a){if(b.call(a[f],f,a[f])===false)break}else for(d=a[0];e<j&&b.call(d,e,d)!==false;d=a[++e]);return a},trim:function(a){return(a||"").replace(Wa,"")},makeArray:function(a,b){b=b||[];if(a!=null)a.length==null||typeof a==="string"||c.isFunction(a)||typeof a!=="function"&&a.setInterval?ba.call(b,a):c.merge(b,a);return b},inArray:function(a,b){if(b.indexOf)return b.indexOf(a);for(var d=0,f=b.length;d<f;d++)if(b[d]===
+a)return d;return-1},merge:function(a,b){var d=a.length,f=0;if(typeof b.length==="number")for(var e=b.length;f<e;f++)a[d++]=b[f];else for(;b[f]!==w;)a[d++]=b[f++];a.length=d;return a},grep:function(a,b,d){for(var f=[],e=0,j=a.length;e<j;e++)!d!==!b(a[e],e)&&f.push(a[e]);return f},map:function(a,b,d){for(var f=[],e,j=0,i=a.length;j<i;j++){e=b(a[j],j,d);if(e!=null)f[f.length]=e}return f.concat.apply([],f)},guid:1,proxy:function(a,b,d){if(arguments.length===2)if(typeof b==="string"){d=a;a=d[b];b=w}else if(b&&
+!c.isFunction(b)){d=b;b=w}if(!b&&a)b=function(){return a.apply(d||this,arguments)};if(a)b.guid=a.guid=a.guid||b.guid||c.guid++;return b},uaMatch:function(a){a=a.toLowerCase();a=/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version)?[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||!/compatible/.test(a)&&/(mozilla)(?:.*? rv:([\w.]+))?/.exec(a)||[];return{browser:a[1]||"",version:a[2]||"0"}},browser:{}});P=c.uaMatch(P);if(P.browser){c.browser[P.browser]=true;c.browser.version=P.version}if(c.browser.webkit)c.browser.safari=
+true;if(ya)c.inArray=function(a,b){return ya.call(b,a)};T=c(s);if(s.addEventListener)L=function(){s.removeEventListener("DOMContentLoaded",L,false);c.ready()};else if(s.attachEvent)L=function(){if(s.readyState==="complete"){s.detachEvent("onreadystatechange",L);c.ready()}};(function(){c.support={};var a=s.documentElement,b=s.createElement("script"),d=s.createElement("div"),f="script"+J();d.style.display="none";d.innerHTML=" <link/><table></table><a href='/a' style='color:red;float:left;opacity:.55;'>a</a><input type='checkbox'/>";
+var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected,
+parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent=
+false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="<input type='radio' name='radiotest' checked='checked'/>";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n=
+s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true,
+applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando];
+else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this,
+a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===
+w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i,
+cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1)if(e.className){for(var j=" "+e.className+" ",
+i=e.className,o=0,k=b.length;o<k;o++)if(j.indexOf(" "+b[o]+" ")<0)i+=" "+b[o];e.className=c.trim(i)}else e.className=a}return this},removeClass:function(a){if(c.isFunction(a))return this.each(function(k){var n=c(this);n.removeClass(a.call(this,k,n.attr("class")))});if(a&&typeof a==="string"||a===w)for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1&&e.className)if(a){for(var j=(" "+e.className+" ").replace(Aa," "),i=0,o=b.length;i<o;i++)j=j.replace(" "+b[i]+" ",
+" ");e.className=c.trim(j)}else e.className=""}return this},toggleClass:function(a,b){var d=typeof a,f=typeof b==="boolean";if(c.isFunction(a))return this.each(function(e){var j=c(this);j.toggleClass(a.call(this,e,j.attr("class"),b),b)});return this.each(function(){if(d==="string")for(var e,j=0,i=c(this),o=b,k=a.split(ca);e=k[j++];){o=f?o:!i.hasClass(e);i[o?"addClass":"removeClass"](e)}else if(d==="undefined"||d==="boolean"){this.className&&c.data(this,"__className__",this.className);this.className=
+this.className||a===false?"":c.data(this,"__className__")||""}})},hasClass:function(a){a=" "+a+" ";for(var b=0,d=this.length;b<d;b++)if((" "+this[b].className+" ").replace(Aa," ").indexOf(a)>-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j<d;j++){var i=
+e[j];if(i.selected){a=c(i).val();if(b)return a;f.push(a)}}return f}if(Ba.test(b.type)&&!c.support.checkOn)return b.getAttribute("value")===null?"on":b.value;return(b.value||"").replace(Za,"")}return w}var o=c.isFunction(a);return this.each(function(k){var n=c(this),r=a;if(this.nodeType===1){if(o)r=a.call(this,k,n.val());if(typeof r==="number")r+="";if(c.isArray(r)&&Ba.test(this.type))this.checked=c.inArray(n.val(),r)>=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected=
+c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed");
+a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g,
+function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split(".");
+k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a),
+C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B<r.length;B++){u=r[B];if(d.guid===u.guid){if(i||k.test(u.namespace)){f==null&&r.splice(B--,1);n.remove&&n.remove.call(a,u)}if(f!=
+null)break}}if(r.length===0||f!=null&&r.length===1){if(!n.teardown||n.teardown.call(a,o)===false)Ca(a,e,z.handle);delete C[e]}}else for(var B=0;B<r.length;B++){u=r[B];if(i||k.test(u.namespace)){c.event.remove(a,n,u.handler,B);r.splice(B--,1)}}}if(c.isEmptyObject(C)){if(b=z.handle)b.elem=null;delete z.events;delete z.handle;c.isEmptyObject(z)&&c.removeData(a)}}}}},trigger:function(a,b,d,f){var e=a.type||a;if(!f){a=typeof a==="object"?a[G]?a:c.extend(c.Event(e),a):c.Event(e);if(e.indexOf("!")>=0){a.type=
+e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&&
+f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;
+if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e<j;e++){var i=d[e];if(b||f.test(i.namespace)){a.handler=i.handler;a.data=i.data;a.handleObj=i;i=i.handler.apply(this,arguments);if(i!==w){a.result=i;if(i===false){a.preventDefault();a.stopPropagation()}}if(a.isImmediatePropagationStopped())break}}}return a.result},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),
+fix:function(a){if(a[G])return a;var b=a;a=c.Event(b);for(var d=this.props.length,f;d;){f=this.props[--d];a[f]=b[f]}if(!a.target)a.target=a.srcElement||s;if(a.target.nodeType===3)a.target=a.target.parentNode;if(!a.relatedTarget&&a.fromElement)a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement;if(a.pageX==null&&a.clientX!=null){b=s.documentElement;d=s.body;a.pageX=a.clientX+(b&&b.scrollLeft||d&&d.scrollLeft||0)-(b&&b.clientLeft||d&&d.clientLeft||0);a.pageY=a.clientY+(b&&b.scrollTop||
+d&&d.scrollTop||0)-(b&&b.clientTop||d&&d.clientTop||0)}if(!a.which&&(a.charCode||a.charCode===0?a.charCode:a.keyCode))a.which=a.charCode||a.keyCode;if(!a.metaKey&&a.ctrlKey)a.metaKey=a.ctrlKey;if(!a.which&&a.button!==w)a.which=a.button&1?1:a.button&2?3:a.button&4?2:0;return a},guid:1E8,proxy:c.proxy,special:{ready:{setup:c.bindReady,teardown:c.noop},live:{add:function(a){c.event.add(this,a.origType,c.extend({},a,{handler:oa}))},remove:function(a){var b=true,d=a.origType.replace(O,"");c.each(c.data(this,
+"events").live||[],function(){if(d===this.origType.replace(O,""))return b=false});b&&c.event.remove(this,a.origType,oa)}},beforeunload:{setup:function(a,b,d){if(this.setInterval)this.onbeforeunload=d;return false},teardown:function(a,b){if(this.onbeforeunload===b)this.onbeforeunload=null}}}};var Ca=s.removeEventListener?function(a,b,d){a.removeEventListener(b,d,false)}:function(a,b,d){a.detachEvent("on"+b,d)};c.Event=function(a){if(!this.preventDefault)return new c.Event(a);if(a&&a.type){this.originalEvent=
+a;this.type=a.type}else this.type=a;this.timeStamp=J();this[G]=true};c.Event.prototype={preventDefault:function(){this.isDefaultPrevented=Z;var a=this.originalEvent;if(a){a.preventDefault&&a.preventDefault();a.returnValue=false}},stopPropagation:function(){this.isPropagationStopped=Z;var a=this.originalEvent;if(a){a.stopPropagation&&a.stopPropagation();a.cancelBubble=true}},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=Z;this.stopPropagation()},isDefaultPrevented:Y,isPropagationStopped:Y,
+isImmediatePropagationStopped:Y};var Da=function(a){var b=a.relatedTarget;try{for(;b&&b!==this;)b=b.parentNode;if(b!==this){a.type=a.data;c.event.handle.apply(this,arguments)}}catch(d){}},Ea=function(a){a.type=a.data;c.event.handle.apply(this,arguments)};c.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){c.event.special[a]={setup:function(d){c.event.add(this,b,d&&d.selector?Ea:Da,a)},teardown:function(d){c.event.remove(this,b,d&&d.selector?Ea:Da)}}});if(!c.support.submitBubbles)c.event.special.submit=
+{setup:function(){if(this.nodeName.toLowerCase()!=="form"){c.event.add(this,"click.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="submit"||d==="image")&&c(b).closest("form").length)return na("submit",this,arguments)});c.event.add(this,"keypress.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="text"||d==="password")&&c(b).closest("form").length&&a.keyCode===13)return na("submit",this,arguments)})}else return false},teardown:function(){c.event.remove(this,".specialSubmit")}};
+if(!c.support.changeBubbles){var da=/textarea|input|select/i,ea,Fa=function(a){var b=a.type,d=a.value;if(b==="radio"||b==="checkbox")d=a.checked;else if(b==="select-multiple")d=a.selectedIndex>-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",
+e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,
+"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a,
+d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j<o;j++)c.event.add(this[j],d,i,f)}return this}});c.fn.extend({unbind:function(a,b){if(typeof a==="object"&&
+!a.preventDefault)for(var d in a)this.unbind(d,a[d]);else{d=0;for(var f=this.length;d<f;d++)c.event.remove(this[d],a,b)}return this},delegate:function(a,b,d,f){return this.live(b,d,f,a)},undelegate:function(a,b,d){return arguments.length===0?this.unbind("live"):this.die(b,null,d,a)},trigger:function(a,b){return this.each(function(){c.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0]){a=c.Event(a);a.preventDefault();a.stopPropagation();c.event.trigger(a,b,this[0]);return a.result}},
+toggle:function(a){for(var b=arguments,d=1;d<b.length;)c.proxy(a,b[d++]);return this.click(c.proxy(a,function(f){var e=(c.data(this,"lastToggle"+a.guid)||0)%d;c.data(this,"lastToggle"+a.guid,e+1);f.preventDefault();return b[e].apply(this,arguments)||false}))},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var Ga={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};c.each(["live","die"],function(a,b){c.fn[b]=function(d,f,e,j){var i,o=0,k,n,r=j||this.selector,
+u=j?this:c(this.context);if(c.isFunction(f)){e=f;f=w}for(d=(d||"").split(" ");(i=d[o++])!=null;){j=O.exec(i);k="";if(j){k=j[0];i=i.replace(O,"")}if(i==="hover")d.push("mouseenter"+k,"mouseleave"+k);else{n=i;if(i==="focus"||i==="blur"){d.push(Ga[i]+k);i+=k}else i=(Ga[i]||i)+k;b==="live"?u.each(function(){c.event.add(this,pa(i,r),{data:f,selector:r,handler:e,origType:i,origHandler:e,preType:n})}):u.unbind(pa(i,r),e)}}return this}});c.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error".split(" "),
+function(a,b){c.fn[b]=function(d){return d?this.bind(b,d):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});A.attachEvent&&!A.addEventListener&&A.attachEvent("onunload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}});(function(){function a(g){for(var h="",l,m=0;g[m];m++){l=g[m];if(l.nodeType===3||l.nodeType===4)h+=l.nodeValue;else if(l.nodeType!==8)h+=a(l.childNodes)}return h}function b(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];
+if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1&&!p){t.sizcache=l;t.sizset=q}if(t.nodeName.toLowerCase()===h){y=t;break}t=t[g]}m[q]=y}}}function d(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1){if(!p){t.sizcache=l;t.sizset=q}if(typeof h!=="string"){if(t===h){y=true;break}}else if(k.filter(h,[t]).length>0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,
+e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift();
+t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D||
+g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h<g.length;h++)g[h]===g[h-1]&&g.splice(h--,1)}return g};k.matches=function(g,h){return k(g,null,null,h)};k.find=function(g,h,l){var m,q;if(!g)return[];
+for(var p=0,v=n.order.length;p<v;p++){var t=n.order[p];if(q=n.leftMatch[t].exec(g)){var y=q[1];q.splice(1,1);if(y.substr(y.length-1)!=="\\"){q[1]=(q[1]||"").replace(/\\/g,"");m=n.find[t](q,h,l);if(m!=null){g=g.replace(n.match[t],"");break}}}}m||(m=h.getElementsByTagName("*"));return{set:m,expr:g}};k.filter=function(g,h,l,m){for(var q=g,p=[],v=h,t,y,S=h&&h[0]&&x(h[0]);g&&h.length;){for(var H in n.filter)if((t=n.leftMatch[H].exec(g))!=null&&t[2]){var M=n.filter[H],I,D;D=t[1];y=false;t.splice(1,1);if(D.substr(D.length-
+1)!=="\\"){if(v===p)p=[];if(n.preFilter[H])if(t=n.preFilter[H](t,v,l,p,m,S)){if(t===true)continue}else y=I=true;if(t)for(var U=0;(D=v[U])!=null;U++)if(D){I=M(D,t,U,v);var Ha=m^!!I;if(l&&I!=null)if(Ha)y=true;else v[U]=false;else if(Ha){p.push(D);y=true}}if(I!==w){l||(v=p);g=g.replace(n.match[H],"");if(!y)return[];break}}}if(g===q)if(y==null)k.error(g);else break;q=g}return v};k.error=function(g){throw"Syntax error, unrecognized expression: "+g;};var n=k.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF-]|\\.)+)/,
+CLASS:/\.((?:[\w\u00c0-\uFFFF-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(g){return g.getAttribute("href")}},
+relative:{"+":function(g,h){var l=typeof h==="string",m=l&&!/\W/.test(h);l=l&&!m;if(m)h=h.toLowerCase();m=0;for(var q=g.length,p;m<q;m++)if(p=g[m]){for(;(p=p.previousSibling)&&p.nodeType!==1;);g[m]=l||p&&p.nodeName.toLowerCase()===h?p||false:p===h}l&&k.filter(h,g,true)},">":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m<q;m++){var p=g[m];if(p){l=p.parentNode;g[m]=l.nodeName.toLowerCase()===h?l:false}}}else{m=0;for(q=g.length;m<q;m++)if(p=g[m])g[m]=
+l?p.parentNode:p.parentNode===h;l&&k.filter(h,g,true)}},"":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("parentNode",h,m,g,p,l)},"~":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("previousSibling",h,m,g,p,l)}},find:{ID:function(g,h,l){if(typeof h.getElementById!=="undefined"&&!l)return(g=h.getElementById(g[1]))?[g]:[]},NAME:function(g,h){if(typeof h.getElementsByName!=="undefined"){var l=[];
+h=h.getElementsByName(g[1]);for(var m=0,q=h.length;m<q;m++)h[m].getAttribute("name")===g[1]&&l.push(h[m]);return l.length===0?null:l}},TAG:function(g,h){return h.getElementsByTagName(g[1])}},preFilter:{CLASS:function(g,h,l,m,q,p){g=" "+g[1].replace(/\\/g,"")+" ";if(p)return g;p=0;for(var v;(v=h[p])!=null;p++)if(v)if(q^(v.className&&(" "+v.className+" ").replace(/[\t\n]/g," ").indexOf(g)>=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},
+CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m,
+g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},
+text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},
+setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return h<l[3]-0},gt:function(g,h,l){return h>l[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h=
+h[3];l=0;for(m=h.length;l<m;l++)if(h[l]===g)return false;return true}else k.error("Syntax error, unrecognized expression: "+q)},CHILD:function(g,h){var l=h[1],m=g;switch(l){case "only":case "first":for(;m=m.previousSibling;)if(m.nodeType===1)return false;if(l==="first")return true;m=g;case "last":for(;m=m.nextSibling;)if(m.nodeType===1)return false;return true;case "nth":l=h[2];var q=h[3];if(l===1&&q===0)return true;h=h[0];var p=g.parentNode;if(p&&(p.sizcache!==h||!g.nodeIndex)){var v=0;for(m=p.firstChild;m;m=
+m.nextSibling)if(m.nodeType===1)m.nodeIndex=++v;p.sizcache=h}g=g.nodeIndex-q;return l===0?g===0:g%l===0&&g/l>=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m===
+"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g,
+h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l<m;l++)h.push(g[l]);else for(l=0;g[l];l++)h.push(g[l]);return h}}var B;if(s.documentElement.compareDocumentPosition)B=function(g,h){if(!g.compareDocumentPosition||
+!h.compareDocumentPosition){if(g==h)i=true;return g.compareDocumentPosition?-1:1}g=g.compareDocumentPosition(h)&4?-1:g===h?0:1;if(g===0)i=true;return g};else if("sourceIndex"in s.documentElement)B=function(g,h){if(!g.sourceIndex||!h.sourceIndex){if(g==h)i=true;return g.sourceIndex?-1:1}g=g.sourceIndex-h.sourceIndex;if(g===0)i=true;return g};else if(s.createRange)B=function(g,h){if(!g.ownerDocument||!h.ownerDocument){if(g==h)i=true;return g.ownerDocument?-1:1}var l=g.ownerDocument.createRange(),m=
+h.ownerDocument.createRange();l.setStart(g,0);l.setEnd(g,0);m.setStart(h,0);m.setEnd(h,0);g=l.compareBoundaryPoints(Range.START_TO_END,m);if(g===0)i=true;return g};(function(){var g=s.createElement("div"),h="script"+(new Date).getTime();g.innerHTML="<a name='"+h+"'/>";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&&
+q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML="<a href='#'></a>";
+if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="<p class='TEST'></p>";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}();
+(function(){var g=s.createElement("div");g.innerHTML="<div class='test e'></div><div class='test'></div>";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}:
+function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q<p;q++)k(g,h[q],l);return k.filter(m,l)};c.find=k;c.expr=k.selectors;c.expr[":"]=c.expr.filters;c.unique=k.uniqueSort;c.text=a;c.isXMLDoc=x;c.contains=E})();var eb=/Until$/,fb=/^(?:parents|prevUntil|prevAll)/,
+gb=/,/;R=Array.prototype.slice;var Ia=function(a,b,d){if(c.isFunction(b))return c.grep(a,function(e,j){return!!b.call(e,j,e)===d});else if(b.nodeType)return c.grep(a,function(e){return e===b===d});else if(typeof b==="string"){var f=c.grep(a,function(e){return e.nodeType===1});if(Ua.test(b))return c.filter(b,f,!d);else b=c.filter(b,f)}return c.grep(a,function(e){return c.inArray(e,b)>=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f<e;f++){d=b.length;
+c.find(a,this[f],b);if(f>0)for(var j=d;j<b.length;j++)for(var i=0;i<d;i++)if(b[i]===b[j]){b.splice(j--,1);break}}return b},has:function(a){var b=c(a);return this.filter(function(){for(var d=0,f=b.length;d<f;d++)if(c.contains(this,b[d]))return true})},not:function(a){return this.pushStack(Ia(this,a,false),"not",a)},filter:function(a){return this.pushStack(Ia(this,a,true),"filter",a)},is:function(a){return!!a&&c.filter(a,this).length>0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j=
+{},i;if(f&&a.length){e=0;for(var o=a.length;e<o;e++){i=a[e];j[i]||(j[i]=c.expr.match.POS.test(i)?c(i,b||this.context):i)}for(;f&&f.ownerDocument&&f!==b;){for(i in j){e=j[i];if(e.jquery?e.index(f)>-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a===
+"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",
+d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?
+a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType===
+1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/<tbody/i,jb=/<|&#?\w+;/,ta=/<script|<object|<embed|<option|<style/i,ua=/checked\s*(?:[^=]|=\s*.checked.)/i,Ma=function(a,b,d){return hb.test(d)?
+a:b+"></"+d+">"},F={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div<div>","</div>"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d=
+c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this},
+wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})},
+prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,
+this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild);
+return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja,
+""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b<d;b++)if(this[b].nodeType===1){c.cleanData(this[b].getElementsByTagName("*"));this[b].innerHTML=a}}catch(f){this.empty().append(a)}}else c.isFunction(a)?this.each(function(e){var j=c(this),i=j.html();j.empty().append(function(){return a.call(this,e,i)})}):this.empty().append(a);return this},replaceWith:function(a){if(this[0]&&
+this[0].parentNode){if(c.isFunction(a))return this.each(function(b){var d=c(this),f=d.html();d.replaceWith(a.call(this,b,f))});if(typeof a!=="string")a=c(a).detach();return this.each(function(){var b=this.nextSibling,d=this.parentNode;c(this).remove();b?c(b).before(a):c(d).append(a)})}else return this.pushStack(c(c.isFunction(a)?a():a),"replaceWith",a)},detach:function(a){return this.remove(a,true)},domManip:function(a,b,d){function f(u){return c.nodeName(u,"table")?u.getElementsByTagName("tbody")[0]||
+u.appendChild(u.ownerDocument.createElement("tbody")):u}var e,j,i=a[0],o=[],k;if(!c.support.checkClone&&arguments.length===3&&typeof i==="string"&&ua.test(i))return this.each(function(){c(this).domManip(a,b,d,true)});if(c.isFunction(i))return this.each(function(u){var z=c(this);a[0]=i.call(this,u,b?z.html():w);z.domManip(a,b,d)});if(this[0]){e=i&&i.parentNode;e=c.support.parentNode&&e&&e.nodeType===11&&e.childNodes.length===this.length?{fragment:e}:sa(a,this,o);k=e.fragment;if(j=k.childNodes.length===
+1?(k=k.firstChild):k.firstChild){b=b&&c.nodeName(j,"tr");for(var n=0,r=this.length;n<r;n++)d.call(b?f(this[n],j):this[n],n>0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]);
+return this}else{e=0;for(var j=d.length;e<j;e++){var i=(e>0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["",
+""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]==="<table>"&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e=
+c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]?
+c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja=
+function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter=
+Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a,
+"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f=
+a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=
+a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=/<script(.|\s)*?\/script>/gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!==
+"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("<div />").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this},
+serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),
+function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,
+global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&&
+e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)?
+"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache===
+false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B=
+false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since",
+c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E||
+d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x);
+g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===
+1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b===
+"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional;
+if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");
+this[a].style.display=d||"";if(c.css(this[a],"display")==="none"){d=this[a].nodeName;var f;if(la[d])f=la[d];else{var e=c("<"+d+" />").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a<b;a++)this[a].style.display=c.data(this[a],"olddisplay")||"";return this}},hide:function(a,b){if(a||a===0)return this.animate(K("hide",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");!d&&d!=="none"&&c.data(this[a],
+"olddisplay",c.css(this[a],"display"))}a=0;for(b=this.length;a<b;a++)this[a].style.display="none";return this}},_toggle:c.fn.toggle,toggle:function(a,b){var d=typeof a==="boolean";if(c.isFunction(a)&&c.isFunction(b))this._toggle.apply(this,arguments);else a==null||d?this.each(function(){var f=d?a:c(this).is(":hidden");c(this)[f?"show":"hide"]()}):this.animate(K("toggle",3),a,b);return this},fadeTo:function(a,b,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:b},a,d)},
+animate:function(a,b,d,f){var e=c.speed(b,d,f);if(c.isEmptyObject(a))return this.each(e.complete);return this[e.queue===false?"each":"queue"](function(){var j=c.extend({},e),i,o=this.nodeType===1&&c(this).is(":hidden"),k=this;for(i in a){var n=i.replace(ia,ja);if(i!==n){a[n]=a[i];delete a[i];i=n}if(a[i]==="hide"&&o||a[i]==="show"&&!o)return j.complete.call(this);if((i==="height"||i==="width")&&this.style){j.display=c.css(this,"display");j.overflow=this.style.overflow}if(c.isArray(a[i])){(j.specialEasing=
+j.specialEasing||{})[i]=a[i][1];a[i]=a[i][0]}}if(j.overflow!=null)this.style.overflow="hidden";j.curAnim=c.extend({},a);c.each(a,function(r,u){var z=new c.fx(k,j,r);if(Ab.test(u))z[u==="toggle"?o?"show":"hide":u](a);else{var C=Bb.exec(u),B=z.cur(true)||0;if(C){u=parseFloat(C[2]);var E=C[3]||"px";if(E!=="px"){k.style[r]=(u||1)+E;B=(u||1)/z.cur(true)*B;k.style[r]=B+E}if(C[1])u=(C[1]==="-="?-1:1)*u+B;z.custom(B,u,E)}else z.custom(B,u,"")}});return true})},stop:function(a,b){var d=c.timers;a&&this.queue([]);
+this.each(function(){for(var f=d.length-1;f>=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration===
+"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||
+c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;
+this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=
+this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem,
+e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b<a.length;b++)a[b]()||a.splice(b--,1);a.length||
+c.fx.stop()},stop:function(){clearInterval(W);W=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){c.style(a.elem,"opacity",a.now)},_default:function(a){if(a.elem.style&&a.elem.style[a.prop]!=null)a.elem.style[a.prop]=(a.prop==="width"||a.prop==="height"?Math.max(0,a.now):a.now)+a.unit;else a.elem[a.prop]=a.now}}});if(c.expr&&c.expr.filters)c.expr.filters.animated=function(a){return c.grep(c.timers,function(b){return a===b.elem}).length};c.fn.offset="getBoundingClientRect"in s.documentElement?
+function(a){var b=this[0];if(a)return this.each(function(e){c.offset.setOffset(this,a,e)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);var d=b.getBoundingClientRect(),f=b.ownerDocument;b=f.body;f=f.documentElement;return{top:d.top+(self.pageYOffset||c.support.boxModel&&f.scrollTop||b.scrollTop)-(f.clientTop||b.clientTop||0),left:d.left+(self.pageXOffset||c.support.boxModel&&f.scrollLeft||b.scrollLeft)-(f.clientLeft||b.clientLeft||0)}}:function(a){var b=
+this[0];if(a)return this.each(function(r){c.offset.setOffset(this,a,r)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);c.offset.initialize();var d=b.offsetParent,f=b,e=b.ownerDocument,j,i=e.documentElement,o=e.body;f=(e=e.defaultView)?e.getComputedStyle(b,null):b.currentStyle;for(var k=b.offsetTop,n=b.offsetLeft;(b=b.parentNode)&&b!==o&&b!==i;){if(c.offset.supportsFixedPosition&&f.position==="fixed")break;j=e?e.getComputedStyle(b,null):b.currentStyle;
+k-=b.scrollTop;n-=b.scrollLeft;if(b===d){k+=b.offsetTop;n+=b.offsetLeft;if(c.offset.doesNotAddBorder&&!(c.offset.doesAddBorderForTableAndCells&&/^t(able|d|h)$/i.test(b.nodeName))){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=d;d=b.offsetParent}if(c.offset.subtractsBorderForOverflowNotVisible&&j.overflow!=="visible"){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=j}if(f.position==="relative"||f.position==="static"){k+=o.offsetTop;n+=o.offsetLeft}if(c.offset.supportsFixedPosition&&
+f.position==="fixed"){k+=Math.max(i.scrollTop,o.scrollTop);n+=Math.max(i.scrollLeft,o.scrollLeft)}return{top:k,left:n}};c.offset={initialize:function(){var a=s.body,b=s.createElement("div"),d,f,e,j=parseFloat(c.curCSS(a,"marginTop",true))||0;c.extend(b.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});b.innerHTML="<div style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;'><div></div></div><table style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;' cellpadding='0' cellspacing='0'><tr><td></td></tr></table>";
+a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b);
+c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a,
+d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top-
+f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset":
+"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in
+e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window);
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
new file mode 100644
index 0000000000..4ab99764ce
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
@@ -0,0 +1,18 @@
+/*
+ * jquery.layout 1.3.0 - Release Candidate 29.3
+ *
+ * Copyright (c) 2010
+ * Fabrizio Balliano (http://www.fabrizioballiano.net)
+ * Kevin Dalman (http://allpro.net)
+ *
+ * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
+ * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
+ *
+ * Docs: http://layout.jquery-dev.net/documentation.html
+ * Tips: http://layout.jquery-dev.net/tips.html
+ * Help: http://groups.google.com/group/jquery-ui-layout
+ *
+ * $Date: 2010-07-13 08:00:00 (Wed, 14 July 2010) $
+ * $Rev: 30293 $
+ */
+(function($){$.fn.layout=function(opts){var lang={Pane:"Pane",Open:"Open",Close:"Close",Resize:"Resize",Slide:"Slide Open",Pin:"Pin",Unpin:"Un-Pin",selector:"selector",msgNoRoom:"Not enough room to show this pane.",errContainerMissing:"UI Layout Initialization Error\n\nThe specified layout-container does not exist.",errCenterPaneMissing:"UI Layout Initialization Error\n\nThe center-pane element does not exist.\n\nThe center-pane is a required element.",errContainerHeight:"UI Layout Initialization Warning\n\nThe layout-container \"CONTAINER\" has no height.\n\nTherefore the layout is 0-height and hence 'invisible'!",errButton:"Error Adding Button \n\nInvalid "};var options={name:"",scrollToBookmarkOnLoad:true,resizeWithWindow:true,resizeWithWindowDelay:200,resizeWithWindowMaxDelay:0,onresizeall_start:null,onresizeall_end:null,onload:null,onunload:null,autoBindCustomButtons:false,zIndex:null,defaults:{applyDemoStyles:false,closable:true,resizable:true,slidable:true,initClosed:false,initHidden:false,contentSelector:".ui-layout-content",contentIgnoreSelector:".ui-layout-ignore",findNestedContent:false,paneClass:"ui-layout-pane",resizerClass:"ui-layout-resizer",togglerClass:"ui-layout-toggler",buttonClass:"ui-layout-button",minSize:0,maxSize:0,spacing_open:6,spacing_closed:6,togglerLength_open:50,togglerLength_closed:50,togglerAlign_open:"center",togglerAlign_closed:"center",togglerTip_open:lang.Close,togglerTip_closed:lang.Open,togglerContent_open:"",togglerContent_closed:"",resizerDblClickToggle:true,autoResize:true,autoReopen:true,resizerDragOpacity:1,maskIframesOnResize:true,resizeNestedLayout:true,resizeWhileDragging:false,resizeContentWhileDragging:false,noRoomToOpenTip:lang.msgNoRoom,resizerTip:lang.Resize,sliderTip:lang.Slide,sliderCursor:"pointer",slideTrigger_open:"click",slideTrigger_close:"mouseleave",hideTogglerOnSlide:false,preventQuickSlideClose:!!($.browser.webkit||$.browser.safari),showOverflowOnHover:false,enableCursorHotkey:true,customHotkeyModifier:"SHIFT",fxName:"slide",fxSpeed:null,fxSettings:{},fxOpacityFix:true,triggerEventsOnLoad:false,triggerEventsWhileDragging:true,onshow_start:null,onshow_end:null,onhide_start:null,onhide_end:null,onopen_start:null,onopen_end:null,onclose_start:null,onclose_end:null,onresize_start:null,onresize_end:null,onsizecontent_start:null,onsizecontent_end:null,onswap_start:null,onswap_end:null,ondrag_start:null,ondrag_end:null},north:{paneSelector:".ui-layout-north",size:"auto",resizerCursor:"n-resize",customHotkey:""},south:{paneSelector:".ui-layout-south",size:"auto",resizerCursor:"s-resize",customHotkey:""},east:{paneSelector:".ui-layout-east",size:200,resizerCursor:"e-resize",customHotkey:""},west:{paneSelector:".ui-layout-west",size:200,resizerCursor:"w-resize",customHotkey:""},center:{paneSelector:".ui-layout-center",minWidth:0,minHeight:0},useStateCookie:false,cookie:{name:"",autoSave:true,autoLoad:true,domain:"",path:"",expires:"",secure:false,keys:"north.size,south.size,east.size,west.size,north.isClosed,south.isClosed,east.isClosed,west.isClosed,north.isHidden,south.isHidden,east.isHidden,west.isHidden"}};var effects={slide:{all:{duration:"fast"},north:{direction:"up"},south:{direction:"down"},east:{direction:"right"},west:{direction:"left"}},drop:{all:{duration:"slow"},north:{direction:"up"},south:{direction:"down"},east:{direction:"right"},west:{direction:"left"}},scale:{all:{duration:"fast"}}};var state={id:"layout"+new Date().getTime(),initialized:false,container:{},north:{},south:{},east:{},west:{},center:{},cookie:{}};var _c={allPanes:"north,south,west,east,center",borderPanes:"north,south,west,east",altSide:{north:"south",south:"north",east:"west",west:"east"},hidden:{visibility:"hidden"},visible:{visibility:"visible"},zIndex:{pane_normal:1,resizer_normal:2,iframe_mask:2,pane_sliding:100,pane_animate:1000,resizer_drag:10000},resizers:{cssReq:{position:"absolute",padding:0,margin:0,fontSize:"1px",textAlign:"left",overflow:"hidden"},cssDemo:{background:"#DDD",border:"none"}},togglers:{cssReq:{position:"absolute",display:"block",padding:0,margin:0,overflow:"hidden",textAlign:"center",fontSize:"1px",cursor:"pointer",zIndex:1},cssDemo:{background:"#AAA"}},content:{cssReq:{position:"relative"},cssDemo:{overflow:"auto",padding:"10px"},cssDemoPane:{overflow:"hidden",padding:0}},panes:{cssReq:{position:"absolute",margin:0},cssDemo:{padding:"10px",background:"#FFF",border:"1px solid #BBB",overflow:"auto"}},north:{side:"Top",sizeType:"Height",dir:"horz",cssReq:{top:0,bottom:"auto",left:0,right:0,width:"auto"},pins:[]},south:{side:"Bottom",sizeType:"Height",dir:"horz",cssReq:{top:"auto",bottom:0,left:0,right:0,width:"auto"},pins:[]},east:{side:"Right",sizeType:"Width",dir:"vert",cssReq:{left:"auto",right:0,top:"auto",bottom:"auto",height:"auto"},pins:[]},west:{side:"Left",sizeType:"Width",dir:"vert",cssReq:{left:0,right:"auto",top:"auto",bottom:"auto",height:"auto"},pins:[]},center:{dir:"center",cssReq:{left:"auto",right:"auto",top:"auto",bottom:"auto",height:"auto",width:"auto"}}};var timer={data:{},set:function(s,fn,ms){timer.clear(s);timer.data[s]=setTimeout(fn,ms)},clear:function(s){var t=timer.data;if(t[s]){clearTimeout(t[s]);delete t[s]}}};var isStr=function(o){try{return typeof o=="string"||(typeof o=="object"&&o.constructor.toString().match(/string/i)!==null)}catch(e){return false}};var str=function(o){return isStr(o)?$.trim(o):o==undefined||o==null?"":o};var min=function(x,y){return Math.min(x,y)};var max=function(x,y){return Math.max(x,y)};var _transformData=function(d){var a,json={cookie:{},defaults:{fxSettings:{}},north:{fxSettings:{}},south:{fxSettings:{}},east:{fxSettings:{}},west:{fxSettings:{}},center:{fxSettings:{}}};d=d||{};if(d.effects||d.cookie||d.defaults||d.north||d.south||d.west||d.east||d.center){json=$.extend(true,json,d)}else{$.each(d,function(key,val){a=key.split("__");if(!a[1]||json[a[0]]){json[a[1]?a[0]:"defaults"][a[1]?a[1]:a[0]]=val}})}return json};var _queue=function(action,pane,param){var tried=[];$.each(_c.borderPanes.split(","),function(i,p){if(_c[p].isMoving){bindCallback(p);return false}});function bindCallback(p){var c=_c[p];if(!c.doCallback){c.doCallback=true;c.callback=action+","+pane+","+(param?1:0)}else{tried.push(p);var cbPane=c.callback.split(",")[1];if(cbPane!=pane&&!$.inArray(cbPane,tried)>=0){bindCallback(cbPane)}}}};var _dequeue=function(pane){var c=_c[pane];_c.isLayoutBusy=false;delete c.isMoving;if(!c.doCallback||!c.callback){return}c.doCallback=false;var cb=c.callback.split(","),param=(cb[2]>0?true:false);if(cb[0]=="open"){open(cb[1],param)}else{if(cb[0]=="close"){close(cb[1],param)}}if(!c.doCallback){c.callback=null}};var _execCallback=function(pane,v_fn){if(!v_fn){return}var fn;try{if(typeof v_fn=="function"){fn=v_fn}else{if(!isStr(v_fn)){return}else{if(v_fn.match(/,/)){var args=v_fn.split(","),fn=eval(args[0]);if(typeof fn=="function"&&args.length>1){return fn(args[1])}}else{fn=eval(v_fn)}}}if(typeof fn=="function"){if(pane&&$Ps[pane]){return fn(pane,$Ps[pane],$.extend({},state[pane]),options[pane],options.name)}else{return fn(Instance,$.extend({},state),options,options.name)}}}catch(ex){}};var _showInvisibly=function($E,force){if(!$E){return{}}if(!$E.jquery){$E=$($E)}var CSS={display:$E.css("display"),visibility:$E.css("visibility")};if(force||CSS.display=="none"){$E.css({display:"block",visibility:"hidden"});return CSS}else{return{}}};var _fixIframe=function(pane){if(state.browser.mozilla){return}var $P=$Ps[pane];if(state[pane].tagName=="IFRAME"){$P.css(_c.hidden).css(_c.visible)}else{$P.find("IFRAME").css(_c.hidden).css(_c.visible)}};var _cssNum=function($E,prop){if(!$E.jquery){$E=$($E)}var CSS=_showInvisibly($E);var val=parseInt($.curCSS($E[0],prop,true),10)||0;$E.css(CSS);return val};var _borderWidth=function(E,side){if(E.jquery){E=E[0]}var b="border"+side.substr(0,1).toUpperCase()+side.substr(1);return $.curCSS(E,b+"Style",true)=="none"?0:(parseInt($.curCSS(E,b+"Width",true),10)||0)};var cssW=function(el,outerWidth){var str=isStr(el),$E=str?$Ps[el]:$(el);if(isNaN(outerWidth)){outerWidth=str?getPaneSize(el):$E.outerWidth()}if(outerWidth<=0){return 0}if(!state.browser.boxModel){return outerWidth}var W=outerWidth-_borderWidth($E,"Left")-_borderWidth($E,"Right")-_cssNum($E,"paddingLeft")-_cssNum($E,"paddingRight");return W>0?W:0};var cssH=function(el,outerHeight){var str=isStr(el),$E=str?$Ps[el]:$(el);if(isNaN(outerHeight)){outerHeight=str?getPaneSize(el):$E.outerHeight()}if(outerHeight<=0){return 0}if(!state.browser.boxModel){return outerHeight}var H=outerHeight-_borderWidth($E,"Top")-_borderWidth($E,"Bottom")-_cssNum($E,"paddingTop")-_cssNum($E,"paddingBottom");return H>0?H:0};var cssSize=function(pane,outerSize){if(_c[pane].dir=="horz"){return cssH(pane,outerSize)}else{return cssW(pane,outerSize)}};var cssMinDims=function(pane){var dir=_c[pane].dir,d={minWidth:1001-cssW(pane,1000),minHeight:1001-cssH(pane,1000)};if(dir=="horz"){d.minSize=d.minHeight}if(dir=="vert"){d.minSize=d.minWidth}return d};var setOuterWidth=function(el,outerWidth,autoHide){var $E=el,w;if(isStr(el)){$E=$Ps[el]}else{if(!el.jquery){$E=$(el)}}w=cssW($E,outerWidth);$E.css({width:w});if(w>0){if(autoHide&&$E.data("autoHidden")&&$E.innerHeight()>0){$E.show().data("autoHidden",false);if(!state.browser.mozilla){$E.css(_c.hidden).css(_c.visible)}}}else{if(autoHide&&!$E.data("autoHidden")){$E.hide().data("autoHidden",true)}}};var setOuterHeight=function(el,outerHeight,autoHide){var $E=el,h;if(isStr(el)){$E=$Ps[el]}else{if(!el.jquery){$E=$(el)}}h=cssH($E,outerHeight);$E.css({height:h,visibility:"visible"});if(h>0&&$E.innerWidth()>0){if(autoHide&&$E.data("autoHidden")){$E.show().data("autoHidden",false);if(!state.browser.mozilla){$E.css(_c.hidden).css(_c.visible)}}}else{if(autoHide&&!$E.data("autoHidden")){$E.hide().data("autoHidden",true)}}};var setOuterSize=function(el,outerSize,autoHide){if(_c[pane].dir=="horz"){setOuterHeight(el,outerSize,autoHide)}else{setOuterWidth(el,outerSize,autoHide)}};var _parseSize=function(pane,size,dir){if(!dir){dir=_c[pane].dir}if(isStr(size)&&size.match(/%/)){size=parseInt(size)/100}if(size===0){return 0}else{if(size>=1){return parseInt(size,10)}else{if(size>0){var o=options,avail;if(dir=="horz"){avail=sC.innerHeight-($Ps.north?o.north.spacing_open:0)-($Ps.south?o.south.spacing_open:0)}else{if(dir=="vert"){avail=sC.innerWidth-($Ps.west?o.west.spacing_open:0)-($Ps.east?o.east.spacing_open:0)}}return Math.floor(avail*size)}else{if(pane=="center"){return 0}else{var $P=$Ps[pane],dim=(dir=="horz"?"height":"width"),vis=_showInvisibly($P),s=$P.css(dim);$P.css(dim,"auto");size=(dim=="height")?$P.outerHeight():$P.outerWidth();$P.css(dim,s).css(vis);return size}}}}};var getPaneSize=function(pane,inclSpace){var $P=$Ps[pane],o=options[pane],s=state[pane],oSp=(inclSpace?o.spacing_open:0),cSp=(inclSpace?o.spacing_closed:0);if(!$P||s.isHidden){return 0}else{if(s.isClosed||(s.isSliding&&inclSpace)){return cSp}else{if(_c[pane].dir=="horz"){return $P.outerHeight()+oSp}else{return $P.outerWidth()+oSp}}}};var setSizeLimits=function(pane,slide){var o=options[pane],s=state[pane],c=_c[pane],dir=c.dir,side=c.side.toLowerCase(),type=c.sizeType.toLowerCase(),isSliding=(slide!=undefined?slide:s.isSliding),$P=$Ps[pane],paneSpacing=o.spacing_open,altPane=_c.altSide[pane],altS=state[altPane],$altP=$Ps[altPane],altPaneSize=(!$altP||altS.isVisible===false||altS.isSliding?0:(dir=="horz"?$altP.outerHeight():$altP.outerWidth())),altPaneSpacing=((!$altP||altS.isHidden?0:options[altPane][altS.isClosed!==false?"spacing_closed":"spacing_open"])||0),containerSize=(dir=="horz"?sC.innerHeight:sC.innerWidth),minCenterDims=cssMinDims("center"),minCenterSize=dir=="horz"?max(options.center.minHeight,minCenterDims.minHeight):max(options.center.minWidth,minCenterDims.minWidth),limitSize=(containerSize-paneSpacing-(isSliding?0:(_parseSize("center",minCenterSize,dir)+altPaneSize+altPaneSpacing))),minSize=s.minSize=max(_parseSize(pane,o.minSize),cssMinDims(pane).minSize),maxSize=s.maxSize=min((o.maxSize?_parseSize(pane,o.maxSize):100000),limitSize),r=s.resizerPosition={},top=sC.insetTop,left=sC.insetLeft,W=sC.innerWidth,H=sC.innerHeight,rW=o.spacing_open;switch(pane){case"north":r.min=top+minSize;r.max=top+maxSize;break;case"west":r.min=left+minSize;r.max=left+maxSize;break;case"south":r.min=top+H-maxSize-rW;r.max=top+H-minSize-rW;break;case"east":r.min=left+W-maxSize-rW;r.max=left+W-minSize-rW;break}};var calcNewCenterPaneDims=function(){var d={top:getPaneSize("north",true),bottom:getPaneSize("south",true),left:getPaneSize("west",true),right:getPaneSize("east",true),width:0,height:0};with(d){width=sC.innerWidth-left-right;height=sC.innerHeight-bottom-top;top+=sC.insetTop;bottom+=sC.insetBottom;left+=sC.insetLeft;right+=sC.insetRight}return d};var getElemDims=function($E){var d={},x=d.css={},i={},b,p,off=$E.offset();d.offsetLeft=off.left;d.offsetTop=off.top;$.each("Left,Right,Top,Bottom".split(","),function(idx,e){b=x["border"+e]=_borderWidth($E,e);p=x["padding"+e]=_cssNum($E,"padding"+e);i[e]=b+p;d["inset"+e]=p});d.offsetWidth=$E.innerWidth(true);d.offsetHeight=$E.innerHeight(true);d.outerWidth=$E.outerWidth();d.outerHeight=$E.outerHeight();d.innerWidth=d.outerWidth-i.Left-i.Right;d.innerHeight=d.outerHeight-i.Top-i.Bottom;x.width=$E.width();x.height=$E.height();return d};var getElemCSS=function($E,list){var CSS={},style=$E[0].style,props=list.split(","),sides="Top,Bottom,Left,Right".split(","),attrs="Color,Style,Width".split(","),p,s,a,i,j,k;for(i=0;i<props.length;i++){p=props[i];if(p.match(/(border|padding|margin)$/)){for(j=0;j<4;j++){s=sides[j];if(p=="border"){for(k=0;k<3;k++){a=attrs[k];CSS[p+s+a]=style[p+s+a]}}else{CSS[p+s]=style[p+s]}}}else{CSS[p]=style[p]}}return CSS};var getHoverClasses=function(el,allStates){var $El=$(el),type=$El.data("layoutRole"),pane=$El.data("layoutEdge"),o=options[pane],root=o[type+"Class"],_pane="-"+pane,_open="-open",_closed="-closed",_slide="-sliding",_hover="-hover ",_state=$El.hasClass(root+_closed)?_closed:_open,_alt=_state==_closed?_open:_closed,classes=(root+_hover)+(root+_pane+_hover)+(root+_state+_hover)+(root+_pane+_state+_hover);if(allStates){classes+=(root+_alt+_hover)+(root+_pane+_alt+_hover)}if(type=="resizer"&&$El.hasClass(root+_slide)){classes+=(root+_slide+_hover)+(root+_pane+_slide+_hover)}return $.trim(classes)};var addHover=function(evt,el){var e=el||this;$(e).addClass(getHoverClasses(e))};var removeHover=function(evt,el){var e=el||this;$(e).removeClass(getHoverClasses(e,true))};var onResizerEnter=function(evt){$("body").disableSelection();addHover(evt,this)};var onResizerLeave=function(evt,el){var e=el||this,pane=$(e).data("layoutEdge"),name=pane+"ResizerLeave";timer.clear(name);if(!el){removeHover(evt,this);timer.set(name,function(){onResizerLeave(evt,e)},200)}else{if(!state[pane].isResizing){$("body").enableSelection()}}};var _create=function(){initOptions();var o=options;if(false===_execCallback(null,o.onload)){return false}if(!getPane("center").length){alert(lang.errCenterPaneMissing);return null}if(o.useStateCookie&&o.cookie.autoLoad){loadCookie()}state.browser={mozilla:$.browser.mozilla,webkit:$.browser.webkit||$.browser.safari,msie:$.browser.msie,isIE6:$.browser.msie&&$.browser.version==6,boxModel:$.support.boxModel};initContainer();initPanes();initResizable();sizeContent();if(o.scrollToBookmarkOnLoad){with(self.location){if(hash){replace(hash)}}}if(o.autoBindCustomButtons){initButtons()}initHotkeys();if(o.resizeWithWindow&&!$Container.data("layoutRole")){$(window).bind("resize."+sID,windowResize)}$(window).bind("unload."+sID,unload);state.initialized=true};var windowResize=function(){var delay=Number(options.resizeWithWindowDelay)||100;if(delay>0){timer.clear("winResize");timer.set("winResize",function(){timer.clear("winResize");timer.clear("winResizeRepeater");resizeAll()},delay);if(!timer.data.winResizeRepeater){setWindowResizeRepeater()}}};var setWindowResizeRepeater=function(){var delay=Number(options.resizeWithWindowMaxDelay);if(delay>0){timer.set("winResizeRepeater",function(){setWindowResizeRepeater();resizeAll()},delay)}};var unload=function(){var o=options;state.cookie=getState();if(o.useStateCookie&&o.cookie.autoSave){saveCookie()}_execCallback(null,o.onunload)};var initContainer=function(){var $C=$Container,tag=sC.tagName=$C.attr("tagName"),fullPage=(tag=="BODY"),props="position,margin,padding,border",CSS={};sC.selector=$C.selector.split(".slice")[0];sC.ref=tag+"/"+sC.selector;$C.data("layoutContainer",sID).data("layoutName",options.name);if(!$C.data("layoutCSS")){if(fullPage){CSS=$.extend(getElemCSS($C,props),{height:$C.css("height"),overflow:$C.css("overflow"),overflowX:$C.css("overflowX"),overflowY:$C.css("overflowY")});var $H=$("html");$H.data("layoutCSS",{height:"auto",overflow:$H.css("overflow"),overflowX:$H.css("overflowX"),overflowY:$H.css("overflowY")})}else{CSS=getElemCSS($C,props+",top,bottom,left,right,width,height,overflow,overflowX,overflowY")}$C.data("layoutCSS",CSS)}try{if(fullPage){$("html").css({height:"100%",overflow:"hidden",overflowX:"hidden",overflowY:"hidden"});$("body").css({position:"relative",height:"100%",overflow:"hidden",overflowX:"hidden",overflowY:"hidden",margin:0,padding:0,border:"none"})}else{var CSS={overflow:"hidden"},p=$C.css("position"),h=$C.css("height");if(!$C.data("layoutRole")){if(!p||!p.match(/fixed|absolute|relative/)){CSS.position="relative"}}$C.css(CSS);if($C.is(":visible")&&$C.innerHeight()<2){alert(lang.errContainerHeight.replace(/CONTAINER/,sC.ref))}}}catch(ex){}$.extend(state.container,getElemDims($C))};var initHotkeys=function(){$.each(_c.borderPanes.split(","),function(i,pane){var o=options[pane];if(o.enableCursorHotkey||o.customHotkey){$(document).bind("keydown."+sID,keyDown);return false}})};var initOptions=function(){opts=_transformData(opts);var newOpts={applyDefaultStyles:"applyDemoStyles"};renameOpts(opts.defaults);$.each(_c.allPanes.split(","),function(i,pane){renameOpts(opts[pane])});if(opts.effects){$.extend(effects,opts.effects);delete opts.effects}$.extend(options.cookie,opts.cookie);var globals="name,zIndex,scrollToBookmarkOnLoad,resizeWithWindow,resizeWithWindowDelay,resizeWithWindowMaxDelay,onresizeall,onresizeall_start,onresizeall_end,onload,onunload,autoBindCustomButtons,useStateCookie";$.each(globals.split(","),function(i,key){if(opts[key]!==undefined){options[key]=opts[key]}else{if(opts.defaults[key]!==undefined){options[key]=opts.defaults[key];delete opts.defaults[key]}}});$.each("paneSelector,resizerCursor,customHotkey".split(","),function(i,key){delete opts.defaults[key]});$.extend(true,options.defaults,opts.defaults);_c.center=$.extend(true,{},_c.panes,_c.center);var z=options.zIndex;if(z===0||z>0){_c.zIndex.pane_normal=z;_c.zIndex.resizer_normal=z+1;_c.zIndex.iframe_mask=z+1}$.extend(options.center,opts.center);var o_Center=$.extend(true,{},options.defaults,opts.defaults,options.center);var optionsCenter=("paneClass,contentSelector,applyDemoStyles,triggerEventsOnLoad,showOverflowOnHover,onresize,onresize_start,onresize_end,resizeNestedLayout,resizeContentWhileDragging,onsizecontent,onsizecontent_start,onsizecontent_end").split(",");$.each(optionsCenter,function(i,key){options.center[key]=o_Center[key]});var o,defs=options.defaults;$.each(_c.borderPanes.split(","),function(i,pane){_c[pane]=$.extend(true,{},_c.panes,_c[pane]);o=options[pane]=$.extend(true,{},options.defaults,options[pane],opts.defaults,opts[pane]);if(!o.paneClass){o.paneClass="ui-layout-pane"}if(!o.resizerClass){o.resizerClass="ui-layout-resizer"}if(!o.togglerClass){o.togglerClass="ui-layout-toggler"}$.each(["_open","_close",""],function(i,n){var sName="fxName"+n,sSpeed="fxSpeed"+n,sSettings="fxSettings"+n;o[sName]=opts[pane][sName]||opts[pane].fxName||opts.defaults[sName]||opts.defaults.fxName||o[sName]||o.fxName||defs[sName]||defs.fxName||"none";var fxName=o[sName];if(fxName=="none"||!$.effects||!$.effects[fxName]||(!effects[fxName]&&!o[sSettings]&&!o.fxSettings)){fxName=o[sName]="none"}var fx=effects[fxName]||{},fx_all=fx.all||{},fx_pane=fx[pane]||{};o[sSettings]=$.extend({},fx_all,fx_pane,defs.fxSettings||{},defs[sSettings]||{},o.fxSettings,o[sSettings],opts.defaults.fxSettings,opts.defaults[sSettings]||{},opts[pane].fxSettings,opts[pane][sSettings]||{});o[sSpeed]=opts[pane][sSpeed]||opts[pane].fxSpeed||opts.defaults[sSpeed]||opts.defaults.fxSpeed||o[sSpeed]||o[sSettings].duration||o.fxSpeed||o.fxSettings.duration||defs.fxSpeed||defs.fxSettings.duration||fx_pane.duration||fx_all.duration||"normal"})});function renameOpts(O){for(var key in newOpts){if(O[key]!=undefined){O[newOpts[key]]=O[key];delete O[key]}}}};var getPane=function(pane){var sel=options[pane].paneSelector;if(sel.substr(0,1)==="#"){return $Container.find(sel).eq(0)}else{var $P=$Container.children(sel).eq(0);return $P.length?$P:$Container.children("form:first").children(sel).eq(0)}};var initPanes=function(){$.each(_c.allPanes.split(","),function(idx,pane){var o=options[pane],s=state[pane],c=_c[pane],fx=s.fx,dir=c.dir,spacing=o.spacing_open||0,isCenter=(pane=="center"),CSS={},$P,$C,size,minSize,maxSize;$Cs[pane]=false;$P=$Ps[pane]=getPane(pane);if(!$P.length){$Ps[pane]=false;return true}if(!$P.data("layoutCSS")){var props="position,top,left,bottom,right,width,height,overflow,zIndex,display,backgroundColor,padding,margin,border";$P.data("layoutCSS",getElemCSS($P,props))}$P.data("layoutName",options.name).data("layoutRole","pane").data("layoutEdge",pane).css(c.cssReq).css("zIndex",_c.zIndex.pane_normal).css(o.applyDemoStyles?c.cssDemo:{}).addClass(o.paneClass+" "+o.paneClass+"-"+pane).bind("mouseenter."+sID,addHover).bind("mouseleave."+sID,removeHover);initContent(pane,false);if(!isCenter){size=s.size=_parseSize(pane,o.size);minSize=_parseSize(pane,o.minSize)||1;maxSize=_parseSize(pane,o.maxSize)||100000;if(size>0){size=max(min(size,maxSize),minSize)}}s.tagName=$P.attr("tagName");s.edge=pane;s.noRoom=false;s.isVisible=true;if(!isCenter){s.isClosed=false;s.isSliding=false;s.isResizing=false;s.isHidden=false}switch(pane){case"north":CSS.top=sC.insetTop;CSS.left=sC.insetLeft;CSS.right=sC.insetRight;break;case"south":CSS.bottom=sC.insetBottom;CSS.left=sC.insetLeft;CSS.right=sC.insetRight;break;case"west":CSS.left=sC.insetLeft;break;case"east":CSS.right=sC.insetRight;break;case"center":}if(dir=="horz"){CSS.height=max(1,cssH(pane,size))}else{if(dir=="vert"){CSS.width=max(1,cssW(pane,size))}}$P.css(CSS);if(dir!="horz"){sizeMidPanes(pane,true)}$P.css({visibility:"visible",display:"block"});if(o.initClosed&&o.closable){close(pane,true,true)}else{if(o.initHidden||o.initClosed){hide(pane)}}if(o.showOverflowOnHover){$P.hover(allowOverflow,resetOverflow)}});initHandles();$.each(_c.borderPanes.split(","),function(i,pane){if($Ps[pane]&&state[pane].isVisible){setSizeLimits(pane);makePaneFit(pane)}});sizeMidPanes("center");$.each(_c.allPanes.split(","),function(i,pane){var o=options[pane];if($Ps[pane]&&o.triggerEventsOnLoad&&state[pane].isVisible){_execCallback(pane,o.onresize_end||o.onresize)}});if($Container.innerHeight()<2){alert(lang.errContainerHeight.replace(/CONTAINER/,sC.ref))}};var initHandles=function(panes){if(!panes||panes=="all"){panes=_c.borderPanes}$.each(panes.split(","),function(i,pane){var $P=$Ps[pane];$Rs[pane]=false;$Ts[pane]=false;if(!$P){return}var o=options[pane],s=state[pane],c=_c[pane],rClass=o.resizerClass,tClass=o.togglerClass,side=c.side.toLowerCase(),spacing=(s.isVisible?o.spacing_open:o.spacing_closed),_pane="-"+pane,_state=(s.isVisible?"-open":"-closed"),$R=$Rs[pane]=$("<div></div>"),$T=(o.closable?$Ts[pane]=$("<div></div>"):false);if(s.isVisible&&o.resizable){}else{if(!s.isVisible&&o.slidable){$R.attr("title",o.sliderTip).css("cursor",o.sliderCursor)}}$R.attr("id",(o.paneSelector.substr(0,1)=="#"?o.paneSelector.substr(1)+"-resizer":"")).data("layoutRole","resizer").data("layoutEdge",pane).css(_c.resizers.cssReq).css("zIndex",_c.zIndex.resizer_normal).css(o.applyDemoStyles?_c.resizers.cssDemo:{}).addClass(rClass+" "+rClass+_pane).appendTo($Container);if($T){$T.attr("id",(o.paneSelector.substr(0,1)=="#"?o.paneSelector.substr(1)+"-toggler":"")).data("layoutRole","toggler").data("layoutEdge",pane).css(_c.togglers.cssReq).css(o.applyDemoStyles?_c.togglers.cssDemo:{}).addClass(tClass+" "+tClass+_pane).appendTo($R).click(function(evt){toggle(pane);evt.stopPropagation()}).hover(addHover,removeHover);if(o.togglerContent_open){$("<span>"+o.togglerContent_open+"</span>").data("layoutRole","togglerContent").data("layoutEdge",pane).addClass("content content-open").css("display","none").appendTo($T).hover(addHover,removeHover)}if(o.togglerContent_closed){$("<span>"+o.togglerContent_closed+"</span>").data("layoutRole","togglerContent").data("layoutEdge",pane).addClass("content content-closed").css("display","none").appendTo($T).hover(addHover,removeHover)}}if(s.isVisible){setAsOpen(pane)}else{setAsClosed(pane);bindStartSlidingEvent(pane,true)}});sizeHandles("all")};var initContent=function(pane,resize){var o=options[pane],sel=o.contentSelector,$P=$Ps[pane],$C;if(sel){$C=$Cs[pane]=(o.findNestedContent)?$P.find(sel).eq(0):$P.children(sel).eq(0)}if($C&&$C.length){$C.css(_c.content.cssReq);if(o.applyDemoStyles){$C.css(_c.content.cssDemo);$P.css(_c.content.cssDemoPane)}state[pane].content={};if(resize!==false){sizeContent(pane)}}else{$Cs[pane]=false}};var initButtons=function(){var pre="ui-layout-button-",name;$.each("toggle,open,close,pin,toggle-slide,open-slide".split(","),function(i,action){$.each(_c.borderPanes.split(","),function(ii,pane){$("."+pre+action+"-"+pane).each(function(){name=$(this).data("layoutName")||$(this).attr("layoutName");if(name==undefined||name==options.name){if(action.substr("-slide")>0){bindButton(this,action.split("-")[0],pane,true)}else{bindButton(this,action,pane)}}})})})};var initResizable=function(panes){var draggingAvailable=(typeof $.fn.draggable=="function"),$Frames,side;if(!panes||panes=="all"){panes=_c.borderPanes}$.each(panes.split(","),function(idx,pane){var o=options[pane],s=state[pane],c=_c[pane],side=(c.dir=="horz"?"top":"left"),r,live;if(!draggingAvailable||!$Ps[pane]||!o.resizable){o.resizable=false;return true}var $P=$Ps[pane],$R=$Rs[pane],base=o.resizerClass,resizerClass=base+"-drag",resizerPaneClass=base+"-"+pane+"-drag",helperClass=base+"-dragging",helperPaneClass=base+"-"+pane+"-dragging",helperLimitClass=base+"-dragging-limit",helperClassesSet=false;if(!s.isClosed){$R.attr("title",o.resizerTip).css("cursor",o.resizerCursor)}$R.hover(onResizerEnter,onResizerLeave);$R.draggable({containment:$Container[0],axis:(c.dir=="horz"?"y":"x"),delay:0,distance:1,helper:"clone",opacity:o.resizerDragOpacity,addClasses:false,zIndex:_c.zIndex.resizer_drag,start:function(e,ui){o=options[pane];s=state[pane];live=o.resizeWhileDragging;if(false===_execCallback(pane,o.ondrag_start)){return false}_c.isLayoutBusy=true;s.isResizing=true;timer.clear(pane+"_closeSlider");setSizeLimits(pane);r=s.resizerPosition;$R.addClass(resizerClass+" "+resizerPaneClass);helperClassesSet=false;$Frames=$(o.maskIframesOnResize===true?"iframe":o.maskIframesOnResize).filter(":visible");var id,i=0;$Frames.each(function(){id="ui-layout-mask-"+(++i);$(this).data("layoutMaskID",id);$('<div id="'+id+'" class="ui-layout-mask ui-layout-mask-'+pane+'"/>').css({background:"#fff",opacity:"0.001",zIndex:_c.zIndex.iframe_mask,position:"absolute",width:this.offsetWidth+"px",height:this.offsetHeight+"px"}).css($(this).position()).appendTo(this.parentNode)});$("body").disableSelection()},drag:function(e,ui){if(!helperClassesSet){ui.helper.addClass(helperClass+" "+helperPaneClass).children().css("visibility","hidden");helperClassesSet=true;if(s.isSliding){$Ps[pane].css("zIndex",_c.zIndex.pane_sliding)}}var limit=0;if(ui.position[side]<r.min){ui.position[side]=r.min;limit=-1}else{if(ui.position[side]>r.max){ui.position[side]=r.max;limit=1}}if(limit){ui.helper.addClass(helperLimitClass);window.defaultStatus="Panel has reached its "+((limit>0&&pane.match(/north|west/))||(limit<0&&pane.match(/south|east/))?"maximum":"minimum")+" size"}else{ui.helper.removeClass(helperLimitClass);window.defaultStatus=""}if(live){resizePanes(e,ui,pane)}},stop:function(e,ui){$("body").enableSelection();window.defaultStatus="";$R.removeClass(resizerClass+" "+resizerPaneClass+" "+helperLimitClass);s.isResizing=false;_c.isLayoutBusy=false;resizePanes(e,ui,pane,true)}});var resizePanes=function(e,ui,pane,resizingDone){var dragPos=ui.position,c=_c[pane],resizerPos,newSize,i=0;switch(pane){case"north":resizerPos=dragPos.top;break;case"west":resizerPos=dragPos.left;break;case"south":resizerPos=sC.offsetHeight-dragPos.top-o.spacing_open;break;case"east":resizerPos=sC.offsetWidth-dragPos.left-o.spacing_open;break}if(resizingDone){$("div.ui-layout-mask").each(function(){this.parentNode.removeChild(this)});if(false===_execCallback(pane,o.ondrag_end||o.ondrag)){return false}}else{$Frames.each(function(){$("#"+$(this).data("layoutMaskID")).css($(this).position()).css({width:this.offsetWidth+"px",height:this.offsetHeight+"px"})})}newSize=resizerPos-sC["inset"+c.side];manualSizePane(pane,newSize)}})};var destroy=function(){$(window).unbind("."+sID);$(document).unbind("."+sID);window[sID]=null;var fullPage=(sC.tagName=="BODY"),_open="-open",_sliding="-sliding",_closed="-closed",$P,root,pRoot,pClasses;$.each(_c.allPanes.split(","),function(i,pane){$P=$Ps[pane];if(!$P){return true}if(pane!="center"){if($Ts[pane]){$Ts[pane].remove()}$Rs[pane].remove()}root=options[pane].paneClass;pRoot=root+"-"+pane;pClasses=[root,root+_open,root+_closed,root+_sliding,pRoot,pRoot+_open,pRoot+_closed,pRoot+_sliding];$.merge(pClasses,getHoverClasses($P,true));$P.removeClass(pClasses.join(" ")).removeData("layoutRole").removeData("layoutEdge").unbind("."+sID).unbind("mouseenter").unbind("mouseleave");if(!$P.data("layoutContainer")){$P.css($P.data("layoutCSS"))}});$Container.removeData("layoutContainer");if(!$Container.data("layoutEdge")){$Container.css($Container.data("layoutCSS"))}if(fullPage){$("html").css($("html").data("layoutCSS"))}unload();var n=options.name;if(n&&window[n]){window[n]=null}};var hide=function(pane,noAnimation){var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane];if(!$P||s.isHidden){return}if(state.initialized&&false===_execCallback(pane,o.onhide_start)){return}s.isSliding=false;if($R){$R.hide()}if(!state.initialized||s.isClosed){s.isClosed=true;s.isHidden=true;s.isVisible=false;$P.hide();sizeMidPanes(_c[pane].dir=="horz"?"all":"center");if(state.initialized||o.triggerEventsOnLoad){_execCallback(pane,o.onhide_end||o.onhide)}}else{s.isHiding=true;close(pane,false,noAnimation)}};var show=function(pane,openPane,noAnimation,noAlert){var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane];if(!$P||!s.isHidden){return}if(false===_execCallback(pane,o.onshow_start)){return}s.isSliding=false;s.isShowing=true;if(openPane===false){close(pane,true)}else{open(pane,false,noAnimation,noAlert)}};var toggle=function(pane,slide){if(!isStr(pane)){pane.stopImmediatePropagation();pane=$(this).data("layoutEdge")}var s=state[str(pane)];if(s.isHidden){show(pane)}else{if(s.isClosed){open(pane,!!slide)}else{close(pane)}}};var _closePane=function(pane,setHandles){var $P=$Ps[pane],s=state[pane];$P.hide();s.isClosed=true;s.isVisible=false};var close=function(pane,force,noAnimation,skipCallback){if(!state.initialized){_closePane(pane);return}var $P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],o=options[pane],s=state[pane],doFX=!noAnimation&&!s.isClosed&&(o.fxName_close!="none"),isShowing=s.isShowing,isHiding=s.isHiding,wasSliding=s.isSliding;delete s.isShowing;delete s.isHiding;if(!$P||!o.closable){return}else{if(!force&&s.isClosed&&!isShowing){return}}if(_c.isLayoutBusy){_queue("close",pane,force);return}if(!isShowing&&false===_execCallback(pane,o.onclose_start)){return}_c[pane].isMoving=true;_c.isLayoutBusy=true;s.isClosed=true;s.isVisible=false;if(isHiding){s.isHidden=true}else{if(isShowing){s.isHidden=false}}if(s.isSliding){bindStopSlidingEvents(pane,false)}else{sizeMidPanes(_c[pane].dir=="horz"?"all":"center",false)}setAsClosed(pane);if(doFX){lockPaneForFX(pane,true);$P.hide(o.fxName_close,o.fxSettings_close,o.fxSpeed_close,function(){lockPaneForFX(pane,false);close_2()})}else{$P.hide();close_2()}function close_2(){if(s.isClosed){bindStartSlidingEvent(pane,true);var altPane=_c.altSide[pane];if(state[altPane].noRoom){setSizeLimits(altPane);makePaneFit(altPane)}if(!skipCallback&&(state.initialized||o.triggerEventsOnLoad)){if(!isShowing&&!wasSliding){_execCallback(pane,o.onclose_end||o.onclose)}if(isShowing){_execCallback(pane,o.onshow_end||o.onshow)}if(isHiding){_execCallback(pane,o.onhide_end||o.onhide)}}}_dequeue(pane)}};var setAsClosed=function(pane){var $P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],o=options[pane],s=state[pane],side=_c[pane].side.toLowerCase(),inset="inset"+_c[pane].side,rClass=o.resizerClass,tClass=o.togglerClass,_pane="-"+pane,_open="-open",_sliding="-sliding",_closed="-closed";$R.css(side,sC[inset]).removeClass(rClass+_open+" "+rClass+_pane+_open).removeClass(rClass+_sliding+" "+rClass+_pane+_sliding).addClass(rClass+_closed+" "+rClass+_pane+_closed).unbind("dblclick."+sID);if(o.resizable&&typeof $.fn.draggable=="function"){$R.draggable("disable").removeClass("ui-state-disabled").css("cursor","default").attr("title","")}if($T){$T.removeClass(tClass+_open+" "+tClass+_pane+_open).addClass(tClass+_closed+" "+tClass+_pane+_closed).attr("title",o.togglerTip_closed);$T.children(".content-open").hide();$T.children(".content-closed").css("display","block")}syncPinBtns(pane,false);if(state.initialized){sizeHandles("all")}};var open=function(pane,slide,noAnimation,noAlert){var $P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],o=options[pane],s=state[pane],doFX=!noAnimation&&s.isClosed&&(o.fxName_open!="none"),isShowing=s.isShowing;delete s.isShowing;if(!$P||(!o.resizable&&!o.closable)){return}else{if(s.isVisible&&!s.isSliding){return}}if(s.isHidden&&!isShowing){show(pane,true);return}if(_c.isLayoutBusy){_queue("open",pane,slide);return}if(false===_execCallback(pane,o.onopen_start)){return}setSizeLimits(pane,slide);if(s.minSize>s.maxSize){syncPinBtns(pane,false);if(!noAlert&&o.noRoomToOpenTip){alert(o.noRoomToOpenTip)}return}_c[pane].isMoving=true;_c.isLayoutBusy=true;if(slide){bindStopSlidingEvents(pane,true)}else{if(s.isSliding){bindStopSlidingEvents(pane,false)}else{if(o.slidable){bindStartSlidingEvent(pane,false)}}}s.noRoom=false;makePaneFit(pane);s.isVisible=true;s.isClosed=false;if(isShowing){s.isHidden=false}if(doFX){lockPaneForFX(pane,true);$P.show(o.fxName_open,o.fxSettings_open,o.fxSpeed_open,function(){lockPaneForFX(pane,false);open_2()})}else{$P.show();open_2()}function open_2(){if(s.isVisible){_fixIframe(pane);if(!s.isSliding){sizeMidPanes(_c[pane].dir=="vert"?"center":"all",false)}setAsOpen(pane)}_dequeue(pane)}};var setAsOpen=function(pane,skipCallback){var $P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],o=options[pane],s=state[pane],side=_c[pane].side.toLowerCase(),inset="inset"+_c[pane].side,rClass=o.resizerClass,tClass=o.togglerClass,_pane="-"+pane,_open="-open",_closed="-closed",_sliding="-sliding";$R.css(side,sC[inset]+getPaneSize(pane)).removeClass(rClass+_closed+" "+rClass+_pane+_closed).addClass(rClass+_open+" "+rClass+_pane+_open);if(s.isSliding){$R.addClass(rClass+_sliding+" "+rClass+_pane+_sliding)}else{$R.removeClass(rClass+_sliding+" "+rClass+_pane+_sliding)}if(o.resizerDblClickToggle){$R.bind("dblclick",toggle)}removeHover(0,$R);if(o.resizable&&typeof $.fn.draggable=="function"){$R.draggable("enable").css("cursor",o.resizerCursor).attr("title",o.resizerTip)}else{if(!s.isSliding){$R.css("cursor","default")}}if($T){$T.removeClass(tClass+_closed+" "+tClass+_pane+_closed).addClass(tClass+_open+" "+tClass+_pane+_open).attr("title",o.togglerTip_open);removeHover(0,$T);$T.children(".content-closed").hide();$T.children(".content-open").css("display","block")}syncPinBtns(pane,!s.isSliding);$.extend(s,getElemDims($P));if(state.initialized){sizeHandles("all");sizeContent(pane,true)}if(!skipCallback&&(state.initialized||o.triggerEventsOnLoad)&&$P.is(":visible")){_execCallback(pane,o.onopen_end||o.onopen);if(s.isShowing){_execCallback(pane,o.onshow_end||o.onshow)}if(state.initialized){_execCallback(pane,o.onresize_end||o.onresize)}}};var slideOpen=function(evt_or_pane){var type=typeof evt_or_pane,pane=(type=="string"?evt_or_pane:$(this).data("layoutEdge"));if(type=="object"){evt_or_pane.stopImmediatePropagation()}if(state[pane].isClosed){open(pane,true)}else{bindStopSlidingEvents(pane,true)}};var slideClose=function(evt_or_pane){var $E=(isStr(evt_or_pane)?$Ps[evt_or_pane]:$(this)),pane=$E.data("layoutEdge"),o=options[pane],s=state[pane],$P=$Ps[pane];if(s.isClosed||s.isResizing){return}else{if(o.slideTrigger_close=="click"){close_NOW()}else{if(o.preventQuickSlideClose&&_c.isLayoutBusy){return}else{timer.set(pane+"_closeSlider",close_NOW,300)}}}function close_NOW(e){if(s.isClosed){bindStopSlidingEvents(pane,false)}else{close(pane)}}};var slideToggle=function(pane){toggle(pane,true)};var lockPaneForFX=function(pane,doLock){var $P=$Ps[pane];if(doLock){$P.css({zIndex:_c.zIndex.pane_animate});if(pane=="south"){$P.css({top:sC.insetTop+sC.innerHeight-$P.outerHeight()})}else{if(pane=="east"){$P.css({left:sC.insetLeft+sC.innerWidth-$P.outerWidth()})}}}else{if(pane=="south"){$P.css({top:"auto"})}else{if(pane=="east"){$P.css({left:"auto"})}}var o=options[pane];if(state.browser.msie&&o.fxOpacityFix&&o.fxName_open!="slide"&&$P.css("filter")&&$P.css("opacity")==1){$P[0].style.removeAttribute("filter")}}};var bindStartSlidingEvent=function(pane,enable){var o=options[pane],z=_c.zIndex,$P=$Ps[pane],$R=$Rs[pane],trigger=o.slideTrigger_open;if(!$R||!o.slidable){return}if(trigger.match(/mouseover/)){trigger=o.slideTrigger_open="mouseenter"}else{if(!trigger.match(/click|dblclick|mouseenter/)){trigger=o.slideTrigger_open="click"}}$R.css("zIndex",!enable?z.pane_sliding:z.resizer_normal);$P.css("zIndex",!enable?z.pane_sliding:z.pane_normal);$R[enable?"bind":"unbind"](trigger+"."+sID,slideOpen).css("cursor",enable?o.sliderCursor:"default").attr("title",enable?o.sliderTip:"")};var bindStopSlidingEvents=function(pane,enable){var o=options[pane],s=state[pane],trigger=o.slideTrigger_close,action=(enable?"bind":"unbind"),$P=$Ps[pane],$R=$Rs[pane];s.isSliding=enable;timer.clear(pane+"_closeSlider");if(enable){bindStartSlidingEvent(pane,false)}if(!trigger.match(/click|mouseleave/)){trigger=o.slideTrigger_close="mouseleave"}$R[action](trigger,slideClose);if(trigger=="mouseleave"){$P[action]("mouseleave."+sID,slideClose);$R[action]("mouseenter."+sID,cancelMouseOut);$P[action]("mouseenter."+sID,cancelMouseOut)}if(!enable){timer.clear(pane+"_closeSlider")}else{if(trigger=="click"&&!o.resizable){$R.css("cursor",enable?o.sliderCursor:"default");$R.attr("title",enable?o.togglerTip_open:"")}}function cancelMouseOut(evt){timer.clear(pane+"_closeSlider");evt.stopPropagation()}};var makePaneFit=function(pane,isOpening,skipCallback,force){var o=options[pane],s=state[pane],c=_c[pane],$P=$Ps[pane],$R=$Rs[pane],isSidePane=c.dir=="vert",hasRoom=false;if(pane=="center"||(isSidePane&&s.noVerticalRoom)){hasRoom=s.minHeight<=s.maxHeight&&(isSidePane||s.minWidth<=s.maxWidth);if(hasRoom&&s.noRoom){$P.show();if($R){$R.show()}s.isVisible=true;s.noRoom=false;if(isSidePane){s.noVerticalRoom=false}_fixIframe(pane)}else{if(!hasRoom&&!s.noRoom){$P.hide();if($R){$R.hide()}s.isVisible=false;s.noRoom=true}}}if(pane=="center"){}else{if(s.minSize<=s.maxSize){hasRoom=true;if(s.size>s.maxSize){sizePane(pane,s.maxSize,skipCallback,force)}else{if(s.size<s.minSize){sizePane(pane,s.minSize,skipCallback,force)}else{if($R&&$P.is(":visible")){var side=c.side.toLowerCase(),pos=s.size+sC["inset"+c.side];if(_cssNum($R,side)!=pos){$R.css(side,pos)}}}}if(s.noRoom){if(s.wasOpen&&o.closable){if(o.autoReopen){open(pane,false,true,true)}else{s.noRoom=false}}else{show(pane,s.wasOpen,true,true)}}}else{if(!s.noRoom){s.noRoom=true;s.wasOpen=!s.isClosed&&!s.isSliding;if(o.closable){close(pane,true,true)}else{hide(pane,true)}}}}};var manualSizePane=function(pane,size,skipCallback){var o=options[pane],forceResize=o.resizeWhileDragging&&!_c.isLayoutBusy;o.autoResize=false;sizePane(pane,size,skipCallback,forceResize)};var sizePane=function(pane,size,skipCallback,force){var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane],side=_c[pane].side.toLowerCase(),inset="inset"+_c[pane].side,skipResizeWhileDragging=_c.isLayoutBusy&&!o.triggerEventsWhileDragging,oldSize;setSizeLimits(pane);oldSize=s.size;size=_parseSize(pane,size);size=max(size,_parseSize(pane,o.minSize));size=min(size,s.maxSize);if(size<s.minSize){makePaneFit(pane,false,skipCallback);return}if(!force&&size==oldSize){return}if(!skipCallback&&state.initialized&&s.isVisible){_execCallback(pane,o.onresize_start)}$P.css(_c[pane].sizeType.toLowerCase(),max(1,cssSize(pane,size)));s.size=size;$.extend(s,getElemDims($P));if($R&&$P.is(":visible")){$R.css(side,size+sC[inset])}sizeContent(pane);if(!skipCallback&&!skipResizeWhileDragging&&state.initialized&&s.isVisible){_execCallback(pane,o.onresize_end||o.onresize);if(o.resizeNestedLayout&&$P.data("layoutContainer")){$P.layout().resizeAll()}}if(!skipCallback){if(!s.isSliding){sizeMidPanes(_c[pane].dir=="horz"?"all":"center",skipResizeWhileDragging,force)}sizeHandles("all")}var altPane=_c.altSide[pane];if(size<oldSize&&state[altPane].noRoom){setSizeLimits(altPane);makePaneFit(altPane,false,skipCallback)}};var sizeMidPanes=function(panes,skipCallback,force){if(!panes||panes=="all"){panes="east,west,center"}$.each(panes.split(","),function(i,pane){if(!$Ps[pane]){return}var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane],isCenter=(pane=="center"),hasRoom=true,CSS={},d=calcNewCenterPaneDims();$.extend(s,getElemDims($P));if(pane=="center"){if(!force&&s.isVisible&&d.width==s.outerWidth&&d.height==s.outerHeight){return true}$.extend(s,cssMinDims(pane),{maxWidth:d.width,maxHeight:d.height});CSS=d;CSS.width=cssW(pane,d.width);CSS.height=cssH(pane,d.height);hasRoom=CSS.width>0&&CSS.height>0;if(!hasRoom&&!state.initialized&&o.minWidth>0){var reqPx=o.minWidth-s.outerWidth,minE=options.east.minSize||0,minW=options.west.minSize||0,sizeE=state.east.size,sizeW=state.west.size,newE=sizeE,newW=sizeW;if(reqPx>0&&state.east.isVisible&&sizeE>minE){newE=max(sizeE-minE,sizeE-reqPx);reqPx-=sizeE-newE}if(reqPx>0&&state.west.isVisible&&sizeW>minW){newW=max(sizeW-minW,sizeW-reqPx);reqPx-=sizeW-newW}if(reqPx==0){if(sizeE!=minE){sizePane("east",newE,true)}if(sizeW!=minW){sizePane("west",newW,true)}sizeMidPanes("center",skipCallback,force);return}}}else{$.extend(s,getElemDims($P),cssMinDims(pane));if(!force&&!s.noVerticalRoom&&d.height==s.outerHeight){return true}CSS.top=d.top;CSS.bottom=d.bottom;CSS.height=cssH(pane,d.height);s.maxHeight=max(0,CSS.height);hasRoom=(s.maxHeight>0);if(!hasRoom){s.noVerticalRoom=true}}if(hasRoom){if(!skipCallback&&state.initialized){_execCallback(pane,o.onresize_start)}$P.css(CSS);$.extend(s,getElemDims($P));if(s.noRoom){makePaneFit(pane)}if(state.initialized){sizeContent(pane)}}else{if(!s.noRoom&&s.isVisible){makePaneFit(pane)}}if(pane=="center"){var b=state.browser;var fix=b.isIE6||(b.msie&&!b.boxModel);if($Ps.north&&(fix||state.north.tagName=="IFRAME")){$Ps.north.css("width",cssW($Ps.north,sC.innerWidth))}if($Ps.south&&(fix||state.south.tagName=="IFRAME")){$Ps.south.css("width",cssW($Ps.south,sC.innerWidth))}}if(!skipCallback&&state.initialized&&s.isVisible){_execCallback(pane,o.onresize_end||o.onresize);if(o.resizeNestedLayout&&$P.data("layoutContainer")){$P.layout().resizeAll()}}})};var resizeAll=function(){var oldW=sC.innerWidth,oldH=sC.innerHeight;$.extend(state.container,getElemDims($Container));if(!sC.outerHeight){return}if(false===_execCallback(null,options.onresizeall_start)){return false}var shrunkH=(sC.innerHeight<oldH),shrunkW=(sC.innerWidth<oldW),$P,o,s,dir;$.each(["south","north","east","west"],function(i,pane){if(!$Ps[pane]){return}s=state[pane];o=options[pane];dir=_c[pane].dir;if(o.autoResize&&s.size!=o.size){sizePane(pane,o.size,true,true)}else{setSizeLimits(pane);makePaneFit(pane,false,true,true)}});sizeMidPanes("all",true,true);sizeHandles("all");o=options;$.each(_c.allPanes.split(","),function(i,pane){$P=$Ps[pane];if(!$P){return}if(state[pane].isVisible){_execCallback(pane,o[pane].onresize_end||o[pane].onresize)}if(o[pane].resizeNestedLayout&&$P.data("layoutContainer")){$P.layout().resizeAll()}});_execCallback(null,o.onresizeall_end||o.onresizeall)};var sizeContent=function(panes,remeasure){if(!panes||panes=="all"){panes=_c.allPanes}$.each(panes.split(","),function(idx,pane){var $P=$Ps[pane],$C=$Cs[pane],o=options[pane],s=state[pane],m=s.content;if(!$P||!$C||!$P.is(":visible")){return true}if(false===_execCallback(null,o.onsizecontent_start)){return}if(!_c.isLayoutBusy||m.top==undefined||remeasure||o.resizeContentWhileDragging){_measure();if(m.hiddenFooters>0&&$P.css("overflow")=="hidden"){$P.css("overflow","visible");_measure();$P.css("overflow","hidden")}}var newH=s.innerHeight-(m.spaceAbove-s.css.paddingTop)-(m.spaceBelow-s.css.paddingBottom);if(!$C.is(":visible")||m.height!=newH){setOuterHeight($C,newH,true);m.height=newH}if(state.initialized){_execCallback(pane,o.onsizecontent_end||o.onsizecontent);if(o.resizeNestedLayout&&$C.data("layoutContainer")){$C.layout().resizeAll()}}function _below($E){return max(s.css.paddingBottom,(parseInt($E.css("marginBottom"))||0))}function _measure(){var ignore=options[pane].contentIgnoreSelector,$Fs=$C.nextAll().not(ignore||":lt(0)"),$Fs_vis=$Fs.filter(":visible"),$F=$Fs_vis.filter(":last");m={top:$C[0].offsetTop,height:$C.outerHeight(),numFooters:$Fs.length,hiddenFooters:$Fs.length-$Fs_vis.length,spaceBelow:0};m.spaceAbove=m.top;m.bottom=m.top+m.height;if($F.length){m.spaceBelow=($F[0].offsetTop+$F.outerHeight())-m.bottom+_below($F)}else{m.spaceBelow=_below($C)}}})};var sizeHandles=function(panes){if(!panes||panes=="all"){panes=_c.borderPanes}$.each(panes.split(","),function(i,pane){var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],$TC;if(!$P||!$R){return}var dir=_c[pane].dir,_state=(s.isClosed?"_closed":"_open"),spacing=o["spacing"+_state],togAlign=o["togglerAlign"+_state],togLen=o["togglerLength"+_state],paneLen,offset,CSS={};if(spacing==0){$R.hide();return}else{if(!s.noRoom&&!s.isHidden){$R.show()}}if(dir=="horz"){paneLen=$P.outerWidth();s.resizerLength=paneLen;$R.css({width:max(1,cssW($R,paneLen)),height:max(0,cssH($R,spacing)),left:_cssNum($P,"left")})}else{paneLen=$P.outerHeight();s.resizerLength=paneLen;$R.css({height:max(1,cssH($R,paneLen)),width:max(0,cssW($R,spacing)),top:sC.insetTop+getPaneSize("north",true)})}removeHover(o,$R);if($T){if(togLen==0||(s.isSliding&&o.hideTogglerOnSlide)){$T.hide();return}else{$T.show()}if(!(togLen>0)||togLen=="100%"||togLen>paneLen){togLen=paneLen;offset=0}else{if(isStr(togAlign)){switch(togAlign){case"top":case"left":offset=0;break;case"bottom":case"right":offset=paneLen-togLen;break;case"middle":case"center":default:offset=Math.floor((paneLen-togLen)/2)}}else{var x=parseInt(togAlign);if(togAlign>=0){offset=x}else{offset=paneLen-togLen+x}}}if(dir=="horz"){var width=cssW($T,togLen);$T.css({width:max(0,width),height:max(1,cssH($T,spacing)),left:offset,top:0});$T.children(".content").each(function(){$TC=$(this);$TC.css("marginLeft",Math.floor((width-$TC.outerWidth())/2))})}else{var height=cssH($T,togLen);$T.css({height:max(0,height),width:max(1,cssW($T,spacing)),top:offset,left:0});$T.children(".content").each(function(){$TC=$(this);$TC.css("marginTop",Math.floor((height-$TC.outerHeight())/2))})}removeHover(0,$T)}if(!state.initialized&&o.initHidden){$R.hide();if($T){$T.hide()}}})};var swapPanes=function(pane1,pane2){state[pane1].edge=pane2;state[pane2].edge=pane1;var cancelled=false;if(false===_execCallback(pane1,options[pane1].onswap_start)){cancelled=true}if(!cancelled&&false===_execCallback(pane2,options[pane2].onswap_start)){cancelled=true}if(cancelled){state[pane1].edge=pane1;state[pane2].edge=pane2;return}var oPane1=copy(pane1),oPane2=copy(pane2),sizes={};sizes[pane1]=oPane1?oPane1.state.size:0;sizes[pane2]=oPane2?oPane2.state.size:0;$Ps[pane1]=false;$Ps[pane2]=false;state[pane1]={};state[pane2]={};if($Ts[pane1]){$Ts[pane1].remove()}if($Ts[pane2]){$Ts[pane2].remove()}if($Rs[pane1]){$Rs[pane1].remove()}if($Rs[pane2]){$Rs[pane2].remove()}$Rs[pane1]=$Rs[pane2]=$Ts[pane1]=$Ts[pane2]=false;move(oPane1,pane2);move(oPane2,pane1);oPane1=oPane2=sizes=null;if($Ps[pane1]){$Ps[pane1].css(_c.visible)}if($Ps[pane2]){$Ps[pane2].css(_c.visible)}resizeAll();_execCallback(pane1,options[pane1].onswap_end||options[pane1].onswap);_execCallback(pane2,options[pane2].onswap_end||options[pane2].onswap);return;function copy(n){var $P=$Ps[n],$C=$Cs[n];return !$P?false:{pane:n,P:$P?$P[0]:false,C:$C?$C[0]:false,state:$.extend({},state[n]),options:$.extend({},options[n])}}function move(oPane,pane){if(!oPane){return}var P=oPane.P,C=oPane.C,oldPane=oPane.pane,c=_c[pane],side=c.side.toLowerCase(),inset="inset"+c.side,s=$.extend({},state[pane]),o=options[pane],fx={resizerCursor:o.resizerCursor},re,size,pos;$.each("fxName,fxSpeed,fxSettings".split(","),function(i,k){fx[k]=o[k];fx[k+"_open"]=o[k+"_open"];fx[k+"_close"]=o[k+"_close"]});$Ps[pane]=$(P).data("layoutEdge",pane).css(_c.hidden).css(c.cssReq);$Cs[pane]=C?$(C):false;options[pane]=$.extend({},oPane.options,fx);state[pane]=$.extend({},oPane.state);re=new RegExp(o.paneClass+"-"+oldPane,"g");P.className=P.className.replace(re,o.paneClass+"-"+pane);initHandles(pane);initResizable(pane);if(c.dir!=_c[oldPane].dir){size=sizes[pane]||0;setSizeLimits(pane);size=max(size,state[pane].minSize);manualSizePane(pane,size,true)}else{$Rs[pane].css(side,sC[inset]+(state[pane].isVisible?getPaneSize(pane):0))}if(oPane.state.isVisible&&!s.isVisible){setAsOpen(pane,true)}else{setAsClosed(pane,true);bindStartSlidingEvent(pane,true)}oPane=null}};function keyDown(evt){if(!evt){return true}var code=evt.keyCode;if(code<33){return true}var PANE={38:"north",40:"south",37:"west",39:"east"},ALT=evt.altKey,SHIFT=evt.shiftKey,CTRL=evt.ctrlKey,CURSOR=(CTRL&&code>=37&&code<=40),o,k,m,pane;if(CURSOR&&options[PANE[code]].enableCursorHotkey){pane=PANE[code]}else{if(CTRL||SHIFT){$.each(_c.borderPanes.split(","),function(i,p){o=options[p];k=o.customHotkey;m=o.customHotkeyModifier;if((SHIFT&&m=="SHIFT")||(CTRL&&m=="CTRL")||(CTRL&&SHIFT)){if(k&&code==(isNaN(k)||k<=9?k.toUpperCase().charCodeAt(0):k)){pane=p;return false}}})}}if(!pane||!$Ps[pane]||!options[pane].closable||state[pane].isHidden){return true}toggle(pane);evt.stopPropagation();evt.returnValue=false;return false}function allowOverflow(el){if(this&&this.tagName){el=this}var $P;if(isStr(el)){$P=$Ps[el]}else{if($(el).data("layoutRole")){$P=$(el)}else{$(el).parents().each(function(){if($(this).data("layoutRole")){$P=$(this);return false}})}}if(!$P||!$P.length){return}var pane=$P.data("layoutEdge"),s=state[pane];if(s.cssSaved){resetOverflow(pane)}if(s.isSliding||s.isResizing||s.isClosed){s.cssSaved=false;return}var newCSS={zIndex:(_c.zIndex.pane_normal+2)},curCSS={},of=$P.css("overflow"),ofX=$P.css("overflowX"),ofY=$P.css("overflowY");if(of!="visible"){curCSS.overflow=of;newCSS.overflow="visible"}if(ofX&&!ofX.match(/visible|auto/)){curCSS.overflowX=ofX;newCSS.overflowX="visible"}if(ofY&&!ofY.match(/visible|auto/)){curCSS.overflowY=ofX;newCSS.overflowY="visible"}s.cssSaved=curCSS;$P.css(newCSS);$.each(_c.allPanes.split(","),function(i,p){if(p!=pane){resetOverflow(p)}})}function resetOverflow(el){if(this&&this.tagName){el=this}var $P;if(isStr(el)){$P=$Ps[el]}else{if($(el).data("layoutRole")){$P=$(el)}else{$(el).parents().each(function(){if($(this).data("layoutRole")){$P=$(this);return false}})}}if(!$P||!$P.length){return}var pane=$P.data("layoutEdge"),s=state[pane],CSS=s.cssSaved||{};if(!s.isSliding&&!s.isResizing){$P.css("zIndex",_c.zIndex.pane_normal)}$P.css(CSS);s.cssSaved=false}function getBtn(selector,pane,action){var $E=$(selector);if(!$E.length){alert(lang.errButton+lang.selector+": "+selector)}else{if(_c.borderPanes.indexOf(pane)==-1){alert(lang.errButton+lang.Pane.toLowerCase()+": "+pane)}else{var btn=options[pane].buttonClass+"-"+action;$E.addClass(btn+" "+btn+"-"+pane).data("layoutName",options.name);return $E}}return false}function bindButton(selector,action,pane){switch(action.toLowerCase()){case"toggle":addToggleBtn(selector,pane);break;case"open":addOpenBtn(selector,pane);break;case"close":addCloseBtn(selector,pane);break;case"pin":addPinBtn(selector,pane);break;case"toggle-slide":addToggleBtn(selector,pane,true);break;case"open-slide":addOpenBtn(selector,pane,true);break}}function addToggleBtn(selector,pane,slide){var $E=getBtn(selector,pane,"toggle");if($E){$E.click(function(evt){toggle(pane,!!slide);evt.stopPropagation()})}}function addOpenBtn(selector,pane,slide){var $E=getBtn(selector,pane,"open");if($E){$E.attr("title",lang.Open).click(function(evt){open(pane,!!slide);evt.stopPropagation()})}}function addCloseBtn(selector,pane){var $E=getBtn(selector,pane,"close");if($E){$E.attr("title",lang.Close).click(function(evt){close(pane);evt.stopPropagation()})}}function addPinBtn(selector,pane){var $E=getBtn(selector,pane,"pin");if($E){var s=state[pane];$E.click(function(evt){setPinState($(this),pane,(s.isSliding||s.isClosed));if(s.isSliding||s.isClosed){open(pane)}else{close(pane)}evt.stopPropagation()});setPinState($E,pane,(!s.isClosed&&!s.isSliding));_c[pane].pins.push(selector)}}function syncPinBtns(pane,doPin){$.each(_c[pane].pins,function(i,selector){setPinState($(selector),pane,doPin)})}function setPinState($Pin,pane,doPin){var updown=$Pin.attr("pin");if(updown&&doPin==(updown=="down")){return}var pin=options[pane].buttonClass+"-pin",side=pin+"-"+pane,UP=pin+"-up "+side+"-up",DN=pin+"-down "+side+"-down";$Pin.attr("pin",doPin?"down":"up").attr("title",doPin?lang.Unpin:lang.Pin).removeClass(doPin?UP:DN).addClass(doPin?DN:UP)}function isCookiesEnabled(){return(navigator.cookieEnabled!=0)}function getCookie(opts){var o=$.extend({},options.cookie,opts||{}),name=o.name||options.name||"Layout",c=document.cookie,cs=c?c.split(";"):[],pair;for(var i=0,n=cs.length;i<n;i++){pair=$.trim(cs[i]).split("=");if(pair[0]==name){return decodeJSON(decodeURIComponent(pair[1]))}}return""}function saveCookie(keys,opts){var o=$.extend({},options.cookie,opts||{}),name=o.name||options.name||"Layout",params="",date="",clear=false;if(o.expires.toUTCString){date=o.expires}else{if(typeof o.expires=="number"){date=new Date();if(o.expires>0){date.setDate(date.getDate()+o.expires)}else{date.setYear(1970);clear=true}}}if(date){params+=";expires="+date.toUTCString()}if(o.path){params+=";path="+o.path}if(o.domain){params+=";domain="+o.domain}if(o.secure){params+=";secure"}if(clear){state.cookie={};document.cookie=name+"="+params}else{state.cookie=getState(keys||o.keys);document.cookie=name+"="+encodeURIComponent(encodeJSON(state.cookie))+params}return $.extend({},state.cookie)}function deleteCookie(){saveCookie("",{expires:-1})}function loadCookie(opts){var o=getCookie(opts);if(o){state.cookie=$.extend({},o);loadState(o)}return o}function loadState(opts){$.extend(true,options,opts)}function getState(keys){var data={},alt={isClosed:"initClosed",isHidden:"initHidden"},pair,pane,key,val;if(!keys){keys=options.cookie.keys}if($.isArray(keys)){keys=keys.join(",")}keys=keys.replace(/__/g,".").split(",");for(var i=0,n=keys.length;i<n;i++){pair=keys[i].split(".");pane=pair[0];key=pair[1];if(_c.allPanes.indexOf(pane)<0){continue}val=state[pane][key];if(val==undefined){continue}if(key=="isClosed"&&state[pane]["isSliding"]){val=true}(data[pane]||(data[pane]={}))[alt[key]?alt[key]:key]=val}return data}function encodeJSON(JSON){return parse(JSON);function parse(h){var D=[],i=0,k,v,t;for(k in h){v=h[k];t=typeof v;if(t=="string"){v='"'+v+'"'}else{if(t=="object"){v=parse(v)}}D[i++]='"'+k+'":'+v}return"{"+D.join(",")+"}"}}function decodeJSON(str){try{return window["eval"]("("+str+")")||{}}catch(e){return{}}}var $Container=$(this).eq(0);if(!$Container.length){return null}if($Container.data("layoutContainer")){return $.extend({},window[$Container.data("layoutContainer")])}var $Ps={},$Cs={},$Rs={},$Ts={},sC=state.container,sID=state.id;_create();var Instance={options:options,state:state,container:$Container,panes:$Ps,contents:$Cs,resizers:$Rs,togglers:$Ts,toggle:toggle,hide:hide,show:show,open:open,close:close,slideOpen:slideOpen,slideClose:slideClose,slideToggle:slideToggle,initContent:initContent,sizeContent:sizeContent,sizePane:manualSizePane,swapPanes:swapPanes,resizeAll:resizeAll,destroy:destroy,setSizeLimits:setSizeLimits,bindButton:bindButton,addToggleBtn:addToggleBtn,addOpenBtn:addOpenBtn,addCloseBtn:addCloseBtn,addPinBtn:addPinBtn,allowOverflow:allowOverflow,resetOverflow:resetOverflow,encodeJSON:encodeJSON,decodeJSON:decodeJSON,getState:getState,getCookie:getCookie,saveCookie:saveCookie,deleteCookie:deleteCookie,loadCookie:loadCookie,loadState:loadState,cssWidth:cssW,cssHeight:cssH};window[sID]=Instance;return Instance}})(jQuery);
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png
new file mode 100644
index 0000000000..886cd5c7c3
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png
new file mode 100644
index 0000000000..0a483cacf5
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png
new file mode 100644
index 0000000000..c3eef6ea51
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png
new file mode 100644
index 0000000000..5ab05cc6ac
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png
new file mode 100644
index 0000000000..4625f9df74
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd
new file mode 100644
index 0000000000..3764f82ccb
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js
new file mode 100644
index 0000000000..4417f5b438
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js
@@ -0,0 +1,71 @@
+// © 2010 EPFL/LAMP
+// code by Gilles Dubochet
+
+function Scheduler() {
+ var scheduler = this;
+ var resolution = 0;
+ this.timeout = undefined;
+ this.queues = new Array(0); // an array of work pacakges indexed by index in the labels table.
+ this.labels = new Array(0); // an indexed array of labels indexed by priority. This should be short.
+ this.label = function(name, priority) {
+ this.name = name;
+ this.priority = priority;
+ }
+ this.work = function(fn, self, args) {
+ this.fn = fn;
+ this.self = self;
+ this.args = args;
+ }
+ this.addLabel = function(name, priority) {
+ var idx = 0;
+ while (idx < scheduler.queues.length && scheduler.labels[idx].priority <= priority) { idx = idx + 1; }
+ scheduler.labels.splice(idx, 0, new scheduler.label(name, priority));
+ scheduler.queues.splice(idx, 0, new Array(0));
+ }
+ this.clearLabel = function(name) {
+ var idx = 0;
+ while (idx < scheduler.queues.length && scheduler.labels[idx].name != name) { idx = idx + 1; }
+ if (idx < scheduler.queues.length && scheduler.labels[i].name == name) {
+ scheduler.labels.splice(idx, 1);
+ scheduler.queues.splice(idx, 1);
+ }
+ }
+ this.nextWork = function() {
+ var fn = undefined;
+ var idx = 0;
+ while (idx < scheduler.queues.length && scheduler.queues[idx].length == 0) { idx = idx + 1; }
+ if (idx < scheduler.queues.length && scheduler.queues[idx].length > 0) {
+ var fn = scheduler.queues[idx].shift();
+ }
+ return fn;
+ }
+ this.add = function(labelName, fn, self, args) {
+ var doWork = function() {
+ scheduler.timeout = setTimeout(function() {
+ var work = scheduler.nextWork();
+ if (work != undefined) {
+ if (work.args == undefined) { work.args = new Array(0); }
+ work.fn.apply(work.self, work.args);
+ doWork();
+ }
+ else {
+ scheduler.timeout = undefined;
+ }
+ }, resolution);
+ }
+ var idx = 0;
+ while (idx < scheduler.labels.length && scheduler.labels[idx].name != labelName) { idx = idx + 1; }
+ if (idx < scheduler.queues.length && scheduler.labels[idx].name == labelName) {
+ scheduler.queues[idx].push(new scheduler.work(fn, self, args));
+ if (scheduler.timeout == undefined) doWork();
+ }
+ else throw("queue for add is non existant");
+ }
+ this.clear = function(labelName) {
+ var idx = 0;
+ while (idx < scheduler.labels.length && scheduler.labels[idx].name != labelName) { idx = idx + 1; }
+ if (idx < scheduler.queues.length && scheduler.labels[idx].name == labelName) {
+ scheduler.queues[idx] = new Array();
+ }
+ }
+};
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
new file mode 100644
index 0000000000..de78793fe6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -0,0 +1,496 @@
+/* Reset */
+
+html, body, div, span, object, iframe,
+h1, h2, h3, h4, h5, h6, p, blockquote, pre,
+a, abbr, acronym, address, code,
+del, dfn, em, img, q, dl, dt, dd, ol, ul, li,
+fieldset, form, label, legend, input,
+table, caption, tbody, tfoot, thead, tr, th, td {
+ margin: 0;
+ padding: 0;
+ border: 0;
+ font-weight: inherit;
+ font-style: inherit;
+ font-size: 100%;
+ font-family: inherit;
+ vertical-align: baseline;
+}
+
+body {
+ line-height: 1;
+}
+
+table { border-collapse: separate; border-spacing: 0; }
+caption, th, td { text-align: left; font-weight: normal; }
+table, td, th { vertical-align: middle; }
+
+blockquote:before, blockquote:after, q:before, q:after { content: ""; }
+blockquote, q { quotes: none; }
+
+a img { border: none; }
+
+input { border-width: 0px; }
+
+/* Page */
+
+body {
+ font-family: sans-serif;
+ font-size: 10pt;
+}
+
+a {
+ text-decoration: underline;
+ color: #69481D; /* brown */
+}
+
+a:hover {
+ text-decoration: none;
+}
+
+#owner {
+ padding: 4px;
+ font-size: 12pt;
+ display: block;
+}
+
+#owner a {
+ color: black;
+}
+
+.value #owner {
+ background-color: #7996AC;
+}
+
+.type #owner {
+ background-color: #799F5A;
+}
+
+#definition {
+ display: block;
+ padding: 6px;
+}
+
+#definition > img {
+ float: left;
+}
+
+#definition > h1 {
+ padding: 12px 0 12px 6px;
+ color: white;
+ font-size: 24pt;
+ display: inline-block;
+}
+
+#signature > span.kind {
+ font-size: 11pt;
+ display: inline;
+ float: left;
+ text-align: left;
+ width: auto;
+ padding-left: 6px;
+}
+
+#signature > span.symbol {
+ font-size: 11pt;
+ text-align: left;
+}
+
+.value #definition {
+ background-color: #2C475C;
+}
+
+.type #definition {
+ background-color: #385E1A;
+}
+
+#comment {
+ padding-right: 8px;
+ padding-left: 8px;
+}
+
+#template .values > h3 {
+ color: white;
+ padding: 4px;
+ background-color: #7996AC;
+ font-size: 12pt;
+ font-weight: bold;
+}
+
+#template .types > h3 {
+ padding: 4px;
+ color: white;
+ font-weight: bold;
+ font-size: 12pt;
+ background-color: #799F5A;
+}
+
+#constructors > h3 {
+ padding: 4px;
+ color: white;
+ font-weight: bold;
+ font-size: 12pt;
+ background-color: #333;
+}
+
+#template > div.parent > h3 {
+ color: white;
+ padding: 4px;
+ margin-top: 10px;
+ background-color: #47166D;
+ font-size: 12pt;
+ font-weight: bold;
+}
+
+#template > div.parent > h3 * {
+ color: white;
+}
+
+/* Member cells */
+
+div.members > ol {
+ background-color: white;
+ list-style: none
+}
+
+div.members > ol > li {
+ display: block;
+}
+
+/* Member signatures */
+
+#tooltip {
+ background: #EFD5B5;
+ border: 1px solid gray;
+ color: black;
+ display: none;
+ padding: 5px;
+ position: absolute;
+}
+
+.signature {
+ padding: 3px;
+ font-family: monospace;
+ font-size: 10pt;
+ clear: both;
+ display: block;
+ background-color: #E5E5E5;
+}
+
+.signature .kind {
+ text-align: right;
+ float: left;
+ display: inline-block;
+ width: 8em;
+}
+
+.signature > a > .symbol > .name {
+ text-decoration: underline;
+}
+
+.signature > a:hover > .symbol > .name {
+ text-decoration: none;
+}
+
+.signature > a {
+ text-decoration: none;
+}
+
+.signature .symbol {
+ display: inline;
+ padding-left: 0.7em;
+}
+
+.signature .name {
+ display: inline-block;
+ font-weight: bold;
+}
+
+.signature .symbol .params .implicit {
+ font-style: italic;
+}
+
+.signature .symbol .name.deprecated {
+ text-decoration: line-through;
+}
+
+.signature .symbol .params .default {
+ font-style: italic;
+}
+
+#template .values .signature .name {
+ color: blue;
+}
+
+#template .types .signature .name {
+ color: green;
+}
+
+/* Comments text formating */
+
+.cmt {}
+
+.cmt p {
+ margin-bottom: 0.4em;
+ margin-top: 0.4em;
+}
+
+.cmt h3 {
+ margin-bottom: 1em;
+ margin-top: 1em;
+ display: block;
+ text-align: left;
+ font-weight: bold;
+ font-size: x-large;
+}
+
+.cmt h4 {
+ margin-bottom: 0.6em;
+ margin-top: 0.6em;
+ display: block;
+ text-align: left;
+ font-weight: bold;
+ font-size: large;
+}
+
+.cmt h5 {
+ margin-bottom: 0.4em;
+ margin-top: 0.4em;
+ display: block;
+ text-align: left;
+ font-weight: bold;
+}
+
+.cmt h6 {
+ margin-bottom: 0.4em;
+ margin-top: 0.4em;
+ display: block;
+ text-align: left;
+ font-style: italic;
+}
+
+.cmt pre {
+ padding: 0.4em;
+ border-color: #ddd;
+ border-style: solid;
+ border-width: 1px;
+ margin-left: 0;
+ margin-bottom: 0.4em;
+ margin-right: 0;
+ margin-top: 0.4em;
+ background-color: #eee;
+ display: block;
+ font-family: monospace;
+}
+
+.cmt ul {
+ display: block;
+ list-style: circle;
+ padding-left:20px;
+}
+
+.cmt ol {
+ display: block;
+ padding-left:20px;
+}
+
+.cmt ol.decimal {
+ list-style: decimal;
+}
+
+.cmt ol.lowerAlpha {
+ list-style: lower-alpha;
+}
+
+.cmt ol.upperAlpha {
+ list-style: upper-alpha;
+}
+
+.cmt ol.lowerRoman {
+ list-style: lower-roman;
+}
+
+.cmt ol.upperRoman {
+ list-style: upper-roman;
+}
+
+.cmt li {
+ display:list-item;
+}
+
+.cmt code {
+ font-family: monospace;
+}
+
+.cmt a {
+ font-style: bold;
+}
+
+/* Comments structured layout */
+
+p.comment {
+ display: block;
+ margin-left: 8.7em;
+}
+
+p.shortcomment {
+ display: block;
+ margin-left: 8.7em;
+ cursor: pointer;
+}
+
+div.fullcomment {
+ margin: 10px 0 10px 0;
+}
+
+#template div.fullcomment {
+ display:none;
+ margin: 6px 0 6px 8.7em;
+}
+
+div.fullcomment .block {
+ padding: 2px 0 2px 0;
+ border-top: 1px solid gray;
+ border-bottom: 1px solid gray;
+}
+
+div.fullcomment div.block ol li p,
+div.fullcomment div.block ol li {
+ display:inline
+}
+
+div.fullcomment .block + .block {
+ border-top: none;
+}
+
+div.fullcomment .block > h5 {
+ font-style: italic;
+ font-weight: normal;
+ display: inline-block;
+}
+
+div.fullcomment .comment {
+ margin: 6px 0 6px 0;
+}
+
+div.fullcomment dl.paramcmts > dt {
+ display: block;
+ float: left;
+ font-weight: bold;
+ margin: 2px 4px 2px 0;
+}
+
+div.fullcomment dl.paramcmts > dd {
+ display: block;
+ padding-left: 80px;
+ border-top: 1px dashed gray;
+ border-bottom: 1px dashed gray;
+}
+
+div.fullcomment dl.paramcmts > dt:first-child + dd {
+ border-top: none;
+}
+
+div.fullcomment dl.paramcmts > dd:last-child {
+ border-bottom: none;
+}
+
+div.fullcomment dl.paramcmts > dd + dt + dd {
+ border-top: none;
+}
+
+/* Members filter tool */
+
+#textfilter {
+ position: relative;
+ display: block;
+ height: 20px;
+ margin-bottom: 5px;
+}
+
+#textfilter > .pre {
+ display: block;
+ position: absolute;
+ top: 0;
+ left: 0;
+ height: 20px;
+ width: 20px;
+ background: url("filter_box_left.png");
+}
+
+#textfilter > .input {
+ display: block;
+ position: absolute;
+ top: 0;
+ right: 20px;
+ left: 20px;
+}
+
+#textfilter > .input > input {
+ height: 16px;
+ padding: 2px;
+ font-weight: bold;
+ color: #993300;
+ background-color: white;
+ width: 100%;
+}
+
+#textfilter > .post {
+ display: block;
+ position: absolute;
+ top: 0;
+ right: 0;
+ height: 20px;
+ width: 20px;
+ background: url("filter_box_right.png");
+}
+
+#mbrsel {
+ padding: 4px;
+ background-color: #B78E99; /* grayish pink */
+ display: block;
+ margin-bottom: 10px;
+}
+
+#mbrsel > div > span.filtertype {
+ padding: 4px;
+ float: left;
+ display: inline-block;
+ color: white;
+ width: 4.5em;
+}
+
+#mbrsel > div > ol {
+ display: inline-block;
+ background-color: white;
+}
+
+#mbrsel > div > ol#linearization {
+ display: inline;
+}
+
+#mbrsel > div > ol > li {
+ padding: 4px 8px 4px 8px;
+ background-color: white;
+ display: inline-block;
+ cursor: pointer;
+}
+
+#mbrsel > div > ol > li.in {
+ background-color: white;
+ color: #4C4C4C;
+}
+
+#mbrsel > div > ol > li.out {
+ color: #80384E;
+ background-color: #9E6A77;
+}
+
+#mbrsel .hideall {
+ color: #4C4C4C;
+ font-weight: bold;
+}
+
+#mbrsel .showall {
+ color: #4C4C4C;
+ font-weight: bold;
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
new file mode 100644
index 0000000000..e32c7f135e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -0,0 +1,270 @@
+// © 2009–2010 EPFL/LAMP
+// code by Gilles Dubochet with contributions by Pedro Furlanetto
+
+$(document).ready(function(){
+ var prefilters = $("#ancestors > ol > li").filter(function(){
+ var name = $(this).attr("name");
+ return name == "scala.Any" || name == "scala.AnyRef";
+ });
+ prefilters.removeClass("in");
+ prefilters.addClass("out");
+ filter();
+
+ var input = $("#textfilter > input");
+ input.bind("keyup", function(event) {
+ if (event.keyCode == 27) { // escape
+ input.attr("value", "");
+ }
+ filter();
+ });
+ input.focus(function(event) { input.select(); });
+ $("#textfilter > .post").click(function(){
+ $("#textfilter > input").attr("value", "");
+ filter();
+ });
+
+ $("#ancestors > ol > li").click(function(){
+ if ($(this).hasClass("in")) {
+ $(this).removeClass("in");
+ $(this).addClass("out");
+ }
+ else if ($(this).hasClass("out")) {
+ $(this).removeClass("out");
+ $(this).addClass("in");
+ };
+ filter();
+ });
+ $("#ancestors > ol > li.hideall").click(function() {
+ $("#ancestors > ol > li.in").removeClass("in").addClass("out");
+ filter();
+ })
+ $("#ancestors > ol > li.showall").click(function() {
+ var filtered =
+ $("#ancestors > ol > li.out").filter(function() {
+ var name = $(this).attr("name");
+ return !(name == "scala.Any" || name == "scala.AnyRef");
+ });
+ filtered.removeClass("out").addClass("in");
+ filter();
+ });
+ $("#visbl > ol > li.public").click(function() {
+ if ($(this).hasClass("out")) {
+ $(this).removeClass("out").addClass("in");
+ $("#visbl > ol > li.all").removeClass("in").addClass("out");
+ filter();
+ };
+ })
+ $("#visbl > ol > li.all").click(function() {
+ if ($(this).hasClass("out")) {
+ $(this).removeClass("out").addClass("in");
+ $("#visbl > ol > li.public").removeClass("in").addClass("out");
+ filter();
+ };
+ });
+ $("#impl > ol > li.concrete").click(function() {
+ if ($(this).hasClass("out")) {
+ $(this).removeClass("out").addClass("in");
+ $("li[data-isabs='false']").show();
+ } else {
+ $(this).removeClass("in").addClass("out");
+ $("li[data-isabs='false']").hide();
+ }
+ });
+ $("#impl > ol > li.abstract").click(function() {
+ if ($(this).hasClass("out")) {
+ $(this).removeClass("out").addClass("in");
+ $("li[data-isabs='true']").show();
+ } else {
+ $(this).removeClass("in").addClass("out");
+ $("li[data-isabs='true']").hide();
+ }
+ });
+ $("#order > ol > li.alpha").click(function() {
+ if ($(this).hasClass("out")) {
+ $(this).removeClass("out").addClass("in");
+ $("#order > ol > li.inherit").removeClass("in").addClass("out");
+ orderAlpha();
+ };
+ })
+ $("#order > ol > li.inherit").click(function() {
+ if ($(this).hasClass("out")) {
+ $(this).removeClass("out").addClass("in");
+ $("#order > ol > li.alpha").removeClass("in").addClass("out");
+ orderInherit();
+ };
+ });
+ initInherit();
+ //http://flowplayer.org/tools/tooltip.html
+ $(".extype").tooltip({
+ tip: "#tooltip",
+ position:"top center",
+ onBeforeShow: function(ev) {
+ $(this.getTip()).text(this.getTrigger().attr("name"));
+ }
+ });
+ $(".defval").tooltip({
+ tip: "#tooltip",
+ position:"top center",
+ onBeforeShow: function(ev) {
+ $(this.getTip()).html(this.getTrigger().attr("name"))
+ }
+ });
+ var docAllSigs = $("#template .signature");
+ function commentShowFct(fullComment){
+ var vis = $(":visible", fullComment);
+ if (vis.length > 0) {
+ fullComment.slideUp(100);
+ }
+ else {
+ fullComment.slideDown(100);
+ }
+ };
+ var docShowSigs = docAllSigs.filter(function(){
+ return $("+ div.fullcomment", $(this)).length > 0;
+ });
+ docShowSigs.css("cursor", "pointer");
+ docShowSigs.click(function(){
+ commentShowFct($("+ div.fullcomment", $(this)));
+ });
+ function commentToggleFct(shortComment){
+ var vis = $("~ div.fullcomment:visible", shortComment);
+ if (vis.length > 0) {
+ shortComment.slideDown(100);
+ vis.slideUp(100);
+ }
+ else {
+ var hid = $("~ div.fullcomment:hidden", shortComment);
+ hid.slideDown(100);
+ shortComment.slideUp(100);
+ }
+ };
+ var docToggleSigs = docAllSigs.filter(function(){
+ return $("+ p.shortcomment", $(this)).length > 0;
+ });
+ docToggleSigs.css("cursor", "pointer");
+ docToggleSigs.click(function(){
+ commentToggleFct($("+ p.shortcomment", $(this)));
+ });
+ $("p.shortcomment").click(function(){
+ commentToggleFct($(this));
+ });
+});
+
+function orderAlpha() {
+ $("#template > div.parent").hide();
+ $("#ancestors").show();
+ filter();
+};
+
+function orderInherit() {
+ $("#template > div.parent").show();
+ $("#ancestors").hide();
+ filter();
+};
+
+/** Prepares the DOM for inheritance-based display. To do so it will:
+ * - hide all statically-generated parents headings;
+ * - copy all members from the value and type members lists (flat members) to corresponding lists nested below the
+ * parent headings (inheritance-grouped members);
+ * - initialises a control variable used by the filter method to control whether filtering happens on flat members
+ * or on inheritance-grouped members. */
+function initInherit() {
+ // parents is a map from fully-qualified names to the DOM node of parent headings.
+ var parents = new Object();
+ $("#template > div.parent").each(function(){
+ parents[$(this).attr("name")] = $(this);
+ });
+ //
+ $("#types > ol > li").each(function(){
+ var qualName = $(this).attr("name");
+ var owner = qualName.slice(0, qualName.indexOf("#"));
+ var name = qualName.slice(qualName.indexOf("#") + 1);
+ var parent = parents[owner];
+ if (parent != undefined) {
+ var types = $("> .types > ol", parent);
+ if (types.length == 0) {
+ parent.append("<div class='types members'><h3>Type Members</h3><ol></ol></div>");
+ types = $("> .types > ol", parent);
+ }
+ types.append($(this).clone());
+ }
+ });
+ $("#values > ol > li").each(function(){
+ var qualName = $(this).attr("name");
+ var owner = qualName.slice(0, qualName.indexOf("#"));
+ var name = qualName.slice(qualName.indexOf("#") + 1);
+ var parent = parents[owner];
+ if (parent != undefined) {
+ var values = $("> .values > ol", parent);
+ if (values.length == 0) {
+ parent.append("<div class='values members'><h3>Value Members</h3><ol></ol></div>");
+ values = $("> .values > ol", parent);
+ }
+ values.append($(this).clone());
+ }
+ });
+ $("#template > div.parent").each(function(){
+ if ($("> div.members", this).length == 0) { $(this).remove(); };
+ });
+ $("#template > div.parent").each(function(){
+ $(this).hide();
+ });
+};
+
+function filter() {
+ var query = $("#textfilter input").attr("value").toLowerCase();
+ var queryRegExp = new RegExp(query, "i");
+ var inheritHides = null
+ if ($("#order > ol > li.inherit").hasClass("in")) {
+ inheritHides = $("#linearization > li:gt(0)");
+ }
+ else {
+ inheritHides = $("#linearization > li.out");
+ }
+ var outOwners =
+ inheritHides.map(function(){
+ var r = $(this).attr("name");
+ return r
+ }).get();
+ var prtVisbl = $("#visbl > ol > li.all").hasClass("in");
+ $(".members > ol > li").each(function(){
+ var vis1 = $(this).attr("visbl");
+ var qualName1 = $(this).attr("name");
+ //var name1 = qualName1.slice(qualName1.indexOf("#") + 1);
+ var showByOwned = true;
+ if ($(this).parents(".parent").length == 0) {
+ // owner filtering must not happen in "inherited from" member lists
+ var owner1 = qualName1.slice(0, qualName1.indexOf("#"));
+ for (out in outOwners) {
+ if (outOwners[out] == owner1) {
+ showByOwned = false;
+ };
+ };
+ };
+ var showByVis = true;
+ if (vis1 == "prt") {
+ showByVis = prtVisbl;
+ };
+ var showByName = true;
+ if (query != "") {
+ var content = $(this).attr("name") + $("> .fullcomment .cmt", this).text();
+ showByName = queryRegExp.test(content);
+ };
+ if (showByOwned && showByVis && showByName) {
+ $(this).show();
+ }
+ else {
+ $(this).hide();
+ };
+ });
+ $(".members").each(function(){
+ $(this).show();
+ if ($(" > ol > li:visible", this).length == 0) { $(this).hide(); }
+ });
+ return false
+};
+
+function windowTitle()
+{
+ parent.document.title=document.title;
+};
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
new file mode 100644
index 0000000000..0af34eca4c
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
@@ -0,0 +1,14 @@
+/*
+ * tools.tooltip 1.1.3 - Tooltips done right.
+ *
+ * Copyright (c) 2009 Tero Piirainen
+ * http://flowplayer.org/tools/tooltip.html
+ *
+ * Dual licensed under MIT and GPL 2+ licenses
+ * http://www.opensource.org/licenses
+ *
+ * Launch : November 2008
+ * Date: ${date}
+ * Revision: ${revision}
+ */
+(function(c){var d=[];c.tools=c.tools||{};c.tools.tooltip={version:"1.1.3",conf:{effect:"toggle",fadeOutSpeed:"fast",tip:null,predelay:0,delay:30,opacity:1,lazy:undefined,position:["top","center"],offset:[0,0],cancelDefault:true,relative:false,oneInstance:true,events:{def:"mouseover,mouseout",input:"focus,blur",widget:"focus mouseover,blur mouseout",tooltip:"mouseover,mouseout"},api:false},addEffect:function(e,g,f){b[e]=[g,f]}};var b={toggle:[function(e){var f=this.getConf(),g=this.getTip(),h=f.opacity;if(h<1){g.css({opacity:h})}g.show();e.call()},function(e){this.getTip().hide();e.call()}],fade:[function(e){this.getTip().fadeIn(this.getConf().fadeInSpeed,e)},function(e){this.getTip().fadeOut(this.getConf().fadeOutSpeed,e)}]};function a(f,g){var p=this,k=c(this);f.data("tooltip",p);var l=f.next();if(g.tip){l=c(g.tip);if(l.length>1){l=f.nextAll(g.tip).eq(0);if(!l.length){l=f.parent().nextAll(g.tip).eq(0)}}}function o(u){var t=g.relative?f.position().top:f.offset().top,s=g.relative?f.position().left:f.offset().left,v=g.position[0];t-=l.outerHeight()-g.offset[0];s+=f.outerWidth()+g.offset[1];var q=l.outerHeight()+f.outerHeight();if(v=="center"){t+=q/2}if(v=="bottom"){t+=q}v=g.position[1];var r=l.outerWidth()+f.outerWidth();if(v=="center"){s-=r/2}if(v=="left"){s-=r}return{top:t,left:s}}var i=f.is(":input"),e=i&&f.is(":checkbox, :radio, select, :button"),h=f.attr("type"),n=g.events[h]||g.events[i?(e?"widget":"input"):"def"];n=n.split(/,\s*/);if(n.length!=2){throw"Tooltip: bad events configuration for "+h}f.bind(n[0],function(r){if(g.oneInstance){c.each(d,function(){this.hide()})}var q=l.data("trigger");if(q&&q[0]!=this){l.hide().stop(true,true)}r.target=this;p.show(r);n=g.events.tooltip.split(/,\s*/);l.bind(n[0],function(){p.show(r)});if(n[1]){l.bind(n[1],function(){p.hide(r)})}});f.bind(n[1],function(q){p.hide(q)});if(!c.browser.msie&&!i&&!g.predelay){f.mousemove(function(){if(!p.isShown()){f.triggerHandler("mouseover")}})}if(g.opacity<1){l.css("opacity",g.opacity)}var m=0,j=f.attr("title");if(j&&g.cancelDefault){f.removeAttr("title");f.data("title",j)}c.extend(p,{show:function(r){if(r){f=c(r.target)}clearTimeout(l.data("timer"));if(l.is(":animated")||l.is(":visible")){return p}function q(){l.data("trigger",f);var t=o(r);if(g.tip&&j){l.html(f.data("title"))}r=r||c.Event();r.type="onBeforeShow";k.trigger(r,[t]);if(r.isDefaultPrevented()){return p}t=o(r);l.css({position:"absolute",top:t.top,left:t.left});var s=b[g.effect];if(!s){throw'Nonexistent effect "'+g.effect+'"'}s[0].call(p,function(){r.type="onShow";k.trigger(r)})}if(g.predelay){clearTimeout(m);m=setTimeout(q,g.predelay)}else{q()}return p},hide:function(r){clearTimeout(l.data("timer"));clearTimeout(m);if(!l.is(":visible")){return}function q(){r=r||c.Event();r.type="onBeforeHide";k.trigger(r);if(r.isDefaultPrevented()){return}b[g.effect][1].call(p,function(){r.type="onHide";k.trigger(r)})}if(g.delay&&r){l.data("timer",setTimeout(q,g.delay))}else{q()}return p},isShown:function(){return l.is(":visible, :animated")},getConf:function(){return g},getTip:function(){return l},getTrigger:function(){return f},bind:function(q,r){k.bind(q,r);return p},onHide:function(q){return this.bind("onHide",q)},onBeforeShow:function(q){return this.bind("onBeforeShow",q)},onShow:function(q){return this.bind("onShow",q)},onBeforeHide:function(q){return this.bind("onBeforeHide",q)},unbind:function(q){k.unbind(q);return p}});c.each(g,function(q,r){if(c.isFunction(r)){p.bind(q,r)}})}c.prototype.tooltip=function(e){var f=this.eq(typeof e=="number"?e:0).data("tooltip");if(f){return f}var g=c.extend(true,{},c.tools.tooltip.conf);if(c.isFunction(e)){e={onBeforeShow:e}}else{if(typeof e=="string"){e={tip:e}}}e=c.extend(true,g,e);if(typeof e.position=="string"){e.position=e.position.split(/,?\s/)}if(e.lazy!==false&&(e.lazy===true||this.length>20)){this.one("mouseover",function(h){f=new a(c(this),e);f.show(h);d.push(f)})}else{this.each(function(){f=new a(c(this),e);d.push(f)})}return e.api?f:this}})(jQuery); \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png
new file mode 100644
index 0000000000..b3871a01c4
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png
new file mode 100644
index 0000000000..a44871309e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
new file mode 100644
index 0000000000..b56fd1d3cd
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
@@ -0,0 +1,6020 @@
+%PDF-1.5 %âãÏÓ
+1 0 obj <</Metadata 1054 0 R/Pages 2 0 R/OCProperties<</D<</RBGroups[]/ON[37 0 R 65 0 R 78 0 R 90 0 R 116 0 R 129 0 R 141 0 R 167 0 R 180 0 R 192 0 R 218 0 R 237 0 R 255 0 R 287 0 R 306 0 R 324 0 R 356 0 R 375 0 R 393 0 R 425 0 R 444 0 R 462 0 R 480 0 R 515 0 R 534 0 R 552 0 R 570 0 R 605 0 R 624 0 R 642 0 R 660 0 R 695 0 R 699 0 R 718 0 R 735 0 R 753 0 R 785 0 R 789 0 R 808 0 R 825 0 R 843 0 R 878 0 R 882 0 R 901 0 R 918 0 R 936 0 R 971 0 R 975 0 R 994 0 R 1011 0 R 1029 0 R]/OFF[15 0 R 27 0 R]/Order 970 0 R>>/OCGs[15 0 R 27 0 R 37 0 R 65 0 R 78 0 R 90 0 R 116 0 R 129 0 R 141 0 R 167 0 R 180 0 R 192 0 R 218 0 R 237 0 R 255 0 R 287 0 R 306 0 R 324 0 R 356 0 R 375 0 R 393 0 R 425 0 R 444 0 R 462 0 R 480 0 R 515 0 R 534 0 R 552 0 R 570 0 R 605 0 R 624 0 R 642 0 R 660 0 R 695 0 R 699 0 R 718 0 R 735 0 R 753 0 R 785 0 R 789 0 R 808 0 R 825 0 R 843 0 R 878 0 R 882 0 R 901 0 R 918 0 R 936 0 R 971 0 R 975 0 R 994 0 R 1011 0 R 1029 0 R]>>/Type/Catalog>> endobj 1054 0 obj <</Subtype/XML/Length 44482/Type/Metadata>>stream
+<?xpacket begin="" id="W5M0MpCehiHzreSzNTczkc9d"?>
+<x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="Adobe XMP Core 4.1-c036 46.277092, Fri Feb 23 2007 14:16:18 ">
+ <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
+ <rdf:Description rdf:about=""
+ xmlns:dc="http://purl.org/dc/elements/1.1/">
+ <dc:format>application/pdf</dc:format>
+ <dc:title>
+ <rdf:Alt>
+ <rdf:li xml:lang="x-default">Print</rdf:li>
+ </rdf:Alt>
+ </dc:title>
+ </rdf:Description>
+ <rdf:Description rdf:about=""
+ xmlns:xap="http://ns.adobe.com/xap/1.0/"
+ xmlns:xapGImg="http://ns.adobe.com/xap/1.0/g/img/">
+ <xap:CreatorTool>Adobe Illustrator CS3</xap:CreatorTool>
+ <xap:CreateDate>2009-11-23T17:10:12+01:00</xap:CreateDate>
+ <xap:ModifyDate>2010-04-13T20:05:12+02:00</xap:ModifyDate>
+ <xap:MetadataDate>2010-04-13T20:05:12+02:00</xap:MetadataDate>
+ <xap:Thumbnails>
+ <rdf:Alt>
+ <rdf:li rdf:parseType="Resource">
+ <xapGImg:width>256</xapGImg:width>
+ <xapGImg:height>208</xapGImg:height>
+ <xapGImg:format>JPEG</xapGImg:format>
+ <xapGImg:image>/9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA&#xA;AQBIAAAAAQAB/+4ADkFkb2JlAGTAAAAAAf/bAIQABgQEBAUEBgUFBgkGBQYJCwgGBggLDAoKCwoK&#xA;DBAMDAwMDAwQDA4PEA8ODBMTFBQTExwbGxscHx8fHx8fHx8fHwEHBwcNDA0YEBAYGhURFRofHx8f&#xA;Hx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8f/8AAEQgA0AEAAwER&#xA;AAIRAQMRAf/EAaIAAAAHAQEBAQEAAAAAAAAAAAQFAwIGAQAHCAkKCwEAAgIDAQEBAQEAAAAAAAAA&#xA;AQACAwQFBgcICQoLEAACAQMDAgQCBgcDBAIGAnMBAgMRBAAFIRIxQVEGE2EicYEUMpGhBxWxQiPB&#xA;UtHhMxZi8CRygvElQzRTkqKyY3PCNUQnk6OzNhdUZHTD0uIIJoMJChgZhJRFRqS0VtNVKBry4/PE&#xA;1OT0ZXWFlaW1xdXl9WZ2hpamtsbW5vY3R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo+Ck5SVlpeYmZ&#xA;qbnJ2en5KjpKWmp6ipqqusra6voRAAICAQIDBQUEBQYECAMDbQEAAhEDBCESMUEFURNhIgZxgZEy&#xA;obHwFMHR4SNCFVJicvEzJDRDghaSUyWiY7LCB3PSNeJEgxdUkwgJChgZJjZFGidkdFU38qOzwygp&#xA;0+PzhJSktMTU5PRldYWVpbXF1eX1RlZmdoaWprbG1ub2R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo&#xA;+DlJWWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+v/aAAwDAQACEQMRAD8A9U4q7FXlX54yWv13ynba&#xA;/PLB5FuryZPMPpsVR5FjD2aSlP3nAujk022r1C5k6fka+ro1ZOl8kH+VVxocf5iarp3kyXn5TTTI&#xA;p9Sht3kmso9WaUKPRklHL4oVbcfap/kilmckwBl9V/YjH9W3J7DmE3OxV2KuxV2KuxV2KuxV2KpD&#xA;5s89eU/KUNvN5h1BLFbpylupWSV3KirFY4ld+K7ValBUeIycMcp8gxlIDmivLfmfQfMumDU9DvFv&#xA;bIu0ZkUMhV06q6OFdDuDRgNiD0IxnjlA0UxkDyTTIJdirsVdirsVdirsVdirsVdirsVdirsVdirs&#xA;VdirsVdirsVU7m2t7q3ltrmJJ7eZTHNDIodHRhRlZWqCCOoOEGlU9P03TtNtEs9OtYbKzir6dtbx&#xA;rFEvIljxRAFFSSTiZE7lAFIjAl2KuxV2KuxV2KuxV2KuxV5v5w0LVrT8wW84pbT3emJ5euNNMVin&#xA;rXYuhMZY1SHhJX1PUorcSoI+OgpXIxyHDw9eJrkN78kX+Wumaw+s+ZfNepaY+ir5hktDaaZNw9dY&#xA;raAj1puBIV5WlPJCAykb45pChEG+FYDcnvZ7mO2OxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Kux&#xA;V2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV&#xA;2KuxV2KuJAFT0xV5j58/Pryv5Zup9NtUbU9WhJR4YiPTRxx+3J0H7QIHxAj7NDXMrDpZT36NU8oD&#xA;zi6/5yR843N6k1rYQWtqtK2vL1OVDU1dkruNtszo9nCmg6gsv8tf85H6ZdziHXtPbTuVAs0LerGK&#xA;KftE8SOTUA2oOpbKcvZsgNt2cdSDzexW11b3VvHcW0qzQSqHilQhlZTuCCOoOa4iubkqmBXYq7FX&#xA;Yq7FXYq7FXYqwDzz+dXk/wApy/VZJDqGoUVvqlqVdgrgkFmrxX9k/ERUGormRi00ptcsgDzof85Q&#xA;aw5Xh5eh4j7ZNw9WHagCfD+OZo7Nvq0nU+TJfK3/ADkXoV/MINdtH0qR2/dzKfVgApQBn2ate/ED&#xA;7t68vZ047jdlHUA83rdtcwXMEdxbyLLBKoeORTUMrCoI+jNeRTkKmBXYq7FXYq7FXYq7FWHz/wB9&#xA;J/rH9ebAcmgrMKHYqn3l7/eaT/X/AIDMXPzbYck1yhm7FXYq7FXj3/OQv5mXflvSYtE0icR6pqis&#xA;ssqULwwinJga1RjWg271B2zL0uHiNlqyzp4J5D8ja95t1T6lpcXqSAc7i4kJEUSk/akeh6nt1Oba&#xA;WSOONycQRMjQe1Wf/OM5W3/0nXgtwR0ituSKfm0ilvuGY38q1yj9rZ+V82Geevyk17ypD9blZL7T&#xA;CQpvIQRwJ2AlQ1417GpHvXM7Ta2GXblJoy4TDfonH5IfmBc6PrUXlu/n/wBxF8zfVjIdoZiCeKkk&#xA;cVkP/DfMnMXtHSgjjHNt0+Xei+jc0bnOxV2KuxV2KuxV2KvLvz7/ADIl8peXY7LT246xq3JLd96R&#xA;xr/eSdKGlQAK9TuCKjMnTYuI+TVlnQeH/lv+U2uee7W91K2v4I3gm43BujIXd5BzL8lV6171zZT1&#xA;EcVAhx44zNllj/zj55wfVriyaW3itbbgDqDF/TkLIGpEvHk3HlQnYV75Z/KWMRB3vuY/lpEteavy&#xA;L8z6JpsmoRSQ6lbQKXuBByWRFG5bgw+JQOtDX2yzD2jjyHhOxYz08oi0R+SX5g3Wja1F5cvpS2k6&#xA;g/G1Lkn0Zz0Vd9lkO1Kfa+ZOUdo6UEcY5s9Pl3ovo7NG5zsVdirsVdirsVdirD5/76T/AFj+vNgO&#xA;TQVmFDsVT7y9/vNJ/r/wGYufm2w5MO178x9W0/zF5isYWshJodm9xp3l+RHOpauy2JuvUs29aOka&#xA;Sfu2CwSH4G6GmUM3n/mD86fN955Av3e503SHurLVzba7FJGyyva2kLRWtr9Sv7sW9873LlOU7ECL&#xA;lwqxVFUlv/M3nma91i2tNfuYJ5Lj6rcAz3Usscc2u2VpD61us0P1A+jcMLd4GVriLk5KvRgq9W8g&#xA;ah5wP5mebdM1y3u4rCGy019Jjlnimt44EmvLdJVpPNLzuxD6jFxzqpVzshZV4B+fmpXd5+ampxXD&#xA;8/qIjtoTSlI+PqqtB4erTNtpQBEOJlO73z/nHbSbWz/LW0vYlHr6nNPNO/c+nK0CivgBF+OYmtkT&#xA;krubcA9LEfzU/NPzVov5jGy065MNhpXoc7QAcJzJGsz+ptU1WTj7UqN8ztJpITxWRuWjNmkJ7dHu&#xA;Gpafa6rpdzYXK87a8iaKRSP2XWlaHuM1MJmMgRzDmEWKfFjXs1hdw3kLcbi0lSaJga0eNgykEe4z&#xA;qswBjTqobF9r2DyPY27yNzkaJC7kAFmKipoKDf2zkzzdqFfAl2KuxV2KuxV2KvkH/nInWF1D8y54&#xA;0DJHZQpb8TQVdGYO+xPXYV8AM2uljUXEymy9R/5xZ/5R3Wf+YqP/AJN5X2hzHuZ6fkU5/OT80tc8&#xA;pahp+n6RFD6k8ZuZ5p1LgrzKKigFafZPI/KmOi0kcgJkjPmMTQZ75S15PMPlnT9Y9MR/XYQ8kXUK&#xA;4qrqK9QGBpmHmx8EzHub4S4hb5T/ADAtk0LzrqkNgfSFletJa8duFG9RAP8AV2zo8cuPECeoddIV&#xA;M0+vLB+djbvUnlEjVapbdQd6985g83ZBXwJdirsVdirsVdirD5/76T/WP682A5NBWYUOxVPvL3+8&#xA;0n+v/AZi5+bbDkmuUM3Yq7FXYq+T/wDnJnyydM88RavClLbVIQ0jcuR9dCQ1evEMKcfkadM2eknc&#xA;a7nGyjdn3/ONP5gabPoP+ErydYdQtJHk09HNPWhlJdlSvV0csSPA+xyrWYjfEGWGW1Mx83fk1onm&#xA;XzXBr9xcyQ09MX1oqKyz+lsvxE/DVQFOx2HbBh10scOEBZ4BKVpr+ZPnrTvKPly4uZZlGpTRsmnW&#xA;1RzeUigbj14ITVj9HUjKtNgOSVdOrPJPhD5S8q6VN5h81abpMUfrCedTMlVFYlPKQVYhalRQVO5o&#xA;M3mpy1ElwccLL7VtoFt7eKBSzLEiorMasQopUnxznCbdkqYFdirsVdirsVdir5Z/5yj8vz2nnCz1&#xA;tUAtb+3ERK1P72Ekkt2BYNsPYnNlpJemnGzDdmn/ADinJz8ua37Xcf8AybyGuNkMsA2LN/zJ/Kiy&#xA;87XFldNfNYXNopiZxGJQ8RPLjQslCDWhr36ZDS6w4gRV2nLh42SWkGjeU/LEULzC20vSbcK08p6K&#xA;g3Zj3Zj4dSdsx5GWSd9S2ACI9z5I1C4ufPHn91t0fnrN8xVEBZ0hZiSaLyJ9OIVNB2zfSkMeOu4O&#xA;ABxSfZVrE0NtDCzBmjRUZgKAlRSoG9M54uxVMCuxV2KuxV2KuxV8e63/AM5B+fLXWb+2jjsfTguZ&#xA;Y0rC5PFHKiv7z2zcRxCg4plugv8AoYz8wP8Afdh/yJf/AKqYfCCOIu/6GM/MD/fdh/yJf/qpj4QX&#xA;iL3v/nHrzzrXnDyzqV9qywrNb3voRiBCi8fSRtwWberZgauNSDdiNh6pmK2uxV2KuxVjnn7yPpnn&#xA;Py7No1/VAx9S3mABaOVQQrivhXt8ulRlmPIYGwxlGw+PPN3kDzd5Kv6ahbSpArVt9QjDek1GIU8x&#xA;9lqqdvpFRvm1x5RLk4koEIi0/Nz8w7e2FtH5hvhEBQBpmZgB2DNVh9+S8PH/ADQvFLvQFmvmvzbq&#xA;3pWqXWsanMQHdmaV9zxBkkc/CKnqxplhyxgO4MREkvp78l/ykTydZHU9RYS69eJSUgUWKM7+mtQG&#xA;/wBb9W22q1OoMzXRysePhen5itrsVdirsVdirsVdirHPP3kjTfOfl2bRr4lAx9SCYAFo5VB4uKjt&#xA;Xt8ulRlmPIYG2Mo2GF/kF5I13yfbeYNM1aLiTdo1tOv2JYwhXmtd+o7/AKqE26nIJUQwxxpjP/OQ&#xA;H5gea/KfnPTf0HqMlmJtPDSxgK8bETSCpjkDoT70y/SQjKJ4he7DLIg7PIdX8+fmB53uI7G7vLnV&#xA;JCax2cKBUqP2vShVE2H7RG2ZkRDHuBTSTKXN73+Sn5Nz+WOWteYI431mZAIodnFupo1FIPHlX7Rp&#xA;1Hw7Cra/U6nj2HJyMeOub2DMNudirsVdirsVdirsVfnj5l/5SPVf+Yy4/wCTrZvIcg4Z5pbkkOxV&#xA;9V/84jf8oVrP/bSP/JiPNbrfqHucjDyeo6j5wk0/zrYeX7mzWOwvtPvL9dXedVVDYtCJUaIrsoW4&#xA;VuZcfLMNtYy35u6jN5d13zDY6AraZoF1JHcC7uzbTzWi2lveQzwRCCWstwl2PShcr25MrHiFV11+&#xA;cHop5quFtdONt5ZLRPC+plL0yiSONWubb6swtoGZ2/emRvs/ZPZVUufzetLe90HSpG0Y6trUX1su&#xA;usINPW2af0Ymgu5IEkuZJt/TjSDchgSAAxVeiYqpz21vcJ6c8Sypv8LqGG4Knr7EjEFWNH8q/wAt&#xA;yat5a09m7s1ujEnxJIqT88s8afex4B3J1pOgaLo9utvpdlDZwrXikSBQOVKgeA26ZGUieaQAEfkU&#xA;uxV2KuxV2KuxV2KuxV2KrXiRxRhvQjkCQwB60YUI+jFWMeaPy08oeaNQtL7WbT6zPZjgnI1DJy5c&#xA;H5VPGtelOpy2GWURQYygCmeieUfLGhBho+mW9gH48vQjCVKggHbvv16nISmTzKREBNsil2KuxV2K&#xA;uxV2KuxV2Kvzx8y/8pHqv/MZcf8AJ1s3kOQcM80tySHYq+q/+cRv+UK1n/tpH/kxHmt1v1D3ORh5&#xA;PXdW8p6Rq2s2eqX6GdrO0vbAWjhHt5YNR9H11mjZW57WygCtKE1B7YbaxW4/JDyolxJLotxceXY5&#xA;b+PU5LXSodOjt2nggjggDQzWk6MkJiaWNSKCV3f7XHiqm8vkBpdWn1h/MGpnVTDJa2F5Sw5WUE80&#xA;c8scC/VODBzAi/v1kIXpSpxVKJvyS8sSWjWqX1/DFdW8lprPBreuoQzXUt5IlxygbhynuZWrb+kR&#xA;yoKACir0LFXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq+Rta/wCc&#xA;afzQu9Yv7qGC0MNxcSyxk3Kg8XcsKingc2cdVABxzjKC/wChX/zV/wCWez/6SV/ph/NwR4Rd/wBC&#xA;v/mr/wAs9n/0kr/TH83BfCL3b8gPy+8x+SPLeo6frqRJcXN568QhkEg4ekibke6nMPU5BMghtxxI&#xA;D1DMdsdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsV&#xA;dirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVd&#xA;irsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdi&#xA;rsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdir&#xA;sVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirs&#xA;VdirsVdirsVdiqGOpWIJBmUEbEYOINnhS7nfpOw/3+uPEF8KXc79J2H+/wBceIL4Uu5VhuIZlLRO&#xA;HANCR44gsZRI5qmFi7FXYq7FVO4uIbaB553EcMYLO56ADElXj/mv/nIzSrK+ks/L1oNV9MsjXRYp&#xA;ATQbo1KtRq9BQihDZh5NWI8t2mWYBiUH5+/mCzDnHZtEOoERDkf63Lj/AMLmKdfJq/MFnvlP8+NJ&#xA;1K5W01u2/Rc0jBYpg3OCpIFGc0K9SakUpmTi1sZGjs2Qzg83qasrKGU1U7gjM1vbxV2KuxV2KuxV&#xA;2KuxV2KuxVDahqmnadbtcX9zHbQIpZpJGCgKCATv4VGAkDmgljx/NP8ALsS+l+n7QtUCoeq7/wCU&#xA;Bx/HK/Hh3sfEj3p7pet6RqsIm028hu4jUB4XDg8aA0p4VGTjIHkyBBRuSS7FXYq7FXYq7FXYqxCb&#xA;++k/1j+vKC7SPJZil2Kp95f/AN5pP9f+AyyDh6nmE0ybjOxV2KuxV86/85E/mHcz6n/g/T5THbwg&#xA;NqbKWBdmoViIIX/WPWopQ9a4Opy9A0ZZdGDeQvIet+aLs22mQgrHQ3FzIeMUQPTk1D1psAK5gxxS&#xA;yGg44iZHZ6/af848KsI+s63SUjdY7eqg/NpKn7hmQOze+X2Nv5bzec+cfLMWga9c6N9ZF21sE5TB&#xA;PTqZEDgceT9mHfNfnx+HPhu3GyR4TT0T8j/PF08x8rahNzSJC+nO53CggGLkSNl/ZFD9AGbLQ5yf&#xA;SXKwZL2L2bNk5LsVdirsVdirsVdirsVYf+Z35hWHkvQHu3ZZNQmPp2VoGHN3YH4qUaiim7Hb5mim&#xA;rLk4QwnKg+ZpdQ82/mB5iiW7mN3fXktLa1DCOFCRQBFYhVoq0qdzTcnNXOcpyrq4spGRZtqH5Iax&#xA;onlu71nU7q3Q2qK4tYuUjHkyrRmIRR9rtXHJpJxiZErLEQLLEtMutR0m6S90u5e1uUIYPGdjxPIB&#xA;lPwsKitCMw4ZpRNhpjMh9Dflh+Y0Xmqye3vSkOs21BNCDT1AannGD1XbxqO/Ynd6bUDIPNzsWTiD&#xA;Osym12KuxV2KuxV2KsQm/vpP9Y/rygu0jyWYpdiqfeX/APeaT/X/AIDLIOHqeYea+bfJvnPWPzE1&#xA;680wB4v0dplnbtfP9UhFpPJdHUIrK9jtLqaKZhFGrcD9mSrbiLjNxkt8k6j+Zun+VLfT57W/sdW0&#xA;vy7bJ5a0OOzaSwu5ItKBBv7uS05QXC3ClTA00VGAX468iqivJHmL8z7rVrOHzRc6jb6axVtPmtNM&#xA;mZrmcyASW2pyTaVbm3jQUpKkEKlW/vGKk4qyD8n9Y/MjUk1N/OimKVRAUtZIJoWguCZPXjjdrKyh&#xA;khFE4enJPTesrchRV8yecLmafzzrkkztI4vrhObks3GOQotSdzRVGajLzLhzfVP5LaRa6f8Al5pj&#xA;wqPVvVa6uJAN2d2IFf8AVQBcz9LECA83IxCosA/Mv81fNlp5qu9L0m5+oWlgwi+BEZ5GoCzMzq3c&#xA;7AZg6nVTEyAaAaMuU3Qeb3+sX+q6jPqF/L615cENNLQLyIAHRQANh2Ga/JIyNnm40jZsorynqs2m&#xA;+cdJvYKGWKbZT0IZSpB+g5dpTUgWeI0X1rnQOxdirsVdirsVdirsVdir5L/P3zBc6p+ZFzYu1bbS&#xA;VW3hRWZl5Mod2ofssahWp/Lmt1Mrk42U7su/5x20Xy/d319e6giNqFi9s2mM0jIwZxLz4oGUP9le&#xA;oOR0cYmRJ5jkjCATu941uw0rUNLuLPVlVtPmAFwruY1oGBFXUqRuB3zY5IiQqXJyZAEbvnL8yNN0&#xA;LSfNclloiomniGJlVJGmXkwPL42Zz+OaLV44xnUeTr80QDslvkvWp9F85aXewtxDzpbzAsUVo5mC&#xA;HkR+yCQ30Y6SZjMLhlUn1bnQOxdirsVdirsVdirEJv76T/WP68oLtI8lmKXYqn3l/wD3mk/1/wCA&#xA;yyDh6nmE0ybjOxV2KuxV8afnB5cm8vfmDqMbBjBdyG7gkYglhL8TluOwPOpp4U2GazNCpFxZii9x&#xA;/wCcfvPmnan5Yh8uzzKmq6byWKJjRpYCxdWSvXhXiQOgAzI0uQVw9Q24pbUyPzd+UflnzLqR1KZ5&#xA;7S9kAE727Lxk4igLK6t8VABUYc2kjM3yKzwiRt4N+YWiWnljzXe6Ras7W1uIjC8pBcrJEr1YgKOr&#xA;Htmpz4eCZAcTJCpUi/yZ0WfXfP1rMIxJZ6WDc3RYEpuCqKSAdyTUV60zJ0eK5X3NmGO76lzbuY7F&#xA;XYq7FXYq7FXYq7FXxt+c1tJafmlrnOvGaZZo2IIqrop2r4NUfRmszj1Fxcg3TX8oJlbzxoi13+sr&#xA;/wAROY2EfvQ1wHqD6I/NdWb8vdZCgk+khoPASoTm01f92XKy/SXy8hCmpzQF16vo4a88yaVax15S&#xA;3cC1ALUHqAs1B/KN8vwR9QZ4xu+xs6B2LsVdirsVdirsVfB3mHz152j1/U44/MOppGl3OqIt5cAA&#xA;CRgAAH6DMUl6GEBQ2S//AB956/6mPVP+k24/5rwWy4B3O/x956/6mPVP+k24/wCa8bXgHc+mf+cW&#xA;tZ1jVfKGrTapfXF/MmoFEkuZXmZV9CM8QXLECpy/FydVrwBIe57RljguxV2KuxVhP5qflpY+eNFE&#xA;LMYdStAz2FwADRj1U1ps1PH9QpVlxcQYTjb5S1/yt5q8o6i0OpW0ls8LgR3SV9NmoGUpIO/EhqGj&#xA;DuBmunjIO7jmNI+D81fPsUaxr5gv+KigBuJDt8ya4OOfeUcR71fQfL/nrz7qfK1S4vXkYLcandM7&#xA;RoNh8czciaV6Cp9sY4pTKiBL6k/L38v9K8maKlja/vrt/ivL1gA8khpX/VXbZf1nfNnixCApyoxo&#xA;Mpyxk7FXYq7FXYq7FXYq7FXiv/ORH5bXOtWcXmXS0aXULJBFcW6hmaSGpPw7kVQ9FoK1PViBmLqM&#xA;d7hqyR6vAPKvmObQ9csNUjXm1jPHP6Z2DCNgxUn/ACgKZgDYg9zRyNvrvSvzT/LnWdNWca3ZQxzL&#xA;SW1vZY4JFqN0eOUrWnQ0qPfNoM0CObkiYL55/N3W9Dm87Xj6JLBLp4jhWJ7Xj6NViUMF4fDsfDNX&#xA;qIgzNcnFyAGWzNP+cf8AyDdXF+PN2pI0UUIppkTBlL81IM1QV2psOtQTUUKk5Wkw16i24odX0Bme&#xA;5DsVdirsVdirsVfnl5l/5SPVf+Yy4/5OtmIXo4fSEtxZOxV9V/8AOI//AChWs/8AbSP/ACYjy7Fy&#xA;dT2h9Q9zJvOHnTW/K/mu/vtUk1BdBis5pNEs7ZdPeyvLi00+a8nhnbhJqEcnGIslCqfD13obXASN&#xA;PPXnMeVZJ7jUryz8zWV/oU+qWlza6d6Bs9YvI7YRWht2uaQODJQySNMOO/GoxVO187+ZdT/Nafy1&#xA;EbzSNLk07UoLEzaZP8V5Zy2gF+LiWIQvFSeRECuU2UtvJGuKsek/NfzdH5M8gXZF2bjU10e81/WI&#xA;tNnuYZorq9gtpbWM28EkMc0qyO1Nm2CoC7rRV7hiqjdWNndoUuoEmQqUIdQ3wtTkN/GmAi1pjw/L&#xA;H8vg3IeX7ISf79EQElfHmPir71yHhR7mPAGSQ28EC8IY1iTb4UUKNhQbD2GWUyX4q7FXYq7FXYq7&#xA;FXYq7FXYq06JIjI6h0cFWVhUEHYgg4q8x8+fkF5U8zXM2o2rNpmpzVZ5IQPSdzTd0pT+Ykjck9co&#xA;yacS3a5YwWAn/nFXWhLRPMNuYf5zA4an+ryI/wCGyn8oe9h4TL/J3/OOPlnSLiO81m4bWLmJuSxu&#xA;oW3qCCpMZrXoQQxI3yyGmA5so4gHrkUUUSBIkCIKkKooKk1PTxJzJbV2KuxV2KuxV2KuxV+eXmX/&#xA;AJSPVf8AmMuP+TrZiF6OH0hLcWTsVfVf/OI//KFaz/20j/yYjy7FydT2h9Q9z2MeWvLg1eXWhpVm&#xA;NYuE9KfUhbxfWZI+IXg83H1GXioFCemWuApWPlDynYWUljY6JYWljLMlzLawWsMcTTxOskcrIqhS&#xA;6OisrUqCAe2Kpi1patdR3jQxtdwxvDFcFQZEjlKNIivTkFdokLAdeI8BiqjHo+kR6fBp0djbpp9q&#xA;Yja2axIIYjbuskBjjA4r6TorJQfCQCOmKovFXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7F&#xA;XYq7FXYq7FXYq7FXYq+edT/5xI+u6ld3v+K/T+tTSTen+j+XH1GLUr9ZFaV8Mq8J2Me0KFcP2/sQ&#xA;3/Qnn/f3f9y7/s6weF5p/lH+j9v7Hf8AQnn/AH93/cu/7OsfC81/lH+j9v7Hqv5R/lh/yrzRbzTP&#xA;0n+lPrdz9Z9b0Pq/H92qcePqTV+xWtcsjGnE1GfxDdUzrJNDsVdirsVdirsVdirsVdirsVdirsVd&#xA;irsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdi&#xA;rsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVeE+eYtP1&#xA;78wPNVj5l1N/q2iWtnJ5d0WS7SxgmaWAPNKrOOLOjsd+prStBthZRxEg9OQaJiyWafkX5g1zXfIi&#xA;XWrtJM8VzLBaXc3IyTwIFIdnYn1CHZ05D+Wh3By3TSkYbs8RJG70LMhsdirsVdirsVdirsVdirsV&#xA;dirGPO3nyw8rCwt2tZ9R1fV5Gg0nTrYDlLKoH2nYhUQM6hm3IrWhocryZOH3ljKVLPJn5gWXmSe7&#xA;06a1k0rXtONL7SrgqzqK05xuu0kdduQA7dipMceYS25EIjO/eyrLmbsVdirsVdirsVdirsVdirsV&#xA;dirsVdirsVdirsVdirsVS3VfLHlrV5Ul1bSbLUZY14RyXdvFOyrWtFMisQK5EwB5hBAR1vb29tbx&#xA;W1tEkNvCixwwxqEREQUVVUUAAAoAMkAlUxV2KuxV2KuxV2KuxV2KuxV2KsB/NHyPruvXnl3XNBli&#xA;GqeW7prqO0uHaNJ0Yo7R+ooajMYFXfajGpGU5YE0R0YTiTuFD8vPIGq6f5p1jzlrf+i6jq6mOHSU&#xA;nN0tvFIUkkEk5Veb84wFC/Cq92r8McWKpGR5lEIUbei5kNjsVdirsVdirsVdirsVdirsVdirsVdi&#xA;rsVdirsVf//Z</xapGImg:image>
+ </rdf:li>
+ </rdf:Alt>
+ </xap:Thumbnails>
+ </rdf:Description>
+ <rdf:Description rdf:about=""
+ xmlns:xapMM="http://ns.adobe.com/xap/1.0/mm/"
+ xmlns:stRef="http://ns.adobe.com/xap/1.0/sType/ResourceRef#">
+ <xapMM:DocumentID>uuid:89B13A64E5D9DE11BB37992E5642CB24</xapMM:DocumentID>
+ <xapMM:InstanceID>uuid:bce40ba9-528e-1a48-b1a0-2636e427f460</xapMM:InstanceID>
+ <xapMM:DerivedFrom rdf:parseType="Resource">
+ <stRef:instanceID>uuid:1052650b-0efc-4cb2-a32e-387095575b05</stRef:instanceID>
+ <stRef:documentID>uuid:6120892493BFDB11914A8590D31508C8</stRef:documentID>
+ </xapMM:DerivedFrom>
+ </rdf:Description>
+ <rdf:Description rdf:about=""
+ xmlns:illustrator="http://ns.adobe.com/illustrator/1.0/">
+ <illustrator:Type>Document</illustrator:Type>
+ <illustrator:StartupProfile>Print</illustrator:StartupProfile>
+ </rdf:Description>
+ <rdf:Description rdf:about=""
+ xmlns:xapTPg="http://ns.adobe.com/xap/1.0/t/pg/"
+ xmlns:stDim="http://ns.adobe.com/xap/1.0/sType/Dimensions#"
+ xmlns:stFnt="http://ns.adobe.com/xap/1.0/sType/Font#"
+ xmlns:xapG="http://ns.adobe.com/xap/1.0/g/">
+ <xapTPg:NPages>1</xapTPg:NPages>
+ <xapTPg:HasVisibleTransparency>False</xapTPg:HasVisibleTransparency>
+ <xapTPg:HasVisibleOverprint>False</xapTPg:HasVisibleOverprint>
+ <xapTPg:MaxPageSize rdf:parseType="Resource">
+ <stDim:w>841.889771</stDim:w>
+ <stDim:h>595.275574</stDim:h>
+ <stDim:unit>Pixels</stDim:unit>
+ </xapTPg:MaxPageSize>
+ <xapTPg:Fonts>
+ <rdf:Bag>
+ <rdf:li rdf:parseType="Resource">
+ <stFnt:fontName>MyriadPro-Regular</stFnt:fontName>
+ <stFnt:fontFamily>Myriad Pro</stFnt:fontFamily>
+ <stFnt:fontFace>Regular</stFnt:fontFace>
+ <stFnt:fontType>Open Type</stFnt:fontType>
+ <stFnt:versionString>Version 2.007;PS 002.000;Core 1.0.38;makeotf.lib1.7.9032</stFnt:versionString>
+ <stFnt:composite>False</stFnt:composite>
+ <stFnt:fontFileName>MyriadPro-Regular.otf</stFnt:fontFileName>
+ </rdf:li>
+ </rdf:Bag>
+ </xapTPg:Fonts>
+ <xapTPg:PlateNames>
+ <rdf:Seq>
+ <rdf:li>Cyan</rdf:li>
+ <rdf:li>Magenta</rdf:li>
+ <rdf:li>Yellow</rdf:li>
+ <rdf:li>Black</rdf:li>
+ </rdf:Seq>
+ </xapTPg:PlateNames>
+ <xapTPg:SwatchGroups>
+ <rdf:Seq>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:groupName>Default Swatch Group</xapG:groupName>
+ <xapG:groupType>0</xapG:groupType>
+ <xapG:Colorants>
+ <rdf:Seq>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>White</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>255</xapG:red>
+ <xapG:green>255</xapG:green>
+ <xapG:blue>255</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>Black</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>35</xapG:red>
+ <xapG:green>31</xapG:green>
+ <xapG:blue>32</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>CMYK Red</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>236</xapG:red>
+ <xapG:green>28</xapG:green>
+ <xapG:blue>36</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>CMYK Yellow</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>255</xapG:red>
+ <xapG:green>241</xapG:green>
+ <xapG:blue>0</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>CMYK Green</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>0</xapG:red>
+ <xapG:green>165</xapG:green>
+ <xapG:blue>81</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>CMYK Cyan</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>0</xapG:red>
+ <xapG:green>173</xapG:green>
+ <xapG:blue>238</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>CMYK Blue</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>46</xapG:red>
+ <xapG:green>49</xapG:green>
+ <xapG:blue>145</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>CMYK Magenta</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>235</xapG:red>
+ <xapG:green>0</xapG:green>
+ <xapG:blue>139</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=16 M=98 Y=92 K=7</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>194</xapG:red>
+ <xapG:green>39</xapG:green>
+ <xapG:blue>45</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=0 M=99 Y=97 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>236</xapG:red>
+ <xapG:green>32</xapG:green>
+ <xapG:blue>39</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=0 M=79 Y=96 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>240</xapG:red>
+ <xapG:green>92</xapG:green>
+ <xapG:blue>39</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=0 M=50 Y=98 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>246</xapG:red>
+ <xapG:green>146</xapG:green>
+ <xapG:blue>33</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=0 M=35 Y=87 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>250</xapG:red>
+ <xapG:green>175</xapG:green>
+ <xapG:blue>59</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=5 M=0 Y=93 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>249</xapG:red>
+ <xapG:green>236</xapG:green>
+ <xapG:blue>35</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=19 M=0 Y=98 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>216</xapG:red>
+ <xapG:green>223</xapG:green>
+ <xapG:blue>39</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=50 M=0 Y=99 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>139</xapG:red>
+ <xapG:green>197</xapG:green>
+ <xapG:blue>64</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=74 M=0 Y=99 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>61</xapG:red>
+ <xapG:green>180</xapG:green>
+ <xapG:blue>74</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=86 M=12 Y=100 K=9</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>0</xapG:red>
+ <xapG:green>146</xapG:green>
+ <xapG:blue>69</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=88 M=28 Y=95 K=32</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>0</xapG:red>
+ <xapG:green>104</xapG:green>
+ <xapG:blue>55</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=76 M=0 Y=75 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>34</xapG:red>
+ <xapG:green>180</xapG:green>
+ <xapG:blue>115</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=78 M=9 Y=46 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>3</xapG:red>
+ <xapG:green>168</xapG:green>
+ <xapG:blue>156</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=70 M=15 Y=0 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>37</xapG:red>
+ <xapG:green>169</xapG:green>
+ <xapG:blue>224</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=87 M=52 Y=0 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>16</xapG:red>
+ <xapG:green>114</xapG:green>
+ <xapG:blue>185</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=99 M=96 Y=4 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>46</xapG:red>
+ <xapG:green>55</xapG:green>
+ <xapG:blue>143</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=100 M=100 Y=26 K=25</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>38</xapG:red>
+ <xapG:green>34</xapG:green>
+ <xapG:blue>97</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=74 M=98 Y=1 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>103</xapG:red>
+ <xapG:green>48</xapG:green>
+ <xapG:blue>144</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=49 M=99 Y=1 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>146</xapG:red>
+ <xapG:green>41</xapG:green>
+ <xapG:blue>141</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=34 M=100 Y=37 K=11</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>157</xapG:red>
+ <xapG:green>30</xapG:green>
+ <xapG:blue>96</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=12 M=100 Y=49 K=1</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>211</xapG:red>
+ <xapG:green>28</xapG:green>
+ <xapG:blue>92</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=0 M=96 Y=20 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>236</xapG:red>
+ <xapG:green>37</xapG:green>
+ <xapG:blue>122</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=23 M=27 Y=40 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>198</xapG:red>
+ <xapG:green>178</xapG:green>
+ <xapG:blue>152</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=40 M=43 Y=52 K=7</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>152</xapG:red>
+ <xapG:green>133</xapG:green>
+ <xapG:blue>118</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=50 M=53 Y=61 K=23</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>117</xapG:red>
+ <xapG:green>101</xapG:green>
+ <xapG:blue>88</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=57 M=60 Y=64 K=42</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>85</xapG:red>
+ <xapG:green>72</xapG:green>
+ <xapG:blue>65</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=23 M=38 Y=63 K=1</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>197</xapG:red>
+ <xapG:green>156</xapG:green>
+ <xapG:blue>110</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=32 M=49 Y=74 K=10</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>165</xapG:red>
+ <xapG:green>124</xapG:green>
+ <xapG:blue>82</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=36 M=57 Y=84 K=23</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>139</xapG:red>
+ <xapG:green>99</xapG:green>
+ <xapG:blue>57</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=39 M=64 Y=93 K=36</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>117</xapG:red>
+ <xapG:green>77</xapG:green>
+ <xapG:blue>36</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=41 M=70 Y=96 K=49</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>97</xapG:red>
+ <xapG:green>57</xapG:green>
+ <xapG:blue>23</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=47 M=73 Y=83 K=68</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>65</xapG:red>
+ <xapG:green>35</xapG:green>
+ <xapG:blue>18</xapG:blue>
+ </rdf:li>
+ </rdf:Seq>
+ </xapG:Colorants>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:groupName>Print Color Group</xapG:groupName>
+ <xapG:groupType>1</xapG:groupType>
+ <xapG:Colorants>
+ <rdf:Seq>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=2 M=28 Y=72 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>246</xapG:red>
+ <xapG:green>187</xapG:green>
+ <xapG:blue>96</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=5 M=70 Y=90 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>231</xapG:red>
+ <xapG:green>110</xapG:green>
+ <xapG:blue>52</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=4 M=92 Y=77 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>229</xapG:red>
+ <xapG:green>59</xapG:green>
+ <xapG:blue>65</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=29 M=2 Y=92 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>191</xapG:red>
+ <xapG:green>210</xapG:green>
+ <xapG:blue>65</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=62 M=4 Y=93 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>109</xapG:red>
+ <xapG:green>182</xapG:green>
+ <xapG:blue>78</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=30 M=2 Y=7 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>174</xapG:red>
+ <xapG:green>218</xapG:green>
+ <xapG:blue>230</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=60 M=8 Y=5 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>85</xapG:red>
+ <xapG:green>185</xapG:green>
+ <xapG:blue>223</xapG:blue>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>C=78 M=4 Y=11 K=0</xapG:swatchName>
+ <xapG:mode>RGB</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:red>0</xapG:red>
+ <xapG:green>178</xapG:green>
+ <xapG:blue>215</xapG:blue>
+ </rdf:li>
+ </rdf:Seq>
+ </xapG:Colorants>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:groupName>Grayscale</xapG:groupName>
+ <xapG:groupType>1</xapG:groupType>
+ <xapG:Colorants>
+ <rdf:Seq>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>K=100</xapG:swatchName>
+ <xapG:mode>GRAY</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:gray>255</xapG:gray>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>K=90</xapG:swatchName>
+ <xapG:mode>GRAY</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:gray>229</xapG:gray>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>K=80</xapG:swatchName>
+ <xapG:mode>GRAY</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:gray>203</xapG:gray>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>K=70</xapG:swatchName>
+ <xapG:mode>GRAY</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:gray>178</xapG:gray>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>K=60</xapG:swatchName>
+ <xapG:mode>GRAY</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:gray>152</xapG:gray>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>K=50</xapG:swatchName>
+ <xapG:mode>GRAY</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:gray>127</xapG:gray>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>K=40</xapG:swatchName>
+ <xapG:mode>GRAY</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:gray>101</xapG:gray>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>K=30</xapG:swatchName>
+ <xapG:mode>GRAY</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:gray>76</xapG:gray>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>K=20</xapG:swatchName>
+ <xapG:mode>GRAY</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:gray>50</xapG:gray>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>K=10</xapG:swatchName>
+ <xapG:mode>GRAY</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:gray>25</xapG:gray>
+ </rdf:li>
+ <rdf:li rdf:parseType="Resource">
+ <xapG:swatchName>K=5</xapG:swatchName>
+ <xapG:mode>GRAY</xapG:mode>
+ <xapG:type>PROCESS</xapG:type>
+ <xapG:gray>12</xapG:gray>
+ </rdf:li>
+ </rdf:Seq>
+ </xapG:Colorants>
+ </rdf:li>
+ </rdf:Seq>
+ </xapTPg:SwatchGroups>
+ </rdf:Description>
+ </rdf:RDF>
+</x:xmpmeta>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+<?xpacket end="w"?> endstream endobj 2 0 obj <</Count 1/Type/Pages/Kids[5 0 R]>> endobj 15 0 obj <</Intent 16 0 R/Usage 17 0 R/Name(Class)/Type/OCG>> endobj 27 0 obj <</Intent 28 0 R/Usage 29 0 R/Name(Package)/Type/OCG>> endobj 37 0 obj <</Intent 38 0 R/Usage 39 0 R/Name(Object)/Type/OCG>> endobj 65 0 obj <</Intent 66 0 R/Usage 67 0 R/Name(Class)/Type/OCG>> endobj 78 0 obj <</Intent 79 0 R/Usage 80 0 R/Name(Package)/Type/OCG>> endobj 90 0 obj <</Intent 91 0 R/Usage 92 0 R/Name(Object)/Type/OCG>> endobj 116 0 obj <</Intent 117 0 R/Usage 118 0 R/Name(Class)/Type/OCG>> endobj 129 0 obj <</Intent 130 0 R/Usage 131 0 R/Name(Package)/Type/OCG>> endobj 141 0 obj <</Intent 142 0 R/Usage 143 0 R/Name(Object)/Type/OCG>> endobj 167 0 obj <</Intent 168 0 R/Usage 169 0 R/Name(Class)/Type/OCG>> endobj 180 0 obj <</Intent 181 0 R/Usage 182 0 R/Name(Package)/Type/OCG>> endobj 192 0 obj <</Intent 193 0 R/Usage 194 0 R/Name(Object)/Type/OCG>> endobj 218 0 obj <</Intent 219 0 R/Usage 220 0 R/Name(Class)/Type/OCG>> endobj 237 0 obj <</Intent 238 0 R/Usage 239 0 R/Name(Package)/Type/OCG>> endobj 255 0 obj <</Intent 256 0 R/Usage 257 0 R/Name(Object)/Type/OCG>> endobj 287 0 obj <</Intent 288 0 R/Usage 289 0 R/Name(Class)/Type/OCG>> endobj 306 0 obj <</Intent 307 0 R/Usage 308 0 R/Name(Package)/Type/OCG>> endobj 324 0 obj <</Intent 325 0 R/Usage 326 0 R/Name(Object)/Type/OCG>> endobj 356 0 obj <</Intent 357 0 R/Usage 358 0 R/Name(Class)/Type/OCG>> endobj 375 0 obj <</Intent 376 0 R/Usage 377 0 R/Name(Package)/Type/OCG>> endobj 393 0 obj <</Intent 394 0 R/Usage 395 0 R/Name(Object)/Type/OCG>> endobj 425 0 obj <</Intent 426 0 R/Usage 427 0 R/Name(Class)/Type/OCG>> endobj 444 0 obj <</Intent 445 0 R/Usage 446 0 R/Name(Trait)/Type/OCG>> endobj 462 0 obj <</Intent 463 0 R/Usage 464 0 R/Name(Package)/Type/OCG>> endobj 480 0 obj <</Intent 481 0 R/Usage 482 0 R/Name(Object)/Type/OCG>> endobj 515 0 obj <</Intent 516 0 R/Usage 517 0 R/Name(Class)/Type/OCG>> endobj 534 0 obj <</Intent 535 0 R/Usage 536 0 R/Name(Trait)/Type/OCG>> endobj 552 0 obj <</Intent 553 0 R/Usage 554 0 R/Name(Package)/Type/OCG>> endobj 570 0 obj <</Intent 571 0 R/Usage 572 0 R/Name(Object)/Type/OCG>> endobj 605 0 obj <</Intent 606 0 R/Usage 607 0 R/Name(Class)/Type/OCG>> endobj 624 0 obj <</Intent 625 0 R/Usage 626 0 R/Name(Trait)/Type/OCG>> endobj 642 0 obj <</Intent 643 0 R/Usage 644 0 R/Name(Package)/Type/OCG>> endobj 660 0 obj <</Intent 661 0 R/Usage 662 0 R/Name(Object)/Type/OCG>> endobj 695 0 obj <</Intent 696 0 R/Usage 697 0 R/Name(Base)/Type/OCG>> endobj 699 0 obj <</Intent 700 0 R/Usage 701 0 R/Name(Class)/Type/OCG>> endobj 718 0 obj <</Intent 719 0 R/Usage 720 0 R/Name(Trait)/Type/OCG>> endobj 735 0 obj <</Intent 736 0 R/Usage 737 0 R/Name(Package)/Type/OCG>> endobj 753 0 obj <</Intent 754 0 R/Usage 755 0 R/Name(Object)/Type/OCG>> endobj 785 0 obj <</Intent 786 0 R/Usage 787 0 R/Name(Base)/Type/OCG>> endobj 789 0 obj <</Intent 790 0 R/Usage 791 0 R/Name(Class)/Type/OCG>> endobj 808 0 obj <</Intent 809 0 R/Usage 810 0 R/Name(Trait)/Type/OCG>> endobj 825 0 obj <</Intent 826 0 R/Usage 827 0 R/Name(Package)/Type/OCG>> endobj 843 0 obj <</Intent 844 0 R/Usage 845 0 R/Name(Object)/Type/OCG>> endobj 878 0 obj <</Intent 879 0 R/Usage 880 0 R/Name(Base)/Type/OCG>> endobj 882 0 obj <</Intent 883 0 R/Usage 884 0 R/Name(Class)/Type/OCG>> endobj 901 0 obj <</Intent 902 0 R/Usage 903 0 R/Name(Trait)/Type/OCG>> endobj 918 0 obj <</Intent 919 0 R/Usage 920 0 R/Name(Package)/Type/OCG>> endobj 936 0 obj <</Intent 937 0 R/Usage 938 0 R/Name(Object)/Type/OCG>> endobj 971 0 obj <</Intent 972 0 R/Usage 973 0 R/Name(Base)/Type/OCG>> endobj 975 0 obj <</Intent 976 0 R/Usage 977 0 R/Name(Class)/Type/OCG>> endobj 994 0 obj <</Intent 995 0 R/Usage 996 0 R/Name(Trait)/Type/OCG>> endobj 1011 0 obj <</Intent 1012 0 R/Usage 1013 0 R/Name(Package)/Type/OCG>> endobj 1029 0 obj <</Intent 1030 0 R/Usage 1031 0 R/Name(Object)/Type/OCG>> endobj 1030 0 obj [/View/Design] endobj 1031 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 1012 0 obj [/View/Design] endobj 1013 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 995 0 obj [/View/Design] endobj 996 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 976 0 obj [/View/Design] endobj 977 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 972 0 obj [/View/Design] endobj 973 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 937 0 obj [/View/Design] endobj 938 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 919 0 obj [/View/Design] endobj 920 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 902 0 obj [/View/Design] endobj 903 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 883 0 obj [/View/Design] endobj 884 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 879 0 obj [/View/Design] endobj 880 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 844 0 obj [/View/Design] endobj 845 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 826 0 obj [/View/Design] endobj 827 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 809 0 obj [/View/Design] endobj 810 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 790 0 obj [/View/Design] endobj 791 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 786 0 obj [/View/Design] endobj 787 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 754 0 obj [/View/Design] endobj 755 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 736 0 obj [/View/Design] endobj 737 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 719 0 obj [/View/Design] endobj 720 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 700 0 obj [/View/Design] endobj 701 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 696 0 obj [/View/Design] endobj 697 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 661 0 obj [/View/Design] endobj 662 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 643 0 obj [/View/Design] endobj 644 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 625 0 obj [/View/Design] endobj 626 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 606 0 obj [/View/Design] endobj 607 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 571 0 obj [/View/Design] endobj 572 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 553 0 obj [/View/Design] endobj 554 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 535 0 obj [/View/Design] endobj 536 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 516 0 obj [/View/Design] endobj 517 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 481 0 obj [/View/Design] endobj 482 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 463 0 obj [/View/Design] endobj 464 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 445 0 obj [/View/Design] endobj 446 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 426 0 obj [/View/Design] endobj 427 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 394 0 obj [/View/Design] endobj 395 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 376 0 obj [/View/Design] endobj 377 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 357 0 obj [/View/Design] endobj 358 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 325 0 obj [/View/Design] endobj 326 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 307 0 obj [/View/Design] endobj 308 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 288 0 obj [/View/Design] endobj 289 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 256 0 obj [/View/Design] endobj 257 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 238 0 obj [/View/Design] endobj 239 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 219 0 obj [/View/Design] endobj 220 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 193 0 obj [/View/Design] endobj 194 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 181 0 obj [/View/Design] endobj 182 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 168 0 obj [/View/Design] endobj 169 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 142 0 obj [/View/Design] endobj 143 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 130 0 obj [/View/Design] endobj 131 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 117 0 obj [/View/Design] endobj 118 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 91 0 obj [/View/Design] endobj 92 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 79 0 obj [/View/Design] endobj 80 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 66 0 obj [/View/Design] endobj 67 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 38 0 obj [/View/Design] endobj 39 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 28 0 obj [/View/Design] endobj 29 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 16 0 obj [/View/Design] endobj 17 0 obj <</CreatorInfo<</Subtype/Artwork/Creator(Adobe Illustrator 13.0)>>>> endobj 970 0 obj [1029 0 R 1011 0 R 994 0 R 975 0 R 971 0 R] endobj 5 0 obj <</CropBox[321.5 204.775 334.5 217.775]/Parent 2 0 R/Contents 1048 0 R/BleedBox[0.0 0.0 841.89 595.276]/PieceInfo<</Illustrator 962 0 R>>/ArtBox[321.5 204.775 334.5 217.775]/MediaBox[0.0 0.0 841.89 595.276]/Thumb 1053 0 R/TrimBox[0.0 0.0 841.89 595.276]/Resources<</XObject<</Fm0 986 0 R/Fm1 993 0 R/Fm2 1003 0 R/Fm3 1010 0 R/Fm4 1020 0 R/Fm5 1028 0 R/Fm6 1038 0 R/Fm7 1045 0 R>>/ColorSpace<</CS0 982 0 R>>/Font<</T1_0 1046 0 R>>/ProcSet[/PDF/Text]/Properties<</MC0 971 0 R/MC1 975 0 R/MC2 994 0 R/MC3 1011 0 R/MC4 1029 0 R>>/ExtGState<</GS0 978 0 R>>>>/Type/Page/LastModified(D:20100413200510+02'00')>> endobj 1048 0 obj <</Length 1948/Filter/FlateDecode>>stream
+H‰ÌWËŽ5Ý÷Wx™,®Çå··L
+ZÖíÿu¹ÄÐgIIy_±ã<¾+i,¼}pë:§Œ¿.ï öP—ÎØ5•æmØW±!¥´«{[°£
+ûYì$ÖC«je ¹g±-´uÊG+ÒL„S×±[SãÒAðÞ¡°j}l6
+w" )lÞ |IsZ±ƒ¢66]#Ì
+Piæñ6y†ÉK° †Ý¢±ß
+rHv”ËÆÐÄF’5 nZ¦1’¤”u\•î˜ÄI&b,p]4MV˜˜˜cœuÔ“ ãP´ÞÀÀœF%÷Ì :HÜÁÙQë‰4¨ò> ÷G!æišeH„¾¶éZ `Ãcõ
+H¶CŸá’:P«Ûzì Ž¾þ˜²må9cÄfwó§ s@Èð~¾0‡æð_f²p. ….7–ªdCõ¨¯¥^"'€ýù@Ó"¤2’L¯ü\BaËPp¯p]zòÐ&à‘¿—µL X‘¢EpÒïdüã„Àsômš»¥ì“¢Åxf†Íœ+ˆìÚ‹C?œÖ ŸU\ó†J³ôµŠ×scàÐ}„»½ÖÕGäl–²EÔ\8ÂK Gì.%C´:¨wHº'¥øœ7Ê ”ŠL'é%Jq¸z”k¦dÇ5m^4ÄÅsª_›025úèL¹íÇtˤðEi}†‚oH+%Ç#·×@ïŠÆ•>".wbì˜w ]NÈ`ͨiçÁ9ıgEÊÈ„C€f¼CΔÛNAÙ@ (K£Z Ú1‘šÚµcêq¤ð,°çG†3* ðÉWµGá
+6&-Ñ£´Y픪ç6 ¨¬ÌœJCÆL™ì»”t¤ ÀRõÒ±ú…7QX¥Î”î)çȸÀÓ¡nÒ¡Áb”‘{;¦Œ#å¦6‹qâÁ©Ä'Ý¥ÑBÖfž2N”ÛN¬$Ý3¢1•ô´›¾.g
+õ€´¼¯‘ÊÆ«ÛcöËUóñùË; ñÇRùÜ£`|²Âü‹»îx¼éÂKsuÄuϦ½pD ýÔ)>l”›R„ gmÚ‰’‚vÒ·3…}({j\"Š W O(ŽMä™r›((sMÆƱÅg)a´#¤°×:RJ¥8¶¶¼ÄÈ•Í-uÁ=¬옺)”vkâ@Ñr}Wv¾;Fš—¬Ž”ÛD‰‰YºÉ"›ov\, w-f
+eÀ!L<8“ØUoÎλvkºŠ]GîLxg‘‚bgv &›R;à2$ìx¶xÆ…˜N”Tg l3ûÝ#|XʪÕD^kE½ï‡V­%ÅIe@ÛT7|]Δ›ÚKšŸx OòºKËGìò0ðD¸M\[±)‡ðz®Ðy¶Û¿ãër¦ô“´™ïÍ•¨î?;äŸj"Ëi.ªÞGMr¿qÑ)ióµþÓö_'X(ü¶L|¯øJ_¦LãGA,)}Á^ç¾+¾´ïʽ²|ºBz${E» ×íóO‚ø¢
+ÿ«
+ù¬oH*U zE';«‹øhÐñ3žgÑn/µëRôÝ×ÜÀå pÄMùe#fP'Q¬
+8Éͯvù:¢—ÚÐÒó¹Ë&?`óø&Cw`„×}ëa¾Ù´i7¼6|F£†9¾5i#… g¦ºx;.¾s7íÀM;ª(Ϥc* Žr~g ê^±( ›xw ‡‹ÃSß/ýi‚^Êo€æÉiškMŸ| uàtŽ´•DÓÌu»¤UŽÊ@Ž‹˜ÝÚ‘ï?P\æË¡#¾Wðj‚­CÕ”s\Ñoè3«¡3 ¼¾ðW7¡Z³†¢ýEæðé|ý¸Êäm¶¢®Hg™UœRøÛå/
+8;Xnf&7>J<fDsP~> endstream endobj 978 0 obj <</OPM 1/BM/Normal/CA 1.0/OP false/SMask/None/ca 1.0/AIS false/op false/Type/ExtGState/SA true>> endobj 1046 0 obj <</Subtype/Type1/FontDescriptor 1047 0 R/LastChar 102/Widths[513 513 0 513 513 513 0 513 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 448 564 501 292]/BaseFont/XAMDXC+MyriadPro-Regular/FirstChar 49/Encoding/WinAnsiEncoding/Type/Font>> endobj 1047 0 obj <</StemV 88/FontName/XAMDXC+MyriadPro-Regular/FontStretch/Normal/FontFile3 1050 0 R/FontWeight 400/Flags 32/Descent -250/FontBBox[-157 -250 1126 952]/Ascent 952/FontFamily(Myriad Pro)/CapHeight 674/XHeight 484/Type/FontDescriptor/ItalicAngle 0>> endobj 1050 0 obj <</Subtype/Type1C/Length 1212/Filter/FlateDecode>>stream
+H‰|SmPg¾#\N„¦-ñMìÝY-àH‘BR?
+((ØH%t´sÚ LÔŠšv¦-„ X~P¨"ZGT§„ŠZÔ"ÅX$ÊHüK;ô{ñÍŒ½Øÿý³»Ïûì³ûî;ïâXh†ãø¬‚ô¼Ì‚Œy;M^e2¾ŸÏm)+Ñ™‚$ÃËq~f(?;‚B ÇË—/s ¨}¾}yvè¹H,ÇžÎ0–
+Ú-Ÿ[ØÄ„„„¸ U°¯2]o,âXõN³…Ûffs¶o2šJ&…Ódzé%%l~Pffó93g*jâÕñ¬JHØna3JEœ2%1þ¿«±3«cM܃PÍÄéY‹I§ç¶éL[Yãfö:-\®þdg)Ǧ°zn3†áX$ŽQ!Ø, ËaáÂ#`yX)Ö…ùñ|¼/duˆ3ä•H-Ú7UÓÃOöà‚×#ª å«üª@•¨BË`ÏĈE½A ž ˆ7¼Ža°˜¿D#D’#åü€·¦"¬ãÒ3`÷Óibé0z÷£ÃÙ@Ü jÅ’ßàÁŸñ£<èb-y©›€w`æ£Iˆ=Sý‰Ø´UFn}<h;á>qM>yruc›Ÿ–ŒRQâòì™RèYÃp{?èÃ?/
+²ž 'ð±q$ºH"ÖÀ”E"بZ’`GÓ‰CÇ›;g9ÄÕ›Á CÛH‰u:‡ ù~ŸˆÏòÓº)Ö¡ÂÑÚî<#‡¨«'ÛE¦&Eµ¶ùP)]–MTt\¯‘Ãœ©ç õê®´3­›´.­Ü¼»ºº’iƒÅØÅëô BÂ]ü§GÐ"”Vøc©]e¶ïv0‹¾Ê(Ö¬¶jÕúh4C†–>A0 ƒiêDfh E%t•øà ÏÐ-mÎ{“mŠ §Ž¾¨f9ëöÓ?·ºOõÊÇzÕññ¹iŒÄz“ïê<óxå0?\é“z žRRßbCášE²8o*̆·&ï1ªý5õ$í ¥÷îw»vE6–;Š"‘X‘5–zÔêÛú)ê”)Ú¾AóÛùž»Gé Ón‰LêÉ)Ð.¡í¼@ßíЬSmܤþ‚Ik.<= WÏu{h‰Û:Âc¿ãÂØ1)«©xV¾ cB§®{AÔ}ʺ÷Ó<D4˜×7®“£Ä÷†æ£Øç1=ðËá†F‹¬#úr~Ù0l­ˆìôAû„ÔÍÇø )ÔUMž-ÓžN—£é1±HigPÌãX½vþò1ºV,­t&£âªS6÷ï%“Nj·moÕ^ZêÎ٥ѫäqŸŽýõâòý'ƒÝëÕŒÃj¯´Ê$ÿX‡aI?ÌíÇ;‚k\ÄA9eÿ¾ö@“̧%¢9JD¢hí‹…ù÷zè +û‰ÝÅœ5Wžô‘«{?u£þ¬×'ÿ»U¡¶1VXå-HÁ½D°4ŠWBØ-ö#yç‡Á?Úš¾ý¦‘n"ê¿,­/‘}¶gk1S¤¯Ì[!³¬È
+(…ŸéòØ€6¸`¡Ëí
+XêÅüÊ:”[0º.ֵבô‘Y_xvkúDø„3â ~ÿ ~.õ¯
+H‰œ–yTSwÇoÉž•°Ãc [€°5la‘QIBHØADED„ª•2ÖmtFOE.®c­Ö}êÒõ0êè8´׎8GNg¦Óïï÷9÷wïïÝß½÷ó
+ 
+V³)gB£0ñiœWו8#©8wÕ©•õ8_Å٥ʨQãüÜ«QÊj@é&»A)/ÇÙgº>'K‚ó
+€x¯Íú·¶Ò-
+¨ꇆ¡Ðnè÷ÐQètº}MA ï —0Óal»Á¾°ŽSàx ¬‚kà&¸^Á£ð>ø0|>_ƒ'á‡ð,ÂG!"F$H:Rˆ”!z¤éF‘Qd?r 9‹\A&‘GÈ ”ˆrQ ¢áhš‹ÊÑ´íE‡Ñ]èaô4zBgÐ×Á–àE#H ‹*B=¡‹0HØIøˆp†p0MxJ$ùD1„˜D, V›‰½Ä­ÄÄãÄKÄ»ÄY‰dEò"EÒI2’ÔEÚBÚGúŒt™4MzN¦‘Èþär!YKî ’÷?%_&ß#¿¢°(®”0J:EAi¤ôQÆ(Ç()Ó”WT6U@ æP+¨íÔ!ê~êêmêæD ¥eÒÔ´å´!ÚïhŸÓ¦h/èº']B/¢éëèÒÓ¿¢?a0nŒhF!ÃÀXÇØÍ8ÅøšñÜŒkæc&5S˜µ™˜6»lö˜Iaº2c˜K™MÌAæ!æEæ#…åÆ’°d¬VÖë(ëk–Íe‹Øél »—½‡}Ž}ŸCâ¸qâ9
+N'çÎ)Î].ÂuæJ¸rî
+î÷ wšGä xR^¯‡÷[ÞoÆœchžgÞ`>bþ‰ù$á»ñ¥ü*~ÿ ÿ:ÿ¥…EŒ…ÒbÅ~‹ËÏ,m,£-•–Ý–,¯Y¾´Â¬â­*­6X[ݱF­=­3­ë­·YŸ±~dó ·‘ÛtÛ´¹i ÛzÚfÙ6Û~`{ÁvÖÎÞ.ÑNg·Åî”Ý#{¾}´}…ý€ý§ö¸‘j‡‡ÏþŠ™c1X6„Æfm“Ž;'_9 œr:œ8Ýq¦:‹ËœœO:ϸ8¸¤¹´¸ìu¹éJq»–»nv=ëúÌMà–ï¶ÊmÜí¾ÀR 4 ö
+n»3Ü£ÜkÜGݯz=Ä•[=¾ô„=ƒ<Ë=G</zÁ^Á^j¯­^—¼ Þ¡ÞZïQïBº0FX'Ü+œòáû¤útøŒû<öuñ-ôÝà{Ö÷µ__•ß˜ß-G”,ê}çïé/÷ñ¿ÀHh 8ðm W 2p[àŸƒ¸AiA«‚Ný#8$X¼?øAˆKHIÈ{!7Ä<q†¸Wüy(!46´-ôãÐaÁa†°ƒa†W†ï ¿¿@°@¹`lÁݧYÄŽˆÉH,²$òýÈÉ(Ç(YÔhÔ7ÑÎÑŠèÑ÷b<b*böÅ<Žõ‹ÕÇ~ûL&Y&9‡Ä%ÆuÇMÄsâsã‡ã¿NpJP%ìM˜I JlN<žDHJIÚtCj'•KwKg’C’—%ŸN¡§d§ §|“ꙪO=–§%§mL»½Ðu¡váx:H—¦oL¿“!ȨÉøC&13#s$ó/Y¢¬–¬³ÙÜìâì=ÙOsbsúrnåºçsOæ1óŠòvç=ËËïÏŸ\ä»hÙ¢óÖê‚#…¤Â¼Â…³‹ãoZ<]TÔUt}‰`IÃ’sK­—V-ý¤˜Y,+>TB(É/ÙSòƒ,]6*›-•–¾W:#—È7Ë*¢ŠÊe¿ò^YDYÙ}U„j£êAyTù`ù#µD=¬þ¶"©b{ųÊôÊ+¬Ê¯: !kJ4Gµm¥ötµ}uCõ%—®K7YV³©fFŸ¢ßY Õ.©=bàá?SŒîÆ•Æ©ºÈº‘ºçõyõ‡Ø Ú† žkï5%4ý¦m–7Ÿlqlio™Z³lG+ÔZÚz²Í¹­³mzyâò]íÔöÊö?uøuôw|¿"űN»ÎåwW&®ÜÛe֥ﺱ*|ÕöÕèjõê‰5k¶¬yÝ­èþ¢Ç¯g°ç‡^yïkEk‡Öþ¸®lÝD_p߶õÄõÚõ×7DmØÕÏîoê¿»1mãál {àûMśΠnßLÝlÜ<9”úO
+¾„¾ÿ¿z¿õÀpÀìÁgÁãÂ_ÂÛÃXÃÔÄQÄÎÅKÅÈÆFÆÃÇAÇ¿È=ȼÉ:ɹÊ8Ê·Ë6˶Ì5̵Í5͵Î6ζÏ7ϸÐ9кÑ<ѾÒ?ÒÁÓDÓÆÔIÔËÕNÕÑÖUÖØ×\×àØdØèÙlÙñÚvÚûÛ€ÜÜŠÝÝ–ÞÞ¢ß)߯à6à½áDáÌâSâÛãcãëäsäüå„æ æ–çç©è2è¼éFéÐê[êåëpëûì†ííœî(î´ï@ïÌðXðåñrñÿòŒóó§ô4ôÂõPõÞömöû÷Šøø¨ù8ùÇúWúçûwüü˜ý)ýºþKþÜÿmÿÿ
+/CS0 cs 0.29 0.486 0.133 scn
+1 i
+/GS0 gs
+q 1 0 0 1 183 308.2725 cm
+0 0 m
+6.607 0.011 12.01 -5.392 11.999 -11.999 c
+12.01 -18.606 6.607 -24.009 0 -23.998 c
+-6.607 -24.009 -12.01 -18.606 -11.999 -11.999 c
+-12.01 -5.392 -6.607 0.011 0 0 c
+f
+Q
+q
+/Fm0 Do
+Q
+ endstream endobj 993 0 obj <</Subtype/Form/Length 244/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 988 0 R/Resources<</XObject<</Fm0 992 0 R>>/ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[180.624 223.274 192.621 211.277]>>stream
+/CS0 cs 0.29 0.486 0.133 scn
+1 i
+/GS0 gs
+q 1 0 0 1 186.627 218.2744 cm
+0 0 m
+0.544 0.003 0.995 -0.445 0.994 -1.003 c
+0.997 -1.549 0.546 -1.999 0 -1.997 c
+-0.55 -2 -1.007 -1.543 -1.003 -1.003 c
+-1.005 -0.451 -0.549 0.003 0 0 c
+f
+Q
+q
+/Fm0 Do
+Q
+ endstream endobj 1003 0 obj <</Subtype/Form/Length 258/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 998 0 R/Resources<</XObject<</Fm0 1002 0 R>>/ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[306.003 319.277 362.001 263.279]>>stream
+/CS0 cs 0.29 0.486 0.133 scn
+1 i
+/GS0 gs
+q 1 0 0 1 334.002 303.2773 cm
+0 0 m
+6.607 0.011 12.01 -5.392 11.999 -11.999 c
+12.008 -18.614 6.61 -24.008 0 -23.998 c
+-6.61 -24.008 -12.008 -18.614 -11.999 -11.999 c
+-12.01 -5.392 -6.607 0.011 0 0 c
+f
+Q
+q
+/Fm0 Do
+Q
+ endstream endobj 1010 0 obj <</Subtype/Form/Length 247/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1005 0 R/Resources<</XObject<</Fm0 1009 0 R>>/ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[321.996 217.271 334.002 205.274]>>stream
+/CS0 cs 0.29 0.486 0.133 scn
+1 i
+/GS0 gs
+q 1 0 0 1 327.999 212.2715 cm
+0 0 m
+0.55 0.003 1.007 -0.454 1.003 -0.994 c
+1.008 -1.537 0.543 -2.002 0 -1.997 c
+-0.543 -2.002 -1.008 -1.537 -1.003 -0.994 c
+-1.007 -0.454 -0.55 0.003 0 0 c
+f
+Q
+q
+/Fm0 Do
+Q
+ endstream endobj 1020 0 obj <</Subtype/Form/Length 262/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1015 0 R/Resources<</XObject<</Fm0 1019 0 R>>/ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[295.671 461.774 351.669 405.776]>>stream
+/CS0 cs 0.196 0.322 0.616 scn
+1 i
+/GS0 gs
+q 1 0 0 1 323.6699 445.7744 cm
+0 0 m
+6.607 0.011 12.01 -5.392 11.999 -11.999 c
+12.01 -18.606 6.607 -24.009 0 -23.998 c
+-6.615 -24.007 -12.009 -18.609 -11.999 -11.999 c
+-12.009 -5.389 -6.615 0.009 0 0 c
+f
+Q
+q
+/Fm0 Do
+Q
+ endstream endobj 1028 0 obj <</Subtype/Form/Length 241/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1023 0 R/Resources<</XObject<</Fm0 1027 0 R>>/ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[309.171 492.275 321.168 480.278]>>stream
+/CS0 cs 0.196 0.322 0.616 scn
+1 i
+/GS0 gs
+q 1 0 0 1 315.165 487.2754 cm
+0 0 m
+0.548 0.003 1.005 -0.451 1.003 -1.003 c
+1.007 -1.542 0.55 -2 0 -1.997 c
+-0.546 -2 -0.997 -1.549 -0.994 -1.003 c
+-0.995 -0.445 -0.544 0.003 0 0 c
+f
+Q
+q
+/Fm0 Do
+Q
+ endstream endobj 1038 0 obj <</Subtype/Form/Length 262/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1033 0 R/Resources<</XObject<</Fm0 1037 0 R>>/ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[156.837 462.278 212.835 406.271]>>stream
+/CS0 cs 0.196 0.322 0.616 scn
+1 i
+/GS0 gs
+q 1 0 0 1 184.8359 446.2783 cm
+0 0 m
+6.607 0.011 12.01 -5.392 11.999 -11.999 c
+12.008 -18.617 6.606 -24.018 0 -24.007 c
+-6.606 -24.018 -12.008 -18.617 -11.999 -11.999 c
+-12.01 -5.392 -6.607 0.011 0 0 c
+f
+Q
+q
+/Fm0 Do
+Q
+ endstream endobj 1045 0 obj <</Subtype/Form/Length 240/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1040 0 R/Resources<</XObject<</Fm0 1044 0 R>>/ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[183.873 501.023 195.879 489.026]>>stream
+/CS0 cs 0.196 0.322 0.616 scn
+1 i
+/GS0 gs
+q 1 0 0 1 189.876 496.0234 cm
+0 0 m
+0.55 0.003 1.007 -0.455 1.003 -0.994 c
+1.005 -1.546 0.548 -2 0 -1.997 c
+-0.548 -2 -1.005 -1.546 -1.003 -0.994 c
+-1.007 -0.455 -0.55 0.003 0 0 c
+f
+Q
+q
+/Fm0 Do
+Q
+ endstream endobj 1040 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1044 0 obj <</Subtype/Form/Length 482/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1041 0 R/Resources<</XObject<</Fm0 1043 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[183.873 501.023 195.879 489.026]>>stream
+q
+189.876 496.023 m
+189.876 501.023 l
+193.188 501.023 195.879 498.341 195.879 495.029 c
+195.879 491.708 193.188 489.026 189.876 489.026 c
+186.564 489.026 183.873 491.708 183.873 495.029 c
+183.873 498.341 186.564 501.023 189.876 501.023 c
+189.876 496.023 l
+189.326 496.026 188.869 495.569 188.873 495.029 c
+188.871 494.478 189.328 494.023 189.876 494.026 c
+190.424 494.023 190.881 494.478 190.879 495.029 c
+190.883 495.569 190.426 496.026 189.876 496.023 c
+W n
+q
+/GS0 gs
+/Fm0 Do
+Q
+Q
+ endstream endobj 1041 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1043 0 obj <</Subtype/Form/Length 11006/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1042 0 R/Resources<</ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[183.873 501.023 195.879 489.026]>>stream
+/CS0 cs 0.2 0.325 0.624 scn
+1 i
+/GS0 gs
+q 1 0 0 1 189.876 496.0234 cm
+0 0 m
+0 5 l
+3.312 5 6.003 2.318 6.003 -0.994 c
+6.003 -4.315 3.312 -6.997 0 -6.997 c
+-3.312 -6.997 -6.003 -4.315 -6.003 -0.994 c
+-6.003 2.318 -3.312 5 0 5 c
+0 0 l
+-0.55 0.003 -1.007 -0.455 -1.003 -0.994 c
+-1.005 -1.546 -0.548 -2 0 -1.997 c
+0.548 -2 1.005 -1.546 1.003 -0.994 c
+1.007 -0.455 0.55 0.003 0 0 c
+f
+Q
+q 1 0 0 1 189.876 496.9482 cm
+0 0 m
+-0.013 -0.041 -0.073 -0.074 -0.083 -0.116 c
+-0.111 -0.248 -0.02 -0.426 0 -0.56 c
+0 -0.925 l
+-0.55 -0.922 -1.007 -1.379 -1.003 -1.919 c
+-1.005 -2.471 -0.548 -2.925 0 -2.922 c
+0.548 -2.925 1.005 -2.471 1.003 -1.919 c
+1.007 -1.379 0.55 -0.922 0 -0.925 c
+0 -0.56 l
+0.034 -0.557 0.079 -0.553 0.113 -0.55 c
+0.142 -0.55 0.184 -0.537 0.21 -0.549 c
+1.046 -1.473 l
+1.442 -2.154 1.79 -2.107 1.805 -2.105 c
+2.057 -2.065 3.182 -0.618 1.901 0.191 c
+1.598 0.383 1.274 0.41 1.132 0.395 c
+0 0 l
+0 4.075 l
+3.312 4.075 6.003 1.393 6.003 -1.919 c
+6.003 -5.24 3.312 -7.922 0 -7.922 c
+-3.312 -7.922 -6.003 -5.24 -6.003 -1.919 c
+-6.003 1.393 -3.312 4.075 0 4.075 c
+0 0 l
+f
+Q
+0.196 0.318 0.612 scn
+q 1 0 0 1 189.876 497.0903 cm
+0 0 m
+-0.03 -0.092 -0.164 -0.17 -0.185 -0.265 c
+-0.222 -0.433 -0.125 -0.678 -0.188 -0.838 c
+-0.188 -0.839 -0.237 -0.941 -0.403 -1.05 c
+-1.156 -1.54 -1.044 -2.156 -0.992 -2.333 c
+-0.807 -2.959 -0.146 -3.264 0.451 -2.999 c
+0.651 -2.909 0.79 -2.772 0.872 -2.69 c
+1.143 -2.422 1.548 -2.621 1.836 -2.412 c
+2.433 -1.979 2.576 -1.57 2.629 -1.416 c
+2.85 -0.785 2.461 0.134 1.628 0.371 c
+0.853 0.591 0.002 0.007 0 0 c
+0 3.933 l
+3.312 3.933 6.003 1.251 6.003 -2.061 c
+6.003 -5.382 3.312 -8.064 0 -8.064 c
+-3.312 -8.064 -6.003 -5.382 -6.003 -2.061 c
+-6.003 1.251 -3.312 3.933 0 3.933 c
+0 0 l
+f
+Q
+0.192 0.31 0.596 scn
+q 1 0 0 1 189.876 497.231 cm
+0 0 m
+-0.294 -0.832 -1.296 -1.347 -1.079 -2.407 c
+-0.939 -3.088 -0.171 -3.557 0.648 -3.165 c
+2.592 -2.234 2.592 -2.234 2.763 -1.674 c
+3.159 -0.375 2.125 0.263 1.731 0.384 c
+0.831 0.661 0.003 0.008 0 0 c
+0 3.792 l
+3.312 3.792 6.003 1.11 6.003 -2.202 c
+6.003 -5.522 3.312 -8.205 0 -8.205 c
+-3.312 -8.205 -6.003 -5.522 -6.003 -2.202 c
+-6.003 1.11 -3.312 3.792 0 3.792 c
+0 0 l
+f
+Q
+0.188 0.302 0.58 scn
+q 1 0 0 1 189.876 497.3701 cm
+0 0 m
+-0.353 -0.867 -1.383 -1.429 -1.146 -2.56 c
+-1.024 -3.139 -0.35 -3.806 0.712 -3.399 c
+2.444 -2.735 2.625 -2.666 2.946 -1.778 c
+2.952 -1.763 3.406 -0.235 2.053 0.316 c
+0.838 0.812 0.004 0.01 0 0 c
+0 3.653 l
+3.312 3.653 6.003 0.971 6.003 -2.341 c
+6.003 -5.662 3.312 -8.344 0 -8.344 c
+-3.312 -8.344 -6.003 -5.662 -6.003 -2.341 c
+-6.003 0.971 -3.312 3.653 0 3.653 c
+0 0 l
+f
+Q
+0.18 0.294 0.569 scn
+q 1 0 0 1 189.876 497.5073 cm
+0 0 m
+-0.193 -0.417 -0.585 -0.692 -0.795 -1.098 c
+-1.093 -1.708 l
+-1.262 -2.107 -1.291 -2.435 -1.188 -2.804 c
+-1.126 -3.032 -0.727 -4.136 0.984 -3.565 c
+4.73 -2.315 2.784 0.034 2.453 0.247 c
+1.442 0.896 0.101 0.218 0 0 c
+0 3.516 l
+3.312 3.516 6.003 0.834 6.003 -2.478 c
+6.003 -5.799 3.312 -8.481 0 -8.481 c
+-3.312 -8.481 -6.003 -5.799 -6.003 -2.478 c
+-6.003 0.834 -3.312 3.516 0 3.516 c
+0 0 l
+f
+Q
+0.176 0.286 0.553 scn
+q 1 0 0 1 189.876 497.6602 cm
+0 0 m
+-0.013 -0.025 -0.053 -0.04 -0.076 -0.058 c
+-0.365 -0.276 -0.692 -0.523 -1.173 -1.803 c
+-1.244 -1.989 -1.457 -2.557 -1.185 -3.151 c
+-0.782 -4.034 0.179 -4.205 1.672 -3.658 c
+3.872 -2.853 3.987 -0.377 2.341 0.401 c
+1.366 0.863 0.123 0.247 0 0 c
+0 3.363 l
+3.312 3.363 6.003 0.681 6.003 -2.631 c
+6.003 -5.952 3.312 -8.634 0 -8.634 c
+-3.312 -8.634 -6.003 -5.952 -6.003 -2.631 c
+-6.003 0.681 -3.312 3.363 0 3.363 c
+0 0 l
+f
+Q
+0.173 0.278 0.541 scn
+q 1 0 0 1 189.876 497.8516 cm
+0 0 m
+-0.034 -0.067 -0.142 -0.105 -0.203 -0.15 c
+-0.741 -0.551 -1.014 -1.287 -1.254 -1.937 c
+-1.386 -2.294 -1.492 -2.833 -1.246 -3.37 c
+-0.614 -4.746 1.248 -4.148 1.804 -3.932 c
+4.133 -3.027 4.261 -0.305 2.51 0.419 c
+1.108 0.999 0.006 0.012 0 0 c
+0 3.172 l
+3.312 3.172 6.003 0.49 6.003 -2.822 c
+6.003 -6.143 3.312 -8.825 0 -8.825 c
+-3.312 -8.825 -6.003 -6.143 -6.003 -2.822 c
+-6.003 0.49 -3.312 3.172 0 3.172 c
+0 0 l
+f
+Q
+0.169 0.275 0.525 scn
+q 1 0 0 1 189.876 498.0396 cm
+0 0 m
+-0.037 -0.07 -0.152 -0.104 -0.217 -0.148 c
+-0.223 -0.151 -0.766 -0.542 -1.153 -1.542 c
+-1.498 -2.429 -1.549 -2.937 -1.35 -3.481 c
+-1.145 -4.045 -0.491 -4.904 1.578 -4.323 c
+4.082 -3.621 4.629 -0.761 2.993 0.316 c
+1.701 1.166 0.079 0.148 0 0 c
+0 2.984 l
+3.312 2.984 6.003 0.302 6.003 -3.01 c
+6.003 -6.331 3.312 -9.013 0 -9.013 c
+-3.312 -9.013 -6.003 -6.331 -6.003 -3.01 c
+-6.003 0.302 -3.312 2.984 0 2.984 c
+0 0 l
+f
+Q
+0.165 0.267 0.51 scn
+q 1 0 0 1 189.876 498.2236 cm
+0 0 m
+-0.175 -0.317 -0.542 -0.437 -0.748 -0.722 c
+-1.027 -1.109 -1.128 -1.336 -1.241 -1.614 c
+-1.322 -1.817 -1.715 -2.863 -1.448 -3.592 c
+-0.849 -5.223 1.105 -4.776 1.689 -4.601 c
+4.425 -3.778 5.003 -0.758 3.22 0.385 c
+1.946 1.2 0.234 0.423 0 0 c
+0 2.8 l
+3.312 2.8 6.003 0.118 6.003 -3.194 c
+6.003 -6.515 3.312 -9.197 0 -9.197 c
+-3.312 -9.197 -6.003 -6.515 -6.003 -3.194 c
+-6.003 0.118 -3.312 2.8 0 2.8 c
+0 0 l
+f
+Q
+0.161 0.259 0.498 scn
+q 1 0 0 1 189.876 498.4546 cm
+0 0 m
+-0.06 -0.132 -0.265 -0.21 -0.386 -0.291 c
+-0.759 -0.542 -1.229 -1.473 -1.327 -1.735 c
+-1.444 -2.049 -1.803 -3.137 -1.475 -3.94 c
+-0.715 -5.801 1.956 -4.866 1.983 -4.856 c
+5.297 -3.576 5.172 -0.368 3.116 0.573 c
+1.411 1.354 0.007 0.017 0 0 c
+0 2.569 l
+3.312 2.569 6.003 -0.113 6.003 -3.425 c
+6.003 -6.746 3.312 -9.428 0 -9.428 c
+-3.312 -9.428 -6.003 -6.746 -6.003 -3.425 c
+-6.003 -0.113 -3.312 2.569 0 2.569 c
+0 0 l
+f
+Q
+0.153 0.251 0.482 scn
+q 1 0 0 1 189.876 498.7373 cm
+0 0 m
+-0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c
+-0.737 -0.595 -1.131 -1.172 -1.412 -1.908 c
+-1.719 -2.716 -1.736 -3.696 -1.576 -4.141 c
+-0.861 -6.127 1.881 -5.307 1.908 -5.298 c
+5.872 -3.968 5.348 -0.494 3.424 0.518 c
+1.628 1.463 0.058 0.121 0 0 c
+0 2.286 l
+3.312 2.286 6.003 -0.396 6.003 -3.708 c
+6.003 -7.029 3.312 -9.711 0 -9.711 c
+-3.312 -9.711 -6.003 -7.029 -6.003 -3.708 c
+-6.003 -0.396 -3.312 2.286 0 2.286 c
+0 0 l
+f
+Q
+0.149 0.243 0.467 scn
+q 1 0 0 1 189.876 499.0234 cm
+0 0 m
+-0.045 -0.106 -0.21 -0.167 -0.302 -0.236 c
+-0.488 -0.374 -1.13 -0.939 -1.627 -2.442 c
+-1.764 -2.855 -1.88 -3.934 -1.545 -4.673 c
+-1.028 -5.816 0.793 -6.212 2.513 -5.554 c
+6.321 -4.099 5.738 -0.283 3.153 0.723 c
+1.353 1.423 0.007 0.017 0 0 c
+0 2 l
+3.312 2 6.003 -0.682 6.003 -3.994 c
+6.003 -7.315 3.312 -9.997 0 -9.997 c
+-3.312 -9.997 -6.003 -7.315 -6.003 -3.994 c
+-6.003 -0.682 -3.312 2 0 2 c
+0 0 l
+f
+Q
+0.145 0.235 0.455 scn
+q 1 0 0 1 189.876 499.4067 cm
+0 0 m
+-0.163 -0.362 -0.542 -0.515 -0.779 -0.805 c
+-0.948 -1.011 -1.049 -1.26 -1.205 -1.475 c
+-1.361 -1.69 -1.461 -1.951 -1.723 -2.734 c
+-2.048 -3.705 -1.823 -4.543 -1.66 -4.957 c
+-1.17 -6.199 0.623 -6.718 2.422 -6.139 c
+7.03 -4.656 5.827 -0.75 3.286 0.539 c
+1.422 1.485 0.008 0.018 0 0 c
+0 1.617 l
+3.312 1.617 6.003 -1.065 6.003 -4.377 c
+6.003 -7.698 3.312 -10.38 0 -10.38 c
+-3.312 -10.38 -6.003 -7.698 -6.003 -4.377 c
+-6.003 -1.065 -3.312 1.617 0 1.617 c
+0 0 l
+f
+Q
+0.141 0.227 0.439 scn
+q 1 0 0 1 189.876 499.8311 cm
+0 0 m
+-0.128 -0.296 -0.442 -0.404 -0.638 -0.631 c
+-0.788 -0.804 -0.893 -1.009 -1.031 -1.191 c
+-1.148 -1.346 -1.62 -2.354 -1.623 -2.361 c
+-2.171 -3.896 -2.053 -4.61 -1.842 -5.154 c
+-0.963 -7.425 1.653 -7.025 2.586 -6.68 c
+3.893 -6.196 6.611 -5.189 5.553 -2.521 c
+5.843 -3.224 6.003 -3.994 6.003 -4.802 c
+6.003 -8.123 3.312 -10.805 0 -10.805 c
+-3.312 -10.805 -6.003 -8.123 -6.003 -4.802 c
+-6.003 -1.49 -3.312 1.192 0 1.192 c
+0 0 l
+f
+Q
+0.137 0.22 0.427 scn
+q 1 0 0 1 189.876 500.2959 cm
+0 0 m
+-0.037 -0.078 -0.154 -0.129 -0.22 -0.184 c
+-1.238 -1.037 -1.832 -2.884 -1.837 -2.903 c
+-2.426 -4.762 -2.011 -5.635 -1.875 -5.921 c
+-0.599 -8.601 3.356 -7.148 3.396 -7.133 c
+4.442 -6.725 6.193 -6.042 5.899 -4.15 c
+5.967 -4.512 6.003 -4.885 6.003 -5.267 c
+6.003 -8.587 3.312 -11.27 0 -11.27 c
+-3.312 -11.27 -6.003 -8.587 -6.003 -5.267 c
+-6.003 -1.955 -3.312 0.728 0 0.728 c
+0 0 l
+f
+Q
+0.133 0.216 0.412 scn
+q 1 0 0 1 189.876 500.7388 cm
+0 0 m
+-0.038 -0.067 -0.155 -0.091 -0.221 -0.129 c
+-1.151 -0.674 -1.646 -2.172 -2.007 -3.267 c
+-2.012 -3.284 -2.546 -5.066 -2.073 -6.279 c
+-1.012 -9 2.932 -7.99 3.099 -7.945 c
+4.318 -7.622 5.989 -7.18 6.001 -5.577 c
+6.002 -5.621 6.003 -5.665 6.003 -5.709 c
+6.003 -9.03 3.312 -11.712 0 -11.712 c
+-3.312 -11.712 -6.003 -9.03 -6.003 -5.709 c
+-6.003 -2.397 -3.312 0.285 0 0.285 c
+0 0 l
+f
+Q
+0.125 0.208 0.396 scn
+q 1 0 0 1 189.876 501.0112 cm
+0 0 m
+-0.043 -0.052 -0.154 -0.029 -0.221 -0.042 c
+-0.696 -0.132 -1.348 -0.689 -1.732 -1.731 c
+-2.576 -4.014 -2.459 -5.548 -2.314 -6.26 c
+-1.78 -8.88 1.72 -8.614 1.755 -8.611 c
+4.215 -8.371 5.7 -8.227 5.951 -6.778 c
+5.561 -9.721 3.043 -11.985 0 -11.985 c
+-3.312 -11.985 -6.003 -9.303 -6.003 -5.982 c
+-6.003 -2.67 -3.312 0.012 0 0.012 c
+0 0 l
+f
+Q
+0.122 0.2 0.384 scn
+q 1 0 0 1 188.9707 500.9468 cm
+0 0 m
+-1.737 -0.589 -1.75 -4.504 -1.75 -4.544 c
+-1.745 -7.052 -0.74 -7.832 0.016 -8.2 c
+1.799 -9.068 6.088 -9.359 6.659 -7.635 c
+5.92 -10.116 3.622 -11.92 0.905 -11.92 c
+-2.407 -11.92 -5.098 -9.238 -5.098 -5.917 c
+-5.098 -2.856 -2.799 -0.333 0.165 0.031 c
+0.115 0.022 0.049 0.013 0 0 c
+f
+Q
+0.118 0.192 0.369 scn
+q 1 0 0 1 187.6411 500.5234 cm
+0 0 m
+-1.064 -0.939 -0.813 -4.868 -0.54 -5.601 c
+0.43 -8.206 2.406 -8.584 3.21 -8.625 c
+4.273 -8.681 5.3 -9.068 6.38 -8.967 c
+6.693 -8.938 7.267 -8.802 7.587 -8.217 c
+6.594 -10.165 4.569 -11.497 2.235 -11.497 c
+-1.077 -11.497 -3.768 -8.815 -3.768 -5.494 c
+-3.768 -2.81 -2 -0.54 0.432 0.225 c
+0.372 0.201 0.292 0.168 0.231 0.144 c
+0.162 0.102 0.062 0.054 0 0 c
+f
+Q
+0.204 0.333 0.639 scn
+q 1 0 0 1 191.4565 495.208 cm
+0 0 m
+-0.097 0.069 -0.097 0.069 -0.519 0.587 c
+-0.662 0.762 -0.835 0.91 -0.974 1.089 c
+-1.125 1.285 -1.232 1.593 y
+-1.227 1.612 -0.03 2.438 0.591 1.363 c
+1.026 0.61 0.244 -0.13 0.233 -0.131 c
+0.153 -0.143 0.065 -0.046 0 0 c
+f
+Q
+0.141 0.227 0.439 scn
+q 1 0 0 1 192.4463 500.4146 cm
+0 0 m
+-1.295 0.463 -2.255 -0.325 -2.57 -0.583 c
+-2.57 0.609 l
+-1.402 0.609 -0.312 0.275 0.611 -0.302 c
+0.521 -0.251 0.401 -0.185 0.312 -0.135 c
+0.218 -0.094 0.096 -0.034 0 0 c
+f
+Q
+0.208 0.337 0.655 scn
+q 1 0 0 1 191.4961 495.46 cm
+0 0 m
+-0.335 0.354 l
+-0.472 0.524 -0.626 0.679 -0.757 0.854 c
+-0.976 1.148 -1.021 1.268 -1.02 1.273 c
+-1.015 1.287 -0.029 1.7 0.33 0.953 c
+0.59 0.409 0.174 -0.12 0.167 -0.121 c
+0.106 -0.131 0.048 -0.04 0 0 c
+f
+Q
+0.137 0.22 0.427 scn
+q 1 0 0 1 191.6431 500.7461 cm
+0 0 m
+-0.651 0.121 -1.163 -0.01 -1.767 -0.45 c
+-1.767 0.277 l
+-1.038 0.277 -0.339 0.147 0.307 -0.09 c
+0.224 -0.065 0.112 -0.032 0.029 -0.006 c
+0.02 -0.004 0.009 -0.001 0 0 c
+f
+Q
+0.216 0.345 0.667 scn
+q 1 0 0 1 191.5 495.7261 cm
+0 0 m
+-0.004 0.004 -0.533 0.573 -0.71 0.862 c
+-0.568 0.875 -0.482 0.883 -0.264 0.809 c
+-0.18 0.781 -0.083 0.699 -0.025 0.631 c
+0.033 0.563 0.091 0.45 0.104 0.362 c
+0.135 0.141 0.099 0.019 0.074 -0.062 c
+0.052 -0.043 0.021 -0.021 0 0 c
+f
+Q
+0.133 0.216 0.412 scn
+q 1 0 0 1 190.7813 500.9458 cm
+0 0 m
+-0.314 -0.005 -0.487 -0.009 -0.905 -0.207 c
+-0.905 0.078 l
+-0.519 0.078 -0.142 0.041 0.225 -0.028 c
+0.157 -0.02 0.067 -0.003 0 0 c
+f
+Q
+0.125 0.208 0.396 scn
+q 1 0 0 1 189.876 501.0112 cm
+0 0 m
+0 0.012 l
+0.072 0.012 0.144 0.011 0.215 0.008 c
+0.15 0.006 0.046 -0.044 0 0 c
+f
+Q
+ endstream endobj 1042 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1033 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1037 0 obj <</Subtype/Form/Length 482/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1034 0 R/Resources<</XObject<</Fm0 1036 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[156.837 462.278 212.835 406.271]>>stream
+q
+184.836 446.278 m
+184.836 462.278 l
+200.298 462.278 212.835 449.741 212.835 434.279 c
+212.835 418.809 200.298 406.271 184.836 406.271 c
+169.374 406.271 156.837 418.809 156.837 434.279 c
+156.837 449.741 169.374 462.278 184.836 462.278 c
+184.836 446.278 l
+178.229 446.289 172.826 440.887 172.837 434.279 c
+172.828 427.661 178.229 422.261 184.836 422.271 c
+191.442 422.261 196.844 427.661 196.835 434.279 c
+196.846 440.887 191.443 446.289 184.836 446.278 c
+W n
+q
+/GS0 gs
+/Fm0 Do
+Q
+Q
+ endstream endobj 1034 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1036 0 obj <</Subtype/Form/Length 13533/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1035 0 R/Resources<</ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[156.837 462.278 212.835 406.271]>>stream
+/CS0 cs 0.208 0.337 0.655 scn
+1 i
+/GS0 gs
+q 1 0 0 1 184.8359 446.2783 cm
+0 0 m
+0 16 l
+15.462 16 27.999 3.463 27.999 -11.999 c
+27.999 -27.47 15.462 -40.007 0 -40.007 c
+-15.462 -40.007 -27.999 -27.47 -27.999 -11.999 c
+-27.999 3.463 -15.462 16 0 16 c
+0 0 l
+-6.607 0.011 -12.01 -5.392 -11.999 -11.999 c
+-12.008 -18.617 -6.606 -24.018 0 -24.007 c
+6.606 -24.018 12.008 -18.617 11.999 -11.999 c
+12.01 -5.392 6.607 0.011 0 0 c
+f
+Q
+q 1 0 0 1 184.8359 451.4419 cm
+0 0 m
+0 -0.468 l
+0 -5.164 l
+-6.607 -5.153 -12.01 -10.555 -11.999 -17.163 c
+-12.008 -23.781 -6.606 -29.181 0 -29.17 c
+6.606 -29.181 12.008 -23.781 11.999 -17.163 c
+12.01 -10.555 6.607 -5.153 0 -5.164 c
+0 -0.468 l
+0.316 -0.694 0.738 -0.997 1.055 -1.223 c
+3.817 -3.661 7.459 -4.869 10 -7.617 c
+12.018 -9.8 13.458 -12.461 14.279 -15.528 c
+15.076 -18.507 16.901 -19.346 16.917 -19.348 c
+18.874 -19.542 24.735 -10.485 17.857 -2.241 c
+10.879 6.124 0.769 1.958 0 0 c
+0 10.836 l
+15.462 10.836 27.999 -1.701 27.999 -17.163 c
+27.999 -32.633 15.462 -45.17 0 -45.17 c
+-15.462 -45.17 -27.999 -32.633 -27.999 -17.163 c
+-27.999 -1.701 -15.462 10.836 0 10.836 c
+0 0 l
+f
+Q
+0.204 0.333 0.639 scn
+q 1 0 0 1 184.8359 453.2891 cm
+0 0 m
+-0.296 -0.712 -1.487 -1.168 -1.735 -1.898 c
+-1.987 -2.638 -2.003 -3.873 -1.53 -4.494 c
+-1.227 -4.893 -0.45 -4.945 0 -5.167 c
+0 -7.011 l
+-6.607 -7 -12.01 -12.402 -11.999 -19.01 c
+-12.008 -25.628 -6.606 -31.028 0 -31.018 c
+6.606 -31.028 12.008 -25.628 11.999 -19.01 c
+12.01 -12.402 6.607 -7 0 -7.011 c
+0 -5.167 l
+0.338 -5.201 0.788 -5.245 1.126 -5.278 c
+2.249 -5.476 12.144 -7.557 13.761 -19.538 c
+13.765 -19.565 14.171 -22.516 14.171 -22.516 c
+14.636 -23.09 15.724 -23.507 16.459 -23.43 c
+20.584 -22.993 26.416 -9.568 15.896 -1.312 c
+7.943 4.929 0.035 0.084 0 0 c
+0 8.989 l
+15.462 8.989 27.999 -3.548 27.999 -19.01 c
+27.999 -34.48 15.462 -47.018 0 -47.018 c
+-15.462 -47.018 -27.999 -34.48 -27.999 -19.01 c
+-27.999 -3.548 -15.462 8.989 0 8.989 c
+0 0 l
+f
+Q
+0.2 0.325 0.624 scn
+q 1 0 0 1 184.8359 454.4082 cm
+0 0 m
+-0.627 -1.109 -1.866 -1.525 -2.708 -2.391 c
+-4.764 -4.503 -4.447 -6.209 -4.44 -6.223 c
+-4.355 -6.386 -4.355 -6.386 0 -7.408 c
+0 -8.13 l
+-6.607 -8.119 -12.01 -13.521 -11.999 -20.129 c
+-12.008 -26.747 -6.606 -32.147 0 -32.137 c
+6.606 -32.147 12.008 -26.747 11.999 -20.129 c
+12.01 -13.521 6.607 -8.119 0 -8.13 c
+0 -7.408 l
+0.312 -7.428 0.727 -7.455 1.039 -7.475 c
+5.587 -8.118 13.156 -12.018 12.674 -22.551 c
+12.559 -25.065 12.662 -26.483 12.98 -26.764 c
+14.309 -27.938 23.357 -23.699 22.629 -14.042 c
+21.269 4.004 1.142 2.019 0 0 c
+0 7.87 l
+15.462 7.87 27.999 -4.667 27.999 -20.129 c
+27.999 -35.6 15.462 -48.137 0 -48.137 c
+-15.462 -48.137 -27.999 -35.6 -27.999 -20.129 c
+-27.999 -4.667 -15.462 7.87 0 7.87 c
+0 0 l
+f
+Q
+0.196 0.318 0.612 scn
+q 1 0 0 1 184.8359 455.3335 cm
+0 0 m
+-0.223 -0.377 -0.896 -0.494 -1.279 -0.706 c
+-3.984 -2.198 -4.352 -2.882 -7.218 -8.204 c
+-10.977 -15.407 l
+-12.034 -17.649 -12.409 -19.973 -12.123 -22.512 c
+-11.368 -29.209 -4.441 -35.048 3.701 -32.84 c
+16.505 -28.457 l
+19.639 -26.39 21.523 -23.894 22.614 -20.364 c
+24.61 -13.907 21.812 -4.74 13.674 -0.575 c
+6.26 3.219 0.029 0.049 0 0 c
+0 6.945 l
+15.462 6.945 27.999 -5.592 27.999 -21.054 c
+27.999 -36.525 15.462 -49.062 0 -49.062 c
+-15.462 -49.062 -27.999 -36.525 -27.999 -21.054 c
+-27.999 -5.592 -15.462 6.945 0 6.945 c
+0 0 l
+f
+Q
+0.192 0.31 0.596 scn
+q 1 0 0 1 184.8359 456.1333 cm
+0 0 m
+-0.174 -0.267 -0.682 -0.3 -0.974 -0.428 c
+-3.27 -1.438 -6.363 -4.313 -7.593 -6.58 c
+-13.39 -17.263 -12.999 -20.654 -12.686 -23.38 c
+-12.044 -28.948 -6.307 -36.34 3.975 -34.525 c
+32.478 -29.493 24.483 -7.887 15.417 -1.844 c
+7.621 3.352 0.038 0.059 0 0 c
+0 6.145 l
+15.462 6.145 27.999 -6.392 27.999 -21.854 c
+27.999 -37.325 15.462 -49.862 0 -49.862 c
+-15.462 -49.862 -27.999 -37.325 -27.999 -21.854 c
+-27.999 -6.392 -15.462 6.145 0 6.145 c
+0 0 l
+f
+Q
+0.188 0.302 0.58 scn
+q 1 0 0 1 184.8359 456.834 cm
+0 0 m
+-0.26 -0.393 -1.01 -0.429 -1.443 -0.612 c
+-4.281 -1.817 -7.531 -4.969 -9.346 -8.278 c
+-13.498 -15.848 -13.757 -21.086 -13.243 -24.147 c
+-12.335 -29.562 -7.257 -38.122 6.017 -35.862 c
+29.657 -31.837 27.572 -10.232 15.691 -2.188 c
+7.725 3.206 0.039 0.058 0 0 c
+0 5.444 l
+15.462 5.444 27.999 -7.093 27.999 -22.555 c
+27.999 -38.025 15.462 -50.563 0 -50.563 c
+-15.462 -50.563 -27.999 -38.025 -27.999 -22.555 c
+-27.999 -7.093 -15.462 5.444 0 5.444 c
+0 0 l
+f
+Q
+0.18 0.294 0.569 scn
+q 1 0 0 1 184.8359 457.5 cm
+0 0 m
+-0.27 -0.397 -1.042 -0.411 -1.488 -0.586 c
+-3.111 -1.225 -7.25 -3.37 -10.633 -9.471 c
+-11.685 -11.368 -15.021 -18.085 -13.796 -24.879 c
+-12.453 -32.328 -5.461 -39.37 6.714 -37.227 c
+28.951 -33.313 28.976 -11.259 15.609 -2.301 c
+7.856 2.895 0.038 0.056 0 0 c
+0 4.778 l
+15.462 4.778 27.999 -7.759 27.999 -23.221 c
+27.999 -38.691 15.462 -51.229 0 -51.229 c
+-15.462 -51.229 -27.999 -38.691 -27.999 -23.221 c
+-27.999 -7.759 -15.462 4.778 0 4.778 c
+0 0 l
+f
+Q
+0.176 0.286 0.553 scn
+q 1 0 0 1 184.8359 458.1108 cm
+0 0 m
+-0.285 -0.403 -1.085 -0.384 -1.55 -0.549 c
+-2.14 -0.758 -7.426 -2.783 -11.14 -9.4 c
+-12.536 -11.888 -15.643 -18.441 -14.343 -25.555 c
+-13.275 -31.4 -7.567 -40.72 7.05 -38.576 c
+28.069 -35.492 30.907 -13.131 16.17 -2.838 c
+7.979 2.883 0.04 0.057 0 0 c
+0 4.167 l
+15.462 4.167 27.999 -8.37 27.999 -23.832 c
+27.999 -39.302 15.462 -51.839 0 -51.839 c
+-15.462 -51.839 -27.999 -39.302 -27.999 -23.832 c
+-27.999 -8.37 -15.462 4.167 0 4.167 c
+0 0 l
+f
+Q
+0.173 0.278 0.541 scn
+q 1 0 0 1 184.8359 458.6836 cm
+0 0 m
+-0.294 -0.407 -1.113 -0.365 -1.59 -0.521 c
+-3.037 -0.996 -8.057 -3.068 -11.887 -9.807 c
+-12.95 -11.676 -16.305 -18.381 -14.886 -26.192 c
+-13.691 -32.767 -6.813 -41.832 7.241 -39.858 c
+28.692 -36.845 31.476 -13.851 16.374 -3.144 c
+8.08 2.736 0.041 0.056 0 0 c
+0 3.595 l
+15.462 3.595 27.999 -8.942 27.999 -24.404 c
+27.999 -39.875 15.462 -52.412 0 -52.412 c
+-15.462 -52.412 -27.999 -39.875 -27.999 -24.404 c
+-27.999 -8.942 -15.462 3.595 0 3.595 c
+0 0 l
+f
+Q
+0.169 0.275 0.525 scn
+q 1 0 0 1 184.8359 459.2207 cm
+0 0 m
+-0.327 -0.44 -1.224 -0.37 -1.749 -0.528 c
+-5.52 -1.667 -9.766 -5.26 -12.073 -9.267 c
+-15.394 -15.036 -16.522 -20.933 -15.426 -26.792 c
+-13.856 -35.181 -5.227 -43.019 7.675 -41.021 c
+29.387 -37.659 31.678 -13.959 16.092 -3.122 c
+8.188 2.374 0.041 0.052 0 0 c
+0 3.058 l
+15.462 3.058 27.999 -9.479 27.999 -24.941 c
+27.999 -40.412 15.462 -52.949 0 -52.949 c
+-15.462 -52.949 -27.999 -40.412 -27.999 -24.941 c
+-27.999 -9.479 -15.462 3.058 0 3.058 c
+0 0 l
+f
+Q
+0.165 0.267 0.51 scn
+q 1 0 0 1 184.8359 459.7354 cm
+0 0 m
+-0.315 -0.413 -1.169 -0.321 -1.671 -0.458 c
+-5.628 -1.543 -10.186 -5.222 -12.509 -9.206 c
+-13.794 -11.411 -17.706 -18.119 -15.958 -27.37 c
+-14.312 -36.089 -5.369 -44.235 7.962 -42.157 c
+29.829 -38.748 32.261 -15.07 16.713 -3.752 c
+8.241 2.415 0.041 0.054 0 0 c
+0 2.543 l
+15.462 2.543 27.999 -9.994 27.999 -25.456 c
+27.999 -40.927 15.462 -53.464 0 -53.464 c
+-15.462 -53.464 -27.999 -40.927 -27.999 -25.456 c
+-27.999 -9.994 -15.462 2.543 0 2.543 c
+0 0 l
+f
+Q
+0.161 0.259 0.498 scn
+q 1 0 0 1 184.8359 460.208 cm
+0 0 m
+-0.326 -0.417 -1.197 -0.297 -1.71 -0.424 c
+-5.005 -1.241 -10.022 -4.174 -13.317 -9.752 c
+-16.642 -15.38 -17.707 -21.488 -16.484 -27.905 c
+-14.771 -36.893 -5.522 -45.319 8.241 -43.229 c
+29.819 -39.954 32.248 -15.425 16.845 -4.05 c
+8.507 2.107 0.042 0.053 0 0 c
+0 2.07 l
+15.462 2.07 27.999 -10.467 27.999 -25.929 c
+27.999 -41.399 15.462 -53.937 0 -53.937 c
+-15.462 -53.937 -27.999 -41.399 -27.999 -25.929 c
+-27.999 -10.467 -15.462 2.07 0 2.07 c
+0 0 l
+f
+Q
+0.153 0.251 0.482 scn
+q 1 0 0 1 184.8359 460.6479 cm
+0 0 m
+-0.165 -0.201 -0.596 -0.119 -0.852 -0.169 c
+-6.63 -1.321 -11.086 -5.48 -13.33 -8.99 c
+-17.823 -16.018 -17.959 -22.68 -17.283 -27.032 c
+-15.528 -38.313 -5.353 -45.642 6.913 -44.456 c
+29.058 -42.316 33.217 -18.568 18.588 -5.674 c
+9.722 2.142 0.051 0.062 0 0 c
+0 1.63 l
+15.462 1.63 27.999 -10.907 27.999 -26.369 c
+27.999 -41.839 15.462 -54.376 0 -54.376 c
+-15.462 -54.376 -27.999 -41.839 -27.999 -26.369 c
+-27.999 -10.907 -15.462 1.63 0 1.63 c
+0 0 l
+f
+Q
+0.149 0.243 0.467 scn
+q 1 0 0 1 184.8359 461.0591 cm
+0 0 m
+-0.345 -0.419 -1.243 -0.245 -1.775 -0.35 c
+-5.333 -1.052 -10.598 -4.013 -13.752 -8.857 c
+-18.474 -16.108 -18.606 -22.979 -17.885 -27.466 c
+-16.272 -37.507 -7.1 -46.929 7.31 -45.507 c
+29.58 -43.31 33.524 -19.12 18.666 -5.999 c
+9.679 1.938 0.05 0.061 0 0 c
+0 1.219 l
+15.462 1.219 27.999 -11.318 27.999 -26.78 c
+27.999 -42.25 15.462 -54.788 0 -54.788 c
+-15.462 -54.788 -27.999 -42.25 -27.999 -26.78 c
+-27.999 -11.318 -15.462 1.219 0 1.219 c
+0 0 l
+f
+Q
+0.145 0.235 0.455 scn
+q 1 0 0 1 184.8359 461.4141 cm
+0 0 m
+-0.359 -0.424 -1.279 -0.213 -1.827 -0.305 c
+-2.571 -0.429 -9.239 -1.713 -14.035 -8.521 c
+-19.337 -16.049 -19.04 -23.602 -18.666 -26.5 c
+-16.79 -41.041 -4.557 -47.127 6.015 -46.629 c
+29.242 -45.535 34.043 -19.97 18.705 -6.311 c
+9.693 1.714 0.05 0.059 0 0 c
+0 0.864 l
+15.462 0.864 27.999 -11.673 27.999 -27.135 c
+27.999 -42.605 15.462 -55.143 0 -55.143 c
+-15.462 -55.143 -27.999 -42.605 -27.999 -27.135 c
+-27.999 -11.673 -15.462 0.864 0 0.864 c
+0 0 l
+f
+Q
+0.141 0.227 0.439 scn
+q 1 0 0 1 184.8359 461.7397 cm
+0 0 m
+-0.366 -0.422 -1.29 -0.183 -1.842 -0.262 c
+-5.616 -0.798 -11.203 -3.577 -14.553 -8.414 c
+-20.526 -17.037 -19.484 -25.015 -19.142 -27.636 c
+-17.325 -41.551 -4.721 -48.305 6.215 -47.597 c
+22.827 -46.52 31.839 -32.415 25.896 -16.796 c
+27.251 -20.083 27.999 -23.685 27.999 -27.46 c
+27.999 -42.931 15.462 -55.468 0 -55.468 c
+-15.462 -55.468 -27.999 -42.931 -27.999 -27.46 c
+-27.999 -11.999 -15.462 0.539 0 0.539 c
+0 0 l
+f
+Q
+0.137 0.22 0.427 scn
+q 1 0 0 1 184.8359 461.9951 cm
+0 0 m
+-0.38 -0.425 -1.322 -0.147 -1.889 -0.211 c
+-3.74 -0.417 -10.183 -1.633 -15.334 -8.604 c
+-20.12 -15.08 -20.496 -23.225 -19.964 -27.016 c
+-18.071 -40.504 -7.311 -49.146 6.811 -48.521 c
+13.567 -48.222 30.459 -42.962 27.513 -22.495 c
+27.832 -24.187 27.999 -25.932 27.999 -27.716 c
+27.999 -43.187 15.462 -55.724 0 -55.724 c
+-15.462 -55.724 -27.999 -43.187 -27.999 -27.716 c
+-27.999 -12.254 -15.462 0.283 0 0.283 c
+0 0 l
+f
+Q
+0.133 0.216 0.412 scn
+q 1 0 0 1 184.8359 462.186 cm
+0 0 m
+-0.389 -0.421 -1.333 -0.109 -1.905 -0.156 c
+-5.862 -0.48 -11.762 -2.986 -15.367 -7.721 c
+-21.456 -15.72 -21.121 -23.999 -20.694 -27.186 c
+-18.877 -40.772 -7.134 -50.361 6.621 -49.493 c
+16.365 -48.877 27.809 -42.692 27.992 -27.284 c
+27.997 -27.491 27.999 -27.699 27.999 -27.907 c
+27.999 -43.377 15.462 -55.915 0 -55.915 c
+-15.462 -55.915 -27.999 -43.377 -27.999 -27.907 c
+-27.999 -12.445 -15.462 0.092 0 0.092 c
+0 0 l
+f
+Q
+0.125 0.208 0.396 scn
+q 1 0 0 1 184.8359 462.2749 cm
+0 0 m
+-0.403 -0.423 -1.362 -0.067 -1.945 -0.096 c
+-5.653 -0.278 -11.171 -1.795 -16.407 -7.987 c
+-19.42 -11.549 -22.258 -18.906 -21.583 -25.522 c
+-19.025 -50.599 4.157 -50.427 5.143 -50.408 c
+17.394 -50.165 25.848 -43.174 27.755 -31.708 c
+25.94 -45.423 14.204 -56.003 0 -56.003 c
+-15.462 -56.003 -27.999 -43.466 -27.999 -27.996 c
+-27.999 -12.534 -15.462 0.003 0 0.003 c
+0 0 l
+f
+Q
+0.122 0.2 0.384 scn
+q 1 0 0 1 180.605 461.958 cm
+0 0 m
+-22.531 -4.551 -23.529 -35.032 -6.329 -46.266 c
+6.848 -54.872 25.64 -52.177 31.068 -35.689 c
+27.624 -47.255 16.911 -55.687 4.231 -55.687 c
+-11.231 -55.687 -23.768 -43.149 -23.768 -27.679 c
+-23.768 -13.386 -13.055 -1.592 0.778 0.109 c
+0.544 0.077 0.232 0.04 0 0 c
+f
+Q
+0.118 0.192 0.369 scn
+q 1 0 0 1 172.812 459.498 cm
+0 0 m
+-16.566 -9.064 -17.348 -40.201 9.316 -48.722 c
+16.64 -51.062 30.628 -50.199 36.986 -37.919 c
+32.357 -47.005 22.916 -53.227 12.024 -53.227 c
+-3.438 -53.227 -15.975 -40.689 -15.975 -25.219 c
+-15.975 -12.683 -7.734 -2.069 3.625 1.499 c
+3.1 1.309 2.399 1.057 1.873 0.867 c
+1.31 0.61 0.543 0.297 0 0 c
+f
+Q
+0.216 0.345 0.667 scn
+q 1 0 0 1 200.7622 436.103 cm
+0 0 m
+-1.706 2.422 -2.871 5.192 -4.806 7.466 c
+-5.581 8.375 -6.334 9.141 -7.046 9.74 c
+-7.103 9.788 -12.699 14.577 -12.706 14.929 c
+-12.708 15.035 -10.925 16.753 -10.74 16.825 c
+-10.058 17.086 -7.544 17.231 -6.875 17.166 c
+-5.111 16.992 -2.438 16.241 0.275 13.649 c
+3.79 10.293 4.269 6.382 4.332 5.263 c
+4.608 0.362 1.816 -1.552 1.125 -1.426 c
+0.589 -1.328 0.314 -0.445 0 0 c
+f
+Q
+0.22 0.353 0.682 scn
+q 1 0 0 1 200.8965 438.5967 cm
+0 0 m
+-1.97 2.883 -3.056 4.472 -4.87 6.595 c
+-5.072 6.832 -5.375 7.116 -5.591 7.34 c
+-5.844 7.601 -6.16 7.969 -6.419 8.224 c
+-6.913 8.711 -7.551 9.382 -8.074 9.839 c
+-9.724 11.281 -9.908 11.547 -9.911 11.595 c
+-9.914 11.655 -8.389 13.369 -8.295 13.411 c
+-7.711 13.674 -6.801 13.346 -6.164 13.276 c
+-2.962 12.927 -1.156 11.212 -0.476 10.566 c
+2.531 7.709 2.783 5.143 2.904 3.909 c
+2.938 3.565 2.929 0.875 2.709 0.41 c
+2.675 0.337 0.707 -0.875 0.645 -0.861 c
+0.33 -0.793 0.182 -0.267 0 0 c
+f
+Q
+0.224 0.361 0.694 scn
+q 1 0 0 1 199.9814 442.126 cm
+0 0 m
+-0.737 0.235 -1.076 1.45 -1.576 2.04 c
+-3.148 3.895 -3.148 3.895 -3.897 4.678 c
+-4.212 5.008 -4.84 5.354 -4.922 5.803 c
+-4.014 7.981 l
+-3.953 8.007 -1.427 7.15 0.33 5.083 c
+1.631 3.552 2.397 0.755 2.281 0.574 c
+1.906 -0.01 0.699 -0.197 0.037 0.011 c
+0.026 0.014 0.011 -0.003 0 0 c
+f
+Q
+0.141 0.227 0.439 scn
+q 1 0 0 1 196.8853 459.5508 cm
+0 0 m
+-5.275 2.417 -9.403 2.407 -12.049 2.189 c
+-12.049 2.728 l
+-6.604 2.728 -1.522 1.173 2.777 -1.517 c
+2.232 -1.205 1.506 -0.789 0.961 -0.477 c
+0.673 -0.334 0.292 -0.134 0 0 c
+f
+Q
+0.137 0.22 0.427 scn
+q 1 0 0 1 193.0991 461.0352 cm
+0 0 m
+-3.078 0.794 -4.478 1.111 -8.263 0.96 c
+-8.263 1.243 l
+-4.866 1.243 -1.61 0.638 1.402 -0.47 c
+0.981 -0.329 0.425 -0.126 0 0 c
+f
+Q
+0.133 0.216 0.412 scn
+q 1 0 0 1 189.0669 461.958 cm
+0 0 m
+-2.557 0.263 -2.657 0.273 -4.231 0.228 c
+-4.231 0.32 l
+-2.431 0.32 -0.671 0.15 1.035 -0.174 c
+0.724 -0.122 0.312 -0.042 0 0 c
+f
+Q
+0.125 0.208 0.396 scn
+q 1 0 0 1 184.8359 462.2749 cm
+0 0 m
+0.335 0.003 0.669 -0.002 1.001 -0.014 c
+0.701 -0.01 0.211 -0.214 0 0 c
+f
+Q
+ endstream endobj 1035 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1023 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1027 0 obj <</Subtype/Form/Length 477/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1024 0 R/Resources<</XObject<</Fm0 1026 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[309.171 492.275 321.168 480.278]>>stream
+q
+315.165 487.275 m
+315.165 492.275 l
+318.477 492.275 321.168 489.593 321.168 486.272 c
+321.168 482.96 318.477 480.278 315.165 480.278 c
+311.853 480.278 309.171 482.96 309.171 486.272 c
+309.171 489.593 311.853 492.275 315.165 492.275 c
+315.165 487.275 l
+314.621 487.278 314.17 486.83 314.171 486.272 c
+314.168 485.727 314.619 485.276 315.165 485.278 c
+315.715 485.275 316.172 485.733 316.168 486.272 c
+316.17 486.824 315.713 487.279 315.165 487.275 c
+W n
+q
+/GS0 gs
+/Fm0 Do
+Q
+Q
+ endstream endobj 1024 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1026 0 obj <</Subtype/Form/Length 11079/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1025 0 R/Resources<</ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[309.171 492.275 321.168 480.278]>>stream
+/CS0 cs 0.2 0.325 0.624 scn
+1 i
+/GS0 gs
+q 1 0 0 1 315.165 487.2754 cm
+0 0 m
+0 5 l
+3.312 5 6.003 2.318 6.003 -1.003 c
+6.003 -4.315 3.312 -6.997 0 -6.997 c
+-3.312 -6.997 -5.994 -4.315 -5.994 -1.003 c
+-5.994 2.318 -3.312 5 0 5 c
+0 0 l
+-0.544 0.003 -0.995 -0.445 -0.994 -1.003 c
+-0.997 -1.549 -0.546 -2 0 -1.997 c
+0.55 -2 1.007 -1.542 1.003 -1.003 c
+1.005 -0.451 0.548 0.003 0 0 c
+f
+Q
+q 1 0 0 1 315.165 488.1997 cm
+0 0 m
+-0.013 -0.041 -0.073 -0.074 -0.082 -0.115 c
+-0.11 -0.248 -0.02 -0.425 0 -0.559 c
+0 -0.924 l
+-0.544 -0.921 -0.995 -1.37 -0.994 -1.927 c
+-0.997 -2.473 -0.546 -2.924 0 -2.921 c
+0.55 -2.924 1.007 -2.467 1.003 -1.927 c
+1.005 -1.375 0.548 -0.921 0 -0.924 c
+0 -0.559 l
+0.034 -0.556 0.079 -0.552 0.113 -0.549 c
+0.142 -0.549 0.183 -0.536 0.209 -0.548 c
+1.045 -1.475 l
+1.44 -2.16 1.79 -2.114 1.805 -2.112 c
+2.058 -2.072 3.187 -0.623 1.901 0.191 c
+1.597 0.384 1.274 0.411 1.13 0.396 c
+0 0 l
+0 4.076 l
+3.312 4.076 6.003 1.394 6.003 -1.927 c
+6.003 -5.239 3.312 -7.921 0 -7.921 c
+-3.312 -7.921 -5.994 -5.239 -5.994 -1.927 c
+-5.994 1.394 -3.312 4.076 0 4.076 c
+0 0 l
+f
+Q
+0.196 0.318 0.612 scn
+q 1 0 0 1 315.165 488.3418 cm
+0 0 m
+-0.03 -0.092 -0.163 -0.17 -0.184 -0.265 c
+-0.221 -0.432 -0.125 -0.677 -0.186 -0.837 c
+-0.186 -0.838 -0.235 -0.941 -0.399 -1.048 c
+-1.15 -1.539 -1.036 -2.16 -0.983 -2.339 c
+-0.8 -2.96 -0.143 -3.262 0.452 -2.998 c
+0.652 -2.908 0.791 -2.771 0.873 -2.69 c
+1.144 -2.423 1.548 -2.625 1.836 -2.417 c
+2.431 -1.985 2.564 -1.604 2.628 -1.42 c
+2.85 -0.787 2.46 0.134 1.627 0.371 c
+0.853 0.592 0.002 0.008 0 0 c
+0 3.934 l
+3.312 3.934 6.003 1.251 6.003 -2.069 c
+6.003 -5.381 3.312 -8.063 0 -8.063 c
+-3.312 -8.063 -5.994 -5.381 -5.994 -2.069 c
+-5.994 1.251 -3.312 3.934 0 3.934 c
+0 0 l
+f
+Q
+0.192 0.31 0.596 scn
+q 1 0 0 1 315.165 488.4824 cm
+0 0 m
+-0.294 -0.832 -1.287 -1.354 -1.07 -2.414 c
+-0.931 -3.09 -0.167 -3.555 0.649 -3.164 c
+1.049 -2.972 1.516 -2.957 1.889 -2.695 c
+2.243 -2.445 2.625 -2.13 2.762 -1.679 c
+3.159 -0.375 2.125 0.264 1.73 0.385 c
+0.831 0.662 0.003 0.008 0 0 c
+0 3.793 l
+3.312 3.793 6.003 1.111 6.003 -2.21 c
+6.003 -5.522 3.312 -8.204 0 -8.204 c
+-3.312 -8.204 -5.994 -5.522 -5.994 -2.21 c
+-5.994 1.111 -3.312 3.793 0 3.793 c
+0 0 l
+f
+Q
+0.188 0.302 0.58 scn
+q 1 0 0 1 315.165 488.6216 cm
+0 0 m
+-0.352 -0.867 -1.375 -1.438 -1.138 -2.566 c
+-1.017 -3.142 -0.345 -3.804 0.713 -3.398 c
+2.483 -2.719 2.628 -2.663 2.945 -1.783 c
+2.951 -1.768 3.406 -0.235 2.053 0.317 c
+0.863 0.802 0.004 0.01 0 0 c
+0 3.654 l
+3.312 3.654 6.003 0.972 6.003 -2.349 c
+6.003 -5.661 3.312 -8.343 0 -8.343 c
+-3.312 -8.343 -5.994 -5.661 -5.994 -2.349 c
+-5.994 0.972 -3.312 3.654 0 3.654 c
+0 0 l
+f
+Q
+0.18 0.294 0.569 scn
+q 1 0 0 1 315.165 488.7588 cm
+0 0 m
+-0.192 -0.416 -0.582 -0.691 -0.789 -1.097 c
+-0.793 -1.105 -1.082 -1.703 -1.083 -1.706 c
+-1.253 -2.111 -1.282 -2.441 -1.181 -2.81 c
+-1.118 -3.036 -0.72 -4.135 0.985 -3.564 c
+5.022 -2.213 2.486 0.225 2.452 0.247 c
+1.442 0.897 0.101 0.219 0 0 c
+0 3.517 l
+3.312 3.517 6.003 0.834 6.003 -2.486 c
+6.003 -5.798 3.312 -8.48 0 -8.48 c
+-3.312 -8.48 -5.994 -5.798 -5.994 -2.486 c
+-5.994 0.834 -3.312 3.517 0 3.517 c
+0 0 l
+f
+Q
+0.176 0.286 0.553 scn
+q 1 0 0 1 315.165 488.9116 cm
+0 0 m
+-0.013 -0.025 -0.053 -0.04 -0.076 -0.057 c
+-0.432 -0.327 -0.719 -0.611 -1.164 -1.801 c
+-1.234 -1.99 -1.448 -2.564 -1.178 -3.156 c
+-0.778 -4.031 0.18 -4.2 1.671 -3.658 c
+3.876 -2.856 3.991 -0.38 2.341 0.402 c
+1.366 0.864 0.123 0.248 0 0 c
+0 3.364 l
+3.312 3.364 6.003 0.682 6.003 -2.639 c
+6.003 -5.951 3.312 -8.633 0 -8.633 c
+-3.312 -8.633 -5.994 -5.951 -5.994 -2.639 c
+-5.994 0.682 -3.312 3.364 0 3.364 c
+0 0 l
+f
+Q
+0.173 0.278 0.541 scn
+q 1 0 0 1 315.165 489.1035 cm
+0 0 m
+-0.034 -0.068 -0.142 -0.105 -0.202 -0.15 c
+-0.734 -0.546 -0.993 -1.253 -1.244 -1.936 c
+-1.353 -2.232 -1.496 -2.812 -1.238 -3.374 c
+-0.612 -4.739 1.248 -4.146 1.803 -3.932 c
+4.138 -3.031 4.265 -0.308 2.51 0.419 c
+1.108 1 0.006 0.012 0 0 c
+0 3.172 l
+3.312 3.172 6.003 0.49 6.003 -2.831 c
+6.003 -6.143 3.312 -8.825 0 -8.825 c
+-3.312 -8.825 -5.994 -6.143 -5.994 -2.831 c
+-5.994 0.49 -3.312 3.172 0 3.172 c
+0 0 l
+f
+Q
+0.169 0.275 0.525 scn
+q 1 0 0 1 315.165 489.291 cm
+0 0 m
+-0.037 -0.069 -0.152 -0.103 -0.217 -0.147 c
+-0.48 -0.327 -0.918 -0.951 -1.084 -1.383 c
+-1.402 -2.209 -1.592 -2.802 -1.342 -3.486 c
+-1.138 -4.046 -0.487 -4.899 1.578 -4.322 c
+4.081 -3.623 4.628 -0.763 2.992 0.316 c
+1.701 1.167 0.079 0.149 0 0 c
+0 2.984 l
+3.312 2.984 6.003 0.302 6.003 -3.019 c
+6.003 -6.331 3.312 -9.013 0 -9.013 c
+-3.312 -9.013 -5.994 -6.331 -5.994 -3.019 c
+-5.994 0.302 -3.312 2.984 0 2.984 c
+0 0 l
+f
+Q
+0.165 0.267 0.51 scn
+q 1 0 0 1 315.165 489.4751 cm
+0 0 m
+-0.175 -0.316 -0.541 -0.436 -0.745 -0.721 c
+-1.04 -1.133 -1.134 -1.367 -1.233 -1.614 c
+-1.283 -1.739 -1.712 -2.854 -1.439 -3.598 c
+-0.844 -5.219 1.105 -4.774 1.689 -4.6 c
+4.424 -3.78 5.002 -0.76 3.22 0.385 c
+1.946 1.202 0.234 0.424 0 0 c
+0 2.8 l
+3.312 2.8 6.003 0.118 6.003 -3.203 c
+6.003 -6.515 3.312 -9.197 0 -9.197 c
+-3.312 -9.197 -5.994 -6.515 -5.994 -3.203 c
+-5.994 0.118 -3.312 2.8 0 2.8 c
+0 0 l
+f
+Q
+0.161 0.259 0.498 scn
+q 1 0 0 1 315.165 489.7065 cm
+0 0 m
+-0.06 -0.132 -0.265 -0.21 -0.385 -0.291 c
+-0.751 -0.537 -1.207 -1.436 -1.319 -1.735 c
+-1.402 -1.96 -1.802 -3.124 -1.467 -3.945 c
+-0.712 -5.795 1.956 -4.866 1.982 -4.855 c
+5.299 -3.58 5.174 -0.371 3.116 0.573 c
+1.411 1.355 0.007 0.017 0 0 c
+0 2.569 l
+3.312 2.569 6.003 -0.113 6.003 -3.434 c
+6.003 -6.746 3.312 -9.428 0 -9.428 c
+-3.312 -9.428 -5.994 -6.746 -5.994 -3.434 c
+-5.994 -0.113 -3.312 2.569 0 2.569 c
+0 0 l
+f
+Q
+0.153 0.251 0.482 scn
+q 1 0 0 1 315.165 489.9888 cm
+0 0 m
+-0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c
+-0.739 -0.597 -1.12 -1.159 -1.404 -1.909 c
+-1.678 -2.633 -1.751 -3.637 -1.568 -4.146 c
+-0.856 -6.124 1.88 -5.306 1.908 -5.297 c
+5.872 -3.969 5.347 -0.495 3.422 0.519 c
+1.628 1.464 0.058 0.122 0 0 c
+0 2.287 l
+3.312 2.287 6.003 -0.396 6.003 -3.716 c
+6.003 -7.028 3.312 -9.71 0 -9.71 c
+-3.312 -9.71 -5.994 -7.028 -5.994 -3.716 c
+-5.994 -0.396 -3.312 2.287 0 2.287 c
+0 0 l
+f
+Q
+0.149 0.243 0.467 scn
+q 1 0 0 1 315.165 490.2749 cm
+0 0 m
+-0.045 -0.106 -0.209 -0.167 -0.302 -0.235 c
+-0.485 -0.372 -1.122 -0.935 -1.618 -2.443 c
+-1.723 -2.761 -1.897 -3.881 -1.538 -4.677 c
+-1.024 -5.812 0.792 -6.206 2.512 -5.554 c
+6.336 -4.105 5.75 -0.288 3.153 0.723 c
+1.353 1.423 0.007 0.017 0 0 c
+0 2 l
+3.312 2 6.003 -0.682 6.003 -4.002 c
+6.003 -7.314 3.312 -9.997 0 -9.997 c
+-3.312 -9.997 -5.994 -7.314 -5.994 -4.002 c
+-5.994 -0.682 -3.312 2 0 2 c
+0 0 l
+f
+Q
+0.145 0.235 0.455 scn
+q 1 0 0 1 315.165 490.6582 cm
+0 0 m
+-0.163 -0.361 -0.541 -0.515 -0.777 -0.805 c
+-0.945 -1.011 -1.046 -1.259 -1.201 -1.474 c
+-1.269 -1.568 -1.409 -1.763 -1.714 -2.734 c
+-2.048 -3.798 -1.784 -4.665 -1.597 -5.087 c
+-1.005 -6.421 1.188 -6.695 2.68 -6.041 c
+8.251 -3.594 4.333 0.165 2.965 0.677 c
+1.252 1.319 0.007 0.016 0 0 c
+0 1.617 l
+3.312 1.617 6.003 -1.065 6.003 -4.386 c
+6.003 -7.698 3.312 -10.38 0 -10.38 c
+-3.312 -10.38 -5.994 -7.698 -5.994 -4.386 c
+-5.994 -1.065 -3.312 1.617 0 1.617 c
+0 0 l
+f
+Q
+0.141 0.227 0.439 scn
+q 1 0 0 1 315.165 491.083 cm
+0 0 m
+-0.128 -0.296 -0.441 -0.404 -0.637 -0.631 c
+-0.787 -0.804 -0.891 -1.009 -1.028 -1.191 c
+-1.149 -1.351 -1.614 -2.354 -1.616 -2.362 c
+-2.165 -3.906 -2.034 -4.643 -1.834 -5.161 c
+-0.959 -7.42 1.653 -7.023 2.585 -6.679 c
+3.892 -6.198 6.61 -5.196 5.552 -2.522 c
+5.843 -3.227 6.003 -4 6.003 -4.811 c
+6.003 -8.123 3.312 -10.805 0 -10.805 c
+-3.312 -10.805 -5.994 -8.123 -5.994 -4.811 c
+-5.994 -1.49 -3.312 1.192 0 1.192 c
+0 0 l
+f
+Q
+0.137 0.22 0.427 scn
+q 1 0 0 1 315.165 491.5479 cm
+0 0 m
+-0.037 -0.078 -0.154 -0.129 -0.22 -0.185 c
+-1.232 -1.033 -1.806 -2.828 -1.83 -2.904 c
+-2.22 -4.142 -2.232 -5.159 -1.867 -5.927 c
+-0.58 -8.633 3.354 -7.149 3.394 -7.134 c
+4.44 -6.729 6.193 -6.052 5.898 -4.154 c
+5.967 -4.518 6.003 -4.892 6.003 -5.275 c
+6.003 -8.587 3.312 -11.27 0 -11.27 c
+-3.312 -11.27 -5.994 -8.587 -5.994 -5.275 c
+-5.994 -1.955 -3.312 0.728 0 0.728 c
+0 0 l
+f
+Q
+0.133 0.216 0.412 scn
+q 1 0 0 1 315.165 491.9907 cm
+0 0 m
+-0.038 -0.067 -0.155 -0.091 -0.221 -0.13 c
+-1.146 -0.672 -1.618 -2.109 -1.997 -3.263 c
+-2.003 -3.281 -2.538 -5.073 -2.065 -6.285 c
+-1.01 -8.991 2.93 -7.989 3.097 -7.945 c
+4.317 -7.624 5.989 -7.184 6.001 -5.584 c
+6.002 -5.628 6.003 -5.673 6.003 -5.718 c
+6.003 -9.03 3.312 -11.712 0 -11.712 c
+-3.312 -11.712 -5.994 -9.03 -5.994 -5.718 c
+-5.994 -2.397 -3.312 0.285 0 0.285 c
+0 0 l
+f
+Q
+0.125 0.208 0.396 scn
+q 1 0 0 1 315.165 492.2632 cm
+0 0 m
+-0.043 -0.052 -0.154 -0.029 -0.221 -0.042 c
+-0.695 -0.132 -1.346 -0.69 -1.729 -1.732 c
+-2.601 -4.102 -2.422 -5.693 -2.305 -6.268 c
+-1.773 -8.88 1.72 -8.614 1.755 -8.61 c
+4.215 -8.37 5.7 -8.226 5.951 -6.783 c
+5.562 -9.72 3.043 -11.985 0 -11.985 c
+-3.312 -11.985 -5.994 -9.303 -5.994 -5.991 c
+-5.994 -2.67 -3.312 0.012 0 0.012 c
+0 0 l
+f
+Q
+0.122 0.2 0.384 scn
+q 1 0 0 1 314.2603 492.1987 cm
+0 0 m
+-1.727 -0.587 -1.739 -4.385 -1.738 -4.546 c
+-1.734 -6.483 -1.193 -7.61 0.017 -8.2 c
+1.798 -9.069 6.085 -9.361 6.66 -7.637 c
+5.921 -10.115 3.622 -11.92 0.905 -11.92 c
+-2.407 -11.92 -5.089 -9.238 -5.089 -5.926 c
+-5.089 -2.857 -2.798 -0.333 0.165 0.032 c
+0.115 0.022 0.048 0.013 0 0 c
+f
+Q
+0.118 0.192 0.369 scn
+q 1 0 0 1 312.9341 491.7764 cm
+0 0 m
+-1.086 -0.961 -0.817 -4.853 -0.535 -5.61 c
+0.431 -8.208 2.403 -8.585 3.207 -8.626 c
+4.27 -8.681 5.298 -9.068 6.378 -8.967 c
+6.691 -8.938 7.264 -8.802 7.584 -8.218 c
+6.592 -10.165 4.566 -11.498 2.231 -11.498 c
+-1.081 -11.498 -3.763 -8.816 -3.763 -5.504 c
+-3.763 -2.812 -2 -0.54 0.432 0.225 c
+0.372 0.2 0.292 0.168 0.231 0.144 c
+0.161 0.102 0.062 0.054 0 0 c
+f
+Q
+0.204 0.333 0.639 scn
+q 1 0 0 1 316.7451 486.4531 cm
+0 0 m
+-0.091 0.065 -0.091 0.065 -0.52 0.593 c
+-0.662 0.769 -0.836 0.916 -0.974 1.096 c
+-1.233 1.432 -1.232 1.599 -1.232 1.6 c
+-1.226 1.62 -0.028 2.446 0.591 1.368 c
+1.026 0.611 0.245 -0.132 0.233 -0.134 c
+0.153 -0.145 0.065 -0.047 0 0 c
+f
+Q
+0.141 0.227 0.439 scn
+q 1 0 0 1 317.7354 491.6665 cm
+0 0 m
+-1.294 0.462 -2.254 -0.325 -2.57 -0.583 c
+-2.57 0.609 l
+-1.403 0.609 -0.313 0.276 0.609 -0.301 c
+0.52 -0.251 0.4 -0.185 0.31 -0.134 c
+0.217 -0.094 0.095 -0.034 0 0 c
+f
+Q
+0.208 0.337 0.655 scn
+q 1 0 0 1 316.7852 486.708 cm
+0 0 m
+-0.336 0.357 l
+-0.473 0.528 -0.628 0.683 -0.758 0.858 c
+-0.977 1.152 -1.021 1.271 -1.02 1.277 c
+-1.015 1.292 -0.028 1.706 0.328 0.955 c
+0.588 0.409 0.173 -0.121 0.167 -0.122 c
+0.106 -0.133 0.047 -0.04 0 0 c
+f
+Q
+0.137 0.22 0.427 scn
+q 1 0 0 1 316.9321 491.998 cm
+0 0 m
+-0.649 0.12 -1.161 -0.01 -1.767 -0.45 c
+-1.767 0.277 l
+-1.039 0.277 -0.34 0.147 0.306 -0.09 c
+0.223 -0.065 0.111 -0.031 0.028 -0.006 c
+0.02 -0.004 0.008 -0.001 0 0 c
+f
+Q
+0.216 0.345 0.667 scn
+q 1 0 0 1 316.7891 486.9756 cm
+0 0 m
+-0.004 0.004 -0.536 0.578 -0.712 0.865 c
+-0.569 0.878 -0.483 0.886 -0.265 0.812 c
+-0.18 0.784 -0.084 0.701 -0.026 0.633 c
+0.032 0.564 0.089 0.451 0.102 0.362 c
+0.133 0.142 0.096 0.015 0.073 -0.061 c
+0.051 -0.042 0.021 -0.02 0 0 c
+f
+Q
+0.133 0.216 0.412 scn
+q 1 0 0 1 316.0703 492.1978 cm
+0 0 m
+-0.314 -0.005 -0.486 -0.009 -0.905 -0.207 c
+-0.905 0.078 l
+-0.519 0.078 -0.142 0.041 0.224 -0.028 c
+0.157 -0.02 0.067 -0.003 0 0 c
+f
+Q
+0.125 0.208 0.396 scn
+q 1 0 0 1 315.165 492.2632 cm
+0 0 m
+0 0.012 l
+0.072 0.012 0.144 0.011 0.215 0.008 c
+0.15 0.006 0.046 -0.044 0 0 c
+f
+Q
+ endstream endobj 1025 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1015 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1019 0 obj <</Subtype/Form/Length 473/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1016 0 R/Resources<</XObject<</Fm0 1018 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[295.671 461.774 351.669 405.776]>>stream
+q
+323.67 445.774 m
+323.67 461.774 l
+339.132 461.774 351.669 449.237 351.669 433.775 c
+351.669 418.313 339.132 405.776 323.67 405.776 c
+308.199 405.776 295.671 418.313 295.671 433.775 c
+295.671 449.237 308.199 461.774 323.67 461.774 c
+323.67 445.774 l
+317.055 445.784 311.661 440.386 311.671 433.775 c
+311.661 427.165 317.055 421.767 323.67 421.776 c
+330.277 421.766 335.68 427.168 335.669 433.775 c
+335.68 440.383 330.277 445.785 323.67 445.774 c
+W n
+q
+/GS0 gs
+/Fm0 Do
+Q
+Q
+ endstream endobj 1016 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1018 0 obj <</Subtype/Form/Length 13391/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1017 0 R/Resources<</ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[295.671 461.774 351.669 405.776]>>stream
+/CS0 cs 0.208 0.337 0.655 scn
+1 i
+/GS0 gs
+q 1 0 0 1 323.6699 445.7744 cm
+0 0 m
+0 16 l
+15.462 16 27.999 3.463 27.999 -11.999 c
+27.999 -27.461 15.462 -39.998 0 -39.998 c
+-15.471 -39.998 -27.999 -27.461 -27.999 -11.999 c
+-27.999 3.463 -15.471 16 0 16 c
+0 0 l
+-6.615 0.009 -12.009 -5.389 -11.999 -11.999 c
+-12.009 -18.609 -6.615 -24.007 0 -23.998 c
+6.607 -24.009 12.01 -18.606 11.999 -11.999 c
+12.01 -5.392 6.607 0.011 0 0 c
+f
+Q
+q 1 0 0 1 323.6699 450.936 cm
+0 0 m
+0 -0.46 l
+0 -5.162 l
+-6.615 -5.152 -12.009 -10.55 -11.999 -17.161 c
+-12.009 -23.771 -6.615 -29.169 0 -29.16 c
+6.607 -29.17 12.01 -23.768 11.999 -17.161 c
+12.01 -10.553 6.607 -5.151 0 -5.162 c
+0 -0.46 l
+0.316 -0.687 0.738 -0.99 1.054 -1.216 c
+3.814 -3.66 7.459 -4.866 10 -7.615 c
+12.018 -9.799 13.458 -12.46 14.279 -15.526 c
+15.091 -18.561 16.901 -19.341 16.918 -19.343 c
+18.873 -19.537 24.733 -10.481 17.857 -2.239 c
+10.881 6.124 0.77 1.958 0 0 c
+0 10.838 l
+15.462 10.838 27.999 -1.699 27.999 -17.161 c
+27.999 -32.623 15.462 -45.16 0 -45.16 c
+-15.471 -45.16 -27.999 -32.623 -27.999 -17.161 c
+-27.999 -1.699 -15.471 10.838 0 10.838 c
+0 0 l
+f
+Q
+0.204 0.333 0.639 scn
+q 1 0 0 1 323.6699 452.7832 cm
+0 0 m
+-0.297 -0.712 -1.488 -1.167 -1.738 -1.898 c
+-1.989 -2.637 -2.005 -3.871 -1.531 -4.492 c
+-1.227 -4.891 -0.45 -4.943 0 -5.165 c
+0 -7.009 l
+-6.615 -7 -12.009 -12.397 -11.999 -19.008 c
+-12.009 -25.618 -6.615 -31.016 0 -31.007 c
+6.607 -31.018 12.01 -25.615 11.999 -19.008 c
+12.01 -12.4 6.607 -6.998 0 -7.009 c
+0 -5.165 l
+0.338 -5.198 0.788 -5.242 1.126 -5.275 c
+2.249 -5.474 12.142 -7.557 13.761 -19.535 c
+14.172 -22.508 l
+14.637 -23.083 15.725 -23.499 16.46 -23.421 c
+20.584 -22.986 26.414 -9.565 15.896 -1.31 c
+7.945 4.929 0.035 0.084 0 0 c
+0 8.991 l
+15.462 8.991 27.999 -3.546 27.999 -19.008 c
+27.999 -34.47 15.462 -47.007 0 -47.007 c
+-15.471 -47.007 -27.999 -34.47 -27.999 -19.008 c
+-27.999 -3.546 -15.471 8.991 0 8.991 c
+0 0 l
+f
+Q
+0.2 0.325 0.624 scn
+q 1 0 0 1 323.6699 453.9038 cm
+0 0 m
+-0.627 -1.11 -1.868 -1.524 -2.71 -2.39 c
+-4.768 -4.502 -4.451 -6.209 -4.444 -6.223 c
+-4.359 -6.387 -4.359 -6.387 0 -7.407 c
+0 -8.129 l
+-6.615 -8.12 -12.009 -13.518 -11.999 -20.128 c
+-12.009 -26.739 -6.615 -32.137 0 -32.127 c
+6.607 -32.138 12.01 -26.736 11.999 -20.128 c
+12.01 -13.521 6.607 -8.119 0 -8.129 c
+0 -7.407 l
+0.312 -7.427 0.727 -7.454 1.039 -7.474 c
+5.586 -8.118 13.154 -12.018 12.674 -22.547 c
+12.56 -25.06 12.663 -26.477 12.982 -26.758 c
+14.311 -27.928 23.356 -23.682 22.629 -14.041 c
+21.27 3.998 1.142 2.018 0 0 c
+0 7.871 l
+15.462 7.871 27.999 -4.667 27.999 -20.128 c
+27.999 -35.59 15.462 -48.127 0 -48.127 c
+-15.471 -48.127 -27.999 -35.59 -27.999 -20.128 c
+-27.999 -4.667 -15.471 7.871 0 7.871 c
+0 0 l
+f
+Q
+0.196 0.318 0.612 scn
+q 1 0 0 1 323.6699 454.8291 cm
+0 0 m
+-0.223 -0.378 -0.896 -0.494 -1.28 -0.706 c
+-3.988 -2.198 -4.356 -2.882 -7.222 -8.202 c
+-10.979 -15.406 l
+-12.035 -17.648 -12.409 -19.972 -12.123 -22.51 c
+-11.368 -29.204 -4.441 -35.039 3.701 -32.831 c
+16.504 -28.45 l
+19.64 -26.383 21.524 -23.889 22.614 -20.364 c
+24.61 -13.907 21.812 -4.74 13.674 -0.575 c
+6.261 3.219 0.029 0.049 0 0 c
+0 6.945 l
+15.462 6.945 27.999 -5.592 27.999 -21.054 c
+27.999 -36.516 15.462 -49.053 0 -49.053 c
+-15.471 -49.053 -27.999 -36.516 -27.999 -21.054 c
+-27.999 -5.592 -15.471 6.945 0 6.945 c
+0 0 l
+f
+Q
+0.192 0.31 0.596 scn
+q 1 0 0 1 323.6699 455.6289 cm
+0 0 m
+-11.795 -5.181 -18.994 -27.783 -4.636 -33.729 c
+5.806 -38.053 30.469 -28.935 22.345 -10.09 c
+19.107 -2.58 10.176 3.509 0 0 c
+0 6.146 l
+15.462 6.146 27.999 -6.392 27.999 -21.854 c
+27.999 -37.315 15.462 -49.853 0 -49.853 c
+-15.471 -49.853 -27.999 -37.315 -27.999 -21.854 c
+-27.999 -6.392 -15.471 6.146 0 6.146 c
+0 0 l
+f
+Q
+0.188 0.302 0.58 scn
+q 1 0 0 1 323.6699 456.3296 cm
+0 0 m
+-0.26 -0.393 -1.011 -0.429 -1.444 -0.612 c
+-4.284 -1.815 -7.534 -4.967 -9.349 -8.277 c
+-13.499 -15.843 -13.758 -21.083 -13.244 -24.145 c
+-12.335 -29.557 -7.256 -38.113 6.018 -35.852 c
+29.65 -31.827 27.567 -10.229 15.691 -2.187 c
+7.726 3.206 0.039 0.058 0 0 c
+0 5.445 l
+15.462 5.445 27.999 -7.092 27.999 -22.554 c
+27.999 -38.016 15.462 -50.553 0 -50.553 c
+-15.471 -50.553 -27.999 -38.016 -27.999 -22.554 c
+-27.999 -7.092 -15.471 5.445 0 5.445 c
+0 0 l
+f
+Q
+0.18 0.294 0.569 scn
+q 1 0 0 1 323.6699 456.9956 cm
+0 0 m
+-0.271 -0.397 -1.043 -0.41 -1.49 -0.586 c
+-3.112 -1.224 -7.251 -3.368 -10.636 -9.471 c
+-11.688 -11.366 -15.022 -18.08 -13.796 -24.877 c
+-12.453 -32.323 -5.461 -39.361 6.714 -37.217 c
+28.943 -33.303 28.97 -11.254 15.609 -2.3 c
+7.857 2.895 0.038 0.056 0 0 c
+0 4.779 l
+15.462 4.779 27.999 -7.758 27.999 -23.22 c
+27.999 -38.682 15.462 -51.219 0 -51.219 c
+-15.471 -51.219 -27.999 -38.682 -27.999 -23.22 c
+-27.999 -7.758 -15.471 4.779 0 4.779 c
+0 0 l
+f
+Q
+0.176 0.286 0.553 scn
+q 1 0 0 1 323.6699 457.6064 cm
+0 0 m
+-0.285 -0.403 -1.086 -0.384 -1.551 -0.549 c
+-2.515 -0.89 -7.505 -2.918 -11.143 -9.4 c
+-12.539 -11.886 -15.644 -18.437 -14.343 -25.553 c
+-13.275 -31.396 -7.567 -40.711 7.05 -38.566 c
+28.064 -35.482 30.902 -13.127 16.17 -2.838 c
+7.979 2.883 0.04 0.057 0 0 c
+0 4.168 l
+15.462 4.168 27.999 -8.369 27.999 -23.831 c
+27.999 -39.293 15.462 -51.83 0 -51.83 c
+-15.471 -51.83 -27.999 -39.293 -27.999 -23.831 c
+-27.999 -8.369 -15.471 4.168 0 4.168 c
+0 0 l
+f
+Q
+0.173 0.278 0.541 scn
+q 1 0 0 1 323.6699 458.1792 cm
+0 0 m
+-0.295 -0.407 -1.114 -0.365 -1.591 -0.521 c
+-3.039 -0.995 -8.059 -3.066 -11.891 -9.807 c
+-12.952 -11.675 -16.307 -18.377 -14.887 -26.189 c
+-13.692 -32.762 -6.813 -41.823 7.243 -39.848 c
+28.687 -36.834 31.471 -13.847 16.374 -3.144 c
+8.08 2.737 0.041 0.056 0 0 c
+0 3.595 l
+15.462 3.595 27.999 -8.942 27.999 -24.404 c
+27.999 -39.866 15.462 -52.403 0 -52.403 c
+-15.471 -52.403 -27.999 -39.866 -27.999 -24.404 c
+-27.999 -8.942 -15.471 3.595 0 3.595 c
+0 0 l
+f
+Q
+0.169 0.275 0.525 scn
+q 1 0 0 1 323.6699 458.7163 cm
+0 0 m
+-0.327 -0.44 -1.225 -0.369 -1.749 -0.527 c
+-5.521 -1.665 -9.768 -5.259 -12.076 -9.267 c
+-15.396 -15.033 -16.523 -20.929 -15.426 -26.791 c
+-13.856 -35.175 -5.227 -43.009 7.675 -41.011 c
+29.382 -37.65 31.673 -13.956 16.092 -3.122 c
+8.188 2.374 0.041 0.052 0 0 c
+0 3.058 l
+15.462 3.058 27.999 -9.479 27.999 -24.941 c
+27.999 -40.403 15.462 -52.94 0 -52.94 c
+-15.471 -52.94 -27.999 -40.403 -27.999 -24.941 c
+-27.999 -9.479 -15.471 3.058 0 3.058 c
+0 0 l
+f
+Q
+0.165 0.267 0.51 scn
+q 1 0 0 1 323.6699 459.2314 cm
+0 0 m
+-0.315 -0.414 -1.17 -0.321 -1.672 -0.458 c
+-5.63 -1.542 -10.189 -5.222 -12.512 -9.206 c
+-13.797 -11.409 -17.707 -18.115 -15.958 -27.369 c
+-14.312 -36.085 -5.369 -44.227 7.962 -42.147 c
+29.823 -38.738 32.256 -15.066 16.713 -3.752 c
+8.241 2.415 0.041 0.054 0 0 c
+0 2.543 l
+15.462 2.543 27.999 -9.994 27.999 -25.456 c
+27.999 -40.918 15.462 -53.455 0 -53.455 c
+-15.471 -53.455 -27.999 -40.918 -27.999 -25.456 c
+-27.999 -9.994 -15.471 2.543 0 2.543 c
+0 0 l
+f
+Q
+0.161 0.259 0.498 scn
+q 1 0 0 1 323.6699 459.7041 cm
+0 0 m
+-0.326 -0.417 -1.198 -0.297 -1.711 -0.424 c
+-5.006 -1.24 -10.024 -4.173 -13.32 -9.752 c
+-16.644 -15.378 -17.708 -21.484 -16.484 -27.903 c
+-14.771 -36.889 -5.522 -45.311 8.242 -43.22 c
+29.813 -39.944 32.242 -15.421 16.845 -4.05 c
+8.507 2.107 0.042 0.053 0 0 c
+0 2.07 l
+15.462 2.07 27.999 -10.467 27.999 -25.929 c
+27.999 -41.391 15.462 -53.928 0 -53.928 c
+-15.471 -53.928 -27.999 -41.391 -27.999 -25.929 c
+-27.999 -10.467 -15.471 2.07 0 2.07 c
+0 0 l
+f
+Q
+0.153 0.251 0.482 scn
+q 1 0 0 1 323.6699 460.144 cm
+0 0 m
+-0.165 -0.201 -0.596 -0.119 -0.852 -0.169 c
+-6.632 -1.32 -11.089 -5.48 -13.333 -8.99 c
+-17.824 -16.015 -17.96 -22.678 -17.283 -27.031 c
+-15.529 -38.309 -5.353 -45.633 6.914 -44.447 c
+29.053 -42.307 33.213 -18.564 18.588 -5.674 c
+9.722 2.142 0.051 0.062 0 0 c
+0 1.63 l
+15.462 1.63 27.999 -10.907 27.999 -26.369 c
+27.999 -41.831 15.462 -54.368 0 -54.368 c
+-15.471 -54.368 -27.999 -41.831 -27.999 -26.369 c
+-27.999 -10.907 -15.471 1.63 0 1.63 c
+0 0 l
+f
+Q
+0.149 0.243 0.467 scn
+q 1 0 0 1 323.6699 460.5547 cm
+0 0 m
+-0.345 -0.419 -1.243 -0.245 -1.776 -0.35 c
+-5.454 -1.074 -10.584 -3.985 -13.756 -8.856 c
+-18.476 -16.104 -18.606 -22.976 -17.885 -27.465 c
+-16.272 -37.503 -7.101 -46.92 7.31 -45.498 c
+29.575 -43.3 33.52 -19.115 18.666 -5.998 c
+9.679 1.938 0.05 0.061 0 0 c
+0 1.22 l
+15.462 1.22 27.999 -11.317 27.999 -26.779 c
+27.999 -42.241 15.462 -54.778 0 -54.778 c
+-15.471 -54.778 -27.999 -42.241 -27.999 -26.779 c
+-27.999 -11.317 -15.471 1.22 0 1.22 c
+0 0 l
+f
+Q
+0.145 0.235 0.455 scn
+q 1 0 0 1 323.6699 460.9102 cm
+0 0 m
+-0.359 -0.424 -1.28 -0.213 -1.828 -0.305 c
+-2.573 -0.429 -9.242 -1.712 -14.038 -8.521 c
+-19.338 -16.045 -19.04 -23.601 -18.666 -26.5 c
+-16.79 -41.035 -4.557 -47.119 6.015 -46.621 c
+29.237 -45.525 34.039 -19.966 18.705 -6.311 c
+9.693 1.714 0.05 0.059 0 0 c
+0 0.864 l
+15.462 0.864 27.999 -11.673 27.999 -27.135 c
+27.999 -42.597 15.462 -55.134 0 -55.134 c
+-15.471 -55.134 -27.999 -42.597 -27.999 -27.135 c
+-27.999 -11.673 -15.471 0.864 0 0.864 c
+0 0 l
+f
+Q
+0.141 0.227 0.439 scn
+q 1 0 0 1 323.6699 461.2358 cm
+0 0 m
+-0.366 -0.422 -1.291 -0.183 -1.844 -0.262 c
+-5.618 -0.797 -11.206 -3.577 -14.557 -8.414 c
+-20.527 -17.033 -19.484 -25.013 -19.142 -27.635 c
+-17.325 -41.544 -4.721 -48.297 6.215 -47.587 c
+22.825 -46.511 31.838 -32.41 25.896 -16.796 c
+27.251 -20.083 27.999 -23.685 27.999 -27.46 c
+27.999 -42.922 15.462 -55.459 0 -55.459 c
+-15.471 -55.459 -27.999 -42.922 -27.999 -27.46 c
+-27.999 -11.999 -15.471 0.539 0 0.539 c
+0 0 l
+f
+Q
+0.137 0.22 0.427 scn
+q 1 0 0 1 323.6699 461.4912 cm
+0 0 m
+-0.38 -0.425 -1.323 -0.147 -1.89 -0.211 c
+-3.742 -0.417 -10.186 -1.632 -15.337 -8.604 c
+-20.121 -15.077 -20.496 -23.224 -19.964 -27.016 c
+-18.071 -40.5 -7.311 -49.138 6.811 -48.512 c
+13.567 -48.212 30.458 -42.954 27.513 -22.495 c
+27.832 -24.187 27.999 -25.932 27.999 -27.716 c
+27.999 -43.178 15.462 -55.715 0 -55.715 c
+-15.471 -55.715 -27.999 -43.178 -27.999 -27.716 c
+-27.999 -12.254 -15.471 0.283 0 0.283 c
+0 0 l
+f
+Q
+0.133 0.216 0.412 scn
+q 1 0 0 1 323.6699 461.6821 cm
+0 0 m
+-0.389 -0.422 -1.334 -0.109 -1.906 -0.156 c
+-5.864 -0.48 -11.765 -2.986 -15.37 -7.721 c
+-21.457 -15.717 -21.121 -23.997 -20.694 -27.186 c
+-18.848 -40.99 -7.359 -50.367 6.621 -49.484 c
+16.365 -48.868 27.809 -42.685 27.992 -27.284 c
+27.997 -27.491 27.999 -27.699 27.999 -27.907 c
+27.999 -43.369 15.462 -55.906 0 -55.906 c
+-15.471 -55.906 -27.999 -43.369 -27.999 -27.907 c
+-27.999 -12.445 -15.471 0.092 0 0.092 c
+0 0 l
+f
+Q
+0.125 0.208 0.396 scn
+q 1 0 0 1 323.6699 461.771 cm
+0 0 m
+-0.403 -0.423 -1.362 -0.067 -1.946 -0.096 c
+-5.655 -0.278 -11.174 -1.795 -16.41 -7.986 c
+-19.422 -11.547 -22.258 -18.903 -21.583 -25.522 c
+-19.025 -50.59 4.157 -50.418 5.143 -50.399 c
+17.394 -50.156 25.847 -43.167 27.756 -31.704 c
+25.941 -45.413 14.205 -55.995 0 -55.995 c
+-15.471 -55.995 -27.999 -43.458 -27.999 -27.996 c
+-27.999 -12.534 -15.471 0.003 0 0.003 c
+0 0 l
+f
+Q
+0.122 0.2 0.384 scn
+q 1 0 0 1 319.437 461.4541 cm
+0 0 m
+-22.531 -4.549 -23.531 -35.025 -6.331 -46.258 c
+6.847 -54.864 25.642 -52.17 31.071 -35.682 c
+27.627 -47.245 16.914 -55.678 4.233 -55.678 c
+-11.238 -55.678 -23.766 -43.141 -23.766 -27.679 c
+-23.766 -13.386 -13.062 -1.593 0.777 0.109 c
+0.544 0.077 0.232 0.04 0 0 c
+f
+Q
+0.118 0.192 0.369 scn
+q 1 0 0 1 311.6421 458.9941 cm
+0 0 m
+-16.565 -9.064 -17.346 -40.196 9.317 -48.713 c
+16.643 -51.053 30.634 -50.189 36.991 -37.91 c
+32.363 -46.995 22.921 -53.218 12.028 -53.218 c
+-3.443 -53.218 -15.971 -40.681 -15.971 -25.219 c
+-15.971 -12.684 -7.737 -2.07 3.624 1.498 c
+3.099 1.309 2.397 1.056 1.872 0.866 c
+1.309 0.609 0.542 0.297 0 0 c
+f
+Q
+0.216 0.345 0.667 scn
+q 1 0 0 1 339.5962 435.5991 cm
+0 0 m
+-1.706 2.422 -2.871 5.192 -4.806 7.466 c
+-5.581 8.375 -6.334 9.141 -7.046 9.74 c
+-7.103 9.788 -12.699 14.577 -12.705 14.929 c
+-12.707 15.035 -10.925 16.753 -10.74 16.825 c
+-10.058 17.086 -7.544 17.231 -6.875 17.166 c
+-5.111 16.992 -2.438 16.241 0.275 13.649 c
+3.79 10.293 4.269 6.382 4.332 5.263 c
+4.608 0.362 1.816 -1.553 1.125 -1.426 c
+0.589 -1.328 0.314 -0.445 0 0 c
+f
+Q
+0.22 0.353 0.682 scn
+q 1 0 0 1 339.7305 438.0928 cm
+0 0 m
+-1.97 2.883 -3.055 4.471 -4.87 6.595 c
+-5.072 6.832 -5.375 7.116 -5.591 7.34 c
+-5.844 7.601 -6.16 7.969 -6.419 8.224 c
+-6.913 8.711 -7.551 9.382 -8.074 9.839 c
+-9.724 11.281 -9.908 11.547 -9.911 11.595 c
+-9.914 11.657 -8.495 13.252 -8.295 13.411 c
+-8.132 13.541 -7.808 13.456 -7.601 13.433 c
+-5.32 13.184 -2.962 12.927 -0.476 10.566 c
+2.531 7.709 2.783 5.143 2.904 3.909 c
+2.938 3.565 2.929 0.875 2.709 0.41 c
+2.675 0.337 0.707 -0.875 0.645 -0.861 c
+0.33 -0.793 0.182 -0.267 0 0 c
+f
+Q
+0.224 0.361 0.694 scn
+q 1 0 0 1 338.8154 441.6221 cm
+0 0 m
+-0.737 0.235 -1.076 1.45 -1.576 2.04 c
+-3.148 3.894 -3.148 3.894 -3.897 4.678 c
+-4.212 5.008 -4.84 5.354 -4.922 5.803 c
+-4.014 7.981 l
+-3.953 8.007 -1.427 7.15 0.33 5.083 c
+1.631 3.552 2.397 0.755 2.281 0.574 c
+1.906 -0.01 0.699 -0.197 0.037 0.011 c
+0.026 0.014 0.011 -0.003 0 0 c
+f
+Q
+0.141 0.227 0.439 scn
+q 1 0 0 1 335.7192 459.0469 cm
+0 0 m
+-5.275 2.417 -9.403 2.407 -12.049 2.189 c
+-12.049 2.728 l
+-6.604 2.728 -1.522 1.173 2.777 -1.517 c
+2.232 -1.205 1.506 -0.789 0.961 -0.477 c
+0.673 -0.334 0.292 -0.134 0 0 c
+f
+Q
+0.137 0.22 0.427 scn
+q 1 0 0 1 331.9331 460.5313 cm
+0 0 m
+-3.078 0.794 -4.478 1.111 -8.263 0.96 c
+-8.263 1.243 l
+-4.866 1.243 -1.61 0.638 1.402 -0.47 c
+0.981 -0.329 0.425 -0.126 0 0 c
+f
+Q
+0.133 0.216 0.412 scn
+q 1 0 0 1 327.9009 461.4541 cm
+0 0 m
+-1.314 0.178 -2.48 0.278 -4.231 0.228 c
+-4.231 0.32 l
+-2.431 0.32 -0.671 0.15 1.035 -0.174 c
+0.724 -0.122 0.312 -0.042 0 0 c
+f
+Q
+0.125 0.208 0.396 scn
+q 1 0 0 1 323.6699 461.771 cm
+0 0 m
+0.335 0.003 0.669 -0.002 1.001 -0.014 c
+0.701 -0.01 0.211 -0.214 0 0 c
+f
+Q
+ endstream endobj 1017 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1005 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1009 0 obj <</Subtype/Form/Length 478/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1006 0 R/Resources<</XObject<</Fm0 1008 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[321.996 217.271 334.002 205.274]>>stream
+q
+327.999 212.271 m
+327.999 217.271 l
+331.311 217.271 334.002 214.59 334.002 211.277 c
+334.002 207.966 331.311 205.274 327.999 205.274 c
+324.687 205.274 321.996 207.966 321.996 211.277 c
+321.996 214.59 324.687 217.271 327.999 217.271 c
+327.999 212.271 l
+327.449 212.274 326.992 211.817 326.996 211.277 c
+326.991 210.734 327.456 210.27 327.999 210.274 c
+328.542 210.27 329.007 210.734 329.002 211.277 c
+329.006 211.817 328.549 212.274 327.999 212.271 c
+W n
+q
+/GS0 gs
+/Fm0 Do
+Q
+Q
+ endstream endobj 1006 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1008 0 obj <</Subtype/Form/Length 11068/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1007 0 R/Resources<</ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[321.996 217.271 334.002 205.274]>>stream
+/CS0 cs 0.294 0.494 0.133 scn
+1 i
+/GS0 gs
+q 1 0 0 1 327.999 212.2715 cm
+0 0 m
+0 5 l
+3.312 5 6.003 2.318 6.003 -0.994 c
+6.003 -4.306 3.312 -6.997 0 -6.997 c
+-3.312 -6.997 -6.003 -4.306 -6.003 -0.994 c
+-6.003 2.318 -3.312 5 0 5 c
+0 0 l
+-0.55 0.003 -1.007 -0.454 -1.003 -0.994 c
+-1.008 -1.537 -0.543 -2.002 0 -1.997 c
+0.543 -2.002 1.008 -1.537 1.003 -0.994 c
+1.007 -0.454 0.55 0.003 0 0 c
+f
+Q
+q 1 0 0 1 327.999 213.1963 cm
+0 0 m
+-0.013 -0.041 -0.073 -0.074 -0.083 -0.115 c
+-0.111 -0.248 -0.02 -0.426 0 -0.56 c
+0 -0.925 l
+-0.55 -0.922 -1.007 -1.379 -1.003 -1.919 c
+-1.008 -2.462 -0.543 -2.927 0 -2.922 c
+0.543 -2.927 1.008 -2.462 1.003 -1.919 c
+1.007 -1.379 0.55 -0.922 0 -0.925 c
+0 -0.56 l
+0.034 -0.557 0.079 -0.553 0.113 -0.55 c
+0.142 -0.55 0.184 -0.536 0.21 -0.549 c
+1.046 -1.473 l
+1.441 -2.153 1.79 -2.106 1.805 -2.104 c
+2.057 -2.064 3.185 -0.619 1.901 0.191 c
+1.598 0.383 1.275 0.409 1.132 0.396 c
+0 0 l
+0 4.075 l
+3.312 4.075 6.003 1.394 6.003 -1.919 c
+6.003 -5.23 3.312 -7.922 0 -7.922 c
+-3.312 -7.922 -6.003 -5.23 -6.003 -1.919 c
+-6.003 1.394 -3.312 4.075 0 4.075 c
+0 0 l
+f
+Q
+0.286 0.482 0.133 scn
+q 1 0 0 1 327.999 213.3379 cm
+0 0 m
+-0.03 -0.092 -0.164 -0.17 -0.185 -0.265 c
+-0.222 -0.433 -0.125 -0.678 -0.188 -0.838 c
+-0.188 -0.839 -0.237 -0.941 -0.404 -1.049 c
+-1.156 -1.538 -1.044 -2.153 -0.992 -2.33 c
+-0.81 -2.948 -0.137 -3.26 0.449 -2.997 c
+0.649 -2.907 0.789 -2.769 0.872 -2.687 c
+1.143 -2.418 1.548 -2.618 1.836 -2.409 c
+2.434 -1.976 2.571 -1.584 2.629 -1.416 c
+2.851 -0.784 2.461 0.135 1.628 0.371 c
+0.853 0.591 0.002 0.008 0 0 c
+0 3.934 l
+3.312 3.934 6.003 1.252 6.003 -2.061 c
+6.003 -5.372 3.312 -8.063 0 -8.063 c
+-3.312 -8.063 -6.003 -5.372 -6.003 -2.061 c
+-6.003 1.252 -3.312 3.934 0 3.934 c
+0 0 l
+f
+Q
+0.278 0.471 0.129 scn
+q 1 0 0 1 327.999 213.4785 cm
+0 0 m
+-0.294 -0.83 -1.296 -1.345 -1.079 -2.404 c
+-0.955 -3.01 -0.239 -3.591 0.647 -3.163 c
+1.047 -2.97 1.515 -2.951 1.888 -2.688 c
+2.104 -2.536 2.607 -2.182 2.763 -1.673 c
+3.16 -0.374 2.125 0.264 1.731 0.385 c
+0.831 0.661 0.003 0.009 0 0 c
+0 3.793 l
+3.312 3.793 6.003 1.111 6.003 -2.201 c
+6.003 -5.513 3.312 -8.204 0 -8.204 c
+-3.312 -8.204 -6.003 -5.513 -6.003 -2.201 c
+-6.003 1.111 -3.312 3.793 0 3.793 c
+0 0 l
+f
+Q
+0.275 0.459 0.125 scn
+q 1 0 0 1 327.999 213.6182 cm
+0 0 m
+-0.352 -0.866 -1.383 -1.428 -1.146 -2.558 c
+-1.025 -3.14 -0.35 -3.809 0.711 -3.398 c
+2.484 -2.712 2.629 -2.655 2.946 -1.777 c
+2.952 -1.763 3.406 -0.234 2.053 0.316 c
+0.838 0.812 0.004 0.01 0 0 c
+0 3.653 l
+3.312 3.653 6.003 0.972 6.003 -2.341 c
+6.003 -5.652 3.312 -8.344 0 -8.344 c
+-3.312 -8.344 -6.003 -5.652 -6.003 -2.341 c
+-6.003 0.972 -3.312 3.653 0 3.653 c
+0 0 l
+f
+Q
+0.267 0.447 0.122 scn
+q 1 0 0 1 327.999 213.7549 cm
+0 0 m
+-0.193 -0.417 -0.585 -0.691 -0.795 -1.098 c
+-1.093 -1.707 l
+-1.262 -2.105 -1.291 -2.433 -1.189 -2.801 c
+-1.126 -3.029 -0.725 -4.141 0.983 -3.563 c
+5.011 -2.2 2.486 0.226 2.453 0.247 c
+1.442 0.896 0.101 0.219 0 0 c
+0 3.517 l
+3.312 3.517 6.003 0.835 6.003 -2.478 c
+6.003 -5.789 3.312 -8.48 0 -8.48 c
+-3.312 -8.48 -6.003 -5.789 -6.003 -2.478 c
+-6.003 0.835 -3.312 3.517 0 3.517 c
+0 0 l
+f
+Q
+0.259 0.439 0.118 scn
+q 1 0 0 1 327.999 213.9082 cm
+0 0 m
+-0.013 -0.025 -0.053 -0.04 -0.076 -0.058 c
+-0.364 -0.275 -0.691 -0.521 -1.173 -1.803 c
+-1.243 -1.988 -1.457 -2.555 -1.186 -3.148 c
+-0.781 -4.033 0.18 -4.204 1.671 -3.654 c
+3.863 -2.846 3.98 -0.373 2.341 0.401 c
+1.366 0.862 0.123 0.247 0 0 c
+0 3.363 l
+3.312 3.363 6.003 0.682 6.003 -2.631 c
+6.003 -5.942 3.312 -8.634 0 -8.634 c
+-3.312 -8.634 -6.003 -5.942 -6.003 -2.631 c
+-6.003 0.682 -3.312 3.363 0 3.363 c
+0 0 l
+f
+Q
+0.255 0.427 0.118 scn
+q 1 0 0 1 327.999 214.0996 cm
+0 0 m
+-0.034 -0.067 -0.142 -0.105 -0.203 -0.15 c
+-0.702 -0.521 -0.962 -1.182 -1.171 -1.711 c
+-1.281 -1.991 -1.54 -2.648 -1.288 -3.269 c
+-0.891 -4.246 0.088 -4.488 1.621 -3.988 c
+4.051 -3.195 4.189 -0.578 2.798 0.287 c
+1.588 1.039 0.134 0.266 0 0 c
+0 3.172 l
+3.312 3.172 6.003 0.49 6.003 -2.822 c
+6.003 -6.134 3.312 -8.825 0 -8.825 c
+-3.312 -8.825 -6.003 -6.134 -6.003 -2.822 c
+-6.003 0.49 -3.312 3.172 0 3.172 c
+0 0 l
+f
+Q
+0.247 0.416 0.114 scn
+q 1 0 0 1 327.999 214.2871 cm
+0 0 m
+-0.037 -0.069 -0.152 -0.104 -0.217 -0.147 c
+-0.454 -0.309 -0.887 -0.883 -1.091 -1.383 c
+-1.28 -1.846 -1.632 -2.707 -1.384 -3.387 c
+-0.994 -4.454 0.002 -4.769 1.578 -4.319 c
+4.069 -3.61 4.619 -0.754 2.993 0.316 c
+1.701 1.166 0.079 0.148 0 0 c
+0 2.984 l
+3.312 2.984 6.003 0.303 6.003 -3.01 c
+6.003 -6.321 3.312 -9.013 0 -9.013 c
+-3.312 -9.013 -6.003 -6.321 -6.003 -3.01 c
+-6.003 0.303 -3.312 2.984 0 2.984 c
+0 0 l
+f
+Q
+0.239 0.404 0.11 scn
+q 1 0 0 1 327.999 214.4717 cm
+0 0 m
+-0.176 -0.317 -0.542 -0.437 -0.748 -0.722 c
+-1.049 -1.139 -1.146 -1.381 -1.241 -1.614 c
+-1.291 -1.738 -1.721 -2.847 -1.448 -3.589 c
+-0.846 -5.228 1.105 -4.775 1.689 -4.598 c
+4.413 -3.769 4.993 -0.751 3.22 0.385 c
+1.946 1.2 0.234 0.423 0 0 c
+0 2.8 l
+3.312 2.8 6.003 0.118 6.003 -3.194 c
+6.003 -6.506 3.312 -9.197 0 -9.197 c
+-3.312 -9.197 -6.003 -6.506 -6.003 -3.194 c
+-6.003 0.118 -3.312 2.8 0 2.8 c
+0 0 l
+f
+Q
+0.235 0.392 0.106 scn
+q 1 0 0 1 327.999 214.7031 cm
+0 0 m
+-0.06 -0.133 -0.265 -0.211 -0.386 -0.291 c
+-0.759 -0.541 -1.229 -1.474 -1.327 -1.735 c
+-1.444 -2.049 -1.803 -3.136 -1.475 -3.938 c
+-0.713 -5.804 1.956 -4.863 1.982 -4.853 c
+5.283 -3.568 5.162 -0.364 3.116 0.573 c
+1.411 1.354 0.007 0.017 0 0 c
+0 2.568 l
+3.312 2.568 6.003 -0.113 6.003 -3.426 c
+6.003 -6.737 3.312 -9.429 0 -9.429 c
+-3.312 -9.429 -6.003 -6.737 -6.003 -3.426 c
+-6.003 -0.113 -3.312 2.568 0 2.568 c
+0 0 l
+f
+Q
+0.227 0.38 0.106 scn
+q 1 0 0 1 327.999 214.9854 cm
+0 0 m
+-0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c
+-0.736 -0.594 -1.131 -1.171 -1.412 -1.908 c
+-1.719 -2.715 -1.736 -3.694 -1.577 -4.139 c
+-0.858 -6.132 1.881 -5.304 1.908 -5.295 c
+5.598 -4.044 5.76 -0.555 3.075 0.691 c
+1.838 1.266 0.163 0.34 0 0 c
+0 2.286 l
+3.312 2.286 6.003 -0.396 6.003 -3.708 c
+6.003 -7.02 3.312 -9.711 0 -9.711 c
+-3.312 -9.711 -6.003 -7.02 -6.003 -3.708 c
+-6.003 -0.396 -3.312 2.286 0 2.286 c
+0 0 l
+f
+Q
+0.22 0.369 0.102 scn
+q 1 0 0 1 327.999 215.2715 cm
+0 0 m
+-0.045 -0.106 -0.21 -0.167 -0.302 -0.236 c
+-0.487 -0.373 -1.13 -0.938 -1.627 -2.442 c
+-1.764 -2.854 -1.88 -3.932 -1.545 -4.67 c
+-1.027 -5.814 0.793 -6.21 2.513 -5.55 c
+6.314 -4.092 5.733 -0.28 3.153 0.723 c
+1.353 1.422 0.007 0.017 0 0 c
+0 2 l
+3.312 2 6.003 -0.682 6.003 -3.994 c
+6.003 -7.306 3.312 -9.997 0 -9.997 c
+-3.312 -9.997 -6.003 -7.306 -6.003 -3.994 c
+-6.003 -0.682 -3.312 2 0 2 c
+0 0 l
+f
+Q
+0.212 0.361 0.098 scn
+q 1 0 0 1 327.999 215.6543 cm
+0 0 m
+-0.163 -0.361 -0.542 -0.515 -0.779 -0.805 c
+-0.948 -1.011 -1.05 -1.26 -1.205 -1.475 c
+-1.369 -1.701 -1.472 -1.983 -1.723 -2.733 c
+-2.048 -3.703 -1.823 -4.541 -1.66 -4.953 c
+-1.229 -6.046 0.416 -6.786 2.422 -6.135 c
+7.014 -4.645 5.816 -0.744 3.286 0.54 c
+1.422 1.485 0.008 0.019 0 0 c
+0 1.617 l
+3.312 1.617 6.003 -1.064 6.003 -4.377 c
+6.003 -7.688 3.312 -10.38 0 -10.38 c
+-3.312 -10.38 -6.003 -7.688 -6.003 -4.377 c
+-6.003 -1.064 -3.312 1.617 0 1.617 c
+0 0 l
+f
+Q
+0.208 0.349 0.094 scn
+q 1 0 0 1 327.999 216.0791 cm
+0 0 m
+-0.128 -0.296 -0.442 -0.404 -0.638 -0.631 c
+-0.788 -0.804 -0.893 -1.01 -1.031 -1.191 c
+-1.148 -1.346 -1.62 -2.353 -1.623 -2.36 c
+-2.172 -3.895 -2.053 -4.608 -1.843 -5.151 c
+-0.961 -7.428 1.653 -7.023 2.586 -6.676 c
+3.891 -6.189 6.606 -5.178 5.553 -2.521 c
+5.843 -3.224 6.003 -3.994 6.003 -4.802 c
+6.003 -8.113 3.312 -10.805 0 -10.805 c
+-3.312 -10.805 -6.003 -8.113 -6.003 -4.802 c
+-6.003 -1.489 -3.312 1.192 0 1.192 c
+0 0 l
+f
+Q
+0.2 0.337 0.09 scn
+q 1 0 0 1 327.999 216.5439 cm
+0 0 m
+-0.037 -0.078 -0.154 -0.129 -0.22 -0.185 c
+-1.238 -1.037 -1.832 -2.884 -1.837 -2.902 c
+-2.426 -4.76 -2.011 -5.632 -1.875 -5.918 c
+-0.597 -8.6 3.355 -7.144 3.396 -7.129 c
+4.441 -6.72 6.192 -6.035 5.899 -4.15 c
+5.967 -4.512 6.003 -4.885 6.003 -5.267 c
+6.003 -8.578 3.312 -11.27 0 -11.27 c
+-3.312 -11.27 -6.003 -8.578 -6.003 -5.267 c
+-6.003 -1.954 -3.312 0.728 0 0.728 c
+0 0 l
+f
+Q
+0.192 0.325 0.09 scn
+q 1 0 0 1 327.999 216.9863 cm
+0 0 m
+-0.038 -0.066 -0.155 -0.09 -0.221 -0.129 c
+-1.15 -0.674 -1.646 -2.172 -2.007 -3.267 c
+-2.013 -3.283 -2.546 -5.064 -2.073 -6.276 c
+-1.009 -9.004 3.058 -7.952 3.099 -7.941 c
+4.318 -7.615 5.989 -7.169 6.001 -5.576 c
+6.002 -5.62 6.003 -5.664 6.003 -5.709 c
+6.003 -9.021 3.312 -11.712 0 -11.712 c
+-3.312 -11.712 -6.003 -9.021 -6.003 -5.709 c
+-6.003 -2.396 -3.312 0.285 0 0.285 c
+0 0 l
+f
+Q
+0.188 0.314 0.086 scn
+q 1 0 0 1 327.999 217.2598 cm
+0 0 m
+-0.043 -0.053 -0.154 -0.029 -0.221 -0.042 c
+-0.696 -0.133 -1.348 -0.689 -1.732 -1.73 c
+-2.577 -4.014 -2.459 -5.548 -2.314 -6.259 c
+-1.864 -8.468 0.843 -8.703 1.755 -8.611 c
+4.299 -8.355 5.7 -8.214 5.951 -6.775 c
+5.562 -9.713 3.043 -11.985 0 -11.985 c
+-3.312 -11.985 -6.003 -9.294 -6.003 -5.982 c
+-6.003 -2.67 -3.312 0.012 0 0.012 c
+0 0 l
+f
+Q
+0.18 0.302 0.082 scn
+q 1 0 0 1 327.0938 217.1953 cm
+0 0 m
+-1.738 -0.59 -1.75 -4.505 -1.75 -4.545 c
+-1.745 -7.049 -0.739 -7.83 0.017 -8.199 c
+1.798 -9.07 6.085 -9.361 6.66 -7.631 c
+5.921 -10.109 3.622 -11.921 0.905 -11.921 c
+-2.407 -11.921 -5.098 -9.229 -5.098 -5.918 c
+-5.098 -2.856 -2.799 -0.334 0.165 0.031 c
+0.115 0.021 0.049 0.013 0 0 c
+f
+Q
+0.173 0.29 0.078 scn
+q 1 0 0 1 325.7642 216.7715 cm
+0 0 m
+-1.064 -0.938 -0.813 -4.867 -0.541 -5.6 c
+0.429 -8.205 2.405 -8.584 3.209 -8.627 c
+4.272 -8.682 5.299 -9.067 6.379 -8.965 c
+6.692 -8.936 7.266 -8.798 7.587 -8.212 c
+6.594 -10.16 4.569 -11.497 2.235 -11.497 c
+-1.077 -11.497 -3.768 -8.806 -3.768 -5.494 c
+-3.768 -2.81 -2.001 -0.54 0.432 0.225 c
+0.372 0.2 0.292 0.168 0.231 0.144 c
+0.161 0.102 0.061 0.054 0 0 c
+f
+Q
+0.302 0.506 0.137 scn
+q 1 0 0 1 329.5791 211.4561 cm
+0 0 m
+-0.095 0.068 -0.095 0.068 -0.519 0.587 c
+-0.661 0.762 -0.834 0.909 -0.973 1.089 c
+-1.125 1.286 -1.231 1.594 y
+-1.226 1.612 -0.029 2.438 0.592 1.362 c
+1.027 0.609 0.245 -0.131 0.233 -0.133 c
+0.153 -0.144 0.065 -0.047 0 0 c
+f
+Q
+0.208 0.349 0.094 scn
+q 1 0 0 1 330.5688 216.6631 cm
+0 0 m
+-1.295 0.462 -2.254 -0.325 -2.57 -0.584 c
+-2.57 0.608 l
+-1.402 0.608 -0.311 0.274 0.612 -0.302 c
+0.522 -0.252 0.402 -0.186 0.312 -0.136 c
+0.219 -0.095 0.096 -0.034 0 0 c
+f
+Q
+0.306 0.518 0.141 scn
+q 1 0 0 1 329.6191 211.708 cm
+0 0 m
+-0.335 0.354 l
+-0.472 0.524 -0.626 0.68 -0.757 0.854 c
+-0.976 1.148 -1.021 1.268 -1.019 1.272 c
+-1.014 1.287 -0.028 1.7 0.33 0.952 c
+0.591 0.409 0.174 -0.12 0.167 -0.121 c
+0.106 -0.131 0.048 -0.039 0 0 c
+f
+Q
+0.2 0.337 0.09 scn
+q 1 0 0 1 329.7661 216.9941 cm
+0 0 m
+-0.649 0.12 -1.161 -0.01 -1.767 -0.45 c
+-1.767 0.277 l
+-1.038 0.277 -0.339 0.147 0.307 -0.091 c
+0.224 -0.065 0.112 -0.031 0.029 -0.007 c
+0.02 -0.005 0.009 -0.002 0 0 c
+f
+Q
+0.314 0.525 0.145 scn
+q 1 0 0 1 329.623 211.9746 cm
+0 0 m
+-0.004 0.004 -0.533 0.572 -0.71 0.861 c
+-0.568 0.874 -0.482 0.883 -0.264 0.809 c
+-0.18 0.78 -0.083 0.699 -0.025 0.631 c
+0.033 0.563 0.091 0.45 0.104 0.361 c
+0.135 0.141 0.099 0.019 0.074 -0.063 c
+0.052 -0.044 0.021 -0.021 0 0 c
+f
+Q
+0.192 0.325 0.09 scn
+q 1 0 0 1 328.9043 217.1943 cm
+0 0 m
+-0.314 -0.006 -0.487 -0.009 -0.905 -0.208 c
+-0.905 0.077 l
+-0.519 0.077 -0.142 0.041 0.225 -0.029 c
+0.157 -0.021 0.068 -0.004 0 0 c
+f
+Q
+0.188 0.314 0.086 scn
+q 1 0 0 1 327.999 217.2598 cm
+0 0 m
+0 0.012 l
+0.072 0.012 0.144 0.011 0.215 0.008 c
+0.15 0.006 0.046 -0.045 0 0 c
+f
+Q
+ endstream endobj 1007 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 998 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1002 0 obj <</Subtype/Form/Length 475/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 999 0 R/Resources<</XObject<</Fm0 1001 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[306.003 319.277 362.001 263.279]>>stream
+q
+334.002 303.277 m
+334.002 319.277 l
+349.464 319.277 362.001 306.74 362.001 291.278 c
+362.001 275.808 349.464 263.279 334.002 263.279 c
+318.54 263.279 306.003 275.808 306.003 291.278 c
+306.003 306.74 318.54 319.277 334.002 319.277 c
+334.002 303.277 l
+327.395 303.288 321.992 297.886 322.003 291.278 c
+321.994 284.663 327.392 279.27 334.002 279.279 c
+340.612 279.27 346.01 284.663 346.001 291.278 c
+346.012 297.886 340.609 303.288 334.002 303.277 c
+W n
+q
+/GS0 gs
+/Fm0 Do
+Q
+Q
+ endstream endobj 999 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1001 0 obj <</Subtype/Form/Length 13469/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 1000 0 R/Resources<</ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[306.003 319.277 362.001 263.279]>>stream
+/CS0 cs 0.306 0.518 0.141 scn
+1 i
+/GS0 gs
+q 1 0 0 1 334.002 303.2773 cm
+0 0 m
+0 16 l
+15.462 16 27.999 3.463 27.999 -11.999 c
+27.999 -27.47 15.462 -39.998 0 -39.998 c
+-15.462 -39.998 -27.999 -27.47 -27.999 -11.999 c
+-27.999 3.463 -15.462 16 0 16 c
+0 0 l
+-6.607 0.011 -12.01 -5.392 -11.999 -11.999 c
+-12.008 -18.614 -6.61 -24.008 0 -23.998 c
+6.61 -24.008 12.008 -18.614 11.999 -11.999 c
+12.01 -5.392 6.607 0.011 0 0 c
+f
+Q
+q 1 0 0 1 334.002 308.4409 cm
+0 0 m
+0 -0.468 l
+0 -5.164 l
+-6.607 -5.153 -12.01 -10.555 -11.999 -17.163 c
+-12.008 -23.778 -6.61 -29.171 0 -29.162 c
+6.61 -29.171 12.008 -23.778 11.999 -17.163 c
+12.01 -10.555 6.607 -5.153 0 -5.164 c
+0 -0.468 l
+0.316 -0.694 0.738 -0.996 1.055 -1.223 c
+3.817 -3.661 7.459 -4.869 10 -7.617 c
+12.018 -9.8 13.458 -12.461 14.279 -15.528 c
+15.076 -18.506 16.901 -19.345 16.917 -19.347 c
+18.874 -19.542 24.734 -10.485 17.857 -2.241 c
+10.879 6.124 0.769 1.958 0 0 c
+0 10.836 l
+15.462 10.836 27.999 -1.701 27.999 -17.163 c
+27.999 -32.633 15.462 -45.162 0 -45.162 c
+-15.462 -45.162 -27.999 -32.633 -27.999 -17.163 c
+-27.999 -1.701 -15.462 10.836 0 10.836 c
+0 0 l
+f
+Q
+0.302 0.506 0.137 scn
+q 1 0 0 1 334.002 310.2881 cm
+0 0 m
+-0.296 -0.712 -1.487 -1.168 -1.735 -1.898 c
+-1.987 -2.638 -2.003 -3.873 -1.53 -4.494 c
+-1.227 -4.893 -0.45 -4.945 0 -5.167 c
+0 -7.011 l
+-6.607 -7 -12.01 -12.402 -11.999 -19.01 c
+-12.008 -25.625 -6.61 -31.019 0 -31.009 c
+6.61 -31.019 12.008 -25.625 11.999 -19.01 c
+12.01 -12.402 6.607 -7 0 -7.011 c
+0 -5.167 l
+0.338 -5.201 0.788 -5.245 1.126 -5.278 c
+2.249 -5.476 12.144 -7.557 13.761 -19.537 c
+14.171 -22.514 l
+14.636 -23.089 15.724 -23.505 16.459 -23.428 c
+20.584 -22.992 26.416 -9.568 15.896 -1.312 c
+7.943 4.929 0.035 0.084 0 0 c
+0 8.989 l
+15.462 8.989 27.999 -3.548 27.999 -19.01 c
+27.999 -34.48 15.462 -47.009 0 -47.009 c
+-15.462 -47.009 -27.999 -34.48 -27.999 -19.01 c
+-27.999 -3.548 -15.462 8.989 0 8.989 c
+0 0 l
+f
+Q
+0.294 0.494 0.133 scn
+q 1 0 0 1 334.002 311.4072 cm
+0 0 m
+-0.627 -1.109 -1.866 -1.525 -2.708 -2.391 c
+-4.764 -4.503 -4.447 -6.209 -4.44 -6.223 c
+-4.355 -6.386 -4.355 -6.386 0 -7.408 c
+0 -8.13 l
+-6.607 -8.119 -12.01 -13.521 -11.999 -20.129 c
+-12.008 -26.744 -6.61 -32.138 0 -32.128 c
+6.61 -32.138 12.008 -26.744 11.999 -20.129 c
+12.01 -13.521 6.607 -8.119 0 -8.13 c
+0 -7.408 l
+0.312 -7.428 0.727 -7.455 1.039 -7.475 c
+5.587 -8.118 13.155 -12.018 12.674 -22.55 c
+12.559 -25.063 12.663 -26.479 12.981 -26.762 c
+14.31 -27.933 23.356 -23.69 22.629 -14.042 c
+21.27 4.006 1.142 2.02 0 0 c
+0 7.87 l
+15.462 7.87 27.999 -4.667 27.999 -20.129 c
+27.999 -35.6 15.462 -48.128 0 -48.128 c
+-15.462 -48.128 -27.999 -35.6 -27.999 -20.129 c
+-27.999 -4.667 -15.462 7.87 0 7.87 c
+0 0 l
+f
+Q
+0.286 0.482 0.133 scn
+q 1 0 0 1 334.002 312.3325 cm
+0 0 m
+-0.223 -0.377 -0.896 -0.494 -1.279 -0.706 c
+-3.984 -2.198 -4.352 -2.882 -7.218 -8.204 c
+-10.978 -15.407 l
+-12.034 -17.649 -12.409 -19.973 -12.123 -22.511 c
+-11.368 -29.203 -4.44 -35.038 3.702 -32.832 c
+16.504 -28.455 l
+19.639 -26.388 21.523 -23.893 22.614 -20.364 c
+24.61 -13.908 21.812 -4.74 13.674 -0.575 c
+6.26 3.219 0.029 0.049 0 0 c
+0 6.945 l
+15.462 6.945 27.999 -5.592 27.999 -21.054 c
+27.999 -36.525 15.462 -49.053 0 -49.053 c
+-15.462 -49.053 -27.999 -36.525 -27.999 -21.054 c
+-27.999 -5.592 -15.462 6.945 0 6.945 c
+0 0 l
+f
+Q
+0.278 0.471 0.129 scn
+q 1 0 0 1 334.002 313.1323 cm
+0 0 m
+-0.174 -0.267 -0.682 -0.3 -0.974 -0.428 c
+-3.27 -1.438 -6.363 -4.313 -7.593 -6.58 c
+-13.39 -17.263 -13 -20.654 -12.686 -23.379 c
+-12.044 -28.943 -6.306 -36.331 3.976 -34.516 c
+34.376 -29.152 23.202 -7.033 15.417 -1.844 c
+7.621 3.352 0.038 0.059 0 0 c
+0 6.145 l
+15.462 6.145 27.999 -6.392 27.999 -21.854 c
+27.999 -37.325 15.462 -49.853 0 -49.853 c
+-15.462 -49.853 -27.999 -37.325 -27.999 -21.854 c
+-27.999 -6.392 -15.462 6.145 0 6.145 c
+0 0 l
+f
+Q
+0.275 0.459 0.125 scn
+q 1 0 0 1 334.002 313.833 cm
+0 0 m
+-0.26 -0.393 -1.01 -0.429 -1.443 -0.612 c
+-4.281 -1.817 -7.531 -4.969 -9.346 -8.278 c
+-13.499 -15.849 -13.757 -21.087 -13.243 -24.146 c
+-12.334 -29.559 -7.254 -38.113 6.021 -35.853 c
+29.652 -31.827 27.567 -10.229 15.691 -2.188 c
+7.725 3.206 0.039 0.058 0 0 c
+0 5.444 l
+15.462 5.444 27.999 -7.093 27.999 -22.555 c
+27.999 -38.025 15.462 -50.554 0 -50.554 c
+-15.462 -50.554 -27.999 -38.025 -27.999 -22.555 c
+-27.999 -7.093 -15.462 5.444 0 5.444 c
+0 0 l
+f
+Q
+0.267 0.447 0.122 scn
+q 1 0 0 1 334.002 314.499 cm
+0 0 m
+-0.27 -0.397 -1.042 -0.411 -1.488 -0.586 c
+-3.111 -1.225 -7.25 -3.37 -10.633 -9.471 c
+-11.685 -11.368 -15.021 -18.085 -13.796 -24.878 c
+-12.453 -32.322 -5.461 -39.359 6.715 -37.218 c
+28.949 -33.308 28.975 -11.258 15.609 -2.301 c
+7.856 2.895 0.038 0.056 0 0 c
+0 4.778 l
+15.462 4.778 27.999 -7.759 27.999 -23.221 c
+27.999 -38.691 15.462 -51.22 0 -51.22 c
+-15.462 -51.22 -27.999 -38.691 -27.999 -23.221 c
+-27.999 -7.759 -15.462 4.778 0 4.778 c
+0 0 l
+f
+Q
+0.259 0.439 0.118 scn
+q 1 0 0 1 334.002 315.1099 cm
+0 0 m
+-0.285 -0.403 -1.085 -0.384 -1.55 -0.549 c
+-2.14 -0.758 -7.426 -2.783 -11.14 -9.4 c
+-12.536 -11.888 -15.643 -18.441 -14.343 -25.554 c
+-13.275 -31.396 -7.567 -40.71 7.05 -38.567 c
+28.067 -35.485 30.905 -13.13 16.17 -2.838 c
+7.979 2.883 0.04 0.057 0 0 c
+0 4.167 l
+15.462 4.167 27.999 -8.37 27.999 -23.832 c
+27.999 -39.302 15.462 -51.831 0 -51.831 c
+-15.462 -51.831 -27.999 -39.302 -27.999 -23.832 c
+-27.999 -8.37 -15.462 4.167 0 4.167 c
+0 0 l
+f
+Q
+0.255 0.427 0.118 scn
+q 1 0 0 1 334.002 315.6826 cm
+0 0 m
+-0.294 -0.407 -1.113 -0.365 -1.59 -0.521 c
+-3.037 -0.996 -8.057 -3.068 -11.887 -9.807 c
+-12.95 -11.677 -16.306 -18.383 -14.886 -26.191 c
+-13.691 -32.763 -6.811 -41.823 7.247 -39.848 c
+28.69 -36.835 31.472 -13.848 16.374 -3.144 c
+8.08 2.736 0.041 0.056 0 0 c
+0 3.595 l
+15.462 3.595 27.999 -8.942 27.999 -24.404 c
+27.999 -39.875 15.462 -52.403 0 -52.403 c
+-15.462 -52.403 -27.999 -39.875 -27.999 -24.404 c
+-27.999 -8.942 -15.462 3.595 0 3.595 c
+0 0 l
+f
+Q
+0.247 0.416 0.114 scn
+q 1 0 0 1 334.002 316.2197 cm
+0 0 m
+-0.327 -0.44 -1.224 -0.37 -1.749 -0.528 c
+-5.52 -1.667 -9.766 -5.26 -12.073 -9.267 c
+-15.394 -15.036 -16.522 -20.933 -15.426 -26.792 c
+-13.857 -35.175 -5.228 -43.007 7.675 -41.012 c
+29.388 -37.654 31.678 -13.959 16.092 -3.122 c
+8.188 2.374 0.041 0.052 0 0 c
+0 3.058 l
+15.462 3.058 27.999 -9.479 27.999 -24.941 c
+27.999 -40.412 15.462 -52.94 0 -52.94 c
+-15.462 -52.94 -27.999 -40.412 -27.999 -24.941 c
+-27.999 -9.479 -15.462 3.058 0 3.058 c
+0 0 l
+f
+Q
+0.239 0.404 0.11 scn
+q 1 0 0 1 334.002 316.7344 cm
+0 0 m
+-0.315 -0.413 -1.169 -0.321 -1.671 -0.458 c
+-5.628 -1.543 -10.186 -5.222 -12.509 -9.206 c
+-13.794 -11.411 -17.706 -18.119 -15.958 -27.369 c
+-14.312 -36.083 -5.369 -44.225 7.962 -42.147 c
+29.829 -38.742 32.261 -15.07 16.713 -3.752 c
+8.241 2.415 0.041 0.054 0 0 c
+0 2.543 l
+15.462 2.543 27.999 -9.994 27.999 -25.456 c
+27.999 -40.927 15.462 -53.455 0 -53.455 c
+-15.462 -53.455 -27.999 -40.927 -27.999 -25.456 c
+-27.999 -9.994 -15.462 2.543 0 2.543 c
+0 0 l
+f
+Q
+0.235 0.392 0.106 scn
+q 1 0 0 1 334.002 317.207 cm
+0 0 m
+-0.326 -0.417 -1.197 -0.297 -1.71 -0.424 c
+-5.005 -1.241 -10.022 -4.174 -13.317 -9.752 c
+-16.642 -15.38 -17.708 -21.487 -16.484 -27.904 c
+-14.771 -36.888 -5.523 -45.309 8.242 -43.221 c
+29.817 -39.947 32.246 -15.423 16.845 -4.05 c
+8.507 2.107 0.042 0.053 0 0 c
+0 2.07 l
+15.462 2.07 27.999 -10.467 27.999 -25.929 c
+27.999 -41.399 15.462 -53.928 0 -53.928 c
+-15.462 -53.928 -27.999 -41.399 -27.999 -25.929 c
+-27.999 -10.467 -15.462 2.07 0 2.07 c
+0 0 l
+f
+Q
+0.227 0.38 0.106 scn
+q 1 0 0 1 334.002 317.647 cm
+0 0 m
+-0.165 -0.201 -0.596 -0.119 -0.852 -0.169 c
+-6.63 -1.321 -11.086 -5.48 -13.33 -8.99 c
+-17.824 -16.019 -17.96 -22.681 -17.283 -27.032 c
+-15.528 -38.307 -5.35 -45.631 6.918 -44.447 c
+29.057 -42.308 33.214 -18.565 18.588 -5.674 c
+9.722 2.142 0.051 0.062 0 0 c
+0 1.63 l
+15.462 1.63 27.999 -10.907 27.999 -26.369 c
+27.999 -41.839 15.462 -54.368 0 -54.368 c
+-15.462 -54.368 -27.999 -41.839 -27.999 -26.369 c
+-27.999 -10.907 -15.462 1.63 0 1.63 c
+0 0 l
+f
+Q
+0.22 0.369 0.102 scn
+q 1 0 0 1 334.002 318.0581 cm
+0 0 m
+-0.345 -0.419 -1.243 -0.245 -1.775 -0.35 c
+-5.333 -1.052 -10.598 -4.013 -13.752 -8.857 c
+-18.474 -16.108 -18.606 -22.979 -17.885 -27.466 c
+-16.272 -37.501 -7.101 -46.918 7.31 -45.498 c
+29.578 -43.303 33.522 -19.118 18.666 -5.999 c
+9.679 1.938 0.05 0.061 0 0 c
+0 1.219 l
+15.462 1.219 27.999 -11.318 27.999 -26.78 c
+27.999 -42.25 15.462 -54.779 0 -54.779 c
+-15.462 -54.779 -27.999 -42.25 -27.999 -26.78 c
+-27.999 -11.318 -15.462 1.219 0 1.219 c
+0 0 l
+f
+Q
+0.212 0.361 0.098 scn
+q 1 0 0 1 334.002 318.4131 cm
+0 0 m
+-0.359 -0.424 -1.279 -0.213 -1.827 -0.305 c
+-2.571 -0.429 -9.239 -1.713 -14.035 -8.521 c
+-19.337 -16.049 -19.04 -23.602 -18.666 -26.5 c
+-16.791 -41.034 -4.557 -47.118 6.016 -46.62 c
+29.239 -45.526 34.04 -19.967 18.705 -6.311 c
+9.693 1.714 0.05 0.059 0 0 c
+0 0.864 l
+15.462 0.864 27.999 -11.673 27.999 -27.135 c
+27.999 -42.605 15.462 -55.134 0 -55.134 c
+-15.462 -55.134 -27.999 -42.605 -27.999 -27.135 c
+-27.999 -11.673 -15.462 0.864 0 0.864 c
+0 0 l
+f
+Q
+0.208 0.349 0.094 scn
+q 1 0 0 1 334.002 318.7388 cm
+0 0 m
+-0.366 -0.422 -1.29 -0.183 -1.842 -0.262 c
+-5.616 -0.798 -11.203 -3.577 -14.553 -8.414 c
+-20.526 -17.037 -19.484 -25.014 -19.142 -27.636 c
+-17.325 -41.544 -4.721 -48.295 6.216 -47.587 c
+22.826 -46.511 31.838 -32.411 25.896 -16.796 c
+27.251 -20.083 27.999 -23.685 27.999 -27.46 c
+27.999 -42.931 15.462 -55.459 0 -55.459 c
+-15.462 -55.459 -27.999 -42.931 -27.999 -27.46 c
+-27.999 -11.999 -15.462 0.539 0 0.539 c
+0 0 l
+f
+Q
+0.2 0.337 0.09 scn
+q 1 0 0 1 334.002 318.9941 cm
+0 0 m
+-0.38 -0.425 -1.322 -0.147 -1.889 -0.211 c
+-3.74 -0.417 -10.183 -1.633 -15.334 -8.604 c
+-20.121 -15.081 -20.497 -23.226 -19.964 -27.017 c
+-18.07 -40.5 -7.309 -49.138 6.814 -48.512 c
+13.57 -48.212 30.458 -42.954 27.513 -22.495 c
+27.832 -24.187 27.999 -25.932 27.999 -27.716 c
+27.999 -43.187 15.462 -55.715 0 -55.715 c
+-15.462 -55.715 -27.999 -43.187 -27.999 -27.716 c
+-27.999 -12.254 -15.462 0.283 0 0.283 c
+0 0 l
+f
+Q
+0.192 0.325 0.09 scn
+q 1 0 0 1 334.002 319.1851 cm
+0 0 m
+-0.389 -0.421 -1.333 -0.109 -1.905 -0.156 c
+-5.862 -0.48 -11.762 -2.986 -15.367 -7.721 c
+-21.456 -15.721 -21.121 -23.999 -20.694 -27.186 c
+-18.848 -40.988 -7.36 -50.366 6.622 -49.484 c
+16.365 -48.869 27.809 -42.686 27.992 -27.284 c
+27.997 -27.491 27.999 -27.699 27.999 -27.907 c
+27.999 -43.377 15.462 -55.906 0 -55.906 c
+-15.462 -55.906 -27.999 -43.377 -27.999 -27.907 c
+-27.999 -12.445 -15.462 0.092 0 0.092 c
+0 0 l
+f
+Q
+0.188 0.314 0.086 scn
+q 1 0 0 1 334.002 319.2739 cm
+0 0 m
+-0.403 -0.423 -1.362 -0.067 -1.945 -0.096 c
+-5.653 -0.278 -11.171 -1.795 -16.407 -7.987 c
+-19.42 -11.549 -22.258 -18.906 -21.583 -25.522 c
+-19.025 -50.59 4.157 -50.418 5.143 -50.399 c
+17.395 -50.155 25.849 -43.167 27.755 -31.707 c
+25.94 -45.421 14.205 -55.995 0 -55.995 c
+-15.462 -55.995 -27.999 -43.466 -27.999 -27.996 c
+-27.999 -12.534 -15.462 0.003 0 0.003 c
+0 0 l
+f
+Q
+0.18 0.302 0.082 scn
+q 1 0 0 1 329.771 318.957 cm
+0 0 m
+-22.534 -4.552 -23.533 -35.028 -6.33 -46.26 c
+6.848 -54.863 25.642 -52.17 31.069 -35.688 c
+27.625 -47.252 16.911 -55.678 4.231 -55.678 c
+-11.231 -55.678 -23.768 -43.149 -23.768 -27.679 c
+-23.768 -13.386 -13.055 -1.592 0.778 0.109 c
+0.544 0.077 0.232 0.04 0 0 c
+f
+Q
+0.173 0.29 0.078 scn
+q 1 0 0 1 321.978 316.4971 cm
+0 0 m
+-16.565 -9.063 -17.347 -40.195 9.314 -48.713 c
+16.64 -51.053 30.632 -50.191 36.987 -37.914 c
+32.359 -46.999 22.917 -53.218 12.024 -53.218 c
+-3.438 -53.218 -15.975 -40.689 -15.975 -25.219 c
+-15.975 -12.683 -7.734 -2.069 3.625 1.499 c
+3.1 1.309 2.399 1.057 1.873 0.867 c
+1.31 0.61 0.543 0.297 0 0 c
+f
+Q
+0.314 0.525 0.145 scn
+q 1 0 0 1 349.9282 293.1025 cm
+0 0 m
+-1.706 2.422 -2.871 5.191 -4.806 7.466 c
+-5.58 8.375 -6.333 9.14 -7.046 9.739 c
+-7.103 9.787 -12.7 14.578 -12.706 14.928 c
+-12.708 15.034 -10.925 16.753 -10.74 16.824 c
+-10.058 17.085 -7.544 17.231 -6.875 17.165 c
+-5.111 16.991 -2.438 16.24 0.275 13.649 c
+3.79 10.292 4.269 6.381 4.332 5.263 c
+4.608 0.361 1.816 -1.553 1.125 -1.426 c
+0.589 -1.328 0.314 -0.446 0 0 c
+f
+Q
+0.322 0.537 0.145 scn
+q 1 0 0 1 350.0625 295.5957 cm
+0 0 m
+-1.97 2.883 -3.056 4.472 -4.87 6.595 c
+-5.072 6.832 -5.375 7.116 -5.591 7.34 c
+-5.844 7.601 -6.16 7.969 -6.419 8.224 c
+-6.913 8.711 -7.551 9.382 -8.074 9.839 c
+-9.724 11.281 -9.908 11.547 -9.911 11.595 c
+-9.914 11.655 -8.389 13.369 -8.295 13.411 c
+-7.711 13.674 -6.801 13.346 -6.164 13.276 c
+-2.962 12.927 -1.156 11.212 -0.476 10.566 c
+2.531 7.709 2.783 5.143 2.904 3.909 c
+2.938 3.565 2.929 0.875 2.709 0.41 c
+2.675 0.337 0.707 -0.874 0.645 -0.861 c
+0.33 -0.793 0.182 -0.267 0 0 c
+f
+Q
+0.325 0.549 0.149 scn
+q 1 0 0 1 349.1475 299.125 cm
+0 0 m
+-0.737 0.235 -1.076 1.45 -1.576 2.04 c
+-3.148 3.894 -3.148 3.894 -3.897 4.678 c
+-4.212 5.008 -4.84 5.354 -4.922 5.803 c
+-4.014 7.981 l
+-3.953 8.007 -1.427 7.15 0.33 5.083 c
+1.631 3.552 2.397 0.755 2.281 0.574 c
+1.906 -0.01 0.699 -0.197 0.037 0.011 c
+0.026 0.014 0.011 -0.003 0 0 c
+f
+Q
+0.208 0.349 0.094 scn
+q 1 0 0 1 346.0513 316.5498 cm
+0 0 m
+-5.275 2.417 -9.403 2.407 -12.049 2.189 c
+-12.049 2.728 l
+-6.604 2.728 -1.522 1.173 2.777 -1.517 c
+2.232 -1.205 1.506 -0.789 0.961 -0.477 c
+0.673 -0.334 0.292 -0.134 0 0 c
+f
+Q
+0.2 0.337 0.09 scn
+q 1 0 0 1 342.2651 318.0342 cm
+0 0 m
+-3.078 0.794 -4.478 1.111 -8.263 0.96 c
+-8.263 1.243 l
+-4.866 1.243 -1.61 0.638 1.402 -0.47 c
+0.981 -0.329 0.425 -0.126 0 0 c
+f
+Q
+0.192 0.325 0.09 scn
+q 1 0 0 1 338.2329 318.957 cm
+0 0 m
+-2.557 0.263 -2.657 0.273 -4.231 0.228 c
+-4.231 0.32 l
+-2.431 0.32 -0.671 0.15 1.035 -0.174 c
+0.724 -0.122 0.312 -0.042 0 0 c
+f
+Q
+0.188 0.314 0.086 scn
+q 1 0 0 1 334.002 319.2739 cm
+0 0 m
+0.335 0.003 0.669 -0.002 1.001 -0.014 c
+0.701 -0.01 0.211 -0.214 0 0 c
+f
+Q
+ endstream endobj 1000 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 988 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 992 0 obj <</Subtype/Form/Length 481/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 989 0 R/Resources<</XObject<</Fm0 991 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[180.624 223.274 192.621 211.277]>>stream
+q
+186.627 218.274 m
+186.627 223.274 l
+189.939 223.274 192.621 220.593 192.621 217.271 c
+192.621 213.959 189.939 211.277 186.627 211.277 c
+183.315 211.277 180.624 213.959 180.624 217.271 c
+180.624 220.593 183.315 223.274 186.627 223.274 c
+186.627 218.274 l
+186.078 218.277 185.622 217.823 185.624 217.271 c
+185.62 216.731 186.077 216.274 186.627 216.277 c
+187.173 216.275 187.624 216.726 187.621 217.271 c
+187.622 217.829 187.171 218.277 186.627 218.274 c
+W n
+q
+/GS0 gs
+/Fm0 Do
+Q
+Q
+ endstream endobj 989 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 991 0 obj <</Subtype/Form/Length 11020/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 990 0 R/Resources<</ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[180.624 223.274 192.621 211.277]>>stream
+/CS0 cs 0.294 0.494 0.133 scn
+1 i
+/GS0 gs
+q 1 0 0 1 186.627 218.2744 cm
+0 0 m
+0 5 l
+3.312 5 5.994 2.318 5.994 -1.003 c
+5.994 -4.315 3.312 -6.997 0 -6.997 c
+-3.312 -6.997 -6.003 -4.315 -6.003 -1.003 c
+-6.003 2.318 -3.312 5 0 5 c
+0 0 l
+-0.549 0.003 -1.005 -0.451 -1.003 -1.003 c
+-1.007 -1.543 -0.55 -2 0 -1.997 c
+0.546 -1.999 0.997 -1.549 0.994 -1.003 c
+0.995 -0.445 0.544 0.003 0 0 c
+f
+Q
+q 1 0 0 1 186.627 219.1992 cm
+0 0 m
+-0.013 -0.041 -0.073 -0.074 -0.083 -0.115 c
+-0.111 -0.248 -0.02 -0.426 0 -0.561 c
+0 -0.925 l
+-0.549 -0.922 -1.005 -1.376 -1.003 -1.928 c
+-1.007 -2.468 -0.55 -2.925 0 -2.922 c
+0.546 -2.924 0.997 -2.474 0.994 -1.928 c
+0.995 -1.37 0.544 -0.922 0 -0.925 c
+0 -0.561 l
+0.034 -0.558 0.078 -0.553 0.112 -0.55 c
+0.141 -0.55 0.182 -0.536 0.208 -0.549 c
+1.037 -1.473 l
+1.432 -2.162 1.781 -2.116 1.796 -2.113 c
+2.048 -2.073 3.175 -0.62 1.896 0.192 c
+1.594 0.385 1.27 0.411 1.126 0.396 c
+0 0 l
+0 4.075 l
+3.312 4.075 5.994 1.394 5.994 -1.928 c
+5.994 -5.24 3.312 -7.922 0 -7.922 c
+-3.312 -7.922 -6.003 -5.24 -6.003 -1.928 c
+-6.003 1.394 -3.312 4.075 0 4.075 c
+0 0 l
+f
+Q
+0.286 0.482 0.133 scn
+q 1 0 0 1 186.627 219.3418 cm
+0 0 m
+-0.03 -0.093 -0.164 -0.171 -0.185 -0.266 c
+-0.222 -0.434 -0.125 -0.678 -0.187 -0.838 c
+-0.188 -0.839 -0.237 -0.941 -0.403 -1.05 c
+-1.157 -1.54 -1.045 -2.159 -0.993 -2.338 c
+-0.812 -2.951 -0.139 -3.261 0.448 -2.999 c
+0.646 -2.911 0.784 -2.775 0.866 -2.694 c
+1.137 -2.427 1.542 -2.629 1.829 -2.42 c
+2.42 -1.988 2.555 -1.604 2.619 -1.418 c
+2.84 -0.784 2.454 0.136 1.624 0.372 c
+0.851 0.592 0.002 0.007 0 0 c
+0 3.933 l
+3.312 3.933 5.994 1.251 5.994 -2.07 c
+5.994 -5.383 3.312 -8.064 0 -8.064 c
+-3.312 -8.064 -6.003 -5.383 -6.003 -2.07 c
+-6.003 1.251 -3.312 3.933 0 3.933 c
+0 0 l
+f
+Q
+0.278 0.471 0.129 scn
+q 1 0 0 1 186.627 219.4824 cm
+0 0 m
+-0.295 -0.834 -1.295 -1.352 -1.079 -2.413 c
+-0.941 -3.092 -0.175 -3.558 0.645 -3.166 c
+2.581 -2.241 2.581 -2.241 2.752 -1.679 c
+3.15 -0.374 2.119 0.265 1.727 0.386 c
+0.83 0.662 0.003 0.008 0 0 c
+0 3.792 l
+3.312 3.792 5.994 1.11 5.994 -2.211 c
+5.994 -5.523 3.312 -8.205 0 -8.205 c
+-3.312 -8.205 -6.003 -5.523 -6.003 -2.211 c
+-6.003 1.11 -3.312 3.792 0 3.792 c
+0 0 l
+f
+Q
+0.275 0.459 0.125 scn
+q 1 0 0 1 186.627 219.6211 cm
+0 0 m
+-0.353 -0.868 -1.382 -1.434 -1.146 -2.564 c
+-1.026 -3.142 -0.354 -3.806 0.709 -3.4 c
+2.435 -2.741 2.615 -2.673 2.848 -2.025 c
+3.232 -0.958 2.919 -0.038 2.048 0.318 c
+0.863 0.804 0.004 0.01 0 0 c
+0 3.653 l
+3.312 3.653 5.994 0.972 5.994 -2.35 c
+5.994 -5.662 3.312 -8.344 0 -8.344 c
+-3.312 -8.344 -6.003 -5.662 -6.003 -2.35 c
+-6.003 0.972 -3.312 3.653 0 3.653 c
+0 0 l
+f
+Q
+0.267 0.447 0.122 scn
+q 1 0 0 1 186.627 219.7588 cm
+0 0 m
+-0.193 -0.418 -0.584 -0.692 -0.794 -1.099 c
+-1.091 -1.709 l
+-1.261 -2.111 -1.291 -2.44 -1.189 -2.809 c
+-1.127 -3.035 -0.731 -4.134 0.979 -3.567 c
+4.729 -2.327 2.779 0.033 2.448 0.247 c
+1.441 0.897 0.102 0.218 0 0 c
+0 3.516 l
+3.312 3.516 5.994 0.834 5.994 -2.487 c
+5.994 -5.8 3.312 -8.481 0 -8.481 c
+-3.312 -8.481 -6.003 -5.8 -6.003 -2.487 c
+-6.003 0.834 -3.312 3.516 0 3.516 c
+0 0 l
+f
+Q
+0.259 0.439 0.118 scn
+q 1 0 0 1 186.627 219.9111 cm
+0 0 m
+-0.013 -0.025 -0.053 -0.04 -0.076 -0.058 c
+-0.436 -0.329 -0.724 -0.613 -1.172 -1.804 c
+-1.294 -2.128 -1.428 -2.622 -1.186 -3.154 c
+-0.786 -4.034 0.174 -4.205 1.666 -3.662 c
+3.819 -2.879 3.945 -0.361 2.337 0.402 c
+1.364 0.864 0.123 0.248 0 0 c
+0 3.363 l
+3.312 3.363 5.994 0.682 5.994 -2.64 c
+5.994 -5.952 3.312 -8.634 0 -8.634 c
+-3.312 -8.634 -6.003 -5.952 -6.003 -2.64 c
+-6.003 0.682 -3.312 3.363 0 3.363 c
+0 0 l
+f
+Q
+0.255 0.427 0.118 scn
+q 1 0 0 1 186.627 220.1025 cm
+0 0 m
+-0.034 -0.067 -0.142 -0.105 -0.203 -0.15 c
+-0.738 -0.548 -1 -1.255 -1.252 -1.938 c
+-1.385 -2.296 -1.491 -2.836 -1.247 -3.372 c
+-0.62 -4.745 1.243 -4.15 1.798 -3.936 c
+4.073 -3.057 4.215 -0.289 2.506 0.421 c
+1.109 1.002 0.006 0.013 0 0 c
+0 3.172 l
+3.312 3.172 5.994 0.49 5.994 -2.831 c
+5.994 -6.144 3.312 -8.825 0 -8.825 c
+-3.312 -8.825 -6.003 -6.144 -6.003 -2.831 c
+-6.003 0.49 -3.312 3.172 0 3.172 c
+0 0 l
+f
+Q
+0.247 0.416 0.114 scn
+q 1 0 0 1 186.627 220.291 cm
+0 0 m
+-0.037 -0.07 -0.152 -0.104 -0.217 -0.148 c
+-0.425 -0.29 -0.869 -0.842 -1.09 -1.384 c
+-1.279 -1.849 -1.632 -2.713 -1.384 -3.395 c
+-1 -4.452 -0.005 -4.766 1.573 -4.327 c
+4.077 -3.63 4.625 -0.767 2.988 0.316 c
+1.701 1.168 0.079 0.148 0 0 c
+0 2.983 l
+3.312 2.983 5.994 0.302 5.994 -3.02 c
+5.994 -6.332 3.312 -9.014 0 -9.014 c
+-3.312 -9.014 -6.003 -6.332 -6.003 -3.02 c
+-6.003 0.302 -3.312 2.983 0 2.983 c
+0 0 l
+f
+Q
+0.239 0.404 0.11 scn
+q 1 0 0 1 186.627 220.4746 cm
+0 0 m
+-0.175 -0.316 -0.542 -0.436 -0.748 -0.721 c
+-1.047 -1.138 -1.145 -1.38 -1.239 -1.615 c
+-1.289 -1.739 -1.721 -2.852 -1.448 -3.597 c
+-0.854 -5.222 1.1 -4.778 1.685 -4.604 c
+4.42 -3.787 4.999 -0.764 3.215 0.386 c
+1.946 1.203 0.235 0.424 0 0 c
+0 2.8 l
+3.312 2.8 5.994 0.118 5.994 -3.203 c
+5.994 -6.516 3.312 -9.197 0 -9.197 c
+-3.312 -9.197 -6.003 -6.516 -6.003 -3.203 c
+-6.003 0.118 -3.312 2.8 0 2.8 c
+0 0 l
+f
+Q
+0.235 0.392 0.106 scn
+q 1 0 0 1 186.627 220.7061 cm
+0 0 m
+-0.06 -0.132 -0.265 -0.211 -0.386 -0.291 c
+-0.737 -0.526 -1.203 -1.41 -1.325 -1.736 c
+-1.409 -1.96 -1.811 -3.121 -1.476 -3.944 c
+-0.72 -5.801 1.951 -4.87 1.978 -4.859 c
+5.294 -3.584 5.17 -0.372 3.113 0.574 c
+1.411 1.356 0.007 0.017 0 0 c
+0 2.568 l
+3.312 2.568 5.994 -0.113 5.994 -3.435 c
+5.994 -6.747 3.312 -9.429 0 -9.429 c
+-3.312 -9.429 -6.003 -6.747 -6.003 -3.435 c
+-6.003 -0.113 -3.312 2.568 0 2.568 c
+0 0 l
+f
+Q
+0.227 0.38 0.106 scn
+q 1 0 0 1 186.627 220.9883 cm
+0 0 m
+-0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c
+-0.735 -0.593 -1.129 -1.17 -1.41 -1.909 c
+-1.685 -2.632 -1.76 -3.635 -1.577 -4.146 c
+-0.866 -6.126 1.876 -5.311 1.903 -5.301 c
+5.874 -3.976 5.345 -0.496 3.416 0.521 c
+1.627 1.465 0.058 0.121 0 0 c
+0 2.286 l
+3.312 2.286 5.994 -0.396 5.994 -3.717 c
+5.994 -7.029 3.312 -9.711 0 -9.711 c
+-3.312 -9.711 -6.003 -7.029 -6.003 -3.717 c
+-6.003 -0.396 -3.312 2.286 0 2.286 c
+0 0 l
+f
+Q
+0.22 0.369 0.102 scn
+q 1 0 0 1 186.627 221.2744 cm
+0 0 m
+-0.045 -0.106 -0.21 -0.167 -0.303 -0.236 c
+-0.487 -0.373 -1.127 -0.938 -1.625 -2.443 c
+-1.73 -2.761 -1.906 -3.878 -1.546 -4.676 c
+-1.031 -5.818 0.788 -6.214 2.508 -5.559 c
+6.319 -4.105 5.737 -0.286 3.15 0.724 c
+1.354 1.425 0.007 0.017 0 0 c
+0 2 l
+3.312 2 5.994 -0.682 5.994 -4.003 c
+5.994 -7.315 3.312 -9.997 0 -9.997 c
+-3.312 -9.997 -6.003 -7.315 -6.003 -4.003 c
+-6.003 -0.682 -3.312 2 0 2 c
+0 0 l
+f
+Q
+0.212 0.361 0.098 scn
+q 1 0 0 1 186.627 221.6582 cm
+0 0 m
+-0.163 -0.362 -0.542 -0.515 -0.779 -0.805 c
+-0.947 -1.012 -1.049 -1.261 -1.205 -1.476 c
+-1.367 -1.7 -1.47 -1.983 -1.721 -2.735 c
+-2.06 -3.745 -1.792 -4.628 -1.661 -4.961 c
+-1.172 -6.201 0.619 -6.721 2.417 -6.144 c
+7.025 -4.662 5.824 -0.754 3.284 0.539 c
+1.422 1.486 0.008 0.018 0 0 c
+0 1.616 l
+3.312 1.616 5.994 -1.065 5.994 -4.387 c
+5.994 -7.699 3.312 -10.381 0 -10.381 c
+-3.312 -10.381 -6.003 -7.699 -6.003 -4.387 c
+-6.003 -1.065 -3.312 1.616 0 1.616 c
+0 0 l
+f
+Q
+0.208 0.349 0.094 scn
+q 1 0 0 1 186.627 222.082 cm
+0 0 m
+-0.128 -0.296 -0.442 -0.404 -0.638 -0.631 c
+-0.788 -0.804 -0.893 -1.01 -1.031 -1.191 c
+-1.147 -1.346 -1.619 -2.354 -1.622 -2.361 c
+-2.173 -3.904 -2.042 -4.642 -1.843 -5.159 c
+-0.967 -7.426 1.647 -7.027 2.581 -6.683 c
+3.886 -6.201 6.602 -5.198 5.542 -2.518 c
+5.833 -3.224 5.994 -3.998 5.994 -4.811 c
+5.994 -8.123 3.312 -10.805 0 -10.805 c
+-3.312 -10.805 -6.003 -8.123 -6.003 -4.811 c
+-6.003 -1.489 -3.312 1.192 0 1.192 c
+0 0 l
+f
+Q
+0.2 0.337 0.09 scn
+q 1 0 0 1 186.627 222.5469 cm
+0 0 m
+-0.037 -0.078 -0.154 -0.129 -0.22 -0.185 c
+-1.236 -1.035 -1.83 -2.885 -1.836 -2.903 c
+-2.227 -4.14 -2.24 -5.156 -1.875 -5.925 c
+-0.602 -8.604 3.351 -7.152 3.39 -7.137 c
+4.435 -6.729 6.183 -6.049 5.89 -4.151 c
+5.958 -4.516 5.994 -4.891 5.994 -5.275 c
+5.994 -8.588 3.312 -11.27 0 -11.27 c
+-3.312 -11.27 -6.003 -8.588 -6.003 -5.275 c
+-6.003 -1.954 -3.312 0.728 0 0.728 c
+0 0 l
+f
+Q
+0.192 0.325 0.09 scn
+q 1 0 0 1 186.627 222.9893 cm
+0 0 m
+-0.038 -0.066 -0.155 -0.09 -0.221 -0.129 c
+-1.149 -0.673 -1.644 -2.171 -2.005 -3.266 c
+-2.01 -3.282 -2.546 -5.07 -2.073 -6.283 c
+-1.016 -9.001 3.053 -7.959 3.094 -7.948 c
+4.312 -7.626 5.98 -7.185 5.993 -5.583 c
+5.994 -5.628 5.994 -5.673 5.994 -5.718 c
+5.994 -9.03 3.312 -11.712 0 -11.712 c
+-3.312 -11.712 -6.003 -9.03 -6.003 -5.718 c
+-6.003 -2.396 -3.312 0.285 0 0.285 c
+0 0 l
+f
+Q
+0.188 0.314 0.086 scn
+q 1 0 0 1 186.627 223.2627 cm
+0 0 m
+-0.043 -0.052 -0.154 -0.029 -0.221 -0.042 c
+-0.696 -0.133 -1.347 -0.689 -1.732 -1.731 c
+-2.576 -4.018 -2.459 -5.555 -2.314 -6.268 c
+-1.868 -8.458 0.839 -8.7 1.752 -8.612 c
+4.209 -8.376 5.692 -8.233 5.942 -6.786 c
+5.553 -9.723 3.042 -11.985 0 -11.985 c
+-3.312 -11.985 -6.003 -9.304 -6.003 -5.991 c
+-6.003 -2.67 -3.312 0.012 0 0.012 c
+0 0 l
+f
+Q
+0.18 0.302 0.082 scn
+q 1 0 0 1 185.7217 223.1973 cm
+0 0 m
+-1.735 -0.588 -1.748 -4.507 -1.748 -4.547 c
+-1.744 -6.481 -1.201 -7.607 0.015 -8.199 c
+1.797 -9.066 6.081 -9.359 6.651 -7.642 c
+5.914 -10.117 3.621 -11.92 0.905 -11.92 c
+-2.407 -11.92 -5.098 -9.238 -5.098 -5.926 c
+-5.098 -2.855 -2.799 -0.333 0.165 0.032 c
+0.115 0.022 0.049 0.014 0 0 c
+f
+Q
+0.173 0.29 0.078 scn
+q 1 0 0 1 184.3926 222.7744 cm
+0 0 m
+-1.065 -0.939 -0.813 -4.875 -0.541 -5.608 c
+0.425 -8.204 2.403 -8.583 3.208 -8.626 c
+4.27 -8.682 5.294 -9.071 6.373 -8.972 c
+6.625 -8.948 7.249 -8.828 7.579 -8.222 c
+6.588 -10.166 4.567 -11.497 2.234 -11.497 c
+-1.078 -11.497 -3.769 -8.815 -3.769 -5.503 c
+-3.769 -2.812 -2.001 -0.54 0.432 0.225 c
+0.372 0.2 0.292 0.168 0.231 0.144 c
+0.161 0.103 0.062 0.054 0 0 c
+f
+Q
+0.302 0.506 0.137 scn
+q 1 0 0 1 188.1982 217.4531 cm
+0 0 m
+-0.089 0.064 -0.089 0.064 -0.518 0.595 c
+-0.66 0.77 -0.832 0.916 -0.969 1.096 c
+-1.153 1.336 -1.228 1.588 -1.225 1.6 c
+-1.219 1.619 -0.023 2.449 0.592 1.369 c
+1.023 0.611 0.244 -0.132 0.233 -0.134 c
+0.153 -0.145 0.065 -0.047 0 0 c
+f
+Q
+0.208 0.349 0.094 scn
+q 1 0 0 1 189.1953 222.666 cm
+0 0 m
+-1.292 0.462 -2.253 -0.325 -2.568 -0.584 c
+-2.568 0.608 l
+-1.402 0.608 -0.314 0.276 0.606 -0.3 c
+0.517 -0.25 0.397 -0.184 0.307 -0.133 c
+0.215 -0.093 0.095 -0.034 0 0 c
+f
+Q
+0.306 0.518 0.141 scn
+q 1 0 0 1 188.2393 217.709 cm
+0 0 m
+-0.336 0.357 l
+-0.471 0.528 -0.626 0.683 -0.755 0.857 c
+-0.971 1.148 -1.017 1.271 -1.015 1.275 c
+-1.01 1.29 -0.025 1.71 0.328 0.955 c
+0.583 0.408 0.172 -0.12 0.166 -0.121 c
+0.105 -0.132 0.047 -0.039 0 0 c
+f
+Q
+0.2 0.337 0.09 scn
+q 1 0 0 1 188.3931 222.9971 cm
+0 0 m
+-0.649 0.121 -1.161 -0.01 -1.766 -0.45 c
+-1.766 0.277 l
+-1.038 0.277 -0.341 0.147 0.305 -0.09 c
+0.221 -0.064 0.11 -0.031 0.027 -0.006 c
+0.019 -0.004 0.008 -0.001 0 0 c
+f
+Q
+0.314 0.525 0.145 scn
+q 1 0 0 1 188.2437 217.9775 cm
+0 0 m
+-0.004 0.005 -0.532 0.572 -0.709 0.863 c
+-0.562 0.878 -0.481 0.886 -0.263 0.812 c
+-0.178 0.783 -0.083 0.7 -0.026 0.632 c
+0.032 0.563 0.087 0.449 0.1 0.36 c
+0.13 0.142 0.09 0.006 0.071 -0.06 c
+0.049 -0.041 0.02 -0.02 0 0 c
+f
+Q
+0.192 0.325 0.09 scn
+q 1 0 0 1 187.5317 223.1973 cm
+0 0 m
+-0.313 -0.006 -0.486 -0.009 -0.905 -0.208 c
+-0.905 0.077 l
+-0.519 0.077 -0.142 0.041 0.224 -0.029 c
+0.157 -0.021 0.068 -0.004 0 0 c
+f
+Q
+0.188 0.314 0.086 scn
+q 1 0 0 1 186.627 223.2627 cm
+0 0 m
+0 0.012 l
+0.072 0.012 0.144 0.011 0.215 0.008 c
+0.15 0.006 0.046 -0.045 0 0 c
+f
+Q
+ endstream endobj 990 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 979 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 985 0 obj <</Subtype/Form/Length 450/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 980 0 R/Resources<</XObject<</Fm0 984 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[155.001 324.272 210.999 268.274]>>stream
+q
+183 308.272 m
+183 324.272 l
+198.462 324.272 210.999 311.735 210.999 296.273 c
+210.999 280.812 198.462 268.274 183 268.274 c
+167.538 268.274 155.001 280.812 155.001 296.273 c
+155.001 311.735 167.538 324.272 183 324.272 c
+183 308.272 l
+176.393 308.283 170.99 302.881 171.001 296.273 c
+170.99 289.666 176.393 284.264 183 284.274 c
+189.607 284.264 195.01 289.666 194.999 296.273 c
+195.01 302.881 189.607 308.283 183 308.272 c
+W n
+q
+/GS0 gs
+/Fm0 Do
+Q
+Q
+ endstream endobj 980 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 984 0 obj <</Subtype/Form/Length 13394/Matrix[1.0 0.0 0.0 1.0 0.0 0.0]/Group 981 0 R/Resources<</ColorSpace<</CS0 982 0 R>>/ExtGState<</GS0 978 0 R>>>>/BBox[155.001 324.272 210.999 268.274]>>stream
+/CS0 cs 0.306 0.518 0.141 scn
+1 i
+/GS0 gs
+q 1 0 0 1 183 308.2725 cm
+0 0 m
+0 16 l
+15.462 16 27.999 3.463 27.999 -11.999 c
+27.999 -27.461 15.462 -39.998 0 -39.998 c
+-15.462 -39.998 -27.999 -27.461 -27.999 -11.999 c
+-27.999 3.463 -15.462 16 0 16 c
+0 0 l
+-6.607 0.011 -12.01 -5.392 -11.999 -11.999 c
+-12.01 -18.606 -6.607 -24.009 0 -23.998 c
+6.607 -24.009 12.01 -18.606 11.999 -11.999 c
+12.01 -5.392 6.607 0.011 0 0 c
+f
+Q
+q 1 0 0 1 183 313.436 cm
+0 0 m
+0 -0.468 l
+0 -5.164 l
+-6.607 -5.153 -12.01 -10.555 -11.999 -17.163 c
+-12.01 -23.77 -6.607 -29.172 0 -29.162 c
+6.607 -29.172 12.01 -23.77 11.999 -17.163 c
+12.01 -10.555 6.607 -5.153 0 -5.164 c
+0 -0.468 l
+0.316 -0.694 0.738 -0.997 1.055 -1.223 c
+3.817 -3.661 7.459 -4.869 10 -7.617 c
+12.018 -9.8 13.458 -12.461 14.279 -15.528 c
+15.091 -18.562 16.901 -19.343 16.918 -19.345 c
+18.873 -19.539 24.733 -10.483 17.857 -2.241 c
+10.879 6.124 0.769 1.958 0 0 c
+0 10.836 l
+15.462 10.836 27.999 -1.701 27.999 -17.163 c
+27.999 -32.625 15.462 -45.162 0 -45.162 c
+-15.462 -45.162 -27.999 -32.625 -27.999 -17.163 c
+-27.999 -1.701 -15.462 10.836 0 10.836 c
+0 0 l
+f
+Q
+0.302 0.506 0.137 scn
+q 1 0 0 1 183 315.2832 cm
+0 0 m
+-0.296 -0.712 -1.487 -1.168 -1.735 -1.898 c
+-1.987 -2.638 -2.003 -3.873 -1.53 -4.494 c
+-1.227 -4.893 -0.45 -4.945 0 -5.167 c
+0 -7.011 l
+-6.607 -7 -12.01 -12.402 -11.999 -19.01 c
+-12.01 -25.617 -6.607 -31.02 0 -31.009 c
+6.607 -31.02 12.01 -25.617 11.999 -19.01 c
+12.01 -12.402 6.607 -7 0 -7.011 c
+0 -5.167 l
+0.338 -5.201 0.788 -5.245 1.126 -5.278 c
+2.249 -5.476 12.142 -7.556 13.761 -19.537 c
+14.172 -22.51 l
+14.637 -23.085 15.725 -23.501 16.46 -23.424 c
+20.584 -22.987 26.414 -9.567 15.896 -1.312 c
+7.943 4.929 0.035 0.084 0 0 c
+0 8.989 l
+15.462 8.989 27.999 -3.548 27.999 -19.01 c
+27.999 -34.472 15.462 -47.009 0 -47.009 c
+-15.462 -47.009 -27.999 -34.472 -27.999 -19.01 c
+-27.999 -3.548 -15.462 8.989 0 8.989 c
+0 0 l
+f
+Q
+0.294 0.494 0.133 scn
+q 1 0 0 1 183 316.4023 cm
+0 0 m
+-0.627 -1.109 -1.866 -1.525 -2.708 -2.391 c
+-4.764 -4.503 -4.447 -6.209 -4.44 -6.223 c
+-4.355 -6.386 -4.355 -6.386 0 -7.408 c
+0 -8.13 l
+-6.607 -8.119 -12.01 -13.521 -11.999 -20.129 c
+-12.01 -26.736 -6.607 -32.139 0 -32.128 c
+6.607 -32.139 12.01 -26.736 11.999 -20.129 c
+12.01 -13.521 6.607 -8.119 0 -8.13 c
+0 -7.408 l
+0.312 -7.428 0.727 -7.455 1.039 -7.475 c
+5.586 -8.118 13.155 -12.017 12.674 -22.548 c
+12.56 -25.061 12.663 -26.477 12.982 -26.758 c
+14.311 -27.929 23.356 -23.684 22.629 -14.042 c
+21.269 4.004 1.142 2.019 0 0 c
+0 7.87 l
+15.462 7.87 27.999 -4.667 27.999 -20.129 c
+27.999 -35.591 15.462 -48.128 0 -48.128 c
+-15.462 -48.128 -27.999 -35.591 -27.999 -20.129 c
+-27.999 -4.667 -15.462 7.87 0 7.87 c
+0 0 l
+f
+Q
+0.286 0.482 0.133 scn
+q 1 0 0 1 183 317.3276 cm
+0 0 m
+-0.223 -0.377 -0.896 -0.494 -1.279 -0.706 c
+-3.983 -2.198 -4.352 -2.882 -7.218 -8.204 c
+-10.977 -15.407 l
+-12.034 -17.649 -12.409 -19.973 -12.123 -22.51 c
+-11.368 -29.204 -4.441 -35.04 3.701 -32.832 c
+16.504 -28.451 l
+19.64 -26.383 21.524 -23.889 22.614 -20.364 c
+24.61 -13.908 21.812 -4.74 13.674 -0.575 c
+6.26 3.219 0.029 0.049 0 0 c
+0 6.945 l
+15.462 6.945 27.999 -5.592 27.999 -21.054 c
+27.999 -36.516 15.462 -49.053 0 -49.053 c
+-15.462 -49.053 -27.999 -36.516 -27.999 -21.054 c
+-27.999 -5.592 -15.462 6.945 0 6.945 c
+0 0 l
+f
+Q
+0.278 0.471 0.129 scn
+q 1 0 0 1 183 318.1274 cm
+0 0 m
+-0.174 -0.267 -0.682 -0.3 -0.974 -0.428 c
+-3.27 -1.438 -6.363 -4.313 -7.593 -6.58 c
+-13.39 -17.262 -13 -20.653 -12.686 -23.377 c
+-12.045 -28.943 -6.307 -36.332 3.975 -34.516 c
+34.372 -29.149 23.201 -7.033 15.417 -1.844 c
+7.621 3.352 0.038 0.059 0 0 c
+0 6.145 l
+15.462 6.145 27.999 -6.392 27.999 -21.854 c
+27.999 -37.316 15.462 -49.853 0 -49.853 c
+-15.462 -49.853 -27.999 -37.316 -27.999 -21.854 c
+-27.999 -6.392 -15.462 6.145 0 6.145 c
+0 0 l
+f
+Q
+0.275 0.459 0.125 scn
+q 1 0 0 1 183 318.8281 cm
+0 0 m
+-0.26 -0.393 -1.01 -0.429 -1.443 -0.612 c
+-4.281 -1.816 -7.531 -4.969 -9.346 -8.278 c
+-13.498 -15.848 -13.757 -21.085 -13.244 -24.146 c
+-12.335 -29.558 -7.256 -38.113 6.018 -35.853 c
+29.65 -31.827 27.567 -10.229 15.691 -2.188 c
+7.725 3.206 0.039 0.058 0 0 c
+0 5.444 l
+15.462 5.444 27.999 -7.093 27.999 -22.555 c
+27.999 -38.017 15.462 -50.554 0 -50.554 c
+-15.462 -50.554 -27.999 -38.017 -27.999 -22.555 c
+-27.999 -7.093 -15.462 5.444 0 5.444 c
+0 0 l
+f
+Q
+0.267 0.447 0.122 scn
+q 1 0 0 1 183 319.4941 cm
+0 0 m
+-0.27 -0.397 -1.042 -0.411 -1.488 -0.586 c
+-3.111 -1.225 -7.249 -3.37 -10.633 -9.471 c
+-11.685 -11.368 -15.021 -18.084 -13.796 -24.877 c
+-12.453 -32.323 -5.461 -39.362 6.714 -37.218 c
+28.943 -33.304 28.97 -11.255 15.609 -2.301 c
+7.856 2.895 0.038 0.056 0 0 c
+0 4.778 l
+15.462 4.778 27.999 -7.759 27.999 -23.221 c
+27.999 -38.683 15.462 -51.22 0 -51.22 c
+-15.462 -51.22 -27.999 -38.683 -27.999 -23.221 c
+-27.999 -7.759 -15.462 4.778 0 4.778 c
+0 0 l
+f
+Q
+0.259 0.439 0.118 scn
+q 1 0 0 1 183 320.105 cm
+0 0 m
+-0.285 -0.403 -1.085 -0.384 -1.55 -0.549 c
+-2.14 -0.758 -7.426 -2.783 -11.14 -9.4 c
+-12.536 -11.888 -15.643 -18.441 -14.343 -25.552 c
+-13.349 -30.994 -7.597 -40.716 7.05 -38.567 c
+28.064 -35.482 30.902 -13.127 16.17 -2.838 c
+7.979 2.883 0.04 0.057 0 0 c
+0 4.167 l
+15.462 4.167 27.999 -8.37 27.999 -23.832 c
+27.999 -39.293 15.462 -51.831 0 -51.831 c
+-15.462 -51.831 -27.999 -39.293 -27.999 -23.832 c
+-27.999 -8.37 -15.462 4.167 0 4.167 c
+0 0 l
+f
+Q
+0.255 0.427 0.118 scn
+q 1 0 0 1 183 320.6777 cm
+0 0 m
+-0.294 -0.407 -1.113 -0.365 -1.59 -0.521 c
+-3.037 -0.996 -8.057 -3.068 -11.887 -9.807 c
+-12.95 -11.676 -16.306 -18.381 -14.886 -26.189 c
+-13.692 -32.763 -6.813 -41.824 7.243 -39.849 c
+28.687 -36.835 31.471 -13.847 16.374 -3.144 c
+8.08 2.736 0.041 0.056 0 0 c
+0 3.595 l
+15.462 3.595 27.999 -8.942 27.999 -24.404 c
+27.999 -39.866 15.462 -52.403 0 -52.403 c
+-15.462 -52.403 -27.999 -39.866 -27.999 -24.404 c
+-27.999 -8.942 -15.462 3.595 0 3.595 c
+0 0 l
+f
+Q
+0.247 0.416 0.114 scn
+q 1 0 0 1 183 321.2148 cm
+0 0 m
+-0.327 -0.44 -1.224 -0.37 -1.749 -0.528 c
+-5.52 -1.667 -9.765 -5.26 -12.073 -9.267 c
+-15.394 -15.036 -16.522 -20.932 -15.426 -26.791 c
+-13.856 -35.176 -5.227 -43.01 7.675 -41.012 c
+29.382 -37.65 31.673 -13.956 16.092 -3.122 c
+8.188 2.374 0.041 0.052 0 0 c
+0 3.058 l
+15.462 3.058 27.999 -9.479 27.999 -24.941 c
+27.999 -40.403 15.462 -52.94 0 -52.94 c
+-15.462 -52.94 -27.999 -40.403 -27.999 -24.941 c
+-27.999 -9.479 -15.462 3.058 0 3.058 c
+0 0 l
+f
+Q
+0.239 0.404 0.11 scn
+q 1 0 0 1 183 321.7295 cm
+0 0 m
+-0.315 -0.413 -1.169 -0.321 -1.671 -0.458 c
+-5.628 -1.543 -10.186 -5.222 -12.509 -9.206 c
+-13.794 -11.411 -17.706 -18.119 -15.958 -27.368 c
+-14.312 -36.085 -5.369 -44.227 7.962 -42.147 c
+29.823 -38.738 32.256 -15.066 16.713 -3.752 c
+8.241 2.415 0.041 0.054 0 0 c
+0 2.543 l
+15.462 2.543 27.999 -9.994 27.999 -25.456 c
+27.999 -40.918 15.462 -53.455 0 -53.455 c
+-15.462 -53.455 -27.999 -40.918 -27.999 -25.456 c
+-27.999 -9.994 -15.462 2.543 0 2.543 c
+0 0 l
+f
+Q
+0.235 0.392 0.106 scn
+q 1 0 0 1 183 322.2021 cm
+0 0 m
+-0.326 -0.417 -1.197 -0.297 -1.71 -0.424 c
+-5.005 -1.241 -10.021 -4.174 -13.317 -9.752 c
+-16.642 -15.38 -17.708 -21.487 -16.484 -27.902 c
+-14.771 -36.889 -5.522 -45.311 8.242 -43.22 c
+29.813 -39.944 32.242 -15.421 16.845 -4.05 c
+8.507 2.107 0.042 0.053 0 0 c
+0 2.07 l
+15.462 2.07 27.999 -10.467 27.999 -25.929 c
+27.999 -41.391 15.462 -53.928 0 -53.928 c
+-15.462 -53.928 -27.999 -41.391 -27.999 -25.929 c
+-27.999 -10.467 -15.462 2.07 0 2.07 c
+0 0 l
+f
+Q
+0.227 0.38 0.106 scn
+q 1 0 0 1 183 322.6421 cm
+0 0 m
+-0.165 -0.201 -0.596 -0.119 -0.851 -0.169 c
+-6.63 -1.321 -11.086 -5.48 -13.33 -8.99 c
+-17.823 -16.018 -17.96 -22.68 -17.283 -27.031 c
+-15.529 -38.308 -5.353 -45.633 6.914 -44.447 c
+29.053 -42.307 33.213 -18.564 18.588 -5.674 c
+9.722 2.142 0.051 0.062 0 0 c
+0 1.63 l
+15.462 1.63 27.999 -10.907 27.999 -26.369 c
+27.999 -41.831 15.462 -54.368 0 -54.368 c
+-15.462 -54.368 -27.999 -41.831 -27.999 -26.369 c
+-27.999 -10.907 -15.462 1.63 0 1.63 c
+0 0 l
+f
+Q
+0.22 0.369 0.102 scn
+q 1 0 0 1 183 323.0532 cm
+0 0 m
+-0.345 -0.419 -1.243 -0.245 -1.775 -0.35 c
+-5.333 -1.052 -10.598 -4.013 -13.752 -8.857 c
+-18.474 -16.108 -18.606 -22.979 -17.885 -27.465 c
+-16.272 -37.503 -7.101 -46.92 7.31 -45.499 c
+29.575 -43.3 33.52 -19.116 18.666 -5.999 c
+9.679 1.938 0.05 0.061 0 0 c
+0 1.219 l
+15.462 1.219 27.999 -11.318 27.999 -26.78 c
+27.999 -42.242 15.462 -54.779 0 -54.779 c
+-15.462 -54.779 -27.999 -42.242 -27.999 -26.78 c
+-27.999 -11.318 -15.462 1.219 0 1.219 c
+0 0 l
+f
+Q
+0.212 0.361 0.098 scn
+q 1 0 0 1 183 323.4082 cm
+0 0 m
+-0.359 -0.424 -1.279 -0.213 -1.827 -0.305 c
+-2.571 -0.429 -9.239 -1.713 -14.035 -8.521 c
+-19.337 -16.049 -19.04 -23.602 -18.666 -26.5 c
+-16.791 -41.035 -4.557 -47.119 6.015 -46.62 c
+29.237 -45.525 34.039 -19.966 18.705 -6.311 c
+9.693 1.714 0.05 0.059 0 0 c
+0 0.864 l
+15.462 0.864 27.999 -11.673 27.999 -27.135 c
+27.999 -42.597 15.462 -55.134 0 -55.134 c
+-15.462 -55.134 -27.999 -42.597 -27.999 -27.135 c
+-27.999 -11.673 -15.462 0.864 0 0.864 c
+0 0 l
+f
+Q
+0.208 0.349 0.094 scn
+q 1 0 0 1 183 323.7339 cm
+0 0 m
+-0.366 -0.422 -1.29 -0.183 -1.842 -0.262 c
+-5.616 -0.798 -11.203 -3.577 -14.553 -8.414 c
+-20.526 -17.037 -19.484 -25.015 -19.142 -27.636 c
+-17.325 -41.545 -4.721 -48.296 6.215 -47.587 c
+22.825 -46.511 31.838 -32.41 25.896 -16.796 c
+27.251 -20.083 27.999 -23.685 27.999 -27.46 c
+27.999 -42.922 15.462 -55.459 0 -55.459 c
+-15.462 -55.459 -27.999 -42.922 -27.999 -27.46 c
+-27.999 -11.999 -15.462 0.539 0 0.539 c
+0 0 l
+f
+Q
+0.2 0.337 0.09 scn
+q 1 0 0 1 183 323.9893 cm
+0 0 m
+-0.38 -0.425 -1.322 -0.147 -1.889 -0.211 c
+-3.74 -0.417 -10.183 -1.633 -15.334 -8.604 c
+-20.12 -15.081 -20.496 -23.225 -19.964 -27.016 c
+-18.071 -40.5 -7.311 -49.139 6.811 -48.512 c
+13.567 -48.212 30.458 -42.954 27.513 -22.495 c
+27.832 -24.187 27.999 -25.932 27.999 -27.716 c
+27.999 -43.178 15.462 -55.715 0 -55.715 c
+-15.462 -55.715 -27.999 -43.178 -27.999 -27.716 c
+-27.999 -12.254 -15.462 0.283 0 0.283 c
+0 0 l
+f
+Q
+0.192 0.325 0.09 scn
+q 1 0 0 1 183 324.1802 cm
+0 0 m
+-0.389 -0.421 -1.333 -0.109 -1.905 -0.156 c
+-5.862 -0.48 -11.762 -2.986 -15.367 -7.721 c
+-21.456 -15.72 -21.121 -23.999 -20.694 -27.186 c
+-18.877 -40.767 -7.134 -50.353 6.621 -49.484 c
+16.365 -48.869 27.809 -42.685 27.992 -27.284 c
+27.997 -27.491 27.999 -27.699 27.999 -27.907 c
+27.999 -43.369 15.462 -55.906 0 -55.906 c
+-15.462 -55.906 -27.999 -43.369 -27.999 -27.907 c
+-27.999 -12.445 -15.462 0.092 0 0.092 c
+0 0 l
+f
+Q
+0.188 0.314 0.086 scn
+q 1 0 0 1 183 324.269 cm
+0 0 m
+-0.403 -0.423 -1.362 -0.067 -1.945 -0.096 c
+-5.653 -0.278 -11.171 -1.795 -16.407 -7.987 c
+-19.42 -11.549 -22.258 -18.906 -21.583 -25.522 c
+-19.025 -50.59 4.157 -50.418 5.143 -50.399 c
+17.394 -50.156 25.847 -43.167 27.756 -31.704 c
+25.941 -45.414 14.205 -55.995 0 -55.995 c
+-15.462 -55.995 -27.999 -43.458 -27.999 -27.996 c
+-27.999 -12.534 -15.462 0.003 0 0.003 c
+0 0 l
+f
+Q
+0.18 0.302 0.082 scn
+q 1 0 0 1 178.769 323.9521 cm
+0 0 m
+-22.529 -4.551 -23.528 -35.026 -6.329 -46.258 c
+6.848 -54.862 25.641 -52.169 31.069 -35.683 c
+27.625 -47.245 16.912 -55.678 4.231 -55.678 c
+-11.231 -55.678 -23.768 -43.141 -23.768 -27.679 c
+-23.768 -13.386 -13.055 -1.592 0.778 0.109 c
+0.544 0.077 0.232 0.04 0 0 c
+f
+Q
+0.173 0.29 0.078 scn
+q 1 0 0 1 170.9761 321.4922 cm
+0 0 m
+-16.563 -9.063 -17.344 -40.194 9.316 -48.713 c
+16.64 -51.054 30.629 -50.189 36.987 -37.91 c
+32.359 -46.995 22.917 -53.218 12.024 -53.218 c
+-3.438 -53.218 -15.975 -40.681 -15.975 -25.219 c
+-15.975 -12.683 -7.734 -2.069 3.625 1.499 c
+3.1 1.309 2.399 1.057 1.873 0.867 c
+1.31 0.61 0.543 0.297 0 0 c
+f
+Q
+0.314 0.525 0.145 scn
+q 1 0 0 1 198.9263 298.0972 cm
+0 0 m
+-1.706 2.422 -2.871 5.192 -4.806 7.466 c
+-5.58 8.375 -6.333 9.14 -7.046 9.74 c
+-7.103 9.788 -12.7 14.579 -12.706 14.929 c
+-12.708 15.035 -10.925 16.753 -10.74 16.825 c
+-10.058 17.086 -7.544 17.231 -6.875 17.166 c
+-5.111 16.992 -2.438 16.241 0.275 13.649 c
+3.79 10.293 4.269 6.382 4.332 5.263 c
+4.608 0.362 1.816 -1.553 1.125 -1.426 c
+0.589 -1.328 0.314 -0.445 0 0 c
+f
+Q
+0.322 0.537 0.145 scn
+q 1 0 0 1 199.0605 300.5908 cm
+0 0 m
+-1.97 2.883 -3.055 4.471 -4.87 6.595 c
+-5.072 6.832 -5.375 7.116 -5.591 7.34 c
+-5.844 7.601 -6.16 7.969 -6.419 8.224 c
+-6.913 8.711 -7.551 9.382 -8.074 9.839 c
+-9.724 11.281 -9.908 11.547 -9.911 11.595 c
+-9.914 11.655 -8.389 13.369 -8.295 13.411 c
+-7.711 13.674 -6.801 13.346 -6.164 13.276 c
+-2.962 12.927 -1.156 11.212 -0.476 10.566 c
+2.531 7.709 2.783 5.143 2.904 3.909 c
+2.938 3.565 2.929 0.875 2.709 0.41 c
+2.675 0.337 0.707 -0.875 0.645 -0.861 c
+0.33 -0.793 0.182 -0.267 0 0 c
+f
+Q
+0.325 0.549 0.149 scn
+q 1 0 0 1 198.1455 304.1201 cm
+0 0 m
+-0.737 0.235 -1.076 1.45 -1.576 2.04 c
+-3.148 3.894 -3.148 3.894 -3.897 4.678 c
+-4.212 5.008 -4.84 5.354 -4.922 5.803 c
+-4.014 7.981 l
+-3.953 8.007 -1.427 7.15 0.33 5.083 c
+1.631 3.552 2.397 0.755 2.281 0.574 c
+1.906 -0.01 0.699 -0.197 0.037 0.011 c
+0.026 0.014 0.011 -0.003 0 0 c
+f
+Q
+0.208 0.349 0.094 scn
+q 1 0 0 1 195.0493 321.5449 cm
+0 0 m
+-5.275 2.417 -9.403 2.407 -12.049 2.189 c
+-12.049 2.728 l
+-6.604 2.728 -1.522 1.173 2.777 -1.517 c
+2.232 -1.205 1.506 -0.789 0.961 -0.477 c
+0.673 -0.334 0.292 -0.134 0 0 c
+f
+Q
+0.2 0.337 0.09 scn
+q 1 0 0 1 191.2632 323.0293 cm
+0 0 m
+-3.078 0.794 -4.478 1.111 -8.263 0.96 c
+-8.263 1.243 l
+-4.866 1.243 -1.61 0.638 1.402 -0.47 c
+0.981 -0.329 0.425 -0.126 0 0 c
+f
+Q
+0.192 0.325 0.09 scn
+q 1 0 0 1 187.231 323.9521 cm
+0 0 m
+-2.557 0.263 -2.657 0.273 -4.231 0.228 c
+-4.231 0.32 l
+-2.431 0.32 -0.671 0.15 1.035 -0.174 c
+0.724 -0.122 0.312 -0.042 0 0 c
+f
+Q
+0.188 0.314 0.086 scn
+q 1 0 0 1 183 324.269 cm
+0 0 m
+0.335 0.003 0.669 -0.002 1.001 -0.014 c
+0.701 -0.01 0.211 -0.214 0 0 c
+f
+Q
+ endstream endobj 981 0 obj <</I true/K false/S/Transparency/Type/Group>> endobj 1051 0 obj [/Indexed/DeviceRGB 255 1052 0 R] endobj 1052 0 obj <</Length 428/Filter[/ASCII85Decode/FlateDecode]>>stream
+8;X]O>EqN@%''O_@%e@?J;%+8(9e>X=MR6S?i^YgA3=].HDXF.R$lIL@"pJ+EP(%0
+b]6ajmNZn*!='OQZeQ^Y*,=]?C.B+\Ulg9dhD*"iC[;*=3`oP1[!S^)?1)IZ4dup`
+E1r!/,*0[*9.aFIR2&b-C#s<Xl5FH@[<=!#6V)uDBXnIr.F>oRZ7Dl%MLY\.?d>Mn
+6%Q2oYfNRF$$+ON<+]RUJmC0I<jlL.oXisZ;SYU[/7#<&37rclQKqeJe#,UF7Rgb1
+VNWFKf>nDZ4OTs0S!saG>GGKUlQ*Q?45:CI&4J'_2j<etJICj7e7nPMb=O6S7UOH<
+PO7r\I.Hu&e0d&E<.')fERr/l+*W,)q^D*ai5<uuLX.7g/>$XKrcYp0n+Xl_nU*O(
+l[$6Nn+Z_Nq0]s7hs]`XX1nZ8&94a\~> endstream endobj 962 0 obj <</Private 963 0 R/LastModified(D:20100413200510+02'00')>> endobj 963 0 obj <</RoundtripVersion 13/CreatorVersion 13/ContainerVersion 11/AIMetaData 964 0 R/AIPrivateData1 965 0 R/AIPrivateData2 966 0 R/AIPrivateData3 967 0 R/AIPrivateData4 968 0 R/NumBlock 4/RoundtripStreamType 1>> endobj 964 0 obj <</Length 974>>stream
+%!PS-Adobe-3.0 %%Creator: Adobe Illustrator(R) 13.0 %%AI8_CreatorVersion: 13.0.2 %%For: (Gilles Dubochet) () %%Title: (type_tags.ai) %%CreationDate: 4/13/10 8:05 PM %%BoundingBox: -227 -99 143 234 %%HiResBoundingBox: -226.5 -98.5 142.5908 233.748 %%DocumentProcessColors: Cyan Magenta Yellow Black %AI5_FileFormat 9.0 %AI12_BuildNumber: 434 %AI3_ColorUsage: Color %AI7_ImageSettings: 0 %%RGBProcessColor: 0 0 0 ([Registration]) %AI3_Cropmarks: -59.5 -62.5 -46.5 -49.5 %AI3_TemplateBox: 40.5 29.5 40.5 29.5 %AI3_TileBox: -239.5552 -349.6377 319.4453 433.3623 %AI3_DocumentPreview: None %AI5_ArtSize: 841.8898 595.2756 %AI5_RulerUnits: 6 %AI9_ColorModel: 1 %AI5_ArtFlags: 0 0 0 1 0 0 1 0 0 %AI5_TargetResolution: 800 %AI5_NumLayers: 5 %AI9_OpenToView: -285.3354 49.2085 4.4426 1589 965 18 0 0 470 360 0 0 1 1 1 0 1 %AI5_OpenViewLayers: 77777 %%PageOrigin:-399 227 %AI7_GridSettings: 72 8 72 8 1 0 0.8 0.8 0.8 0.9 0.9 0.9 %AI9_Flatten: 1 %AI12_CMSettings: 00.MS %%EndComments endstream endobj 965 0 obj <</Length 7512>>stream
+%%BoundingBox: -227 -63 143 234 %%HiResBoundingBox: -226.5 -62.001 142.5908 233.748 %AI7_Thumbnail: 128 104 8 %%BeginData: 7364 Hex Bytes %0000330000660000990000CC0033000033330033660033990033CC0033FF %0066000066330066660066990066CC0066FF009900009933009966009999 %0099CC0099FF00CC0000CC3300CC6600CC9900CCCC00CCFF00FF3300FF66 %00FF9900FFCC3300003300333300663300993300CC3300FF333300333333 %3333663333993333CC3333FF3366003366333366663366993366CC3366FF %3399003399333399663399993399CC3399FF33CC0033CC3333CC6633CC99 %33CCCC33CCFF33FF0033FF3333FF6633FF9933FFCC33FFFF660000660033 %6600666600996600CC6600FF6633006633336633666633996633CC6633FF %6666006666336666666666996666CC6666FF669900669933669966669999 %6699CC6699FF66CC0066CC3366CC6666CC9966CCCC66CCFF66FF0066FF33 %66FF6666FF9966FFCC66FFFF9900009900339900669900999900CC9900FF %9933009933339933669933999933CC9933FF996600996633996666996699 %9966CC9966FF9999009999339999669999999999CC9999FF99CC0099CC33 %99CC6699CC9999CCCC99CCFF99FF0099FF3399FF6699FF9999FFCC99FFFF %CC0000CC0033CC0066CC0099CC00CCCC00FFCC3300CC3333CC3366CC3399 %CC33CCCC33FFCC6600CC6633CC6666CC6699CC66CCCC66FFCC9900CC9933 %CC9966CC9999CC99CCCC99FFCCCC00CCCC33CCCC66CCCC99CCCCCCCCCCFF %CCFF00CCFF33CCFF66CCFF99CCFFCCCCFFFFFF0033FF0066FF0099FF00CC %FF3300FF3333FF3366FF3399FF33CCFF33FFFF6600FF6633FF6666FF6699 %FF66CCFF66FFFF9900FF9933FF9966FF9999FF99CCFF99FFFFCC00FFCC33 %FFCC66FFCC99FFCCCCFFCCFFFFFF33FFFF66FFFF99FFFFCC110000001100 %000011111111220000002200000022222222440000004400000044444444 %550000005500000055555555770000007700000077777777880000008800 %000088888888AA000000AA000000AAAAAAAABB000000BB000000BBBBBBBB %DD000000DD000000DDDDDDDDEE000000EE000000EEEEEEEE0000000000FF %00FF0000FFFFFF0000FF00FFFFFF00FFFFFF %524C45FD0AFFA8282F53FD7CFF06A8FF5359FD7AFFA92EA8FF537EFD7BFF %53282E28A8FD27FF2E2828A8FD51FF847EAFFD27FF5953AFA928FD7BFF59 %2EFFA82FA8FD7AFF847E7E2E59FD7CFFA87D7DFDFCFFFDFCFFFD31FFA9FF %FFFFA9FFFFFFA9FD76FFA9FFA9AFA9FFA9AFA9FFA9AFA9FD1AFFA87E537E %A8FD2BFFA8A9A8AFA8FD23FFA9A9A8A9A8A9A8A9A8A9A8A9A8AFFD17FFA9 %7E282F282F282F2F7EA8FD26FFA8532F282F282F53A8FD22FFA9AFA9AFA9 %AFA9AFA9AFA9AFA9FD16FF7E2E0128062F292F292F062F7EFD23FFA85906 %28062F282F2829065384FD1FFFA8A9A8A9A8A9A8A9A8A9A8A984FD16FF84 %28062F292F292F29542F30292FA8FD21FFA853062928FD042F542F30292F %84FD1FFFA9AFA9FFA9AFA9FFA9AFA9AFA9FD09FFA8A87DFF7DFFA8FD04FF %A8280529282929535353292F292F292FA8FD1FFFA92F00280629062F292F %292F292F292FA8FD1DFFA8A9A8A9A8A9A8A9A8A9A8A9A8FD04FFA87DFF7D %FFA852275227277D27A8FFFFFF530628282F5AA9FD04FF7E302F302953FD %1FFF7E06282FA984A82F7EA8A9847E2F302953FD1EFFA9AFA9AFA9AFA9AF %A9AFA9A9A9FFFFFF52FF5227527D7D52527D527DF8A8FFFFA8002806297E %FD07FF7E30292F06A8FD1DFFA905280653FFFFFFA9FD05FF7E292F0684FD %1CFFA8A9A8A9A8A9A8A9A8A9A8A9A8AFFFFFA8527D7D27A8FD05527DFF7D %A8FFFF5328282959FD04FFA9FD04FF7E302F2F59FD1DFF7D28282953FD0A %FF5A292F53FD1DFFA9FFA9AFA9FFA9AFA9FFA9AFA9FD09FFA8FFA8FD06FF %A82E002806A9FFFFFF7E067EFD04FF292F292FA8FD1CFF5300280653FD04 %FF5A5AA9FFFFFF7E2F292FA8FD1BFFA9A9A8A9A8A9A8A9A8A9A8A9A8FD14 %FF2828062FFD04FF2F2F29FD04FF542F2F28FD1DFF2E28062953FFFFFFA8 %2F067EFFFFFFA92F2F28FD1DFFA9AFA9AFA9AFA9AFA9AFA9AFA9FD12FFA8 %28052828FFFFFFA82F292FA8FFFFFF2F2F2929A8FD1BFF842805280659FF %FFFF8407292FFFFFFF842F2929A8FD1BFFA8A9A8A9A8A9A8A9A8A9A8A9A8 %FD14FF2828062FFD04FF2F292FFD04FF54292F28FD1DFF5328282953FFFF %FFA82F077EFFFFFFAF292F28FD1DFFA9FFA9FFA9FFA9FFA9FFA9FD15FF53 %002806A8FFFFFFA92984FFFFFFA8072F0653A8FD1CFF7D00280659FD04FF %532FA8FFFFFF7E29282FA8FD1BFFFD04532E5353532E535353287EFD13FF %7D2828282FFD09FF532F28297DFD1DFF7E28282853FD0AFF53282953FD1C %FF5300280006002800060028000053FD13FFA92828060653FD07FF532928 %2828FD1FFF28280053FD09FF7E062806A8FD1CFF2E050028000600280006 %00280053FD14FFA8062828282E7E84AFA87E292F282F06A8FD1FFFA80628 %53FFFFFFA8A9FFFFA87E282F067DFD1DFF53002806280528062805280606 %53FD15FF7D0028052806280628062806280059FD21FF7D0053FFFFFF7E06 %2F28280628012EA8FD1DFF2E06002800280028002800280053FD04FFA8FF %FFFFA8FFFFA8A8FFA8FD07FF7E06280628282806282828067EFD23FF5952 %FFFFFF7E28062828280053A8FD1EFF5300280528062805280628050653FF %FFFF7D52FF7D52A852A852A8527DFF7D7DFD04FFA9A82828000600060028 %28A8FD24FFA8A8A9FFFF8400060028287DA8FD1FFF280600060028000600 %2800060059FFFFFFA852A8277DFF7D527DA8527D7D27A8FD07FFA87E597D %537EA8FD2BFFA8282E7D7DFD22FF5300280528062805280628052853FFFF %FFA87DA852527D52FF527DA852527D27FD3AFFA8FD25FF2E060028002800 %28002800280059FD04FF7DFFFFA8527D7D7DA87D7DFF7DA8FD60FF530028 %0628052806280528060653FD72FF2E05002800060028000600280053FD72 %FF5300280628052806280528060653FD72FF280000060006000600060006 %0053FD72FF7E2E532E5353532E5353532E537DFDFCFFFDFCFFFDFCFFFDFC %FFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD9AFFCACAA8CAA8CAA8CA %A8CAA8CAA8FD74FFFD0DCAFD18FFA8A8587C5883A8FD53FFCACAA8CAA8CA %A8CAA8CAA8CAA8CAFD17FF7C51262D2C2D2C5151A8FD52FFFD0DCAFD15FF %A82D2C262D2C512C2D2C2D2C7CA8FD29FFA87D5258515858A8FD1EFFA8CA %A8CAA8CAA8CAA8CAA8CAA1FD05FFA8FFA8FD0DFFAE2D2D2D512D572D572D %5757572C7CFD28FF7C51262D2C2D2C2D2D58A8FD1DFFCACACAFFCACACAFF %FD05CAFFFFFFFD047DFD0DFF2D26262D2C2D2C5757582D572D572C7CFD25 %FFA82D2C262D2C2D2D572C572C2D7DFD1BFFA8CAA8CAA8CAA8CAA8CAA8CA %A8FD04FFFD04527D7D7D275252A85252A8FFFF582C2C2D2C577CCFFD04FF %FD04572CA8FD24FF2D2C2C51517C83FF57572D572D2D7DFD1BFFFD0DCAFF %FFFF52A8522752FF525227A8A82752FFFFA82D042D2C2D83FD06FF572C57 %2D2D51FD23FF5126262D2CA8FFFFFF582C572C572C2D83FD19FFA8CAA8CA %A8CAA8CAA8CAA8CAA8CAFFFFFFA8A8FFA8A87DFF7DA852A87D7DFFFFA726 %2D2D2D83FD05FFCFAE2D5757572D57A8FD21FF832D2C2D2C57A8FFFFFF58 %57335757572C7CFD1AFFCAFFCACACAFFCACACAFFCACACAFD12FF5226262D %2DFD04FF832C572D572C572D572CA7FD21FF58042D2C51A8FD06FF7C2C57 %2D572CA8FD18FFCACAA8CAA8CAA8CAA8CAA8CAA8CAFD12FF7D042D267CFF %FFFFCF2C572C572D572D572D2D58FD21FF272D262D2DFD07FF58572D5733 %2D7CFD19FFFD0DCAFD12FF5226262C51FFFFFF832D2C2D2C572C572C572C %7CFD20FFA82D262D2657A7FD04FFA8A7582C572C572C58FD18FFCAFFCACA %CAFFCACACAFFCACACAFD13FF7D262D267CFD04FF572D2C572D572D572D2D %83FD20FFA8262D2C512D57A8FFFFFF57572D572D572D2D58FD18FFA176A1 %76A176A176A176A1769AA1FD12FF7D2D262D2CFD05FF7C7D7C572C572C2D %2CAEFD20FF832D262D2C512CA8FFFFFF572C572D572C572C58FD18FF4BFD %0420442020204420202076FD13FF272D262C7CFD07FF2D572D512658FD22 %FF262D2C2D2C57A8FFFFFF57572D572D572D2D7CFD18FF76204B444B204B %444B204B444476FD13FF7C042D262C7CFD06FF572C2D2C2C7DFD21FFA851 %042D262D2CA8FFFFFF582C512C572C2D0483FD18FF4B20204B2044204B20 %44204B2076FD13FFAE52262D262D518383A8A7832D512D2C58FD23FF7C2D %262D2C2DA7FFFFFFAEA857572D572C51AEFD18FF76204B444B204B444B20 %4B444476FD07FFA8FD0CFFA72D042D262D042C262D042D262651FD24FFAE %2626262D047DFD05FF582C2D2C2C58FD19FF4B44204B2045204B2045204B %2076FFFFFF7D7DFF52A87D52FF7D52A852FF527DFFFFFFA852042C262D26 %2D262D262C52FD26FFA8042D262D2CAEFD04FF512D2C2D51FD1AFF76204B %204B444B204B444B204476FFFFFF52A87D52A8FF52A8277DA87DA85252FD %04FFA87D27260426042604517DFD28FF7D04262626047C7D7D582D26262D %A8FD1AFF4B442044204B2044204B20442076FFFFFFA8275252527D52A852 %27FF52A85227FD07FFA77D587D58CFFD2BFFA82D26262D262C042C262D58 %FD1CFF76204B204B444B204B444B204576FFFFFF7DA8FF7DA8FF7DFFFFA8 %FFA8FFFFA8FD3AFF7C51042D042626587DFD1DFF4B442045204B2045204B %20452076FD4DFFCFFFA8A8A8FD20FF76204B444B204B444B204B444476FD %72FF4B20204B2044204B2044204B2076FD72FF76204B444B204B444B204B %444476FD72FF5244204B444B204B444B204B2076FDFCFFFDFCFFFDFCFFFD %FCFFFDFCFFFD2FFFA8512CA8FD7BFFA82D588304A8FD2EFFAEFD4CFF27FF %832C58FD2CFFAE5151A7FD4BFF2D517C04A8FD2CFF047D7D2C83FD4AFFA8 %7D58A8FD2DFF2DA8CF047DFD7BFF26587D26A7FD7BFFAE5158A8FD7EFFAE %FD42FFFF %%EndData endstream endobj 966 0 obj <</Length 65536>>stream
+%AI12_CompressedDataxœì½ë’$Çu ùùµ?dFš-ºÂãܵ5««³CF3š•ÁŠÝ°G}ú"‰óôëçò?žÕ€@Jfè$Øíž‘™áŸŸãá_Æ_ý¿ùò³«oÿðøÙôl¸8ýÕ_ݼ{|øðöݯ.´öâóW¯>¾ÿðNª~ñÛ_^”úªú¢«Ï÷¯ü…ÿõñÝû—oßüJŸz6Ö'ïeë_üõËW¯ß_Ü~üÃÛç|üðË‹_ü²>÷»—^=Ög?üéÛǯ><|óþÙÃË_ò®u7·êÓóe™.Ëp±ÿjX.~óëúüõÛo^¼|óÍõÛþÕÅgã¸]|vež.Æi®Ïÿ§—¿}|þ¢õÙR_¶×ÿ/óøl9†½¾zz¶Í{Ýâöíó¯ß|øÍ»·Ïß¿¿yûêí»÷¿º¸ùÓÛ‹_?|SŸy¸øï¯^½ý§‹ëWÏÿ>mrÿö͇úÒ_ÿéݡuûÏ~ûøÍÇWïÒKþæññÅã‹ï~áÕçËW÷/_=ÖcõúáÃÅ!Gõêó2~uýñå«óñõëAœå»]}>}¥Ÿî÷ïëǪŸPþ-ÕÛWŸ¿®5_>~øP¿t}i™ßþõuþFµR¿ø»úÖ/µ!ëQþ¿ôݾ{ûíë‡w_·ýl9äh­£üÿ¬Gn®5öºß=¾þöUm=°óPŸåÕñ/Uý>~è§Z¹,ãÅgSÝÉ:mÛÅTŽgó¼Lõ;MϦuœl›Ö
+ÿøòñŸ~uñ7oß<Úá¹z÷áË—ÿ«~á}.ÏöýØ/–cy6nËjÏÿöã«Çw¿óRŽ°Vv˜~ýöÅã«Úc/÷¯ôè裴ÿ·üîáÝ7jïyûêãíÈûàOÕfø/z”n±Ø|ñíã›ß½ý¯úI?÷åÙ4-óEýŽã°×ãQ¿á¸^”e?.Žµöº]ßlÞ†‹iå틾¹:ÙŸì·ÙäOmÅßÔvýâÝËo^¾ùÕgSíèµÃ[‹ÿõ»—/ZƒoãÅnÿ§_èÙžþ;øÏ>y=><¾ñãR;ÚͯSÇžýúËú¶wo^ܼ}-íñ^ιÚcÞÔÎôêí7ö\ü[Ÿ©›üöôw§é¸ü‡o?<¾¯ûzõX¿øå7ïþññ¢ŒûåÕ‹—ïê3ï/¯ÞÕ§/ož?¾¨\x¸¼{xþñÃãåß|¨½ýñò ^vºü}lñ`/yн]><ù®v•¯_=þóåC{mÿ ;ÎÎuËÓå£mú˜6}ŒM_Úî_Úk^¦×¼Œ×¼ÑÝŸ.ßÚkßÚkߦ׾׾µòÑ^úÑ^ú±½ôtù1^ûâá›oß]¾¨ðññòy=Þ—ï?<¾{%ßâýãs醗øXùùáòÛ‡wr¾ýãeÝâõÛxUÒ;=™ëÞ^\>ûmÅË7üpYOîr:_Úgˆ·{öæí‡__^Ý]~ñþÕÃû?ž¢êÛJ÷×/ß|l/âï?=¾¹|ýñ¼úôäuüýöÝ‹¯ë®^¾y”¿~xÿüã+)ð‚‡ZïïþßË—|ñöŸÞ\>þóóW¯õŸµw½|þðªn[}]Ïè—ož~Œo>ÊøòúmN¾þÐJv$*¶_~+ßüý·Ï/¯¬1®¼³ù_w—o¥¼yQ?ÒåãkýK;r=ĺS
+¶O-µz«|ñò_J‰ƒÇü¿Ç¿¾~÷`-z÷ñÝ[ý¤z¦ÄçÖ’îîtùõËú…½{Ôw¾ü¶¾ÏÛÒA´­Ûyö‡‡÷ñµP_úáo?¾¯]äty•ºè]ú÷•õŒ»øpwvh>·úÏswý<^ô¹½è {Ñi|ï/ì¿·Wü>ïæ÷öÔ i©÷ï_^¦­­!^?<'ý½Ž
+éÐýòù§ÞàÏÇ;ÙsÆ›ÕðîÝcÛ^‹üýº¥žÙ/ß¿n½1ÕüFÈóüÕã—ª!Çëyo·_׬&6­½{ó¯Þ~›>dÔÔ!ëâ¿=¼ûöûv-ÍT‰ù¢ž!z>·Ãøöõ·’÷]|ùLJoõã~øã½¾òËÐ_=¼©8×úØ¥à§ößʳIV;]„û}Ÿ}ö=L\.®ß¤§ÿZ°\ǹ_É®ß<êg°š‹òËÓ'ê*sÇ‹ë5àþ¿Oƒü)Ã4ÌÃ2¬Ã6ÃÕp=Ü wÃ})¥ò¼,e«ÇQ®Êu¹)·å~Æ2Žã4Îã2®ã>ãÕx=ÞŒ·ãíi¼ï§aªÙñ4O5_›öéjºžn¦ÛénºŸkú\SÑš¾Íë¼ÍÇ|5_Ï7óí|7ß/ÃR–q©iв.Û²×íj¹9-·ËÝ:¬e×i]ÖuÝÖ}=Ö«õz½Yo×»õ~¶²Û¼-ÛZ“ž};êãj»Þn¶Ûín»ß‡½ècܧ}>íó¾ìë^_·K.xµ_×ÇÍ~»ßí÷GýþG9ê7:¦£~¼c9ê{Õýí‡ü¹:®ëã渭»ú¸¿ªGëtUÍU=WÓUý>WõS_­Wõ\Õ}_Õm®äÏu}ÜÔÇíÕ]}Ü_Ý_ú¨õº¼ø»«Óu=(×Ëõz]¿ƒ|¸k}ÛkÝǵü¹©Ûú¸»¾¿¾¿©­uS[æf¼©‡ùf–Çéÿim;zëZûîÑ·Úƃ¶òTÒÒk´5­}§í-->¢É·®Ñ­Å‹·ùâ­¾OGjyiû!µ½¶þé;šIàz¹‘°Ü×>@/˜½œ÷„»“wéSí¹K\E—ÐNÝB;…÷‰Uû„õ
+é7§Ú-´c|²k,Ú5¬s\iç¸ÑÎq×:‡÷IûFí'í»v
+ë·Ú%¬CÐrëÚæ7­µÿ¥¶í[—ö•Öµö]»óÙÛøä<~¢ieÚùûZúÖ[ºœjc·¦Þõ¿óÓ½oñÖæ´º¶»¶|m÷“7½5þyó;
+´ý Òä¿Ö¬'€‡«“v:„q‚.!bÒNA· cÐ5è§Ú=¤ƒXG¡«´ÎK¤Ã|OZÛþ„þB;,ñоU{ªôÕ9ú«õXé³ÇÉ;í9”æ³ÎzƒÏ¬½Tú¨QHú¦ôKé•Ûtœj‡”î(Ž g­½OúÞmíw¥ö¸¥öµ£ö²ÛÚ»ŠŽ+›v%éF÷µûLµÃ쵓Ülwõ#Nûrªàп¯-;ÕvÜkËÝÖ“|¬'öZÏçë«ÛzõÌÝj£Þ\ßÕ¯3Ý,µ—\ÝÜÜÜÝXÛÞßßßÝßÞßÔ3ùêþ¨g«}v¹Ÿï§ûñ¾Üw÷www·w7w×u˜8jçÚ*Ö–»¹t¼+wÃíýíÝíííM¥ÿ•ôšSåÓv»Öse®'ßx[n‡›ûún·õ=¯ë;õý·zz,uT˜êQn†J‘»J“›J•«úI÷úy×ú©çÊ: ®‡
+£»
+¥›ú•®êÛë¹²Ö“r® +͆ã¾vÕÛÚm¯k>êØêáXj'Ÿj‡/ÇPχ;åæM=K®Nõ¸íõè­õTšë¡ë 6TþÞÕÓ‚Wõ„Ø뙹ÖC>ë9[¶¡žÉwµ)nê¹}Uf¯Í³ÖFškSU”u¨\¸«­wSÛðª¶ä^Ûs­­:׶k ó}eÌmeÍum÷Cù³V͵7ŒµOÔN8ÝWTÝÖ^r-å¨çûVy¶Ô4Õ~T¦a¼¯½ê¶ö­ëÚÇŽÚÓ¶JÅ¥ö»©ö¿2å¾öÇÛÚ+¯ËÕ¿EÛ¶e¯k¤òÓ´¬¶«4ëé§lWiÖÓm×ÖªÖ¨§Óª•;{Ù~lÛ~²uÿÍÏÛݾÿšóö{[ødMüÓµ0çí8skÛÎõÕõ»š É\Ô2\”aš/OõUª¹~*VŠg£\Ÿ»¾4ŽDë‡ehã'2´Q2´92´E"%5¶³Aòv¸×üL†ÉÑc»M‡ÊÝc÷ëúoå‹z`'ƒ¦›§z$dä”±s×ãcÞExãY”7i”'#jŽôöR¼w’Oƒ>yX‚7i{-.Z (#±ŒÅ׳å*·Êãޢ“†Žò°}-,J¸¸iȸkØhÿx
+îyä—è7Ú®´X,ÚôŠs]ä$8ltöØŸ"玞G7JŸÃé³iΚ]i ÖóÞÛ6“Sw=ÕïayZ}»zPzd ‘¡F›Iûl>è #ÃŽ <2ôÈàs¥Ùç®C B2 -z~ÙP4ž´× :"ɘ$£’ŒK22Éؤq¥„o’Õ!J)¦fíí£>.ƒYuÐ:é¸%#׎^2~É&c˜Œb›Žd«ž}vÎO:¦ÞmÛîu|“îö¤ÃÜ͵ý¹ÒáîÐ!o×aO¾U¿EÀYÁIBæRä¡3,’ZKàs¯Ãc}h_õ}ßøãÖwñ°d“+CL±Ø4 É2áâßl‰Çê-{<Žx\ŃÏâNõ¿Ûô¸‹Ç=Û!%=ÆxLé1Ÿ´ í±¤Çš[÷Ø»Çqþ8i_ñ‡Ô'4Ÿž¨›¾c^†©ºÑSïõôStÅç`òäÜ•'Ò5‰Ö™—âóq2S.§<ãò½ó-ÝŒKš[[¼M­ ¯NÞ`·Ö.Þräí0ÛAµÃ'ï+ìFÏ79ïîå,Ô³Q˜!§ç|ªçé¢zÀZtRG¬­I"5HÉøTÇ& ,ôµ˜hÖÁFb¡kZ–J €NŠŸ+&†úaê™XãŒÍ‰¼¤ì6¹Tóó˜^5/¿©ß×BŒ–‘Ë|‘ÄA0|{’pAcËÉ7n5/·‰-ÅNFOO8)N¾rÆ´Hy ¢8KA¤Òì/;cS+[[ _iójãjÓZÃ.Þ¨‡7¦5dÑ&œµå¶SÅïQYs]?ëmô½„k[Iˆs¿Ú7=)ßìS$ý,=u0½¦TßSÃ3´¹ Ìlz®f·)$#›|²ŽéºíäAá³v>o—;°ž„9¤ZÈë;îœ07š©\ë¹}¥çø¡Y‹>Nž¾XA 1k01i@¡!…=dŠNƒ‹;Ÿš¸UÛˆrE¸±k³錟³¢hÒA»† !ˆ%A·ŠH0r­³dפHÓ¢“gF‘ÉC£”§\{žd™’Å,–/YN¬‘‹'Mw'Í›,sºò×ÒÓ§so2ÎëCGüáúì±ûãÐÿ·i¹ú8é_s÷`Ïfòòü^ù©¦ ÿŒ?ÿ{vh¼¶Œ«]‰ž%Ó:4À¸ÑéÈQÃ:AŸ¤RrVßi5iÒdés‘:'~ªç\û²©p;¿f’”læJ§%¹ä5ëÔ¤ôGGïtzÒÆPFÐë“ÌSž žŒœ:j¦1“ñ2FK¿J!gçâ#¥“2R2J¶1ÒFH»kOçäLì<9<'> ÎÞÌÏâNúË
+_V8¦E…ƒõ¾’°Žÿ' n<,¸ö¶dõë¡1„]£[5²˜µ…íaë`ƈD$*©Éæ` Øb9Ì/‡¹ópÆÅø(ÏׄOÆÉ~½>{¤Ñ÷¤ –úÇùŸ~l¿ïGþ³¨ < Æ>®Ð5QýcyòHË)þ¹=yìùaK¿{Ùáw,:L8nº‹û‰ž|úúlÉa
+GçXFq‘~ÇjC¥>¥‹öyÚ÷,¦è–¢µÅh¶¾ðæ”–X•fëÒVï(Ú!ôøÄòþ¼ÀÿJÇÖ›65í«ûóâ±óUý¾xìäSÕ?hõØw®öh.ÊICïZðáK>b2[¯g¤ í¼îÃW~œ|ñG7«­×ºÛĶNmŸE?rÀý Çnâ‰}DÒ‡?n?ßEü9¿jÐ~gÇï£F¿yûêï.nß~à7@jÌñ´îb|VÆå¢<Ûš ÊEq)nr]z]êWü¨ÓWßêüCñ_ùÛ?iñ?×þÏZùOóů/þî /´þo[ÿò=±ã×µª«ÿ5þnZÓ€\•·{£ïóÅXð·ÿÖŸñk?zG/ÿôœ¾ýZžmò‰ëÆëR?Ük¯;¤fÙK¹ÐWÚ,˸ÎZž×UÊóZ»ÛsÛb=|=Ã:NòY¼¬¯ݦîKßeš6YêÑ’*YÖ,/™÷q–r=ƒVÝd²öáíâƶ™d{©+¶I R¤X?Q±]ÔS®–åCø»ݤjÙ¦Q_2¯»–e¡„–í]êâËÔPh•ºš¼ìúšJ9)×fÖ/·”Aߦ+{€¡žÚëQþ´ÚAÛ')WHlR>ä'ky™7ŸÚª“ÕMG¥G-O5–ºX÷ú—UÊË2mZ.›l“¾bßyAÝÙÁ0Êñ˜êZ7-×Qζ˜êá]©+è¯
+Ô0K¿â!ãž³žÐ{íûV^´¼Õpƒm¬ÅêW¬¹¾f·CR75ö웂ÚEœ’ûhU Kô%5¨Ðò4Z^ÖÝ€V{¯ls_ÿ[æz,=è¤nœ‚tR,c#]-5qëH'u£}bíÐK=Ö­‘NÊe^3éjÕ¾ÎtR.K#”§¥'Ý"§å¤“—ŒK#–‡Žtº×­‘®– ÛN:-/=é¤N“NÊÇØH'ûœ§žtòç¡‘NZi¤«åÑɤ«uºtµ¼ÎS#Ý"'åÚ‘nY*PfH·È¯½tµ<GOºZ·G#]-ïãÖH·Ôèk(éjÕ(? éj¹¶#<¿=éjݤ„wÒÕò²­tµ¼KOºZ·KnÙž õxC:)n¥'Ôík#–kuµ\äËfÔIà
+ÔÕ²\÷ ÔIyzÔÉ6ÇÐP—ö!¨“â´ô¨“º25Ôék¶†:-¯=ê´nj¨“ò26ÔÉ׫§`‡:9ÛÞP§íY꤇ìSºÚ‘]Wi¨«E™x Ôi¿+=ê¤on¥¡®–kÜÑPW˺mFôqÙÔ)[¶†:-ï=ê´îh¨Óófj¨S ê„5ÇÑP§å½¡NÊûñu5Ϙ§£#T-먓ruR.SéPWë¦u˜uR†=PWË£…ë®Ö”cÛƒtR®í¤“òºéæÈɳË1ä´l ÈiU™rR.(…œ”{ƒ€œd]ÇQrò‹î”qRÜ," ÆÉ7Û¦#§GkÞƒqµ¼J•WëöÁê”qµ|Ô*7ËÙX»vb\­š‰ß rµ¸X•qµ¸­BãjÝQ³÷`ܼ?½(WËÓ4/äjÝâq–B®–õØ
+é¤ìçNnÓC7éäDí„RÒm’· ƒt›æ­Sn×¼µ‘n×¼uìH'sü»ÅFJº]òÖa Òíš·vA]ÅSí¤+¨;4q-:™ª? kºC×} Ôš¸. uÓ̵Vu™k-w™«<ž¹–¡Ï\ËÐg®µü$s-C—¹–Òe®R<Ï\¥.g®ZN™k)O3W©Ë™k-w™«”Ï3WÙ&g®iʺò4s-¥Ï\õ5)sÕòYæªu)s•rÎ\Kyš¹–¡Ï\¥=å«ÃºãiâºkÞZ@LÀ¯FGEÝþ4oÝ%-GN§Ë)H·ë–sG:ÛF Ù”t»¦­cnš¶J9§€NΚilÝ®Yë˜A·kÖÚ"º]³Ö)@·kÖ:?¬õÔUx™tƒæ­s›¤$Ú¶-X7Hæ:ì}\§?s´·ÜuÔUÓnÜu0¬‚;Ç6®wƒ¦¯@Þ ùëñ`'=¾vÏ´4ŽŒ;©+rúÁ»ÁŽh à Ò
+»1ˆ7ØAlÈ“~9¬÷*óÉc·¥ÏdMd`J½A3Ùqì šÊ–µãÞ ¹¬ì ðÉ×UâFŒ§Ùléb<Éfç _é²YY^4YzÜB<If§Æ½Q“Ù¹e³’ÌÎ[ŸÍJ2kl´lVQiÙ¬$³ËY6+ÉlŠñFï×–ÍNšÌ–~ÊN“Yòdš³ö#?J:g'ɬ RmÎN“ÙÒæì$™]–6g'¹Ñ´ösv’ˆZ²9;Ifž6g'ɬ•Ûœ&³S›³ÓücŒ9;Mfç~ÎN’ÙmisvÀƒ9;Mf§~ήOf'Íe[2;=Íe'Me÷„L˸µ¯ÆZŒ×å²ÛaÚ‚{Òí–áØ;òI眎Š¾A“Ùq ö šÍÚ<`ÀON·ŠÌA?9Ê<¬?­á.P*E‰f
+šÖ6
+>w
+^^½ûpûRïoûðîO¿ªU¿Ðºö¡_^\~ùAnãzñ‹ëë«çÏ?¾þíÛzÛÒ_^üŸòÂÿ7½BïÞ§÷ü”û÷ÉýSýEWßù¢Ïoí%ÿWý¯þï÷'VŠ—¶R¼þC–†¯¿øåÅßþ· £¿8}r]ôÙBs9KÏf&¥*ÏLꉼ¦uÒÇÓ™I¡Ò:¤uÒr kHë¤ó™IÅÅ––Iý̤”Ïg&í´;6ËžVHOf&µ*ÍL*³Ò̤2îlfR8§c++¤nfRŠç3“òÍò̤­43©;›™”×æ™Éúw73)<:›™Tµ™Iùž‰årÎg&åHå™I!Iž™Ô¶8›™”&Î3“ÚÄifRϧ³™I9éòÌäVú™I9ãÏg&139)&*Ì#ŠöœÏLJ]ž™T>¥™IEØṲ̀Ôí¥E±‚½<3)åó™I©Ë3“º1¢X)žÏLê[o-Š•rž™´mú™I­K0×·I3“rDÎi®ìL3“²¿<3)_ëlfRŽF¢¹¼<3©ïlfR;gš™”rž™”6;Ÿ™Ô¶N3“Ú_JZ"}<™Ôº©Q<aÆ–HOf&VifRËifRÊyfòÁ¯Z‚ÿPzËÒ‘E»D /ZGø@"ÒâóÚ…íЧø\C1èË:ÏK
+Ðk×)vÁ¬èã2Í¥]`+l슓è5úì²w ÐësÚ‚§’*Î)N=eÉqú8l¶ aúzø)O˜¾.ÃÙĄ࢞œKÓw÷#L¯§‚å)JŸêøµí)J_†á°‹'Déëæó9J_6‹}m¶&çÊÆ£WN÷󰫤},ªõ(}/‡Q<GéÇa3¥Å&^ ÒÁG«¤¯Å†eÑùɆ£ëäØYŒî“/„è벧«ê5gœmj7‡èûla¼‡è3S·¢¯Ã1Ÿ‡è,x DW̶]¿j¡/Ó¶¦
+NDÔµ8v²iD­.éQ¸FÔR7»ÃaþiDØZ.sc€5˜¨|Ûµ”K75¢ mSRK„‚„ÔúËÐ4¤–rí{Ì 
+»?-MC4N•“f9Fe‰ï²Fe-ò8÷Ûú‰Që<”À¨ÄÇÛÜsTNÔ­4ŽÊнO£²ÍÊ£rÚsƒ©èÛGc©ÍGm,•ºak,¢¥¢oïcÏRÑ·í7–
+K–©ÁTôíµô0-LåÍcƒieÈbõ ¦5D˜ÝW˜Š¾½SÕ·µƒ©úÛë0•ïå¤TšÊ¹=¸ Måpí~¸ô´¨áR TŽÿæ/„oýˆ tÕå]S T{°¡5Pªö¶MA©ž=GTO¸}ìHªòövIÕÞn U3{.HÕÞ>ö
+:-{šâ ÓªeÒéÛÌGNôaìH'GÒƒ^%6ç´餇LËÒ‘NzÒbXRÒÍ0×I§=oÚ:ÒiïœFH'EÛ«‚Núó¶LèDÚ>æ1@§h9–
+d¤“×X~c¤+-ctRµ´”Ußfn)«Jê=çê܆–±jc£”sÒA6ÏÜáœt¤ciëtØzy8§ýnê3Ví›–-KŸ‘â1·”u’ßÈXú”Uºød
+:%ËÑrÖÉ—úgÐiÍ(è´¼Œ:EQÏ9Ými9«–‡–³ªç]žä¬“.ÈÝ:Î_'
+éÇÜqNä`Q’rN›³]Þ¨ÅRæþú†t¤É˜d˜ƒ·`N:žMr6ÌùOš‚9é
+«æjçY,_n˜“¥v­Â0'`Ù·†9)[¾Ü0W¼¹À\ñÜ Ì¿@™9§ XÆ9‘ºmrÞ8W<Aî97꺄5snäFsRÖlÝ@§"¥Š:ñ³Eœ1ÐIi²ÀK@'J³_½ÐÌÓ:èD…Þ×–¹ÊóÃÚg®bSO[Ë\MùžtZ.}æªûm‰«JÜ6骠SKÜ&ntR§äwЩ9>µK¹òÑŠ…w:ùÆ¥´Äu´…Ñ:‘\ç£O\U„=æ
+:±¶!²œ<bm{ k{[æ*Öö¼M:q¹>èFÑh™«jÚ6#  Szìçè¤niñœÇ–¹š]ÝaNìr´ÄUÊÇ8椼•þ
+„Ô­¥%®±Çœ”‡µO\õ­- Òž­¯±GA§å­K\µjn‰+®8 SGýèW9óÑWmN£”’NzÈb—$‚tÒ“¶¹%®£28@§oìóV霳ýê¯v¿ð蜓î|Ì}Þª?L0MÁ9ËÞòV-¡ƒszš4ÌiÑ®{(æDgáÜèë¦ÀÜ8ÅõqÅÜ8ùXßc®,âvkU¤JÃXÇœ”5U·ïjBw†œÔLÖ‚›j–ºß!'E› Ɖó¼Í턔ǥ­T‘ò¼ô+UÄÖ¶*âTø¶I?Eœ–‡~µŠîÖ.'(ãÔð¶ËFÊ8Õ¹íœ ÆIݶ¶Õ*&ŽÁ8)/Üãä:;•qÅÍƉ˺ïýr•¢“‹[0N¼Ù%嬢Ù[¿\Et\q`aœÛÆ/E\‘Y¿^ âj¡—2®–u医”91N|lòrƉ¿=ím¹Š<ïÌ ÆÒZÎ*ºöî±› ®–+¾ú«E¦2–¶ZEåk[y¢ˆ+$ q…g\!qƉò<Z~Sûzo«UŠäVe ÈI¹øe* 'uÃÐV«Ä>rRž×~µŠ¾÷<äô5~mŽmºÕ*Z5µÕ**OmµŠ|=qƒ3äÄØÞ÷¶ZEÛsrÒE»~+ò‹žs[®Rà­SN»ÞØ/W‘î¹ÛJí62>5ÌÕò0Mýréå‹]DPÌ)X¶¶\EË–,æôL±LV9g¤Ù‚sJ¢¡_®¢ûÚr)ûl圖‡'ËUô·'·.œÓ™ÍiÎÉŽz (œ‹òØû¼UnŸ²Ûijt!ÏH'á~y*P‡À êÄÞ^사¢N~ÁsÒ@œ“kK[7Y9;7ÔIy0Dêd·{iW[夆vå;£NÅñc Ô‰Ÿ=n-oÝdÙóÒç­rò [@²²¶Ù9\÷µ¿àª“‹KË[õg)§¶2OÎÈ͸¨“u…óØ.DrtZÞ*?Xz}4w°lÚQ' Ž–†:Yq^ò…ñ±·±å­âkÃdEÝà‹z2êL=k¬“ÅéëÒX7´ ÁºÁ®ÖÉŸMëŠÏ5fÖ©ÝòV-—–·–âSž™ub_ï-oC{+Kc]-/ÃÒ³N~5khykìÖéÊâuòÖ–êä%-œ³-º¼U«¦–·ª:>µ¼µøOsu¤«rÜ[Þ*Í9m~N~ qÚzÒÕžä÷=PÐ _·Ò”HÇ+{Ÿ·Êìx±ù8é42™î<UÐÕþ¼ùª™
+ „C!É„C!pômç
+I&
+‰ƒĹA’ ‡Aá0H I&ö„à q$™q$0.íC‡A’‡AâŒC q$™qn€8 ‡A’‡AâŒC q$™q$0Æ!dÆ!
+Q³DŽ³4o¢f‰Ô[»ËŸ®9Ú]þ¤é&š[!º®dowù“²ßoDÍ]Š´÷wù“ºc˜/Ò>67@Vëxrõx™h•¾9í.Òu§£ÝåOºö2ÆP³DW0í.rRIØY"K“äÞ½¾š%ºÖèXC-™»’«%²ðFË;ª%R·´[’ÈüÆ<j–Èb&Y¾Ó~8–[[ªY"å²·»üÉZžµÝ“HÍ]ß3a–ÈBª Ó¤hYíFßƬ~Cý‰4WK¤¼¯í.B¡Z}«Œ<ök n–HyÛ]þfÓü0K:ŒºYF], ŒºXÒaÔÅ’À¨‹%QK2GÝ+ ŽºWu¯¤ã¨‹%ÀÔ¼’`©{%™¥®•K]+ –ºVÒ±Ôµ’`©k%S×J:˜ºV0u­$`ê^IS÷J¦î•
+®sÂa”dÂa”8áJ BI&B „C(q%‰qø$@ŸÈá“dÈ!”8äðI€>I¢: ”C'rè$™rè$PF@9t’Œ9t0‡NçðI2èðI > ¤Ã'I¤sÄA‡Mè°I2è°IâK¸MèÜ&ÉœÃ&sØ$p›$s›Îa“À9l’ :l@‡Nâ Ã&ɤs›Ða“
+  C!É C!t($€‡$ƒ‡Ðá
+üF*×2…4"ðŸ’5"Ëà]ïPkd°…õ¼Åb×eu±Úˆ”[¯ÚH-ïËKµ¹7™Ï!k”äœÝ#A­½×Ì«52IÇ°FäÛj÷YPkdÚÏnù`u8/Ú åúø:OaHY.Jj ìÚH­ÝîÈÅmYM7"eY¿í›è¸$uî|¨7"e•(ÜÑ‹ì.&¸6"UûPB‘+yø È·Y0\‘o<.sh#ò™üA‘ç‡P@Ô‘ªÍ ±Fä ®f‘¨62ùÒßFµi ½{¡k#µ¬kÐF¤7gtåCšs_ÆÐF¤Éýf#ªLjOÄ66í7¸ÛŽ62Èe­­y#2Ì7ŸÃ¼‰íý³lv©}ÚcýQG ¯ØDµ¹XxŒM‘ëvÛÑ´91÷¸ï‰i#EV½OM©G\pŽ6R¬±¼˜6"×·§ä™„9š7RŒòÑ=gÝÏ2'm¤Ø’¹ÐFÒÝÉB‘…ÃÑ´‘â¿ð6âA¹ocÚHý†Øhª[ÚHÉÞ˜¯ªuzÛ7¼yͱ4oDŽÚ_ÂIE£X#ŽQ¤‘ŒQ¤Ç(ÎÅÉE£H#pi$si˜º5K±F2K] –º6,um¤c©k#ÁR×F€©[#LÝ ˜º50uk¤ƒ©[#S·F€)ÖH†)Ú0Ŧx#™¦x#ÐT½PŠ7’PŠ6JÑF@)ÚHF)Ú(um’¢$’b@R·F
+=£ÜìÖHÆÜìÖ˜›Ýs³[#t³[#nvkÒÍndÒͦ
+ÏåæðÜ-‰9<wKº9<—KbÏå’˜Ãs¹¤›Ãs¹„´ÖÝ’ˆöÜ-é¢=wK"Ús·$¢=wK2
+9ì ‡>åÐG
+å°G ö”Ãù¤öH¥ö”ÃrØ#•rØ#P{ÊaÊ!@9ä(‡<R)‡="Ê!@9ä‘J9ä(‡<åG*åG ò”C©”CrÈ#P{¤R{¤”¸¶œ' ‡=ÂrâGD9¼‘¤œ¼‘å¶lv!}œ‚žCNÞÈ‹ròF’r[’8(ÞÈ ròFrGrG^“8’“8ää¼ 'o$)'o$)'o¤RNÊGRNÚHRNÚÈ‹rÒF’rk*hÒÈ‹r’F’r²F’rmuP’r²F’r²F’rn¼'k$'mÆ¡TÆ¡ˆqX#0k¤2kÆa
+9¬ ‡5ä°F*ä°F€Ú“6R‡5ã°F`ÖÈ7Œ“5òbœ¬‘d\X#‰8Y#/ÄÉIÄÉIÄÉ©ˆ“4’ˆ“4’ˆ“4òBœ¬'i$'iä…8I#‰¸F’p’F^„“4’„“4’„“4ò"œ¤‘$œ¤‘$\Ë<îºÈƒg'—œk·Fqa@8 #®/zm¡*xþh%¾„Ãqø"qø" _Äá‹TÆá‹À8„‡0R‡0ãFÄ8|‘Ê8|‡/ãðE
+ãP=`ºŒC©ŒCqeJd¹îâ$‹@8l‡-R ‡-á°E ¶He¶Œ“.’ˆ“.òBœtkW‘
+·E^ˆ“-’ˆ“-’ˆ“-òBœl‘Dœl‘Dœl‘âd‹$⤋$⤋TÆÉIÆÉIƵì¿×e‹TÆuÙ"0®Ër}Ï6bB®Ër]¶ë²E
+äºd ×%‹
+¹Žâ"ÈuÉ"@®K©ë’E€àr]¶H…\—-’ÛÊr]¶H‹"¢\—'åº<‘J¹~ÐèÂñðô! ×å‰TÈuy"@®ŸIbwº<‘
+¹.OÈu‰"@Q¤BQÈ!Šrx"ròD`žŒ“'R‡ââÐD@šHEšˆ+ûˆå@\©ŒCrX"@K¤BKÈa‰
+9, ×¥‰@¹.M¤R®KåzX"@®Ë©ë²D€\KÆuY"•q]–Œë²D`\—%R×Ãq]šˆë{®”SL1O q}ÏypG\—%ò‰8,‘Š8,‡%â©€ÃpX"
+8ì
+IÌ…’”k«’”“’”“’”“ò¢œ¬(‡åÐB*åÆ2¡çH+Èa…TÈa…
+ùd\Z!ri…@¹´BÀ\Z!•si…
+¿´A :ôK¤à/m
+bÿõ:óî‚X̾ȸ Ï¿ýkƒ bÿ~Zû¸ Ö£<â ¢/p‡ òü{³®1Â¥;VŒA±Žëùä.ƒ<ÿ~nx–Ë Þ× ‰Åò¬Iëþ׬ó|Hø…» Ö“±N.ˆõb¤Î¸ b=”×RE [º Öñ÷%úä‚ØpÃÜ !Äb6))#Ä~b(ŒÆôÿ"ù¸ÄÞÓñ1‡íL#ÄG:ÞBˆ—ô–BˆéÓîM!ÄÎf"*H±~™­~…bÿ¶a „û·=jþ !v•ìž „ØU´9-Œï/®ß¸b±!kÄf´íæX§#Äî]?Öê$#º¦þ£„Ø¿µŠˆ+!öŒ<·–ßøXŸÅ\Zg·µ‹ï”Bšå-ÿŽ/Wû<Ÿm¬¥Fìñíù›è2—Ÿ˜boÀñlŠb¯™5lpBì5œ[.h⯤õÎ-!'¤ÍXINˆuæo9!ÞáçÆ¥Û…ÖÛq)ÄD¬C’Ïçáû9cÕwBìŸ[¼lî„ظʱÖr'Äbϧ'3è8"Íÿí>¢~CÁ‹fã78!öokúà„Ø8¯ë%…XÌZ‹H!öï+öñ,*çYÿÿÁ © •’ •He„¼@*#JIJy‘TFH’TFH’TFÈ‹¤2B§¡„$M¥„¼h*%$i*%$i*%äES)!IS)!àTFÈ §2B§2B§2B^8•’8•N1B*NÑBÀ)^<Å ©<Å §î…
+RÔ@ŠHQC
+H1C
+èÐE
+ç0FàƜé¤Ãt2Ft2F^ “1’ ƒ¸€NÊÈ tRF
+; `‡A’°“ARa'$a·-,;ì$¼`'$i'$i·­)‹¤’¤’¤ ’íd$íd$íd¼h'ý#i'…$i'…äE;)$I;öíB"yÁNIÂ.$’d]ËNËY~a™Çɺ–s¾Á:9$/ÖÉ!u8$°‡¤²n¬U¦…C"uH$uH$B ¨Ã!©¨Ã!u8$ ‡¤¢‡Ôá€:’Š:I$‰Ò!‘|.’‚ºTH`]*$‚]$•viw)À»H*ðÒ xi€¼4H*óÒ z©€½TH*÷PHà^*$€/’J¾TH@_*$bIe_$À/ è—IÅI¶õdd[OÉ«­'$Ûz‹mÑØ“GòjìÉ#É.mx$Ù£•GR{´Z׈­4’ìÐJ#yuhC#Éþ¬4’º“Fòº“F’CwÒHrèNÉkèNIÝ©wÃÈ<’×È$¹“H’#wI^#wIräŽ}0r'•ä5t'•$ÇäØT’:v'“$Çîd’äØL’×ØL’ìÐÊ$É–žL’WKO&I¶ô¤’dKO*IÅ*‰ð—& üK“¤0MˆIÓ$©L“b’ÀÀ4I*S%ƒ©’ÀÁTI>@øß²I²…?ôRI¶˜»M•Äè»—d³yf’‡K²Å8jÊ$ÖÀ¹ï”I¶×OB&Ùl$e¤Lbyëg8>Fmó§˜$óçøh`’L¨9Ž4Iž#Û2]V&És$Od™$Ï‘øÍÃ$yþ½š$Ó“›:&‰Íï×ZVÄ:N6©Ÿ¸Jbø6Ó$±ùèk;Ó$«7´ø_÷•"Ém™ås‰$÷Ïñañf°D’'fgŒHbŸIËÅ@$±ÏäsdüÄnwëV ’ÜŽ«%’X€Å…0I¬_aiK˜$öRù 2I†5SÇÁoîR§yp¥I2|¥§–&ÉÐ÷À#“Ä®’¥üa’ _骧Ibáí|›$Ã×áêi’ŒŸã%Ç$™¶àX_¿¡ÿÞZÄM’é“*gš$öôæ’,2I,kÇÀ´öqx÷“ÄÒmìãÌß±aVëXû“dúÊZ^ä¹ó£¬{â&Éóø­Z2â-ó,³4IìͼO~CdÖ|?—HòÜÉË®ˆD’fw+×— ‘ÄÙýX"‰µ·k‰$-(ŸÏçᦺ]¤Ib ‹ý\&IYc,M{ç¹L’MUû1I¶põ›0I¶Ÿe‡…H²Yù\"ÉVe2åIÙëЯ%’Ø6ZmÄS­ž÷ñGXñéŸo’T¢’
+RL@ºö Å$© Å$¤˜$€“¤€4M4M“¤p4M’àhŠ$âhŠ$¤)’¤)’
+R‰$p“Žb’TŽÊ$£˜$`“¤b“DE$£I¶—H" E$¢ˆ$M“¤0.U1.U1.]’¸tIÄ8É$"\Ê$…pÈ$2 €C&IÀa’
+8L
+êÒ$ Ô¥H"Ö¥H²½„²IĺIĺI>X—&Ia]ª$b]ª$b]º$…ué’ˆu)“ˆu)“Ö¥L"Ö!“À:d’d& ¬Ã$u˜$•u˜$°“Öa’TÖa’À:LX‡JRY‡J"Öa’À:L’Â:DX'‘Ô!’TÔ!’À:D’dD’ë$’$ëB$IÔI$y±N&I²nƒÊb]˜$/ÔÉ$IÔÉ$IÔÉ$y¡N&I¢N&I¢N&É u2I’u2I’u2I^¬“’¬“I’¬“IòbL’d]îC¬“IòbL’dL’d]Ë.K²“Öa’Àº0I*é0ID:D‘.E’BºIDºIDºI
+éR$éÒ$êÒ$)¨K“D¨K“D¨K“¤ .M’@]Š$B"ÉöÒÉnõý…M‘D¤K‘ä“t˜$•t¨$®Ñ[Ðá’TÐá’
+9L ‡Iä0I*ä0I & ”Ã$©˜Ãs˜$`“¤b“Ì­}æ0I*æ0IÀ& ˜Ã$)˜K“D˜K“D˜K“¤`.M’À\Š$Â\Š$s)’s)’ˆs)’Î¥H²¥w&‰@—&I]š$]š$]š$ti’éR$éR$)¤K‘D¤K‘D¤K‘äƒti’Ò¥J"Ò¥J"Ò¥KRX—.‰X—2‰X—2I]Ê$‚2 °C&IØÉ$v˜$À“¤Â“Øa’
+;L`‡IìPI*ìPI;L`‡IR`‡HìI€"I…" °“HìI*ìI€"IÂN"É v2Iv\ìd’¼`'“$i'“$i·1eQh'“$i'“$i'“äE;™$I;™$I;™$/ÚÉIÚÉ$IÚÉ$yÑN&IÒ.÷!Ú…Iò‚L’„]˜$ɺ–=–d& ¬Ã$u˜$•u˜$ÁºIĺI
+ëI„ºI„ºI
+êR$êÒ$êÒ$)¨K“D¨K“D¨K“¤ .M’@]Š$B]Š$uˆ$"]Š$"]Š$¤[&IAÝRIĺ¥’vË%)´[.‰p·dñnÉ$xK&ñR&y)“$óÒ$zi’€½4I*÷0Ià^š$€/M’J¾4I@_š$°•¤²/UÁ/Mè—&IÁ"I¶õ$’d[O"É«­'‘$ÛzÉ65ö$’¼{I²K"Iöh…£W6L’ìÐÊ$É­L’W‡6L’ìÏÊ$É¡;™$¯¡;™$9t'“$‡îd’¼†îd’äÐ]toräN&ÉkäNHŽÜÉ$É‘;™$¯‘;™$9r—ûÐÈL’×ÐL’»“I’cw2IêØ& cw˜$ŒÝa’Ô±;LuhIhé!’Ô–" -=DZzˆ$)’Ë$ÿ–IR
+ër±.×8ër“º\ãD¬Ë5NÄ:9Y¨Ë5N„:™)I:©)/ÐINIÎa§$çÐS^œÃOÙÒLIΡ¦TΡ¦$çpSuÈ)/Ôa§$êÐSuø)uè)‰:ü”d•ê0Tu¹Ø‰XÇb'u¹Ø‰P—‹u¹ØIA].T"Ôåb'Áº\ë¤À.×:ìÖ.v¹ÖI†Š`‡ ìT*ìd¨À: X‡¡Ra‡¡ìPT€ŠJ…Š
+°CQv(*v(*¢†
+´ÃP©´ÃPw*àC¥âCÜIQv(*w(*àEÜ¡¨|âE¥âEÜ¡¨€;•Š;áCàa¨Tàa¨@< ˆ‡¡R‰‡¤ñ\J¹W;—;)ÈËåN„¼\îDÈËåN
+òr¹!/—;òr½“ż\îDÌCRIèa©¼¨‡§’ØCTIìaª¼°'UeKI%©‡¥R©‡¥’ÔCSIêÉSyAQ%¡‡©’ÐCU)ÐÃTI衪$ôpU^ÔCVIêåº'¢^®{R°Çº'¢^®{"êåº'…z¹f‰¨—ëžõrÙ“B½\öDÔ[»êå²'…zÈ*¢®
+ÔÃU©ÔCV{È*`Y¥bYìa«€=l•Š=l°‡­ö°U*ö°U„=É*PY¥RYê!«@=d•J=d¨‡­ö°U*ö°UÀ¶
+ØÃVùĶJŶ
+ØÃV{Ø*{Ø*²
+ØCV©ØCV{È*`Y¥b_칟â“(½\ù¤`/W>öråa/W>)ØË•O„½\ùDØË¥Oöråa_%±‡°òÂÊJbOÎJRiåE=¬•-}•…= +/ìm´ÉÀ^+‹zRVÞØ“³²°·%¢…½°V*õ$­,êÉZYÔ“¶ò¦ž¼•E=–@z,R©Ç(`%PÀK Tì±| Øc aP*öXìå.„=V@©Ø“·ö¤­$ö]š‚=y+‰=y+‰=÷V^Г·’Г¸ôW*ôW€â
+ÐC\©ÐC\ôðV ÞJ¥Þ
+ÔÃ[zx+•zx+PqêI\©ÐC\zˆ+@qåèI\yAOâJB/Ä•džÄ•ó$®À<y+É<y+/æÉ[IæÉ[IæÉ[y1OêJ2Ïfâ¼$æ±Je‹ À<-‚òX¥"EP@‹ €<VA)Èc'ue!OîÊy²Wò¤¯,æ…¿òFž–-Õ•DîJEîJ"y%™‡½òbúJ2%™‡ÀR ‡¿’ÐC`Ièa°¼ ‡Â’ÐËÕP½\ ¥@/WCôr5A/WC)ÐË•L½\ % —‹¡èåb(‚ÞÚE@ÅP
+ó¤°y, ƒ¥"…ä¡°€<–
+= '‡%™'‡åÅ<9,É<Ñ7™'‡åÅ<9,0O
+K2O
+Ë‹yRX’yRX’yRX^Ì“Â’Ì“Ã’Ì“Ãò‚ž–„ž–„^£ý†K… ÐÃaz8,•z8,¢
+ ÔCa©ÔCaz(,P…¥R‹ê¹µâs¹A½\¥PõP½\EÔËõP
+õr=Q/×CõrA”E½\EÔÃbIꡱ¼¨‡È’ÔÃdIꡲ¼°‡Ë²¥Å’ØCc©Ø“Æ’ÔÃcIê!²¼¨‡É’ÔCeIêá²ꡲ$õpY’zÈ,/êa³$õraQ/F)ÔcaA/FôX¥0/5óra”`^®‹R˜—뢈ykÁ¼\¥@›EÔCfzÈ,•zØ,P›êa³Têa³@=t°‡ÎR±‡Îö¤³@=t–J=tQO6 ÐÃf©ÐÃfzØ,@›¥BO6 ÌCgyè,•yè,0桳|2¥2桳À¼ÐY*ñÐYD<lˆ'›¥›àa³
+< -ð.„––¼Ë¥Q
+ïri/—Fðri”¼\EÀË¥Q¼\e/—FðZx-/àá´$ðZxX-/ࡵl-É;Œ–Ê;Œ–JK§å<¤–VK­¥
+ñ[z.«¤Ù¸ ^K%b ÄClxˆ-•xˆ-³âa¶Tâa¶@<̇ÙR‘‡Ù"ä!¶À<Ä–Ê<ʇØó[*ó[€f ÐÃl©ÐÃlz˜-@³åz˜-z˜-@³èa¶Tìa¶{ˆ-`±¥r±î!¶À=Ä–Ê=¹-pÏÒ•#[!¸—«¤îå*)â^®’"îå*)…{¹JŠ¸—«¤ˆ{¹LÊâ^®’"îá¶$÷[^ÜCoIîá·$÷\^ÜÃpÙp[’{È-•{È-É=ì–Å=é-oîÉoYÜÛÒâž —Ê= . |2\ø¶œë(à“ã²ÀÇr)€åR*øX.ð±\
+àc¹”
+>–:|,—"ð±ZJ«¥
+^šO·IËJŸÞ9ø±ÏhÀþìÓèœvœc4Gàì±JýQsßgilNgžY—ß“êŸËás* méé²Q€Ëç±Î©·ÿÙæy!½žšÊå§ïï &I»b-øãÌÅJöË:‰ö§W:Z·ÞWÔˆç Ê sH‹Ø´žÏŒy’Ó3èX5p·ÛrµqsѾÄ}b‹ö\ö‡ ‹æùvWÊE³|{G¹h»È-/šePìÇ|]´]hä¢í&!Åô¥_4›h÷ÜßrÑ,3ሹ:?”=dG.Únïãq¿.š=D–çÍEÛïH;ä¢í—žrÑ<½ÀÒuÑžS=Öö‹æëðÝÍ9ûýÖf^4Ûž1çì·×¿ýï‹æÏìŒ bãv–¯í˜•ÝÓz|åî-*O7iš—7^Q†¨³ÀÍû©<¢Mq_6¥¨n“³,WÙkš>??¹,§àyw}ÙNo{ØrÏ^­§b›¤¥“ûð>G2¬¢¡sô©Ößõ_ãÜ®Õú˜§ÿm›H¸áª.‰·>|
+üÁñ–ê®|uK
+jÑ
+ê£Hg^6o!ùÒßë²Y ÉZå\6o!Ù§Y—í³a-¤#\7o"y±·¸nÞDšíuݺš›\7o#YΣ®›µ‘ÆëªýÛßîeöïÿðoþãïÿ—§?÷wO—"ì¯ú~ø7ÿ·ý—ÿåOÿÓßÿçÿƒýü‹¿øýïÿê׿ùëû¿üaûñOžÿÿïÿñ‡¿þçÕÛú~_Ë{Z?ýizy&Ò±[¾9ÿú„›‹ËölgøëGØîøó²}ýØÉo„s'¿÷ÃûŸ°6Ò<\8½­³åÃßô¾îk»#ol¿»=OÃéøæBËÓûæh¬½À±‡ñ: GÑóýiÌXGï{§d;ùúÃßÚ0:Z’oïC
+¿6ñ×Ãݤì–ÔëÁS!w#d#¢_ôçæÈKõ´ã(Ì=ÏcöqŒÚ VìÂÎã»GõKîåmDÿæœp¸ñ9ö°• ‰à4¡>Žm^( Ú`ã®=wˆû¢ø(gìÀÄ ý1oBû‘ÙÈÕwàKÚ°×·õòç~%¼u®Õ‹‰ ¯ü¢àÓ®SP÷È‚›®¥…ç¦CóÅrµW]ImûÝ#СÍn˜Ú…ÓÅ¡=áqp•‡µ"ý.ÍæÓS¾â>OÃ×½³éå×gnQ×Þ÷NѼ@·~ù^‡Íƒgþ±¸§Y§:‚ËÒ¥,¸G®‹ ]‡Ý¡£°µd´•€#8×Pk¯6ú‹vp î±'éDpZp
+*â6PDì@øó‹âϦbwîÑR|´Ç¼,^’&~ßÝ«b­-‹e¿Ê<’g
+^[þ>šn>®›Ã²Ž¶‚ÏÑÓ> æyzÆS<â0lN$žæ¯È>yÄL± %]Ì|÷/]
+j~]NËFãí^OÐf³Ñvó"u/5BëÊg°-(wþ°²},fù<ÅטÁ¡nà<F3ž·‡y®¿ÅߚȴÑ6>Ç-x˜Oœ—uúªèiúÃz¸,hƒlºOÛq(æ3ÄË}Fͨ›øUáûäZáBßÅå"yœ€•¯‰˜UˆÔÅ‚Zö_éÆ>/ÒW…¯›÷c¬Ýî÷ÌÜq(xíù¸Y£•ýÆ2ymÿ‰½Žµ×1[î Ö'ô‹k…î,øl‘íØ!îÜ:ŸØòRSÃÂ'™#>´ÖMòªz¶Ovzm­ÁóíÎ߃¨l{?+ŸCï¼;_Ï^o^„{È£a¿µ­çžë oO7¡ lSËì`¼àž(óU6“g¾µ¶åQö©}mxò·žÒ:1·HãñØŽ]ÛzÒy¼Öà‹ +ôôZsì íG†u¼o¯¹‚ñ†ŒÐìòÞ¶ƒ?¶íùp´íø8‚+jDÆ’1ö$^·úÌ>)ÍSÛݯŒ^p¼ùÖ‚8¹ŠÃ¾œjEyjÏzɶqr}ç-?vbgÜñ§±Ô¯£l?ï¥8g°ÏªÊÅ{g¶A^/ÅFCÞ³ð~Ñï}¸Tg;âºØWQ!ÑÜe¾÷e/àšü)ÓI"øtýt­ýéVð¸ÚS;°‚¦Ë•UöÛ@Éqí
+ncêþ÷9öüµOÏ ±ñEôÒ‰]Ç¿ßw€{xê}l8Ô€ôhÛ¸×ÇviÓ›3E|»®í¼ß?÷Ç‹ðŒ?opÚÁÞõ-°öck<&›ëë´ÙÆ|Öâ½z>&OSH—äiÔEÐܶÃÚ‡^¬kz{q[|ål@4.@¿ n½q
+Ç¢<}þó‡:J±×èz?ÍÃìsÙ ôOäó5›¾ï=>§3æTãïoqÎÁø¹Ú)§•XoÏ®ËrŽÉ?õ¤ÏŸ{èÒr•ùèÆ//{«-íÝŒ`H¿~¤w¼fÜvéâûd%†²MÆÛ{Zª‡´õ8‚ÃËãN”VÒa…59Uÿî~U+ÅËÀ¬7 ‚.^¨™NpGO Ϩ(ßsÒÔ³ÊÃ
+f§Ýcñ{«n\‘1¯«7HÁ+{Â#ºEñ{k~ò`n»þÔwƒê~êro{î VÙõ“ñ5·ê&ç9yØ769“v–˜;8£Ñ¥[~©épœÙ„}¶ÎöÏ![#¶6ÉSÁy'‡t gyf<#.þ\¯Ô‰Á‹n• ²3;£ƒd-`+6Ím¤ð„½F±.ï—ËŸ _ø.2<rÏÑRðé,Ú/¾c¾²ÓR*ã¯íÙb^ß sys¨DMK³/sTk2Râa½«ÉlMþ|Å÷“ÞÁ™ïBÞ]ŸÃšùÞD{“·¯©`c|ÒSÄù9_ó;ª*Øî^. ‚ìnÖqˆÞÍ}•]YàÑzòùç •Ï·ë«ß­ÞV¯Í3Ê~ò>×£¸)ò°ç>D‹¨ën£Â¼e¦
+¢¾Ÿº~‚¶³ëaóó.6û·e­X-œaßUþ·ð³#_9ÃþNŒ_?A{³/ª=ñKdï€ —J«Eê¬U/:vO¿~¾Ng;^º,oËg{ýgºö]e\ý¬|ʯ¾eŒYÇÕlè’ª¯X4•Ÿà®µZº¯„bÚ'Ø£ï÷γÔl—3~Ã>%çõ@÷w?ÄéÆâöáz¿0—µz"è“~L·=ˆt'$.âÃ._IëŠøö´þÒ¯Øò˜vsÔìü ï>“è—ÊÊSØ@§Önݱú\ÔðOoƒú¼V_ôóÃRüçw'Ò«~QîógÍ„ÝÏ‹Î6}:4Q6Â7õU&lC›î·Å«ìP½fÒýÞWÄöÝòtqý6¼õ ‡úí¹þÑž8«cjSLc ›ËèM®Ñ†K
+Çlk¢¢MÀøeŠ÷s‹BÂþ©>ƒo¼(ð45\<‰'чP‡5
+öÙ¸PÖf·ûç“Áû¼õlzÿÕùà%2¿=Õ?ÚÃæ鶆_»Uè‘«èƒØöf ÛZ Ʀ¾}d0^ü3ÆÓr>xgÁY\ÎHËÃsðO¾¡ìµ8_Ï]òóõò›vÁ4%åA»³àŒîÂð i»Ë~}Æyø¶6†bCqÅÍ=õ3°|{‹õ&–{àE¨¿ˆ}¶š…w×"üî…1ÈþýøÄÚõu%›¿M·¿ÜÏ3Ó"‰Ù¿i»-]ò­õ8}ÓԸܞﭻ}Vg!ûNm6XfÃño n×iúþÔúâaOð>£³ã7ÆÔØÁØbf5¾ã>=òmØ”wɳö¬a+š¼ñM;h팂øþ€ºvm¶ãô#°·¹Œhžñ2uœâS÷´L$3;z|«ïXcÆ÷êÅoÿò‡ë½™¦&Ç7x7.NñÉÇg~§v·—íðËhj¨Ÿ„ãòïºÿ…¸4Ý°æA¿\ÛyEëőÛÍQŸnX¿s¶øÚ ð8õ šX.òn1Ðáï§zA¼îÄðÔµç";xmþ¢_º½qÁL/þù8¶#ÎÀdzþR_;·Vý¼Ä\<¶£ù‹;´ÃïDJÝÉR²®ëº{<Œ>7ùÝ'Ô3½MèÓÐÏî[¨C޳ϟM´?a3‡{´5{TE‚öŸ§?yëÙ ¶e·¬Ä‰_^ËK¾¯BvàE™Â·'û³¿‡ƒkAù.öàl&#ñ¦¶(Yh=Ðíy ·Ë‘{Ù·üšú]Ñ̺¶CÕ—`ó¯Íþœù%h‘2ça/1÷›Œ¾W¯ ÷—ºož çÛú"’ÑøÑõ½,âr†ùTÔÆ¥9}1ÁøâÆrÍÖ,°¼v=dÞ½´mýÀìt(±ï—ÑgÈü2žV¤Ò>gžƒeIïqmÖZ
+¿û¼~ïÙŽGþ›7Áùï¾5ß}¿~ã]üÞ{ûÝ7ü»Wö·îÂwï×çý'žož—¿Ô¶^5×u‹‹h};}.¯—ÍßWñ¾£ÌÓ/zŒtÁ•û®m=™Ù[dúf~ç+ôO|±¾ùº1zñí—ð{ßÌßø¾~ÿ[üݯöo|á¿Ûøn»á7Úßm|§åòýoÈo}o¾ûeúö+öå7¿x¿õuÜb… ßï°²ôœ'A|U&u¢º?ôÖÇ>Ý+‹°Oø+âÉ‹±OCòçë¹<ç÷¿ _þ ï~(&ÿÜÞΠÝ Jzäë+bO—×ZãºßDâW¿ÿá_1¦{©­ûn
+´“oƒ¬WSƱFÉl€ÓûòØZ³›O?F7u,¼ê£#À‚ÖqóŒzB_>Nå
+´q†÷æ·~ž11Ç^thd¼ªçíý‚14b
+&Çèάó ¾Q·väÄÃ6c\"bsœïØYÏ‹_ÛLzÿfKwb3vÚmâç%¼Æ,¸ïb›e0èïœ'l<!Èžƒ"> µOm[¾#nj ÞÑZà
+ò~Úl1™ióöyº‡&·=cÞq—ðtöóïå g§]"bÜñÓž
+&⶘d]Ûž[°92V>C¦ñç_áKO§ËZ‘îqÅØ_cM+"òó™6íz\ü)o
+xp¿ïob^Œ5Ÿa>úÜ©?Yúó“ÇÍ¿h>fži ¬ÝÑAzGLVçé+<£%AŸó—ˆ¯¼!Ì[úÖ/þ\½ö¯½‚§ÄtëYEÉ8µo
+Ò™9è’ZðÞ²áu…]ý~}¯Hy‹Ã±Ù®¶À'ÁH|…궧i‚b…׌ g‘® }Îzo[ƒ;ÙWlóg•éü û”¾‚gÜ¥±Xƒ[»ËœÜÑÛÆš#¨ï³"æek3ÆÏÏ=’£ê–G¤ßØ.Á„UhzL<-üey†²•FଅäY„~úÙÛÜf°VO„w|ïQž럜—î‡'eþY(Ç¡Käèý)^öCà}Ü™ä‹Rƶ÷¤3?ŸV¾nRð©k/c¿h@rúÈrÝÒçøt1ük)V[~#ºf]¼+¨Wq&ùmì¥mºD]ÇT‚J¤¤/IX–ñÇöx%šå~]ôe]iR²ÇʉseÅùzE{Ös±*xARÝ5ÚÕ,a¸ YSm³Ao†‰FÜ–í6€Fh|ݸ¬=^û­•†U—Êäa+Q¬mcôקç>¾ 6Þ³W¸uÆ©¼¶m¤ÍÁÉzû`¹®1Xý±­ž6k±ÛàîgðÊÒ+L'Ýöºûƒe]†c{x0›Æ]%þ=LÓÐêsé´6e| ì ÂuDÁD“¡Q†kvÆ.>Æm~§ýø¯u!c†Õž;»ï¿Ý»ô€y‚çäö>/w+A^ÏÌo{Ǻãñ]9s‘àˆÔnv°Â^ ‡`<ËVñνڋ¼é®™$<Iqzê<ëÅsnûÑ?‚±dÁzBW8&=hk9•ÉúÍüSÁvÚ_Ûº´5Ì  ¥®Ždù‘o¹Õ³Œ£íÊ7Ú ¡E†ì9s kžjqxId˜Ï¨g0GÇz ¥l;ÃŒæÁPbÂ
+Þ1…ÂøÔ
+Ó‰yGÚù|žL1|{í
+ÜK›aÇ¥ÕœØ+è¸y
+ŠðÓòIGmæ~]|#°|q ‡Rl,ŒcÔÊ°î9 ú*œ[/Á ô7ªnk_•>u
+²ú‚¯m§®·¯Ü”NÓà“‚Ÿ§ðÚvo›¶½s:bÅ|žˆß—0×°Eý{'¢·?ƒcйYa0Ö š¯û¡Xâv3f æcè3hÛŽ“ ^ï·½ƒ6^Ýh£¾Âä!”§` wIÁkrZû®Ry8¬öSÛßG[¯ë
+ö2q°¢eÔyke„±_
+ZŽR ò"<ŸÛö±íÕ5ã
+2Þ4V¶½4íG0âÙ~Ë)ÔðT·lwËN–^Nñ[9óq–`6û~~lk %ŸÏðGk'æ°åV0ÛQ9¸²-WöÈÂ+è5c?ï‚ £vòyî±ëíb¤æìëA¬a |x%ŒMoÓd–¸“oâšR³†œîMÙÖ*nO½] P”àSü4äV8*#zpêö¨1ùôÒ!ì`…7nB‹zA1—}fóRv@Äø¹¬¦µ¥uLcdÒ† •ãP‚GTO¡?±Â¾šÝOµon}ºó¦¿zÅx)Á.ÜúøØÖÆ{6Œ-ƒ½Ju7­£Ñc4e‹®_îjO·<ã™ ‹Ì¾)Ê ²<X9ÀlxC/ ÏVûjÜl{íl dΘ|\A^0/3þÞöRÆj÷*ä»b^ã`äTÛý^<²ÕÈ4‰íî2…gmô~õÌ<‰Á§²lëEÊ=èƒÕï`ÿYk­½ÃQ8Qœ¾š¾”‰ùon†ÚvÕnõž¡áýª÷ÆË­Ä'T㘙òæóÓ¼ù5è//žÂmËÉ ªeâv”þÐص÷Ïè€ÝŸQØtÅgy®´÷°Ô‹µÕ¼òáðêpq^Gv¯øc–«¡é½ÝkÏä tÛüضÓ<ÜT®é3x÷ú„­0S?¶×Ù5pâÃrß½Ükipq|®îWÂçPkŽÁ öèó¾‚=Û}×ꡯ™¦²mc¸¿õš&çÕücœ¥1¹a;èn³Bíµ¡NÁ~ «ßœ‚Û¿ú‹?<UùøHUþ×ÿ¬tek³ûÇÙf§l.À=¹SuxWøë;Ü”Îñõ½“߯üþT _3仪_t{ ü™ÈzšÈ?»cÖ9´rÀZš™ì±“yáidô¶½Õwÿx<©–üïùòþ|IJ¿ž—<NoèѭߨJÛF«b»öÈgò gË~ñ‘Àá³7¶Ý‰FÁva-ùxÉ¥ žðY<4 ¸]w´¿GL´ú3ålóäÚC;ÊÒ÷3¾¢N‰‡Ÿ£½_Wg3_ª]^e"¸«“3¢vjìwFN–.BÈOPS~¾àùÅz$‚5M—äÎ;oðÙµ­×ÒA†m >†¸6«˜¨¬’Áø×Eðõ <èëóÆlQª×Ã|Óį
+3Ú:£t¸[浌¨˜aAµ‡<èác¿Ox»t`>4;˜Ñ@µàáâœØ:±K/Ä#ã…Òžà‘#¾2{ìôŒõ ýP·¨–¦N`Ðð÷àÎAéÓã¿Ï³ëù{¯ù®G¶ñûkÆ離zÐ+æEÐƽc—¬ÿ[^OÕƒ^»Õ·Ý¥çXùÚ#Æ·f|FÙÁhy ÚŬLн3ÍÓµ‹[æCœ­®ö£¦ñÇø([Ø 'ÅñŽmíWýʦ ŒþŽyµÜ'8"+J'|ñ$ÕI¥TA,ìÄ1noA[tT»ðj¼ö·¼ï¹Ûëý×b·²×E‹àžtf+¨FÓ„v~>:Ü-LFûcsã·SOÍŒŠ?º“§Î×D¤¼­Å«;l™âsrb¡³bƒ‚š…³°ìÄÑFF¿ïsãŠGs؃ãæcÌÂ*ö`Ûîq
+$Ú{³kS 9Ú½Î8-³²¾ñ{Õ›×Ùê9¼#•O8Öu¹c©{ý¥–¿×”éê'XÐê´ó·ô~¤sé;=öÜQ_”ùªð8'_˜"Ù¬‘±óÄjг¯cþ“ØíŒÜ
+ÛAv4N}\Me™ÜœSy&‚Žm/Ï\ìÀÆÞ³=Ý™Þh;Ÿ«òæYÆYr’Ät›Úi]ôñ¿³íWÂù43·æn$øÔ(·ÏY—xØïjyòa°c¸èTÜ* m*ý“Ì”­à¬Ž0;‚ü^‘…áG/ä±~ï*im¹Zpl´³ÕôµàRt@¼^Sc@á–ódÁýÈO›î¹åŽï4|N¾6æ0£ßmAe¬ûƒSAmM¾FK¾õ=Õ^'ÃÁ$ý[0[ÐÜA$gÔ'Ôv0ò 0ó°:°Îq¹æÙ½Œis¿Nä. DV€eYnÙöÛgËß_ù¡XGSêúú6ê/í4ȳéÝÖp]x_‰f3odÑ ¯onìÈv~¹„kØÀCþØL2zŪåº5‘ØrdÐÛsy¶×óXržMNÉÁ€)¹Ù„V¯ 7mR _ëñžS'¶ÆË“x‡s§^É©Øѧڙ,Ȫ0ŸWdØ-“l“lw-³¦~‘I+1HÑcåØ눥?õó[[ΓC
+c^_¼go#›ƒ±v»¸óÓ6–Õ_Ͷª B㙟Ï Èë:¢7CÕXÁ¹ñ¬–ÿìäUæf¬ü£SUÍ›6½GL[Tl[  6„>·*%:6fÁ¼«@‹º[,7[É@M•tãVKÝk™s«Þnü¼GÚœͨ‘kŒÒnŒ / öÆHF¾\ͼ¬0•›úIɲ¾fö_n£OøèCϻذàNsú\ÝåÕf²¥}öôÐoöX7v÷õ‰Iè51Å6Ío%‰)O0êç»Á{æPL"ºïѺϿõUÑòdØ×>–նÚ2Ö,H¯YË2ô#ò’Ôžæíîªýg7"iªDT ­~eØ„WÛ¯¾©OP()jù®ya}b8vpÆ
+rù¥Ä‚U¾ayÌê¼ó^’J[ƘÅçnÕö¶s8SûÓÓÙ7Æã]m½r”iP9ß,ªIóŠŽ¿º:ø¬“àä%Øòl¯Áƒ4TÄ0¬k]««5¹½mœxŽ3á‚SléËuÇ~ï4즪F†G‘ñ'M…·A;Ø/ZˆŒt 2ªi‘Õ¾¢LjXä™F¨õ*8v fª_„GQ3þ MÄCoÕ <ô™--ír­4\vazÊäëα°Þ²9VºË¯ð5Á¨–в[³oåÍõÖW…™ž,/éŒi6uVûÝCÿ9rïn2ë‰ìw¿rznUÍcÖU³É-;crÒÖúò›?·cÝi† nuä}ø‰ßSŸ`f
+ংãÑ ©îOÚËG´Bh¾ùŽ[äð¾7/8—mËÔl·9¨XrÉ”SööF¦Þë£èpgç6KæíÛÎÛEjf+,8² ³+{T¤Îð¶k{­ñjî|~ëçJCí[Ö̳ ¹-Ë0=HÓ趕gâöUOcï!“yðÖê¡Ê¤g©‰ ßëHh
+'qf×Ê?¦hÏýª^m}ðNÅ4>`¿ÏÙ
+Ûmv
+¯`:y
+z.n¨jÁ³k.ÏvÛøÐú üW…Ï Rûr¨„•ëD9òÈb¹ã>jn­+ÑÞÿÒ±‘–ëŸ%íÑ4äHêÿ3þfm¢L¬X»Pƒª8‡/”÷ú‚ÛŸï³´Zô·Ÿ?³ÌCê´IÑ÷´[~¤¶<§™1×9õL
+Ì3PW0'Ö„ˆ]™sË †qæåbþGÍìC `{lQÈŽkdxäùQ Æ4骘ݾsg{N0X8g-S¿|+…AÃGË•BG£ÊV+%R[É#[Å,îlAyRBìÀ
+öæìý8¨Q×=kºVï÷Ì~!)R¶-å.f–]4·:góAÞµ^Œ¢øš™HžŸé©Ú &’¯1ûûD9__5jÕZΆ̛Š
+½å'N%˜ì
+¯çÐN2œ)É2€,8ŽÌ¡¿²¢÷¼ñéæ‘­áK¶gRÕñ=1“¨ôcµŒÓS«ÃGòÒÏ`ìÅv›¹SJÇõêÂ9¨¦[/~c“xö2;©=µûÞ?.cº~¨ÊJ­ÂÈÑ*c:Í
+\‡ÂŽàD²[«G+ár0Ts¬"P#—¼Zn8çú}Ú$>y¡§ã¤*õ}­Ôù†õ±Æ¬JôMv;…ý|nRÈ}ôƒ'Oñ\­#«‰I¸ü±åŒ5–°’†Üîræ@ñÇÈ«8îôËFªÿÆÝ‚Öׯ)?7Vm+ÏÈó–å>l#3V}ÛtY"Å‚ï-ÕØ R1¯Ö$ 'Urk¬/°õ}S8(O¼Áž#fÎߣ—[K]b­ÔÚÆbb æÓ}ÆØiW›éä#l Ò™›ÿ
+â:pÚøVõ›WÑ»Ú_ùJ¼³ûéVÜ ÆQSá‡OÍÑ)×áèÙtæ`#xN®­g1Ò<'º >Öûh¹ƒ¹úÚw–•Î(ëižðÏ—œøš]UÞÆܯž–¸={éë"d왶¹×7¹5[ü2¯®¾B6² ¯ð& h#§álDiew,÷öÓˆº
+qïh·åOÄôµªŠq9ÙÃJ"±¡¯ûTåG–‡?µF[^CFÔzþ±\ÅŲXÐÓ|zæ§T<ádj 14º³_åÙFáe=GdÛ€m»8¯L|²!_úÜkèjOÍ>ê7ãÅ*¦í{=WIfwô3-hK┾gNøÝTŒõÄÖœ”dó–zLZŽÌ8¹³.ŽÍªÐå½j
+Ÿ6“©SÈ8«Ázgq×7+©]Q¯AºŠŠá^LÒZ]ÆnsìÌæ”O®ã¡ÕúQZz6O½ñŠ¸øÎô5™KkÍ J¸³ã®U£|X÷$‚®ð®EK,Ý q
+x5wÌ[~U˜â‘w”€#8´_ú½Ç {sÎÉr&Ô¨»£ô“'m¬÷&«˜¶”Ôî5ÁñD{V5ÏJÜøVÉV£ñÖ{G¾*©,þfÆn5£àÝb€Õ{<Šè^‹P5ûÎ)BšPz¼NÙ¤cpGDì`dßäŽ
+N?½Þ;ØŽ|ºàœÃ…§Þçss¯Æî¢g½Å®”Ò¯¤½U…Í OG[¥1îUJÖò.ó&Ð>h=kšÝtî¼ÂïA¶Ú’DîÈÍøªð!óúÊ
+än«´Áí=©ˆùÝYã8~?òã¼V²Èg0~òtíóØ󂳞¼¸µE^”Ö'c]Áû¾ykÓžYyèZó'Ïngw=›–t/Y厎hìÀSå¡Š%ÝŸ\¦„-˜·ðÌô7ÛïÉ~Y¼g;Óaº#Ï3‚×Î?s ¼ØÕ*Œ­üü“ªrþë
+ȽwÖì o‰8è˜é\Ý) ÞZŒ3vJËÙwƒÆv\wÔÒ9¤rÓ%ð ÔÞk‰^Çâ¾TêUs³V4CÙ!íÊڶ̱zK×J·0-Ü ÁÀƒƒå?ÔˆòŒ„é–G`e%O¾´6ñ•ðA}yŸÿ ÌB;תÑq>ˆáxµN],Í›‹ÚР¶’(žâæa5c¬,L¬[ï7G'¬4Ž,‰kiÛu1+ÖÔUýÉkë00u1kux´˜½_[}T¬æCwZÏ—Õ'R*ú[k-Ý«0·…™1Íúwªýeβ%{ÊàAWá¬S)y±ý–‹²ÍÝù£-w
+A¬D_¨ÑÍ¿²RRBõrP¾=Õô²Ú¢WfFD%|¦1¢ uÿÍädšOc¶œdµ¬Dì=ëokF÷ÎõÕ·‹dÂ-S4ŽôÕ}êôbzNÆ$µŠË·gÀ©uº~dÏ÷+á›é˜<ˆ^ò#ra¯Î(„˜_«"íÇ~¿ênåRøÈä¯
+ߙλêNß'©Cn0EpF™Sòå°pf§â`ùœÓWä¹}{
+eñ 2sfÚV¾¤ß—ï•;wavÆýªð9S¥°šà*­à#_¿¿“Ø»éþöÒ¢í¿* ÚÓãô:™ƒFÉ„‹aË^TXßíÅIJVkó^KŸ;Æß=
+6ÕÃ\ÖX‹0ÜY lkµÿ˜ºËd¥¬ €½ÂywNÙß¹„ö+¸¯µQWøZK=ß™ò
+Ò÷¿Jíu›ÎóþØöŠ…Ç¢7ußÁµNV tÜY /ëðĵY`gmëT 2/Ø–9eW;òßu͈kÑòãS|d{1W¬V,¯¹
+²­-¯ÈÌ|ïÒÊM—H¿–ïlgtÀ~Òœ\®UÅ,KU0àÔÚóÛž •3ö,+_Ô0K ­õ3­º³¥ZDã¶í£a>¬±tÛîÉhVøl±×ºE+\Ö-”À¯kÍÌ©ëy’ ³­¥\i±–µnQ Öu‹Jx¯Éh{o*«œ%£½ô\ Æ)ŸX¾·l»i«g KÕ°Ý«¤ùHxܽŸ4yØU¦<W 4ï rŽ
+m–à/ÙW J”mm±SW ‡¿ϺŽõ
+lOYîáIRe.¼4J—²|‘OÖϪÛZE|¼ÉðôæEì †s®ósݤϠw{ò2L®§Oj2ÈlÇ®êÝ+˜ßT½eÛ¦Äk‚ýªë• «… ¥ +™´¤M]ÅÈ¥mß?¶µÂ}ñT­•‰v+[Dy­bdß~µ(ʶg$MÇa];Ë°†÷^> &¨p_u5"HÁ›ÙL”zäKÆ·sí`g1v/˜O¶Rz¼ ½’ÅóSk³&vXô§YFuc™Ù×#¬W>´/I?H-¾£NˆM¦ìm% jqQŸc9YËéi­d5‰WðyùášßÀb«d-C4ü%ø‹™Tè©n{Z«ªëÒ©³‚×Ïk‘¬=bò*^Ù-Æs­’•Öóðu.ÿÛV7
+‹œÝäªF–yå
+ƒ#Ê­à/ù|g[¥¹lkåDO†ÒZ1«œ‘ëøx‘לº˜ÑÜßð|<Wп ™Ÿ3ºÛȬÿ ÍeSýÇŠ¨çºåjg–²nYÕË=¬]Áµž—QK•W÷\lØ
+Ž{®•Œ^ëy™£×Xœ‚5ºl¢AKeµj|}‹o¶í”|(ëy­àk=¯W8]£Î.m[+atÖán×$ë¤åB•WËe7t´e=¯,ëy•èZÏËTÎCÁÌ'ê¹ °‚_ècæBÚ“Õ³‹êCË–Š3«¥•M¯´æ6êú$÷±VÏT×۳˖§Õ¬Jù Ûò¶m~óñî>ÍóYÚà*â2³±%=~¿¢t±eÏ'‰]Y¦=ßB›~É‹l V ^k¥f˻Ԇ¼YÈou*}*3v0rêøÌ™_+@ Ô¹–ožåboO_y®&ˆ2¤±|Àé¼õ™…ö˜ÝcHá<âì‘Á¦²f»?Z¿ãvíS›âŸ˜:?BGèÙŠ.Ao^p·WÙ`_£y׫Áܽ¯PÌŠ)äZ'ëÇ•mŸ;¨ ¶Ú+è¯ñv aº––ñ+è•þá›mwµÍ»ŠLG+¸¯ º¿ýg²UÖ²íFBõ ­`…FÉ:CÿýFÞ·2f™½Ö¯l׶ö|=û\„voí#x—%ÙJøȬÍX=»Å‡Ì[á^‚€dw«nk¢m,еGë;xÆbð´kW˜Þä~2_h®\¨ˆ¿1HªȲe«¡ôô”Ãl™fxcµ¦N¦íŠÏ†þRÌZÖÉJ ÜãsMš–Ðe¡¢s} jÐ=ü˜d8—èÉJ×V“¡2µ`#Ä{©êd¹g ÆÇÍ&¤·ö¼ÊÝ+á;³õwÉ.Ñr£kÙñ·æÝÓJdeÛ‹2dvùd‚” éι¸ó
+·,ãÕ¼«²m¼U‹Ž«s°òŠ+U¶Ý~f¡Â}ærË„\£àòÌUÃ%;7-¦¢ g«Eûð\U&Îͦà-œK7•:nd…¼ƒö˜¶<öõI€÷N5!Ö{‰¤ òa süµí¥BÁ¤|b *M(?m
+g–÷¬ÈgåK]ÄDµ· ÛûþØ{×åºnkMô ôü㪤ª©àÌí_¶r9û”r9qÒ®®.MQ6;馨8ÞO0.ßÀ\sQÖºÈâ"½vÊÞô˜˜X
+‘¿:
+x5Œ%ʼoü ¢°î”@ò„ÀC 2Á(r}ײŒC˜S̹ÒF&ì•@;oªØä|£dµ¹s„ðdµæcPÍÀʆ¯I=>¶$K@(´‚óæOrÙfÄsf*÷´€³:¹—Äb~·rR¡sæ{r‚Ò-D€=k5tÐôp9s^QdsD0e¯9±™®a >ÀÚJ48 h¨lXõ SaeÞµ,„\ãj²§Ÿ­h«ÂÉl#i<Ä‹ÅþÒb@Ø@’×H› Ö'ÇuÐTfAb¤æ¹ñiåV[i +0ÊLW'„°tãQ‘˜”²3Yƒ_9¼}BPšŽš3òŒx®TW×ÛŠcuÑ«”màD ô:Dع2„àJÎöÁ `J3¢tPÈôm)qå3˜ß@ilu‘7TsA"†TÜ`¥ÿ5ß{åí¦Ìv¥m“X>éU…@‡ln­­„8ÕÓg7ºˆüÀ)h:€*Ó:C'*—\–E[)ç?j4,<Öx´„ˆ@]ZFÒÒK¢±”±÷—X#2vº¨©ö›ltýðAý® Éë^¡Àæ-qüYÚÖj¿3¢FúãHƒÜ¤ªt*.£ù9Çg%Ôlß$<‹|Δ2÷›§¦h"&ZbùS…¾k·iIÖOÕ¼æšrp&ˆól³Y`% ÕXVhTÆŸM‘ ñÛT²Ÿ®¥EË¢êÚý´Ž½JVƒ–/o@ÄKfˆÙªÁ{Í¥Õ·bÍ^aëxG鱟ç
+5”­L§’_VˆÒAóÉjÛ$&µÕ5˜½œfä*©§BŒÕÁ@áue5ÂDÆjÞ\
+ûðQ9
+‘sÑÁ ;-&;û±b%È]¬[C‰ÒÕqaÑ–.«ª+«@]s"‰®ú:ˆ|ß+ƆÕÿ k_ƒÞ$Ž'Έò~EÝ­Õ¶š·FD­¨ÆQmE+
+œ&«ÒHN ƒÙ}4®™ì³ Ò •«^_q˜‚Z9ùIb˜Íœâ}mï'N÷k…ü6ª45ÌËØ)I²³ ‰ÊbÁ>ª(‡³u%ÿ•1G Q$E!”å×beõ‹¶W8)ãòë«ÄÔø9™w±³Ád\éÂU—Œˆ
+f?UÙ´ïp´;‹åOv
+-õ}Û̃µBF>M¬jtRE€‚ðCó3"¼­%ùnmxÈ÷èäË‚XWý½ Ûm•ù0 8‚F„:]ô2m)ë¢T0` ¢
+"ˆ
+Š©Ã¡ b¹D5.t…8‘ç_ÈÌ*’,§h.=ÖhØ3#«ÿ]ûl-¬)Ö^M„JÄzj®Æ¼mS[
+ƒjã©!¥
+‚²6ˆ8-0ÖÍÚz1Q¡Wˆ\ž  2ʼkÐ4ÓàÙ[!rÁi€’džk;™4œ8p½Ìˆx_qxæm"Y(üËêÄ¢<ˆ`ûšà=kkŽ¬m蜛32e,44)–7'¤¨P¢qÁC_m[+l8ÛÁQ¢¶Í—/©ÛØË…åÑÊ.€½u>…™ùbÖ6BZeõXÄ0K‰š‘ý(ðíõ@æBõðR€€ÚƒË Of†Ô¶”]P~,IJê
+±oÝQ‹B«oq%²ŠjMâ’É/Ga4H»Ðê[ ÔmP7š“êªaùQ6ŠDFƒ+¢â,ÎÛÖÙ ƒ¢JöB‰áV@ÆY¶å°-­¥É ¢â— %“'€‰
+Ä!K°’¨ÅÂEY`Cy:-Kù$*FM`Z!rPl‰ƒ èõ•ƒNƆroCf§D—‚úvÖbÎf˜Ê’˜¥~ïª5™ãþ½·R®dEb` k4® ÷5(TäœÁÀêhÄUSà¼me"³…¢®× 2аf½f+r;#©å´A¥ýÝlaLKL¦¢ØÐ{¼Iœ3 %(t³Z€+Ä~]b
+‰Í
+€f”½‘<2=óÔ㜈´¤Æ¼- ©"yéóâ´ôùy8#cž‘’æÄáŒô÷µ½—xï°Ö•Pþf>¯„Òéóˆ¨<µºZQôÈe[µvÌ¿Áœ8û^ƒ<û¶i–ðd»`Ø+VvÌÐÝf»+[mØ•}âbÏ‚<¶·IA³càfp¯v`a
+×\!ƒÃRµ» À‹i¬÷ðí{0oëÕ•ËB…fŒY¬Œ¥oe&WÚ¢Z©b4œrÑà!È5¥‡ÒõžÉ3
+IKoúÁŠʉ´¡,ÚšÔ?mWˆC ^!Çy•ë¢½B¸^ù©!ˆ{ƒðž í³S;ÄûA\Qfä¡6P\*»CÁ ŒÓdÆSF(?U çm“ºú¥î˜äIN¸7˜ˆ)hYñyÛ¡:%¥¢9lj®“-"2Ò"jæÎXŸ‹(É7'ªö‡šâ`Q3’sÚã4#ÚI’í9këL¬™iµ3â\ž‘‡¶L¢RWª>à,D‡ËÚ"œÓ¢Ç¼`hÀ^‹‚
+YM1Þaw9­ª|« ‚‰8ÌÌÛVÛÚavš«¿$Ž)¬'…›,Âi±
+¢ÅDDw1ø4#Ú"
+ï[m‹•M‚³$²0²ü
+Ià„X2cfÏZ!2` :dýd
+ouqÀœ)š+‡‘PõÛÌÚK6ª1
+²t0'#=Æ)ª²bg\!›ä
+öK²Ÿ”6ŒŸúCrùÌì¡ä*u–#íH,Ë0»*w˜µÈŽ¼>'#©…6SXšˆÉÖ¦›unN&»X…½ÅÚF@ŃŒØåÂ2#J`¿ó¶#­AúV‰sûý Ó•§Åõ¨W«TŸ€›«=ÃàšñþÑÖr¦ãšĹgÝ$èÊ4¤pžSÌù)h Íl$} ×¼%C(¡ŽŽ:¹WˆlùDƒÌÕq´WœÑ¢–„cèÎÀ„ê8No4 Â'bV—ø
+‘§Š%üšû¸2c½h½aZh¾¥ƒL%ÁÛ¢m‚¸æHì :ÁÃFy„>YÄ>'¾«sŒÖ@¿–í#J‰Y[’XÔž¦Å7ÑÏeÃ9"©¥'̤#~Å¡Jaó’b4o›Q‰gæ¦]!—î
+î_Ëo˜;ŠÝPëçNeÎôt¨×®hb;Ê'ÂU=#Î}+d¸ÀG¯C÷âÌ8Tª6-Í’væ-yYQt\=Ê3¢*Còþ ‹Íít–LÁL´|qjIÜ̬ZÅ<k›Ä&פ®üÈ2.ùˆ]8Èà+D­…D¿$y’JÃï—Az´ÄPµŽ¥VgµœˆÅf YÛ@øS -uFl’ ‰@žÆ¶ÌÂUƱÚâò pØÚf­Q5˜™çÁ53òÄ¡›Y!XFÈ]niJ v€JZ³0 ÇBÅAðRËEÔØ¢ÏV¨ˆABÞ™{…*±äëAMë9‰”•ê%©›ŠžDÔ½a»ø›y Òq-€ª‹V­:n¤ÉfLUg°+BÔQû²Y…±È)C䜌rËi-s›¾§¥‚šV
+Ù.s¦×‰ZÜ+´ë)2e”QÖJ^7iÙGƒô£*ÆA4¼`6òQoyÌ~|C0÷ñ˜ÓÊÍEE¢¤æò¼­Åèè…-ÊG‚!ä<ØìŠ+«¸4 b®fHꄨR+åщ’È=·±Ï‰³¬Q3dÞ+ÌmÑ 8 b
+j»Ÿ·¥DÌË Ôƒ¤î¾y¦T€,:²`ï¯J–TŒQÕ«4K­¦m õqgmg§‚jÕ´ šgéõŠÝT8ÞÜG\M71òOÃœ8,¼ñ¾¶÷§qiz^X±Â#Ý„JšD¨'À(¢ja|JìÒ4‘@Á~QK6à0pù€z Õ(”£+M^o¸}FK*Ñ‘
+´­‹Ü iVëuND‰Éà*¢ÜaÍØdvžjüÉŒ6ÒŒ<†Ov).V$ ÊpeE‚¥ç–ò5_ä()Ë2f¢ü ÛÌê–¬ìT…¡ éL£õÀ½YÇáôäW1—w.s>
+èæ˵â—ÛÃLö…Na„`šI’2³mîΞ5Ük
+8ô+óêòî/g—Ò¶ßyv{y÷Ý›‹»Í·Þ/öî|<ºòöfâ­ùÉ}È?^Ü~{A+ùˆ„ mYÇSü?ß
+ÍæÂíÍ!³•×·7o6?9ÜøÓËéï°o4msÛ¤´þäó¹½`pÓI½zuywù¯ÍÕ©ñ¸®7Ÿ×ùù»7ï>÷0ŸÙì•È׺¾8Û8ýåüìêü7¯6ŸÛxáÓ§Öm,[̾~þO<…M?àê[¤’œ]_¾9{°Ò°µÝi;Øfwþd‚a6ŸÉc³~ƒaÄÜ»Åa9ôcÿd‚a6ŸÉc1è<ý`˜ÍëÊ=¾h˜#†ä_LËŸ#æy|8Ìù“ ‡Ù|&ån=†ÃÃaå2ú„Ãø_J8Ì<ÿÐoâ'³ùL›Aèq†Ãl±³ýŒ<™p˜ÍgòX$¼C ‡9hÿÏã‰æÙâÈmË<è>ƪ[Ä¿ÂÏö~¾j"à#ü|x üE¿øϯËÕG¾ÞÎòµ‘@ôx®¨Íe¤jÑ“,~¬“säh[p´òKæhOþÈÑŽíÈÑœ£ý"Ú‘¡:C» zägG~¶ ?;
+hG~vägG~öøùÙÜ©óõv^ø'ÆÖ6žü/ȇw<6››úK>6OþxlŽÇæ/—ÿ¾¸úËÕÙ_o—xˆQ, ô©ëK¸“¼qô¤ õ¯[PÎÞxLœïöâÍ͇Êk½B­¿y)ŽÃ/öòÔÊŠøÿyp'>÷»“þÏçýïþÿ?ïNžZÈö±†ŠÌë‘ÕPyû=UQÙtv©†Ê†·õG/ ò‰§yyýêâõåõåæ.Ë~R/Îî~»Åu0{ãà ÄŠ õ¡¿ÙlüÀ#õ·™Ëc‰Õ?,—luvœìèU:øDó›7ßß¼íéŸß}à°?>Öð¡-õ¸f/ðåCØØLøÏÍÏÍ?âØl>‘„ Í'Âü°ñD>0åùDüAOäžÁùDÒ#áÈãòÜR/xLWçÏ_7ðSÛŽo/ï¾{sq·¹¬ó‹½C‹c§{L
+ظþÛ1#ÿ“¡5_ý¸yø×°É6ŸÈxé|"ÿ>ppèÜìæõë·wt$n/^mŧwæmtüÿÌ3ýe(
+šÕÑ$þØLâÇ/=>‹¸;<“øQÝÛ…[¾£Â÷‘ÏÌQá;*|G…ï¨ð=¬Â·±€rTøµÂ÷TãÒŽ*ßcTù6®ÃñøT¾§vÔøŽßQã;j|Go›Â
+OEçÛf&®õå'§õm1£G¤õý››WßÞžmÎc*ߣVùž$"ÆV9âž¿ó+^l¬þ+^|r†p,ãùS9V¼ØóRFˆ'Á›õ;–½¾ê‚¬€,ÿÇ7WgçÿüüDH7ߟ_Þýø[áÞÞýxµ¹[[úDßm
+1ae‡²®ÿv{výöõ(‡³ßwj]PÛ1ßä1XCdjŸD=ÚC~áöŽœûâêê¨ëìy±šÛ à~|+ø§Ï¾øOï¾þÝõ+ƒ"R&Ê׺¹þKï‚Ë”œ
+ùË‹o/¯çžýé{íƒ}õã›on®Øvqòååí«_?s'_ôþñów³ÿ¹“ßöþüÌ=¯‘þïÄ=w1‡)ÑSsq’h›çEÿãy‹SðåägÏFPÎ?~ìÿñÿö?þO'ýp’Nþxò¿þ·;yE¿÷×g§Íçéy(S>i!¸ç©ôŽÞ¹Ð´Fäö<6ßN¨m~bNóóäƒ'bjÏkp•ˆ½+úëœ:HéyÌ™úMþy­!SÛþšŸR$byžRLtÏs©Ôk
+Ï‹÷Q:ˆþ9¿ÝŸ¶:qÃø¼¤ªDo=¶”õ×'?Múrz>E jš\Ð÷£µõ¹bà9Õç)¸Y¥Òücykå¦ý¯©éô³öÙú: ÅM¡éËíyk­
+Ù»þMˆØ»Ï>1ѧjJô¾‚æ^žWÏ«ßÉaòúë-5þ­ø¼¶”&!E7}»>o5Ò:õ5ìëÈ¿Ô—,õ‘¶LLLŒ}BbmÕÛøk(ôS¡/TÍMÛ¶Zx£ôo×
+»Ô÷AɼMr_çvòâþ=õòÙ뾑cqµñþõSqþèÿ½lcZ-Úå-'ú`îùDÍ<ígÚ¡oÿ¾½OZ¾Œ}šŸ—Ì;ªÀÙ¥>ç]hC6&UÿÜ……˜sÑ¥ªýuÇK&?Ñú‡J1I—-;þz½aè[ˆ¡Ÿ‡¬»¼†>J^@ß×L>T‰tbé}deIhÑ?O±ïb~?Çþ[üUÜô<Ò¬dQ«‹4Uמ— ‡„{/ï÷¯¥ïÇÐ?Šc2}žªç©Ny’Õ/Eöiÿf½"“
+µ/¿ß×Ò§Àäú<dùýàe…xQª}ß ašd«ðúKý?Šì Ðû•ú¬ôôõ³ïôÚ7ÈY¡_Æ°Õ{Û©HÛÔÙ‹üX’µ)¤’ôPTc4?çôÕ¨#hÔŒ^åý>™I~©¯¯Í–[ /Á”„Õôãäs÷_­¡êYï[R7F™¼Î [èÃ4a•¹3"ú¾B‘GÕ÷p›˜'b¿xöûÎê?ûšþq'¥÷/tºˆÅMÉÒýH÷­ì>U—‰Ý÷ÅêÇçä_ô·¤ó2MA"1ç7ú ÏËy M ›¤ŒUëçÒŪ‡¤óü¬<"Zaî¢v6SäÛõsTo©Ú:oKʦ0U='à}f§ÒEì—¹ï¤V*wQú%1q´}p*ºzZÒó“ûò¢‹Ü7« ܺ7ñI®Ÿ>»R‚œ lfú1HÑ5ë¢óÉåAÿÂY>k?9{ùT5¶¨›­9§ ó¥ŒèËÑrÉŨ'¶ó„~ó6Ž¤¦N²b“·ÐÙ
+̈eçšt»»É79D̆弴–ô`”>Bë"÷ Iþ†Óå¯M¿r*Îa¥kÄ©ÚÇèCoÅGy@ GÏG®Ìa¹_ï•™¤<ÙAJÓl%C±u@ë¾ë›°bÁzÇõ~‹×}RKµµHõ¹Ë <I~î[-7e´ñFa©]–¨³½ý¢wñUÿçïýŸéäW¿>ùÇÿ 1êû¹,µ…tßáÙûèì}pö>6{š}Ì^fïã²÷aÙû¨ì}Pö>&2u‚U‰-ï½h“U@Ô³ÑÅœ>r¥Ò½ûRÉ9åAcäÕ>H"ÔƒçäJÌ"“¿9F•+»¸¬—89í¥ŸN£úìôž|Ö«¶æI;ˆAÞú½°Q¤~ÀTŠìÂFáåP•TMA;qAöné{¶Lºò.{„©m“Qôªˆ‚ôcÑc)H”»w:´¾)ŠJhýTuI†ÖÉ]
+JöXæõÖºÎ] ñ¾é.ä&o@2–.¿’ÌþÒ´VŠ:¹Ÿé]#ݳUA«‰¡z!F:€/T ÅW!³4Íý’ޖݤ;\^×úZb’˜l€^Qä‹ÜYwÚ‰%:]£»jðRÉ}4^ÛÖX´ƒÉ5ý ʸ;×s¥éÖŒ¹ÿõBå·~ÚZSazÔ§jP½Ë† Ï"<^/1¨Ò<y¿Òô´O×U<Õ”û'ÀÎix¿s…\ðå£èu˜BÀ–VE™ZvÞ¯D—£j[N´f&çxCvŵ¨[7¸âU(MU{e¡ÒɪA;è=Úö^uëöëm‚T['í o}]Á\{[ÑÍâóI8<)<Þ©¾ú¿õ˜4%nŠE—€N~¬~Y4¹c:±ËÄGôýTµa%-ï_u)ã:,‹•õÞêD‚ö§ «Lú>«Y/U-¿¼ô›Ùž ÄÎu0ÙœjBe2=8êÞ;°¦]õ…¶=^ ´è±¬­¯vÑŸòN.¿¾ÖY.¿¾ÖX«~cT¶³2©.,|ÐgÑ«Ÿ¾kÅÎr©â»Æ~ÔµSº%±1œ w@;^Š>¶2©^MwKHtas—BÖ…ímïsbé
+ÉüQ*#HtëƒáO^tÙä¡4ÑE(Èj
+b°yñ>~(d¶09çõNå:bN)XmDª"YÍÎ\$ÁMV±‹ “KÆ7«>ú‘P­1Ø·it$²
+P¾TSWF¥Û.Wta*ŠÑ¢*Ã}°7[µBQi%zˆ0‘äB須ŃÉʼní‚D#µ‘QnJJœ¦¢æ¤0e]pÒloÙ>îœô§ú¦*~i}vÁLŵPc²×¯*ïò‰¼Ö…Á(B†‹^MS‹ï[Ê9±01&Š¨zw'B‚
+®_Qòbµ™1’ŠtÅ¿T²«A‡`Ô.qö¯¨ö3çD
+ëMû¶Ó)ô“@,^>?Ùt]ß=³°k®ØuACÌ‚B,Aºð¦6tá ãêr0®¢¶¬¶:lðNìŸB‰mRA4Ø2ö– œ#°]OÅk–@dÉ»°ÈG¬o3ÚòÁú)T:m¸X¼š”œhdbý4ô“ÓÀRŠìÏ)•( !5«’†˜Kµ'éÌÈ„™!øÞU”³OvÕ4©«69x]T`NÈÚHȺ¿‰šrjìb´Š
+D®pÐ^*Ù{Q
+«Yç
+)Ó"±Tž–¦ÊÞêïñ…ªà.ye©}õ¡ dú ^v"kBì"‹vá¢N¢³¤¡Ua#«ÞÇÅPˆU5•E )Ú6ãbèk•Ø5h
+;¤†úfnmþ¾ìCR#…Çõßj•Ío¤ø b˜‚Ú$]Êa* ØIÈ]Qu©O¬a’«‰ïf"ÒOÀ‰c ±k“ûŽÐnIG‚È€[¡/b›‚U¡/“¼ÒØ:/ãÄúåIqs¸îœÈž¾ Ÿ—­á'‘ úæ¯Pc‰œ˜svr¿Ë<¶§ˆ<t5GlÙ\Øz×i¬uà̸‰­ª¾È†”ãˆá;3Udzbá4x_dU¶¨Ã¤Ö—³ðNôapï® øI‰Õ‰rWh÷zíw\îÌ¢“HðDœxw±ÿ:ŽÊ-mf*0î°NÌrš=Ë«P°Ðúñk5Úû]€Wïçd
+ú}4/Ê1Ó‚7=:O,Ýt2³!v}K‰]Gí¼‘ nÖæ  wn6ñ@drò¿T²â½ã•‘ÅÕIdµ±4%†(¡£‡©%myH“jæðjY8eµ·Éî&1UŸ²ÚÛ÷¯Ê¨¤‰”Ë–µÙ— û€•á¬äiªÞÈ]óÐ_#˹PÉÂÎ n,wEלÕÐÑÇ[Š:è›9ûfàSìHßÂÚöº¾ÄÐ[V"é—è ‹E*ÁG5½w%»xq˜““AÄ“NŒŽÛØ¥ƒ&jŠ’E('-½É§ìû¾x¨î½%¶-E˜@Kg[$ðªm»€%“'†%²ëMõRÜåý¾¯sÚÎQºT#ÚJßö“žllªeé3„ÙFä(–{²qFÝ_ì±.0C·œ¡×¢×8®6§UØLÄ#¶ëp[€!ôŸèÌKUA7U¨Î¬ÊeÓ—A; ·JÀELžчû2Ê$Â+é­“W ÇZÈJtª’3·trß·EÈ•Ô—J®­©öÜ{€¿æ)‚hŽÿ
+ôÿÆÈfA§^<^ªµƒ ™Tm›y3‘ç
+ýf±4± „Åc¨¹ld2mñ—JÕ‹Œ .m‚w V˜,Î3!vÑ A}‡fŠj…ÉY5í"û]ÛªNÞ[J¨¡½÷»ÜÉ*ö Âé5AÌ‹ªyèKbhèL`‚ìÇKÀwÃK%×uqÙX Û™õp195óï9Q Hv²š¬V®¦™HõRÉ1W„†`3‰@ã=Xz©gªsA•Ÿ\kˆìrÍk‡˜Þç²P—Ⱥ!þ Ȇ˜œŠ¹Øt)dU>’éáD„}ýÿkÆ•°²8ó€-…—÷·&ÌߟÁMýÓýê‹W7ß\œ|q{÷çë¿œÝ}wòåí»·ßüíææê×ìÏîðyö™K.ºà|gÝ®¹ø{·ú^žt¾Þ9Mù]gSy\ømp9=û¬t1¾üιgŸ}ý›þS+Ñ´Ÿ}=#y0_ó.^}Mã¡ÐÈϾþ¼ÿ3<ûØ_=+æˆß-lQ/ðØÔBBÅ¥ªµ³ßÌ2ù‰YrÑŒ\lGæ÷ nãCW;|»ÄdM¤—p4£Ÿ²ÉѬ2cs"ožë‰ì—%ßÝ}û:ôðuy0+QíšÑÂÔ:‘],fdA™É¾‰Ö@çQäó>€LbçîÄ,6ItÀÁQLžÝZ±°Ú⊹֣…ä91„ËûæwÅ"
+¸µ+F@§®ñåDDºµáÅîŒÛVñ*h[‰-èÄ)Y@eI²,$ŒÅ1
+i€ÖØ6‹*G¦ÒwÐ@ öé½öú4áãÍ+ˆ‰G Í<ù:
+¬8™ÕlìþÆç
+#GŒøv;±†¤L]6Ò‡tF÷#…4¼Tr‹p'Á©Ö‰SˆèW>Z˜9žÌ.NTµ PSØûYCàžHÏ×ÌLHšDÓù±cê
+ÄÄOZÒt
+æ'Æ’*‚ä¦ Ü ¬¶J6‚Š SÕŒoÃû¤hÅRy:úk¸oÈÃ#<¢Á:U)€Ãi¯ÃŠÄä™^ª¸È¶Eé5ƒ%:oê<(iJ 7,+§qY8—Õ?ìʼÛÑZý’ìá( ]¨3¦sÔi±ù^‡)¥¸ºˆïëTyr›û]hWf¬8e'–¢6†®£T- ¹X­ƒÈXƒšðÃ6.%6óy„¦Ùb"ùU¢KXm$+aÏ:Ô<€Çʲs+ÛTìóÍaFŽû}ŒX›¦^MÖ”˜àô9ï‹ïgd]Ü
+
+ ´nÛÑí4³DX:`+æ¯ó*iÐo™ÿÀ›†¾ï-ñ°¯®õSf§“¬§Øçmli ·7;FklÛfJøð ÂjÄä†Á°ŸuýµE“#]?Ø j‚ò"v(×xñ¾¯d‰K¢½¤©¦t,v>ç(±‚1pK³ x§mÉÄ<ÔzlQJ|‡^ì·¤\;qª‹%w‘99—Dþ }»2œ v*"'1ÿ›í<%Df’lè'ð‹ÉÄÀ.¹ ‹jýöû„Ñmlø:¼2è¥4W©›Ç¾-j]i£ŽÖ‘|È
+û Zž…i£†ÏÌÆî}±!@>eS§^<ƒ’p±wä¤1'Yª`±_«ª=[ÌM{
+cVS€šL^i³®'›óƒœMúñˆÅŸ™Ò“«µ’5eÈl˜…ºÔ²¡šŒÏ1á°zm;
+^0
+33#›ˆÛ<½Ÿh”#)Âa)Ð ÙÜŸëÒ*JŸø4dQ®°£­3¢Q4d›‰ÉBTTàãmnÛÆÊ1i2aãäNÔ¹ifC¦ðe¬LM¶ùgd?óp• m&:ijMÂOR>§€î¾rØýÌÞW23k’;K¨V*Fƒ£‰/j`SšÕv ŠÕ¸i\Õ£MÂíK1œQgªéK¥@ÉO% /¦ɳþPÆxsÁ
+2 íL3Ñ9VÈþ)ÑÆfÚƒ#TsNX©°^cȦØTŒ‘UIjë×cCs´€³ÍvnÁ½ãJ
+L—hêl½LVHèwÅH5tçMœã²<º,¢ž·…™ŠÅkÝ’i&݈ۘs?l>†ä·bWñÓ ëÓ#{#·Ÿ°U™_![,!]ŽæWX•_Þßú—çØÇÒM‡7ûOVÞ(9£¶Æ‰ü73à&ay ü‹›E±A5©Ä¦1Ñ.„"9÷Bl-*žÎµ–ªš„R*Ç¡ª{ú:EÇüøLˆ|7
+rÈÓ?ï»çc¸Xº|Þw%¼,÷¬2|pQøv•F›¬µ´Ýtµ­çÝ×û³¯ß~DïÑq?/×÷ú#øÃHþAuŽ©Áªjáw&[•úÃÈ’ål®w†Ÿf¡µlÚ°ÞU]¥Ð7KÜXýÑšÙJ%|%ªZˆ­µ¿þk/Æ0Ô°S¥¤ £Yàå˯·Æl¨D Ê%t2êÆT–J^Þ×tô#ÂnržF1¢Äx6uÈ6†¨±(3ÓŠ†41!0*[B[¥zª’Ñí, 9ØDÕh˜eãdäf­UÓ^Œx„»ÂäJÊÂe¹ ]Å…1dÊvÄ.c PfâEd‹ â’ÆFÖKÕ,ÇDÔ˜Îj%*øS#`05³ì'E‡ÕúUÛM•øt#[­Þ•m«¥V6F’ mgCÍöËl±V,d‹0Z%—4kíh¦áH¶i¬h(`|5XùÕ6«]ƒŒ¬*˜Jôˆ«N8j%û­öâþm¯ÛТÛ4Äȧ+†Ù¦8Oqš0ŽAìHˆ}[¡°‘Ùâ‚•
+V®¡ÓÁ|qF¼Ž|FõkÁÊf%Œ¢†@TQÙñz ØqD”Ш° "ÍÉÂÍ«Uã^‰šSD¶2!T•—`2>زõ:Ív vZ­àhýÖ8.ÙÉú]W+äá «=¹ÿ°瀞‘š’‹ú_¨%Ž>³*ûdήõù5xÆ:«öÌ£
+£Ì(Ôë=AÑÅΘ ÒÁ>¨ ÒÃ^¸ Ø*{ ƒJ{aƒÊ±Ùô'8è龡0(­a„‚ÏîŽ*‚Í^8¡ÒÅ^H¡ÐvÆ
+E»£…J{á…Âf²†ŠKÌP¦ï‡Š®×pC!7ïŽ*=ì…ŠEÝ=_v ?Tì…
+‘|w Q#k(¢`({àˆÂ’¸’(.°]±Dåý½ÐDQ\h<Qéb/DQ™È=˜¢x°ª(Ìk¸¢°gïŽ,*=܃-Š{ ‹Ž.vÆE{ ŒB'ÛcìŒ2:ÔÈqFÑÅÒ(·Ö¨éÌ»£¢‹5¼Q|¿=GÑÅsÔ軣Žâ\íŒ;*ì…<
+¸;ö(˜×ú¨<Ø ÞªÝHad܃N·=PH¥‹}pHG;#‘Âd»†EŠ îŽF*=ìƒGŠï´")öÛ8O{ ’‚S®á’âÁȤvöÀ&…º¾;:)zØŸZý¥Ðë×0Jñ` ¥ÔìŽS
+iuw¤R™ïÁ*Êù­–x¥Bß ±Zû˜¥èbÔRéb/ÜRØvG.•öÁ.…‘a ½n•=ðKMÅÜÁt¸˜¦¢Ÿï…bŠá­á˜âÁH¦ÒÅ=X¦°Èìf
+µ{‰g
+úˆ¦f/ÚÓšÿ¨¦¶#—¸¦x°²©œ—{°Mñ`‰n
+ú¾)Nާ濹åÔ,qNíÁîH§ÃUvÚéÐHx§ïy~¨Õ €ÍÿkÓÔ¯÷ Jýúã„¥.'¢á¤Ÿ}=JO¡ ÇÐ…_bè‚ïgçúæú¤k' ´_,õ:D0軃Ã.¼;L0.¹=€‚í"Ú*]ì,=ìŒÝ·`0±d0äè=@ƒïßn¸Û tðG ¾ûXC|oHÓná>pá{¤5rÀð{ºš…ÔÜ3l–@ÃÖÝjx<ØlxeDKÀa{°3ä0zØtxŒpxØèKèáû_x¹ÒÙ
+µõ¼;"µu±3&µ &»£RÙfg\êaiÞ™ÚF±Ä¦¶Ktê!¬íŒO=6ÇÎÕ¶9–Õc×ìŒRmçe‰SmvGªÆq^ê¶aW´jã»ãU[ ÄjÐ÷À¬F{ VÛ(–¸Õx°†\=ìŒ]mf¤ÝÑ«ÑÅ~µ=ØÁz˜Ëv°6Ác‰bmòÄÎ8Öèa$k3°.±¬ñ` ÍÚ,ñ¬íÁîˆÖHÖ0­íÁΨÖfb^âZÛó@¶6ÚÛvF·6#ÐßÚèK„k{°Ä¸†
+>zZà•7vFGkØàødkèàö`‰nvG·}²Ä·¾—(áö`‰>¬"…}‰n–háÆ—xá÷<(iåÝ1Ã×íŽþÞƒ¢v‰n–ØávHvFŸß?],ľ;†8ºXC·»ãˆcíÖÄǃ–8ìŽ&>XÒ®xâ¶ûvG‡cWLqÈÝQÅÑÃθâ&P-‘ÅÇØvÄ¢Ú]ÜzÞ_Üú^"ŒÛƒ%Ƹ=X¢ŒÛhvÇ·.–Hãö`‰5>F³Š6>¾Æo|Lx8n`‰Üi–¨ãö`WÜqë`‰<n2ø®Øãc7¯¢¹ìŒ?>ôƒ¹=ÀÁš¦ù%º
+ù˜ÍÎ8ä6¸%ùì¬b‘ÛƒÝÑÈm¯-ñÈǃ"¹íóÝ1É­‹ÝQÉM¼Yâ’»"“› ¹;6ùCèä&â.ñÉǃBùLr²¼íiåÁ¥ÜìŽSn²Ö©Ü,±ÊÇ ´r»½wÇ+"ı܄Žfù /PË!­á–Ú~F°v¹u±D/7Io‰_nvG07IoanôQ̇p¹À1·K$s›ÎîXæèb ÍÜìŽgn²üÑÜìŽinß~‰j¾Ü‡ môdsÛÒ;b›3´@7ǃ%¾¹Ñ—çö`‰q>,PÎßcÇX5r¬a ´óŸ° cÎæù‡e§%îùÑ1²î9c?dŒŸúÓ˹-ytÜkþÞbnÿÙÿéÝ׿»~õÕo¾¹¹ÒÿþòâÛËk¥üêË«w'¿¿ºùáâö×ÏÈ\ÏÆúwýdJ¦ü9O±%d4n
+IPŠíMU%Ù‘ƒ }ÿãìy]¶~~væ€[€Ó÷V\ì8¨DF-¢|R6´¼!rljSã@/n¨Ò&alé[ýVøïÔ6kê€þ€Î±è÷>ûæþ•þÕ)á?Lý\þz²/¿üâüüÝ›¿ÞÜQÛÙ û»}•¾´<¤fùR(‚…R¾#¥!Ó2Eï++ä+Äêþä‡>t7E„jU*(Òì]kšÖ:?,Œ›(¬‡v{ôšÐ{JùöZß¹kHlÈ}A+BU%F»ë÷Y@DňXÕîDÏ©ÒM'æ¾âA&m%25zÍ%bA¯TV¢0}…‘i¤3œÓë~Ràr¢:•O ÉÉ“Ñ3úü<ZµS±ŽÐÏsYçS©tJaü6€2cÐÚÍ‘D^­„—¼²–4{†>MZ‚2Rò,HÙINjï5Y5¡4I„bÿý&@šè@êsF7À}Ò¤>­*!¤¦)äÑùçbpÊQñ©¼E„à¨G¯ET‰ÿ8Š–¢P\$€"lV0p•F¤ú%ý
+n"PLX™äð w¨OB¼Qê‡ÝÃI^<{}?¿s'¿ùÏë»~ õ;ääüæÍ÷7ï®_¼ýîìû‹“77¯.f¼îݳ_ýåæêŸg'¿½¹;é’ÆÝÅmg…Äܽ±yúßÿò &1“®Éþ÷É÷Ïô>žØa@à`Œ0¬"fd*gŽ‹A%ÛšOÄ?å”’i…µæ)Ì>}±OYê­óHò*’5‚E…R
+ ý$¼HÎW:ÕYdQ'u·èì69e”s ‚52.VÃgƒýî¬PFF;I#R|_Ü0Úz«¤§MUâØyS92+°yzªƒa5ãbMwE©Ra®©¯ùü£ÍŽ¸­ã •
+Zh®Ã<<†d;{©¶_È\ À<¡ó)㪰<Èm‹Ö}$€4e#•˜£ˆÅ4(RäªwP][Ðl R¤ØŒÇ Ðä²rÐlY.ä” ]MáMZ"Nnz«€·¾ÏŸ½ýHÛ;&HGt5$µX¡ÙeÔAIi\à€Ì
+UɃ‚mi]`‚!8 ®DLdÙ[®¨ Ð7>Éý"U©•É|˜·üþ,w ó'æÏb_ë†;?)[ŽÁ46eÉ
+a«
+c&Éáâf»J™„ÇåhÆš:ä𨜓 ¾èòå(1lD`6ÍlqªÞý²Â 8¬‘;šyU¢$NžØ—í«$¯½Á½¢zNⱄIU9AÍâ¢ä0Ì.Ð&„É$®ø*ajÓ(*š¥:Ùá)|Ö‹$Rº2u†Ft£âqß
+Á²=È/Re–Jn5ÔªòÕDÓ'ȧ$Ú{ö@ðÅQe+h¾a«ªY!g+笾J6£BF'8ÑWtBß¡iùQê!XÁç°‘+ø`µ¤+‚Ÿ–^‰yi1BZe»aHZ7öHU’³[ŽHL
++Ž˜6‚“Ò麙 âš!=Z™soÐìue“¬†JtR‚îäAW‘¯t°‰¬])Ú<ú~èn´\E œÄŽ$þ•ÑÄߌȬ˜€û@C¡'û(£n‰ÏfÍ$ó¨Aû‘ú¹x
+÷š¤˜¿$É™°®”ÉQ€ ˆ¨&¿+ Â{):-x”2q¤8é©‘ÍáIcRÃ}¾„ÉqÀÙ˜G7±½õGcÒànÃ98U4Mb%6ìõ#ÐþclÂÕnE
+4ó~ÚP‘éògÎIßÈOÇ]S†J‚©ª|Nu:üújÖçÒšÿ$(/ÂpMœÙS'>‹ÄØ7!ò¾¢žÑñ AŽý48÷N
+®èE⬬¢!²§UŒRÒrR»ª‚Yw–/n/ÒÚ¸>;˜¤ÞÎYÝÔPç’4võw~©¨õÞ‰Næ>¹Ó‹‚Ú
+!W5»‰¥¦ãWkþ8åhQŒM*7I…2OXE¯ªÆv¢ç6½ê„Í÷]SùºÀò¯÷9<¶ÉÕ@GÂ
+›àØø-¶ñþg?èbµ«žãŒ)=·&®ïçY„)Ò¬Dçôõ%åfBÉ hVؤ‰Ê|b“WêNÆÞÇ5Ð6‘ˆ4¯qãœøl‰±^¨ƒ €‡æúrAq†»L#àÒH¤pÊï-Dƒ6ª„e’'X9ÍÎgE«wpZÓ*Å"ýdê`œ,(0­\±2x7“bÄ JXˆšE¢Ú¸_ KD‚O‡¾Ê6ØùƒzBw´ÆZj[™©‰êª KÉ¡Þλ=ªð ¨vd‘ÔVKW¿È.£›AÇq~~˜´¼À_'&máL¨ÂéMm‰fo$ l1¿¬–
+ñù,—ÜC“Dx{-R)g[£ªIÚQÑ´TËöñˆ„x‹>u†k“ Df¢`d]ÂK´ªª€-Ó³”ØäZDôH¼¯ —±V/‹¶™úoMrüd0®‰ªR(eAy6•wÅêÔ'Ô†O=i–Æ!¿¯*CTú…:Ä\Y$劕!ÍõÜ5¤pøó¼
+̲?úXaàJ”¡#BIxÞ´|"d @eqMé ACAYéU]⾘’˜l×v"‡Ÿ~Dæ18¯PÂoà”$2Mûb´n‰Xu&ÅkI‡:6¸~²ÚRæE{hQ"‚d¡ÐR«t7¡J˜Š­ìÉz9Ò
+I‚$J:ÈÀ.&펾RÁ”`ìL^B)‰H‘},5Ä(¦žÂDþV1óe‰Þ—5P§~}Àú1—,Ù&PŠ”1>ˆèážgkþ}
+ב¢B1#£‹­Y*7±qÇ1|ÿ©Š¼¸¢½ò èðŸ°Óí²
+"BH^,.4
+±S¶ 7QnļÉ1ëR€ †ä½”ßobmå³ÈWÔ©úw%ûËÁ[¾
+·'ûé’ìkŽîm£F,ÒvÇ
+YrÌr^¤´&Ç3³r}Êá«ü¥ØŸU)"ÏÕÄ žÿÒ@” ë婵Ì'Y"”²IXˆV¤•˜d1 ý¨°á›Ì- ìÖ”¨%_ŽHê;]¼,«Ã®
+-çY‚Zä&")‚5°BÜIlÁÄ0[ÑÃ`-réu½¼J¯Ñ.S
+㺋œÔäÇ(¥„E"·."­,Gè°5)c
+l¯ž²˜és¯ÙŽ8!• ?‰ÖÕŠÆ“ñöêÈŽÜÅŸNþ—¼{ý¬ùœrä Ç‹E‰n0Qz&ŸN4^‡=+]žQ<N
+éÒŒj=UÄžD¿ˆI8Å~æ$v†®¶éÓçå˜k‘xÉ5[›(S¤ZŸˆ&"Ù<\Tìv¤‰ÆO%·„O*³OIëÃpÔi ^Õƒ$R‘›D.9\oSÚ(Ecí]Èê&@©$9<©Y—¯ ùy¤f4áux] ¥$E=ÆK™b²`8QyE=²ÙÈŠÎJ¸3 SÕù;ɶgW - w'†¦Z]b7!?;¡ºzS
+u"aµ°S…|¥¦s5ó¨Ó!ÃQŽ—X€Ë$肪
+:D?Y¬éBˆâsÏE¹¡ä
+¯Ñ×:Ä„|ʦq}¢.P%/‰¬ðÅ’ÚSžÚN"î_—¨’( â´È¨¥
+‹fY1"0„’DÌ •©„I#åÙ™%#ÐLl¾|%ŠË÷µJÅl7uœÄ\5*ŸaËe
+Ê Qõ§œ/¼¶í ¶É´´?EõjXI›¤¶ˆ¨;î%ŠìlÒ­:RéKb¨Ó$4ÎC…
+c)ñÉkh¥
+
+d^RC6WsŸªj@êÇb±W/R¢4•Ž·Ô'³oFh(óLMóÓ µ2Ô~
+ PS<íbÕ¶"}äZ»—ò1¬ÖÞ…„ô}‘Y¥˜NHhTv 4.¥©©ì”­C³Dà+-Z…ö›®@VØTf$šNGª‹àtÓ†QO°»X®g±ÿTrbª$Åç“j;MºD!ë¶HºÚ&©®Dm m¥!à–þT_*'rêÏSÙIl0ŸSOš X‹øO¥acïdÕð:y½ÀTOê8„ò€{ˆâ.TW%&!ʵÌEEÕ© Ð!ƒ%©¨_®DAyµÕ€šø i˜FÆÙÈRT¢T¤R>oÖBò³0gÒ€²†¼Q"EÙ™p‘[GðÄ"©Vu ”¬
+ÀHEc«® ÀIÑÞaŠÊEŒúh/Êg!µB=±d?Õ·+œj”‰¦ŸŠ³Ly«’yY.'š’R3±ˆùH6@µ
+4 xº¬€Å¬ÎN«âXT3"”½
+ÈÔ¬HñÄŽD8`‘+Ä1}A…cÀ6º˜G8UKúFκ+ÄHÌÐâŒxÛrµ+ˆ§&˜²ÛŠè õ)vN> ú,Æ‹²S5ì„…g|ªFÐQ¡^ØqÄh$|>‹4…dz®#tÊs”œ^’þµW®À5’$VÁ˜&QvGkÜ
+©p|?ót5H‡¤e®HÙÎ%ÚRäº}äßd»SŽêjIÔ¢*·Ô)‡Ç é4¬É5Ä8¼õdTw9•Ìú*n©j(G¶Ÿ¬¿ßdÕÖ(÷Û· Ë­é*lºPmX9´ªQ¾kšEæÑåBÖ#ÎÁt¢­’Ë)ôªÔm¼Wõ Ø£uVýáöìíÛEeŒwûÕÇØÔuElXlá4ç«ðQ#EÑ‹+‹ˆ-iñ’bOnµ­û&&9¹Õ[ÃA­-B¶­¦Ë×5;8Š€«nR$^C”<GWLÌÐ(PnŠbþ$6Æœ/9¹Dz)Hg¿o&ÊìOlU…}4b(Õle*8‘¹L …u$1ù‚ÓÁÍlÝ÷œ-ñSŠDyßÑr`Û>è’àáÉaìGäZÒG4h©i‡¦VDžÆ&†Î›
+G:pšnËe 4º‡%,Âë(R —bu+Ñ)‚xæ¯ÏZ˜—$±vgR“XÔcùN̤0CV|ñˆ;)ÏÁRãT-ÄRƒ?¼V<åðFW¦Â ãˆXåžÕhMiä°`ñ(ä$à'xñ¸‡^ `‡¼.M#}½hôò~“bÚy’ã$. ?ÉFnˆ®÷!Sbš‰šê#%¨ùÍ‘KT#]¼ýïbËŠXÍ‘EDò©DÁÖ8…”*¸ÉfO^W„Ü» -ÖCµ!÷‹HudáüT7éUÌG'ɺ8rðŠg‚q2Š4Ð!Â…K—p‘& ºw²”bæMîÍFJ­°© Ž“Æ•£–.'[”v¡_¦Œâà4¬ÇxßÚ>‹Ù‚ë°Yu/¨ø”Êv|ZÛ*€EŽ#Y´÷® ò¡O’<ˆiؼ4 /TÙ
+KD-Žé2Âá„‹KW|PåÄ1‹Ì”¯/æB‡B›<…I,Aʤ~òªÙ(<—IäÉF>rýg´Ú8my½É­;é. °T-Ùr‹~ùìõzöö¼øÏ*8Åöᢟ}MGÒ›‚“ãÅfKÎ(*,E¹H†‚g '@ÑúÞ>‰¨(µ¯h²OX#îå¬ ÌVn˜­Qb,Êÿ?{oº$Çqf‰Þà;¤Ùµ#mº@÷ðX¦M?HPìÑ4(âÒ´4mc²B¡HV«XUZžþÆY>Ȫ T ¢Rœ…ôÊôôððå[Îwθ¥GfÂ^Žbm5Õ—©Þd–§OÚL.|¦§¡Qt"€T_@µ²…´’4ŽT¢0*о"»è[Ñ?“wºH2fCˆ¦Yu1~2Î0‡Ÿ8Ó‰fó“²61bxâ××^š‰1!r&åêÎ $c„]*7 æ*b`¦êCt^ù$Q­J7÷$áÅøC«S¤ÕÖ2ÑL“†äqêö_èr–™ˆ²+AµAÌt¡¶üÙ&/#`Ó{%î8îù©®5
+Š3ʬÌ÷‘PX¢ ì]"T†ø‹Ç®Ùƨ „>GA³h΢oƒG®7–˜Ð•ªà¬q3µ×Ì£A¦3PóÝZ=Gñ…ÉàM—×AtS4ªÆŒé½Œª¨®Ÿ£¶bÖZ3ê
+ø®®"4kÅG1éJg!:öŽ|Ë^ó4®¹ÒþO¢®ªhè÷‘P$[“âdeˆx*9¬¡3Áê§ãž‚0b×)ï§ihÌ!%Ô߀Ië2
+mvq™£„bY| E7³#1"[báIÜDLipÜzî€0£n6ŽîÁxh¨ñ'­&£ôËõ`Bð¨Aäå=tY«d"ØÍUÉûD@W5àæûÄú}Î0€.Ü,“›¼?CŠ[ý‘ɦÐØ‹xZü6"×ÄŽbCĶe…âAFߪÝúÙXÀΰ 5.wC\
+hTy
+yì´ÝP³}‰Ønøl
+35ÂÔØϱ[™Œ:ÝzØñ£ÇjšiÖ¹êD)dýP[0•ŸCuTŠÂUÂuLÛ«òJø7‚ ô„Ën’²&› ‰D£e3Qï¡ 3 æ8óÔ QaÚÒ¯’µ× ƒ GWdÍ0Õì¸TbO=9&aì“ôMaiù0îAÕ3i@ÚÈǶ!#Ó<GP²AÑH2Ù…¦KflS9V Ãä{ŠVääOÓNf"¤Ô £(š—#8š'J¹Ye’hË |<#ãH2§Npz` 4ºI‹\6«¹O‡,³MÙP?qs×ËÊ–†¬ÛÄ …Â_áјU[z»ªwu¢¨Šã\³=3ï$²' 69ç)®˜î,±Y)oNd¯ѵyd‡åï"SõŽÇâ÷`cîÜè¨ ßC'sÏê1ayYhçÌKÐÉ¡QÊÍØç$ôls0 nž¥mœ7Z妣q²ÍÂÓcì|&”Ñbš4d —€ésó+É$ ïÇa¾Ç΃Ͼs
+à”[EI i .0~vdœkž  ÁžDa@ê–•ã±›gÙ¨…8ÿ¬N]¿‡OJ×–ÀÛý!‰9îÁ°©t FWŽãì#JƒžbÛ¡e–À’6n\9Uæ Ý’7=~A*g³¦´Ã/Wï¥N‚ž<æ$~ æ¹5š¬Y0¬`³ü¾Õ]cbÆX²f&a£q=´$Ì
+äú.Z:·ÄÁ‡F0$ó±·,£Ù'AÀÕX}¢'Uð¡r¶‰Ì
+ûòX¶\M’ñçº;Ñ<Mu½c{v‹æöký¦ä«—LÊ2@Æ‘)ÈTÝ0søÛd-”]jjßØ©zT”&IrÑ8ŽEj™÷0¬–ÛºF$‡°¼cßYÉ‚:oÐë;°Mf‹ïF yøæ$í5Ön²NÁèÕ6½¦ø¬.s4º¢Ÿ\6dôÚ®PV˜$1·mD_¯Ò©†qIÛU›oŒï7ÚvÔ¢«¬óÂ’$Y×rüh’GzbžœûÎœaýƶèS”AÑð#Moï0ï­~ðJèëðd݃R±ÅÚ®ëŠHŠÚféP<‰fÞïhf¹ykVi –zŠ•X[Å=‰(§p1l”ÛbòÌ@¢0xA׫ÍÚO ?ÚPƒ›&û–\‰Aô!±I«-lKÔÿ¦lnBõ‰_P²Ÿ»ÚUê"äDÞàܨVâzÅB{³ í¾Ø©¨l¡?6‡8ÊÎÒÔ™k…Ôkê oI ®N~tˆÊ„³Eô‡ª³n
+fÆÜ<è\°½8Kw=‘¡·IçR(†¢¦ØH¹¶àE‘ æ<D¸¦ÅÆ›z(huĈùwV¹-ˆŽL‹Lqf‘MFú€-T3÷Ž|ç¾´@  Œ¾¨Ž¤™!ÈÉtÿ#|Ó1´‰_ce``L=::Víª3(£|OŠŠ?~rª-6U}2ªƒà÷7¯£`Œ%~nålº–£Y¯x:V-·Á©Laí¶¥0í}}œAe”®¯¶?% Ñ(n’VÀ1j¹Ô«›mñ¢>¬Dm¶g‰Z2Õä€xnê‚ó„—™Žk
+àÑ%K‰vT)u…}Šã•ãìXåhîœ ³™ƒ¦Gk̤výV «ƒë.Ü`„«¨/sÔ&"ʪ`0óA— Í®+Þ<fõCžmÙ›¶_¢¡ö[ S“1ˆ HLR ‡*8¤ûä]Rò(ÇYõX|‹c\8HŽZˆ{X.
+>—Epa²4ùc@É¢ì$ß©Q9m>~r¦ÑB„ÛKû×Z¢—’îIž*®
+SÞ0ìˆdçb3ÎQBú×Íâ¢C~ºY/hž³ÓÖcl¶Ò¼C+
+ñÕ<‹@Å‚.v†¹­sd†}Ãmi°áàŸºHÄ6Æ|4w5Æ/T¹ó^>à“•OÝ4DÏ\G¯£:ÉK`³–Î b—
+ŠðadiÆÕ«D©gÖ¯­`}ÑdošJ\œ™7 ¿]ã6„¥`êÇŽñöª=mW­£_¨@À 0‹Òìþš,®R_×ÉYrs,¨VA^B§wÍ°²Äz0·3y—òë‚ë„@ƒ)»,ÔF£; Ú¦{1BÌÄÄÕ¸¸/NCöm›ÐÆ™üY¼ÿ˜­iŠx•F|w-oÍ$¹h¦9›ÁËñ$šÅ¶1ƒ·umÌ ìIW³‚4ZÁ-ÍRÄãD¢ ï‰ç¬6—¯ïœÈéaS»åwÐlõ9˜ä¦ší„¹RZLrÐ6­EÚTsLÈ Ô±Òhƒj‰(ŠP27Š> ³a®x ›.˜ïjº¤¸}SŽMƽ8¢Æû#ÂcÝWóÈÒ"%/h;ÇúXåHSˆV,iæG’¢Ô¨òÉ&Eêo04¡]&hr1”‚¥öñzâØ4ËM4÷òF—f–\ªyVDÕXÊÞ¢âÐÖ#íÿ:)Óã%mO…€2˱<>"ãI.9Ðu
+éÒ¬‚ÈÉ2v0{ÎÁí`r‹5ä†áÈdï©¿bs§}9´ÍZYâì`‡åê2|ÉÚú~‘`•ýg“õ¢0]¨‹>Ög?‰Ü…×?Õ~ùýÞlŒ|‘e0¿x‰ EÂBCc`°zÝ®ê  øËÇ9>ká\2!CÜGµ3²JaØUëxI–!¶`tuCá;„¡^É.èïÎ>6("×èØ{f®ªEáØŒ©a Š÷\/м»IÚU+ÅÜfÕ;×ÔÞ••@—-Æ3ಖSÄX§&ý^Î¥a ´Ö‚±­t”#;©
+.¸>'¯&þ2ÇÊ‚!:¸foPrS îðˆ0šç
+ú~ŒªÓÒìÚ)5 ƒÄŽ£ýÄŠ 6š/=°0Á©õU:Ó阫´šZ†Õì1F©â5,öG$5s¼š$&í¾Û¬Â‘åT£V·-i|–7Ê)åR_ZyjeÇ;@®XVwo¯#VyÙhEü,=LP)ö1‚Òv"\§Y²@±Y.¬³ß¨Ð›Í Æ)èjˆYㅈϢ³°Ï߇™’SãÍçêk•Íò`„½&CZ«Âûˆ,FÂQC@)X…rõß´!DVœB´w‰+Šâ*\Ý|ã"ï°.*nÆ)iàJ
+–‹.?’ª+ž@ŒŠñ1±xÄ( ÿ>GBÉ<FàpA9æeVNŽlE^08¬ã™´*žõ¹—À)mnÃ9ÐXk5(whšæ0Œß­ää¶îBÈKï0´ßÿ¥Ñ«èCÙÑ5%‡Ð¸JØj2ዽf9çúþH8ï%BÁÒJ‘Õà6ý fU
+p
+MM½éMz¦-¡Ð²Àö*XŽ,
+2÷ظl¾¦2 =ãØxH)æE7„·’HldJ§¥HX!QÏ~AZ‹‚^€'çÈ/Éi¨wdß ¸
+u;XÚˆAïÉ ©vRÃ[Ÿ†ÙæôIa¸ˆÒ@1>û£éÇØÊÆÙka€ E$c§k@ØWgÀµ£÷ %QSñÌ¢HEí ÌM"bSœQc ÃѬ4˜T&€#ñ¹ ‘ÝÞ¿&ánv·A9Y3ŽКRE  „ßý2[Íš—,–Õ#)93ã$J]~$ä¥/·H7Á>¹ùfÈæ–ÝœEÍLyp±Iâß&Ï 7E*³,·˜êÀ9šD²
+4š©Ð—»YmÙÒÛAvÆJƒÎÈ d4ªA#“nlôzã'‹?ɤž:è„¿ -¢4¹F„Æœ/ôÁTVŠVä®'£þë}l,ë]².Lç¨h?SúI‹¦£EuUñPÂí¢jû”9öå WC<@¹ˆÇ’'ˆWq‰ Æð¡94Jº
+k Œ8¯Áû+q†¡Y¥ Îë]"64Òj懛K4ÁAb2 •.д@#ºÎ 7ÐràI5nž$8ûmƒ(âHT#|ÜhUÊÍÝ´¶Š¾÷BêO<=£Ð;w‚Í’ÏÐåÚšÍ ´kW»U‹‘ ™¡Å~1íñ6éØ»®Ál—›‚Õd7оƻì7I+V¶æò¤ÄE=4‰Q^šF`åC—Ëž>¡Šˆ¦ê,ÅVJc
+ÎÔZ1Âß¹,º¢4—X¬4Ë\LMš.òrŽ0›!ŠÚ¹ñ\ÎáhZŽh›>ömgRÍóèêÉ$q
+:Šb?‚©Û›"®Ä9× ;ªïg)]èPUí"ã4IyQ`"5&ÇÀ¤6eWÈàÆ™ÐÅŸ:N+K&ÅšÃý ŸVž2Q‚jÍZ¶Kj0µ­øH\¾I<QñýΤ IûõI4ëðBóà­ÎfÖ0±y4x•á*2K‰ÊÒ\Y±ÖN;[£8½Y•8ÑÌ—ƒVœîOÜ:úûLQE£ý-àaà›kƒÕ3yEÊt†]ä ÕÜ4ç‘4“Ëm9ÄB:&yœPékDbSŸœ’…CDpEôuÔ­,àr­«Œu”£&FÁÌdWYW]~YBÇÔrëðÐÇágÍV5M_ ¹·´x"’39‼xÞÆ!\=‡Ð &$a÷k² šƒ ‘‹½Ù6¸KüX<6k9uxRq¯µºµHO·5CäüÓ°öa÷ŘRâB£àà ÎYaƒò•>g»¡nNž1*÷`{G¯qet‚ķ桹‹s¿¶&÷Ì÷Ц‡ë6î"a)Äf<±5À{pjruÔ'Q̹”àÎíÊìO’u±½á¹w(šæ¶Ͳ9‹7K½RÝÁY»pã:AQ ëÒÙóÉŸŠËçS
+ÙŽÐ!¨Ú S3¸”'cPµA„º÷Áè4ãÜ*ŸX¨šG,H„à$¶Ac#{E[“fÐcŒRÜ‘šFö‘+«^vMî¥.Äuz¸º¹g|¢q`„YZ:ý6Ü}⎂e Rd4¥ÝU×ó«¨99ý2ÞG•³eA›T­>”‰
+J#¶ä\-eBÂɪRFAð>ËW¥Q®Ü€Ð¼±0u
+"ÆóûH2¥hx$ÇqêP‹Ëéã'qÝ+#Q»Þâвà°MÄ‘Ž lh~×Ê觰”¥..¶ìäæ`#K‘˜àûê©Mù/û+n¸p‰ê2³)¾_ã*© Dy“mCº|jœÛŽf1t#yØ÷ÞL¬ák&%6„_²Ø à¡ “ëC,¹ašÝÆ$¯f¶ÔˆÍÙTˆCL“«3a+-C,„Ó»­"¡àLNRjpˆ¨/%UªÙê²@á‘kéçÍNu](Ïð¦­ÑÁšTgN[YW&§@¾+‡;•u¡âQ——ê”ÈŒ¡ÔF•‚smëìó’:
+áàJ,Ä,?±s®x®‘m+@f±#½
+çdš]ázýC>/4e—KÙt!¤DHK–G…š)¤ehH(£èkr–ln‹£XÅÏ*ËWÚ˜g:á?“@ãƒ*I…É™ÌÙM% €
+Ræ0÷J¸½#Üi³\ÔS‡ÉîkCh!]f·!MUë~ˆò’›À±A¦©å¨|CJÚ>aMp²k_V©.Ûì]jrÊ’Ñ–CÇ/ÑÚS³ý)}R Î4ç°&muý“1žíE¤£ÊáñU,A<N¬,u##ÒËXÐ.j’:Q­‰ÔwJÎ+Fè  ¾Åñɺ‡zÃQ@5v.qdîˆ
+I†Î€†¤sl¾c¶µ´:Þ
+øÃ’G¯¤ÆgûÔ;TW⬆ÍjBZ_i î™Å82z?ɨ„< œ†[ÌeÕ³öµ÷Òçlø,dU"§)N#xº’
+„š]q‹êa(|icy\RPVåRxЧqíÀK”ïVàë¨Ô&b™9z0Fe‡©´•“ëÙ£d.ço„Ï€.*”£ëmkTÙˆà=ìn˜¹ô{¡°eä ‚K­Dõ_ÍîHø‰ÁÅ}¬WÑz¡¶n>zBV¥¨%UÜÇeØEÚ½DlÍÁ.eg©‘ÙA¥ç)àÚ.Ä×—wàFMíúÑfNÆÌð4è'S(‡£ä·%K~7÷¨*Ô“åæmG
+k+¹‹").)­ƒ^l׺Â:ë¢æ¥êrÐÝç 1¡È}º„‹hž:‘£ËÛh‰„¡ÈªQc–’í–šàüR{J30•Z̬YZç<«ÒÒj”V&“á¯K96™84×RýYép±Wahîà1y£’ù¾ÿo~òâêùéî“ëÛ//Ÿß~»ûôúÕÍ·»_]]D~ÄåUüæéÿ”úei,§XZþ7¥òyÚÿ_§¿¤Å:Y6ñχå`/Kþ³e7ôüÓ
+8E9èûÀ¡t)}dªÃ²HÑBaÐyã™
+’ç9ʯp¢
+kƒžÃÑú¯’ùª%âýN=‘¹.§Pð¢fÜý%É ‰Ž2ø…n;/8n`q6Åw÷Ñ9…EªÂaþ®¹á°ku+ ‹íÁÜØã>_væ‘RXÌ.â9©̃‹’ïÏYg§°Ì{ÅX6Ù(e’ŽÂlÕNqñÇÛG²‹uÁg˱“êa‘´™µ™Ì^,,ŠmoóTd+tÞOð.Ü–ê…ž¸J@SúÞ+’’BäÍ"Ø|±C²2PRå’³•cO²`Iµê§?<cOö<õWNïZ
+¡flÜ 'öXhÆÚhós Y·¼¶ÑgO7VÂŽÁ0GÞ–­qÞ arY©À”!t€מCC÷m • ÇD¬VRÞòžC]2mk  @F 
+ š€BÖb&F>7Fƹðüƒ]÷)Í€¤Ãòμ»y¯vz4Vn° =Àš84ÄC˜øUÅ·.Yðß—‡V™ÁC]|¶úÅšÞ¸m‡jĬ¼°l#Õ+à TéE3h¢< Þ–ˆ{pIàøhxW©u": ®
+"`ÄgD«²D¼ih „™¬0²]à”tíà ïUŒBÎ$F”{ˆÍšâê\³rá;QiB\ªa×®‰¸+˜
+ä'­…(Óh†çO‚Œ
+2㔎±«B‹õ!ÝÍLq/] Nxò
+Å‚Xřݦõô>—Ɔw¡=à‚)„+×ÜNfÒóÇxöë@:¿ F݇Ç"þß‚Êðß
+ñŽE±k„¢3‰¸ñTM]NTUH^`¤'ö H8±üþ¬Ë@âp- ÀiV ø| €É9_lÀqt3S:ÑŸ°C£©«Ž¥`¹ca d ÒA'×ú)ÃæxÛøY“pF>Pò0kR¦!@Øâäìż˜I\„y8½
+-x½W|­ªWçÁu¤!ÆÔgÍ@LŸ¥'Â×âŠx;ã è œØIPyÖ@‰Òä»^ë…'žŽ¨žìõXçv¡±)6ÓžõGzÁBU"'‡˜Ã X²ö±G ÞPöëÑB#ig}¶AõgCKç̵¦°£ à.öé#¡YÀøÕË×Â|Ri¨„ƒŽ–`Q~WP‘’¢°µy^z¬Jœcô""Yš§6ýHvèá¡)h—•·CéÝi—çÖAÑ
+&¬[ç(è¾Hƒ\Pfnš5IZaT)‹é3òxÊ”ÁšÛ[@ïÞgu½F
+TGZsm•¤íÇ«·¤uißÙ(Ð ß7«–ØÁy¡ÑénîÝ:>t󴽡†ž‡nví_„¿
+ "1‰íǬ¼¸4¦¶8{mФì×,é±æ‚ó–¹ïÞ´Æ‘5˜æ½â$ˆt¡".\^3Ðù}ûÖA‰Z
+XÂpDG5ƒx®vÃE õ¾l›#µK%@4Úï~öA'±ö/ÿòrìÁ„ ;O-"½¶íúF¥ÉØÎ(bìµnãþVñãÞgqŠëìßôº6j
+ÇñÖD¸¸×œj‰˜dž°{Ib¡Æ:éDpc$s%´÷ÙѼ{½n9‚x²µÙ´ ›Nû†‡Ü  Pýþ`÷>»>ÖÚ鈩)–ªã”MµE£uPG›Œ²žl´VC¨ôÁZc\7©“}»ùìâr³Žc¯×µQˆï¯­q[¬}6­íï«üðv¬›ÏnŸ5:½ûü1/}\£ÙB11öA‘šÝŽ«‘Údh òA(4q dlßb^DǓΧhÐ2ÍhlæEY<¨\†Ÿje\Òƒª4ºáƒaÌ-ä-[6w+šY‘¡÷oÍB¿3˜šAw©5.íGkuçŸ~rròêâ««["ï6`¹w¿
+0št ›Qã":ù8±éÙ94¬·“[A*2„£¬QÃ_Ú=’773«£?Ö&GrÚäUB™Íaèã·ªÖrm†k«É-»eåFßm¹ xòW|±Þö“n~¤÷ò®²ð…®fv$;5ªÍFs`ÊŽ›tدsE¶Ÿ¾-­æ/¥ÕqGxÏk[Ê@©Wß®„SÂô¨j_ðÃ#gjëÜ,A”E4„¤ óO ²*wùÁ)ø±¶;·Y—å3Æü®›Úì©ôeËèMMV’˜ô9×ÌìÜ«MHºL±qºö‚òßß8ì€>[núéú1]Z9ȃ4¬äF+ñ27Àp ¬íîÍÁ{t.%\Óè­í6MF«ÕÅqœµ·G‰÷ho Ö#/»e¬:S•ïuÐ5ci
+žQet\‰Ô’­7Z6–A‘Y†ðˆ×!8íiÈöyÒÆ?vñ}Û_)Ü4_BwýÇ:•džK;§!žŸzÌqïÑ× [Z´ ^5å»ÑØ"k¤~ãûÛ©Ò¯ü`1“Õ®rÝø¾8x* 8¨h”ma+“«ø `’½}þT1r©1SÞ?Õɧ¾?ïÏ™?LVÚà š—‹{Í,ÛRcí»{·5Žsí7ƒIý\î|Ì_ñÙÖ붑#ˆÖæ)lµµ×ÒÜÒ͚Ǿ?ڽϮÏÕz½7aF¦øÁ T§v"¬Í›MíŠC‚Z1&7Æv¶k¼ýl6¹ø^ŸÛFþ~¤ÐÝœÓöDXn$T÷Gq$p¬ÛO¶gÚöyïù11IîîW°ÊA‹ AÔ HÛfÈh"ÀÎP;#íÈã˜MêîvºUÜp9“‹‘½Ñ®~ÊKpŸy2è\]_{”'~ì~Ç”õ‰ ŽàRš±AA¢E8ˆ ‘§©D¬T¢‰ŒÖÎCd¥rÎbÁV)úâ³LO«×*˦¬AüÔLË
+aš)‚mÈv<ŒK~Ô9$0uj)•©…y<‡G1ß“|cÑ` |ƒ?§ •Dr)ÍÕnYÇ øŠ›{¤”c`—+…õxùÕ0¡W¿2»9»ýäÛi‚ î2^PcƉ ƒC¦ÃHÛ`ŽÛ1R{c´—óÅôÚ‘3ÚJ{”`…%Ñù^
+ߨë"Έy„·Æhƒ5)á®Õ÷kí,tìÆS‚hè$Î)?ºv©É÷íbÇ[`… ùÎÛT‹T'ɼ¶í`.‡Ylû¶KÈtÊm
+Žsâ´•)‡1*F$ŠAÂÂAÑ£Õ
+‹^qà„wh¨šÏU˜Ð©¡"Æ@M2ä/K1vfðâ
+zR9
+î-]*\à>¦)?ª°ðˆX?Ó“aëý¾¬ŠaÖ)2p80€vÄZOp˜C¹–Éw©¬H•‹ÈS¿¡Y:,Xv]³V)²=ª­’κY6Z—ô¶Äó0Æâ:\LLûhdU©tMˈXÌŽî@꤄z§ &,*j‰÷ï"…ÍOM®eH8¹N%í/ÊÅñ Þ©®˜ØÀ•YÔ |G¦>·ªþ&Lv/*%QOîì>ò"£áÃ
+¶%Õ䵃G¢Ÿ“U£‚¡PXr4Á¦
+¥[À»YoC²÷¹¶ýGY2W¸¼¢.!tLS
+Z±õ„VÇ>î³1
+3A–žs0Ê–>j0M¨-¯cÛ´ÈÙúƒ÷&-N£@ÁÎ’2ê7@õ¤’sñÄÅ­>“â•ÚD}Ôü Æ#Q­@ùëV÷éB9¦!œ‰Ð³ì%®óxusTkÒ›@£M™îE
+Iô9JÖIÇ9ëÆ™›;‹8D$
+ÔÛ[Z–[†iž˜ggIÿéŸöq
+‘
+FÑPãdNȪç¢À™œ8“÷")¯
+øq"È-27FVÿŽ©‰ų́·GâD¬À£C:êÌl1Z‹áˆ°^æ5H2R ì;jCKÜÝB&àÞ´’¦óÂ2ÃI½q\½Hôî\rTOå‹caç8£H{
+‚uV›œD‚v^¶,,©ä– ZDð-÷Û¥~‘M¸Ò¬¨#Hê©HZ„k¢gèVòV­ÞøþÁÙz"£ÓGäcq½|ÃÑ€ˆ— ?ƒ“àsNNQ!·
+ž—<ÏvJ"OˆµªìFDT½à»à‡¡‘µY oŠ£YF½7ó(êŽ4‡ÞË>:ßkÚËUÙ1 ¨ H5ÂÑG/+„®‰v@§8RìDØÃf‡^ÄÙ3…ã:87‡P0ú§•ŠB[üÎòi÷dfõVï{!æt`©EbB
+tNgœ„o<| eÐد$QAL%N:"HM”Ç*5Zt°†ß]koÃÁ”¡‚°ïnÄ#åçƒîÏ‚LÈ+Ô6éjmˆð/ƒrƒHI™Gå´jÅ1MÈ£dµ4Ò P– a Ì&]tÐM˜¬‹U‡äfu ®V„2Ö埲 nmâÁdP<ûl‰ãþQ4¼±½«ïDLí›E'ðžAˆ+ƒç«îUÅ›… «Yf̺ð’À±SrÜôaÞ°ãÜ+á)®KF$¨’T`\i…›OÄEŸVó("Rø0¢²AvÇT —Å0˜Ç¯ó)†»µk³™âê¦ìQ¿Fï°øó6k&ð2Ý&ðC²„j±r¢ÃË 6Ø„T¨§¹ÂJ¥Ú’;“h@ÌýºÌ’ àö*`3m¹ù™L‰#ÉgzÍIUl‹¾1J™ƒ ¸HJ0ÍG¯×b(­UÝ'œ¥ÉBa=­%ÓC©¶<sŒ2<ªèLûÁaO6AV¸évC~™Bÿ²úå03¡CÚrê[L~Ž#”ñ{û!lÖYõšGÕ ’ígƒ9ݦN!4Ýq’tÍÑHD˘Ê!ùPßTºnîÍÛ<ê®Ó —%–&GX²öÔò™³s!¦
+ò7¬x:™w° Œgâ¹ k[#c\‰,ÎTÀAwág.fíOMHX(úNÜ1a çc¥óÒ; I뛪 &mt ‹ªºæ§º?eûÁûÍÂqàÀðŠzF'(Ea•1Dï«#­ÝPmSLIùʹZÚmDehýÏ#·¥Ü5¼ðQ·f{bÆ»DÑ5néËR¸S×›r<TÓËÀ…îb° M…}sw¯1ºåYéË (7'‡2Zü
+15z§‰†ðšfj€7¸I¥±)σ,ß¡eEH±'†+›¼ÀÓ [áàë…C_ ÏN­jÖy.Së/;|®Cðµ§ÕåÁpÍZ¿‡R8¤cZȘ̩¢ DE¿†EÒdŽaê‡UmZ/4›š±E§Pž#îz8LÁÎëÉ»jñžSä“&« ð›ÁòÚ¢ïˆâöÑ9 ý­ *´ý¬yc¿°!¿l6>JDîd¯e<l>5ígJ*ŸÖÐHßIsN(Ô9‡üJ‘X-`g.9ÌA„Nx¯×£ê`r›Šfu‘[±w*š‘\èJj+Ç“ÆÁ:¾„¬ô¯êÂ
+Ðrô\šæS'!YGÙØK¬œ<²@ &“õ`‰ç}³K*P-.‘°‚R-û–jÂ!>§>Z¹™Ò˜"<CªR ›’lMZÙÅÒd´—û8ða:v-/e¢%Xžs¸àq½Þð!€¶Aý¤q4‡¥óTl
+-)gj HãhQivÄ×L‹<×Þ##5×t
+Ñ9.¤]š)#â
+vÍÔ]cŒ…³[{¶7Æ0[2ioñlÿ䣧ùµÊüMc¤m#@2íµ?œ‰Ì¥ÑS
+W‚tÀzvr%§±l/ú»èû’‰SŠ1Ç}¿!gC¦‡*Шöwd·ÁÃð}¾ª»‰žúÁk&å‚©ßÜQŒ"ê…ˆÚy;M%b*Q#…Š2a9ÃFÌ"#œ¹·B»øh‹«¾ûÌ ¹%gñÌ*ùïÈökºYr%î<+_°*.‚w(-RÙû,ÆÆ‹½p3ïGfYƒ0)´*¹ÄÜ¢ƒXé€×
+ÅeMQì!T>¥D­ÍØÛ‘0Ì{X;X8C .pdm5|
+ уAÁèýx6SmCäcPFa…&ˆrÝû‚¯óbφL´Êب›Zc`ðh2mJp^º¼ÿø¬5}´±HŠ’t‰u2 ´ôsDj˜ÜÐg‰¨ïåËâÊ‹]³¬–WÚ3…µÙEI“ÂÇ5ì7b©%ê¡*L§×4Þ‹‚0uŒ°Àä³–€¯§ªêæe‚…¾ôHœz.9ÐËC ¹fÊ
+_‰›P¹.BdÅþ†8%ïx|ˆ¥H¶µÅ^dÄ@cHBd(ºb¶5Q£‰o?'Û(|\J¸õ-蜨"ˤñDëÚl&ôy…S?6)ê,l#¤ž€À™¶8ÿ¶`z#Õi³9…A…lå5Òzõ£´Du8×r™D§2D¤‘f׸æÐcØͯ•: Cã`éíºzRj¶•î±Ô½VB¥†PYP²ÅAÚirÖ2,­;ÂÑì'Èn+w/‰›Ùm®ñ¶¾ZVêP£¤E–ÕYõʼ[•ÂQYaQhéÑz™º
+z±á³Q3Û‡þIªá ·âŘòf†QÏ!,x’Âdèý¬ÚEe
+aÂ
+ц’#Á¥@2Œ,™¤™G¼VZ”+ËmºÓ4·î¸±¥GdåEù”ù·åRúj˜V›Dg¡ ñÙ1,vGNœ«ûjÆßÁñ®vx`?"©Ð=*váy Ç@cq°·xßo$&0‹]ä¤húðS-&²ù¾ÓÇ­ä‘Á€*ÅEä­n={hÕ,ó² %¿š©b§-Gz«”&ÚÜõ¯Q€´ÅùV©ŠÆqÔ •äÕD&Ž%½*?0,—¼Ã{Ïc̘ËùÃæŠYŸƒ‹a áÄÐ –’ØL"&˜6”¼o•ãËI -lOar Ý#ý핾¹€¡ßëUÖ‡´:ój1²dÆ=­¦&ÞFÝê#‡Yà:2l
+ƒAÔ\6˜³s÷Œï
+¨Ò‘Ñ0°`w‘¥êk«}i$óx½“ITJðJœ0ð"c‡ÈN.ÄãuË7LöE1á™\ÒÐQìÉÔ‘¤0ˆ±‘¸3ÊV j!ÚÜ ªÃàÏ’²^ÓXIÍ&(e¸¬SS|ᚦÓÔÀ(Øun âWÆ¡úW¹Fröž %9
+1”ï[lrZNÅ—Âf¬Ÿè ¼§!ÄAgUm}¦1â°›Ç*ƒ{¡¤Z„D‰
+3Œíg²ñ„³4ÀQ»;6äI%s
+ë‡Å}ŠëQ´ø(¨TÀËBòΘ‹.Ö/âªUXN …ßàHˆÚ†€¦2•Ýöz¥å+\+Z™#ªÜèi@dí15r äÖa°Ž¬†ÄtÄ'­æ<nI)
+B΂Ԯ ýoP` dŠ³„îŽFF
+û^/48ìÀ!UzjZ22X•Š×zèf.«d ªmÞ™¤>Äù‰>5!ñj:é\¹¨ÀŽQƒà‰ŠL,PüÖzÐ]¥ÄNÊŠo)`"®º5Ÿ Þ3­s«˜\©… Õ=EXÚ@nPµZÚ/nZã<+²@µê¡ØM ìì™3’‡$æ¢ȱâaŒ(8•é7UàFÄ¡´f¬Áéo6'˜µÖÊ÷ËáÑX› }m—/&ÓK-›ø„×I­±üŒ‰^ë¤V´^±}á'è»àÍN³ I)ã)²¶26pÉ<bÛ¯t¢OƒïÖÈpztVJïKèY@‚§S GÈ Ëx5ÜéÒRHï+Þ›jœ¡X×½j†ðYËhZ_JŸ}_VŵjB47 [T d2·¤ó
+ÍYFY¡š•¥ÆÉ(—‡’á›Û´¸Áú38
+È%äù pªAaÉô3IŸ»¯~‰..t +ž)ÂÒqEy8›Ö*tjTMºZÕ8ûµ ÂÅ:X+*ëHo¸ž¡Ðì ¬:X¥㬤dx
+ç5©\ï‹5gyÔ,& ’˱X™Ycdûy¾€qh¸ÙäL6š§6ºI^îG„SÙù •Z›KלÆ2P*¡Sq´^X0oPEan#«\"[´‘@"ŸqÊ0)oè®Îy8E¥ï½ùœ×Dd|Zaº›!÷R7Ô¤!,3ëAìô &öÒ’õêÙg¥&auÄôžS\0Ï%ÒŒ*šaRcY!Bsð&ìó¥|چ׌”ý¨wS@qà3åÇíéd [Ý <ñ¼öƒ
+û;`\àP•9f¬âheÛ%Þˆžf ¬ˆ$`DrdðÀŠxº²î—#Ñ™p"‰õVÊ Êd¶½Y#5²9ý ¬ƒFÖ©È­ïÅH{HÄ”UÓ{ÓœØ+>5¶
+ž¾<rº Ûæ²æ«e•OüÄU„#48&Aò8€ $^ÀfÎÁ’W¦°Ü7 ïÛhÀÜÌ@6-n¦¨„)¶L}€6×°¦Ý¬b
+Æ)gJpñŒ&Ja*³°Ò“jOî@B× g:¤ Û £8$Úð}ѳêᘜV£oÄëûiµÐˆH2£­¬zÐuõÂÅñöÞØ>›!Có‹¸u6…
+/ã½w ¿vjLJëýF.ƒÛxx¬™>ô´µR,ï7hBÞxµ¼õ„ÌúËòâžÝþéüôæƒÿíòê—üÅ°ùð“K.>þå2úÝ?ðñ'‹ÑóÝiüõãÇW/±o??;_æŸ_ßÙåNPëGõÇþÈ??ô¥g·Ç'¿Û/½Á/}z|sv²ý™ë«ßŸ~ïW2ÿýüËkvédkè-quûÕé Á üQó|ìÞTaÑ&åÇŸ~½û—Ý»7Ý Jô/;þêî_>Ø}üôøúögùôüôòÅ÷?ý÷œßÛq¯^¿þšñ~ÏÐ_]¾xuvûšao¿øc¼t¼Îw˜'|}¦/?
+ü×Z^Çïüäpû|8ÿ!ÇŽêÀ¡ûž,™œê
+I™æÔͺ_g~vùNü@§»ß~üÅoÿm÷Õé &ÉESOÇ0«Ñíúßžž#_ág­qÚ;´·¼ÇE Øöí½>=½Œ/ÇÙnèkŠ/Ç=0ËØm_~ü§ãøîó1ûÙøÍÖ/{ÄßGÂk)ÚÔºøôüUÌÒEÒ¦ÉS¦~ñU=zûêÇßœ^ÞûÛ]¨AͶg–c& ¸Ñö^£ðgCKáñÏûü‹Ÿ-¿ðÛŸ-cþ·Ÿë»Œ·>qŠößeüÿ .¼ô”ÐÑŒŽÆ¥£Ô:ãw·ÅxË}ä±»ÙÕ°×U 4sÜÈøW¼ftUÇ6:®éµ«šÐÕ´×UÑ$¬‚[è*Þ§µ®㵫eþögÓæ%ìÌr7ö5G÷mÊ4ê©.=qLeÓQ^ Ž¶úWó´%¶÷&ª›£§ÍÓ=ªÞÔkÔc³”–•×µ…9rOó¦§1VåöÙr„óÚÞÉÚîêiìê)væÊQgIJMOÛw˜µVÔã„5º|ç·?ëRZúœ£ÏE‹¥Ê»M¦6›{„M5Å —¾¿øY沯K%Ç·@æ Îùêìî¶çÝlÊŸƒw¬Û‰‹i`û~}½ ‹ãÐ]µ¸GŒ «¿¶=ÆÚï6ºp]¼Ó!¶»—Ž{ÂKí°dÓöU8Jö¨¶]´=0úFßNùÇXòË>ÊwzšcL6̽Ð3Þê ÔÕŒ5»¼øåù¶]‰Ãž//fe=I}¨µ7Qôo‚ÄÇLx S–ëÝÛ-êá3·Ó­Šññ Ù¬_žˆÝÞžÒûÜžÓ뉧XÝnÎ~ŽqÛQñmµ\>¼‹Éú¥¸s†˜¸.&¸lÏìÒ·ç-8Šº.îƒèiE€>–Z»h¦ðtÚ…\¶og™®è»ÇþíÖ ¡]à~ ÛÓ-Öa¿]~)^tÞ.šì#i95'Š³Éçf "U‹°ÇåZ¶íˆÁm»ìcE·a´³%~¬Æ´×¸´{-x¿+Œu9&~‹EÞ®Áv’nþ%Ø$ûX®íÍ–þîÁZÑó€e‹»Æè†x)í…7MšV›“}¿l‚èûqÀ[°‹úÜ&7vqt]Ã
+i§N;›†hé·×ç¸`ýe]
+šŸß£v.þ›es{QLü÷o¾xòË«§þñ_vþñâürùóÑrn.³üêVÕg¬È[–ìI?Bÿ›O-¯ýüÅõ©‹˜Åÿsû§—§úë‡ÿíòæwß_ßü˦JkûÑïŽÒgÑ~óšÏéjÄ]|ðŸÿîgçòêòô &¨„ýᙉOþóßö¹žŸ]¾XÚ½Á³-‹ãÙéí/ù?ü|ÛOÿH¯ÿg¢ûÞ™x£ç?;^ÎÞ7Yø?øVÿž6úÿøî·:>ú7^Ñx¼“W7·Wۓ쯷ÿÇÍ1²\¸ó–-ö¦Ëñ¯¾/ž¡Ðÿ½ÊOa—Þ|ý‡÷ø6þoƒ›ó³“¿÷³ø(ça¼öý¾Ñ»ý[¼GC~´ø??ô0z“‡ùÓßúaÊ0ÃÃ~èiþ tõ?цý·|ª<W:~衾5_Æ?Õ·YãoùX xóºz~u»˜ON¿¾ýòúìjñýà£ÝÿÎ{põóÌ{võêúäôS€ýþæwÿrCý­‡pqz{üb1…Þuó;Žãÿ}áØÛ¬®Í‡9}¨,þ¢ëv-ˆ±˜1¯^îž_~óêø›ÓÝÓ«—¯^ê C­À¨ù;óîøúöùÕñõ‹Ý ã»Ýî:|˶9ØõSÓ+í~uúÇÛÝÏ_œÝ??;7·ÖòÿñÅ鋳W»¯No®Î_7tW¸ò¡A~~Žhóåéµ~âv·z~wÏ‚Òª²_¿ð(´ñ‚¥Œ{¿yyu{çSÇçgÞu%2ïÅ˳Gj‹É\æâºÍø'¿Ø}òêöj÷ÕñÍíéõÙŸOït‰Y|yüry€›³‹WçR¥îµó|¯‹e^âÅîΖ<¾=Ý‘Çu<Kg+úûâôæÛ6$þæfê¿·ûß_^üþêÕíîÅòÛd¬ùêöåòïï>ožîöúøòæåñ²%Nþ´ô{öb™‹˜¨\qß/ÅÏÖÜçéõŸÍëêüÁO>oì°~ô{ÝŒu»?Òî붌OÎÏ^.Ÿ‡£õÇe¼ß,æ%7®£ØûÆ5çùè»Ó“ÛåGžŸ_žÜ_e{_YÞê°v·ËnÜ_»¶÷öÜÿ÷ê»t÷D4Jß7ð`PÛ]}wzýAÓ›ý7ýûO~ñ,n§ËhoþýÛ³“oŸ^_}}v~jRµM×{}úÙç
+Ÿy¡»Ô¹©w?· âL}~ݧþîÞsMyW
+R~n
+ØbŸÑí™9’ó›Ã Rn³“Ý©ÃL6pjÚî¿°ûÄö¼Õ¨¯@H,.òZ^nRËíõJ·<1{˜Ï&ÌÁ÷¶”ÏþéÞ¹¿_J/¹{òš êÙ³ùyõ³s¼u‚ ÑÔNhŸýÓ†¬%,Ùn|ž¬.Û±„ÌM©6§G¿¢àä/Ìh>ß.àoy×ï'(¿SrSÜŸ Nø„P°áˆÐ["6²n[!S‚ KD‹ î#z®mÐí$4´ÛÎwк•Ç?ÔöŒˆ™M9¿SvÉõt_,a}6, 5x®6(Üuh_®€É÷¤Ù‰•üLõ€ÎÔÉøt– .£Ç
+¿¢Žº KÍ|Ìvô–LÎOGøµ ¿šS£ÈQ?ŒÁ¿;áXNNè+‹ôÌãÞ2ïÉä¢þ¯çËsX¦& ›’ ׸ÅYˆœ «¹oÏ8™J€Öº~j¢| m'òWç
+ZЫëÃo*R·÷Ùþëi›7ÆØYÿワÌpmcL©é0•‰(aj‡q‚á^ÀºšpÙ Ö¾6`KcºèæÓ§ê{ÌZ0cU™ÀL­A 3÷R¿˜ebÖcÖŸU³m+UÌ~öÎ1§ê¢Æœ×¦skB9ÌcR 0¯}xÂ|±¢ó“® hb,8‚š¶‹Dre,Ú
+ÿ`±Ýˆ%K~«†XJsRcdåÃ2ø¡‡eË–7_ãXaŒ±RÔ¨Ç*O«Nâ+¬ž-º°¦­ÓÄZ×_%ÖÙ›RØ×0¼Xz¦T7=lÔp›°I«VÇfƒ[Ì"l Kl­±e°m {ÄöeK;NÛGìbrd”Xv|Uª‰¢R“Pi”ºe§©4&£6¥ùŒ”¶â8¨tÚ‹[®tÿrJ_I«S»ž2¢‡”±S÷¨L.ku%9È»”Ù~v£,ô2eeyVt)k›êNÙ¼´Ê® ÀéÅ7Jå°Ž •“-À¼fÊU
+¸ñªíáöÒx‡{.q#*i"x¶¨ãéeu‰óa=^÷ZbxWyéàƒÍúˆÏ~Wüw8ªà‡Åt«Áfc—F»žU5Íö qy•AM bÿÖÄVqפ=­¼¦ØYï4 )¬ùª¦'š±måÒüÌÝßšѶh®f}[«9·ÍZóÜÚѺZ}‹6X ö´‰üޥ͖ë3m¥‰jÛ+ÝA;¸.‹Ú…§kÔn Ù¡ö²ŽEüÓF˜βðwt¿'  ÿú.ºìèŠ@*껺F×맺YîPÐm6î’È+õÚÄ|ª·&½·Pˆè£ƒ³EŸ>g}Ųžé;¹d[?Úcyý*Ò
+™÷ߦԙ\™ÊÓ³ÊÔíWœ¦éÈ™2m'ë¶Y¹k¬Íf<b4{æ¸9^:¶ÍùÍbon¹{Nó¨×.š×–ÚÒ|ýù²YŒ‰ï¢Å£™l,±å`±ä[º¶¥U©-ã|3cÙäv«2ç Y͵ÚÄꛩÜÖä53°–§õ«™XÁBt[•ÁĦÓÅC6gSµ¶EÝcÒ–»TT¶ÖѶMªN·mðÚñ¬® Àíö’Þj×õK{vl+Ú›{¯Ã>±'÷ö]¦ÒràÇUÜákŽÈ>µq䓃®£c°¤À&ó:Ž•Ö©'ê;§Ç¥9“îQÓYñ&rξ_sþ~|.e»lSà.ë%it…C^+×5*]ƒúâšwÕg×Ù©>»kÃÅío{•ît2‰»›Ö²Þ=5ÿØÜG“Áë1QÏ>ÊxÈ„µîiÔêCÏd£ÛxŽþ®Ækè¹<^¿~F‚ý,=Èw¼­ˆcíöïy÷–b¹í e2_>™pù¾"Á’o•ýúUå˜Ãï˜ç+þ„¦uðWSË4W~÷…Lê[ H%dV4èΗ‡Àª&xPß샮dš „ölýBpÖÎiƒ—±­²éTîP,ù»
+U–ÓlhìëéCÇÅ×(lŽv“áˆa¦—ÆËixØRçÇœÛ1¥ó§H˜˜ô#¥¼!~ÁìG«ƒ&j6'×ÑHéÔ–…JtLšˆèé0öǬå´5÷:µ±ºÇb³íúÇýuÜ9þ^ÅS³îO¼ýÓ^ĵãeBZÿ&üÕë6‘?ØN‰A"­JNS£OZJg2î(D’õË%—\LÒ-BݹÎo¹t&re‹…è·—qâ0*ÖSVMd•Jø úT³‡ÅS+öKjÛ¿g2h]úÉÒ|Ý$'åË9ôá´Û줳úÀ’îãÓbúhÔŸ26>™!*§u¦³LE2[Ãæ'kjä#Ù¸^ÿ›m~O“Ù_ú”Ó_¥\d®2æêe?·lÍ#y]ùûœ Zy`Œø¼\]
+:ß©[ ^¨kã†Â²õý[ÔÛ±V1²J$‹æÊQüM±’Ñû³,Å=©ïRÛ­*•¶¡Q¶É`™ØåoåÕT>¦ÖÚŠc9VU²ÑÞµ2¼ .U¬¿¸V}ä^Y-ûu¸¯.Œ}Mg­ØjQ篷ÖòY£µm©™«[çšf=m­MꃲþÐÀ”=cÃ׈†•€®Üø¹,&Mô¥j&¾óæW=VmžÁuË=Û[Åe¤ØZhR¿m}¢îRàíøÏ¢ÙþòáÊöiIu<™þo§lÔ;?ëâ´k©½]¢ßœtûßß6^-¾ËBô«a
+쿶)}áÛ¾8¾óΟÁ÷l0‰ôôá)ÖK⫯^orˆô±¾E£ÀûÁR`ÒoVë…þ¾±ñ\cjPƾƒ•×ÐZšÍÔ0«³{‡ÓÎÒ8Ò»²×Qrg_ß—éXôÆÑX«=þŠUêãk¶Rk•Ê¤5kU&'ä6õ•.->­ŸíßÓ}>;žyL?¿³êÔvšm[ßú¹+gsÏ+á¯Ä|“ ÖÎÌqº(·;×Åzsþ8ÎìO¹®þ
+üèEÜÇvv¬<aNËü³ô[§¡Á›>ëñ…ÿÿÜÍUAGUý#PÜ-76ÀÏÛ8Œw·£#§ÝÅ
+ð
+,ËðŠcìwˆS"wn²yµsoÛi›VIë̯½Ö™MóÄÆ[ÙR#èðX2RµS&¿˜oDöÛ£Šx“û1Ôš}Uìp0 ˆÇ ‚Sòíi{ª;H§Hb­÷˜é¥Ùšlö¾ªHXÇ! ‰¿N‹¾ (¨‰·M”YúlŒ!úS¨ÊSÍ“¦Ôˆæè:²`évlÆH$erÑŸ0 §g¿¥šÉ{‡¼rÂMc¹{úLìºp 6š»Ð37– †ˆÔÐRwy¾<¿Óxƒø5 kþ˜µê„gÒ¬†2Ä0o,§—xE=-Æ'˜}`¿wUà7³¯Tg½ÞID­ZÎ2¬`¾å”¹MXwO,ê©¢'
+>™.éƒrâ÷ƒ®"Áìy¿H®Î+”–æïŠC¯Êf˜QçÍprM—WÑÎ}ånHz@Ž9Á[¼`H_†³m¼>ÊÖUXGR
+1#Âo†WSNŽ ;gË0hˆE\;àì5’ê±%¹»9`)÷„÷–ž}ø~ÍÙiX¿6€M4½O™Ó0 bµ »£6FÂ'zK{ñVÕ›4ˆ|>c&C)·½…cù1ì¨æb Olr6 œ«o†?½¾èÊàŠµ&Vo%S¬Ù'ƒfÜ©ÁÔ)‡^=HÚÓÉB ~Š€%nÛæ“@ÔnÒ%£÷g@ááþ†j‰žÁ¯ (“è‡è«§^pTáÛ0úõA>KP â!3éu›‰šcøuHÁ&! “asø€¸£JÀF%…âよÝ;¹ÿ"ˆšÀÞTP—PÀX
+ôdºgM|>—ç'Zõ+x`ÛîYEÚv¶ vq³ 0¯·ÙŸç‰Eõ\·®*ß@mN“”¾ít(¿¥!QSH³!f…¡Qv'^&‰ßö0¹úõXƒ‹-þô[ƒÐ$2òzk³=윙Ñh5¶¾é3 ÀJ¦ýÿɲðîþ€ñcÖG)Ugdµ²6¹4v“ÖU$ çŠK'Ž-²ËÞB2†0cì9¨dùR§
+:
+Ôš§'«#bF…&B­s†*™ °,ýŸ«ý³:ê_ÏóI0 ™‘ #~"Ýõ.€9žrØiW‘¶Šé{<#…Zc©ÚQ•X4i §I¡Ÿ¥(D–*Üü”à„4k1¾ò’ƒ¾AE¦vב÷o}ôì‡Ljë·Ï;-äÕJ Œ¥»-Ñþµ‹Ç«¥z‹$ô³Û f±ï56ïú:©Ø
+ùɱ‘ù>Ç×ÌKrÙÃÆHz8¼­¬¾Ì %ŸX@µ¯,ž @OË ±v„1æÅÝ` ̶ÜDJÚ_Œ·X|¸?™Ò”JlÖ=XªÆhk|¼!hxcÁ|W ÌÇ%Æ…]Ž×Ó³_0/á“#ÖÄühÐ „uWÇ.¹zäXty¾¢IöWPXc
+Ü¿!÷ ±c¿·lܸ— L&÷:ÙEiþµ9<q 1qÍëO¹—ÕȽ?`%“úÊ&Hî/‡1Òí!Ê°òY` ,îM†KDO0/OîJXj33Š¹oQ ’Ü%i× X|æÃq¿"¬`ìFOíܺœFžgºsi"LKÐvØ ·yGb[Ž€]Œ£x”\‰òÕùH];§ŒÏ‚µc1ë.%ZuôÅíã*¸ÈL~bÉEIlÊv÷O8Ú±C¦®'ƒ`³önº¥MìôËÌßW˜Ê¤NxT—
+>sùFQÉ ÷¶6…Øè¢êÔж닱Pà)Ó¾x»cñïÔ´²ÑóÔ%Ú3ùƒ¯@õ+½gk©ob ýæÇ·t±“ÙWjÖ~`5ð-ÀŒ· `^Xê µ…­`Å @Aé5Éapiå#uDŽ[0É?Œš3/à\]ã¾ÉáÇbÏ´Lô30Å@k¨fR·÷ëy¯ì“BÖÁ€¸ï¯°Ë%sb³µ-ÀzYy"r7<øíC£ÛÁy-½¾êÞ Ú5 Ìf“ãc“/3aóiÉäܳŽ7ˆ±=±ñ©|6‡é¸ŠÊÕ š0–„yXEè|"<í|¦!ŠñZ#v|\ªsr.žc{¢žN:YJ¶¢wíÐCï/ÞÐjúMd§µJÂÕv\&~ ÂÀæa«’º\´àÏôÈ ­Ó“h -ï æÕññ£ê»¼ƒ% Ô¡•…´>l1ßÃE÷Ò«ñ!\Ý¡ÔÄúuàjÞTÏ| ºNUš:Ÿ±—ü^¦ÈùHÏô%5It
+<P4:¿ã†ã,–hö¶@{©èeQ „ðíkOddTd2Þ*b,ÛrŒ05ÜwŸ!Õ[ÿ:žjĽþÚß5JlÁ 6Ñ&p#CSIoÞ™wõ XaVt Em°¶³i@Ìkí%—pu•ÍL¸çÒÊ)­æq è,=ˆe-ó#µgó'¶‡»aÅ>þuª€‰ðåg@ì>¬D껕K<ßµÏXHÃùÒŒ˜mý&ö™è™Hùòú‡‘¿”Þ–n§ÞO/(k,퉅?·!&†î/ŒN!2¾Ù)‘;”·^Àìã¼@)iíù|ÈJ©jŽjÝL¦ì ˆ×”· më€äoX=a2Ixðƒ›ÈŽ7øƒüÙTô€6ý”¥"ŸU&´ä2PÈt­tɘí2s¾›˜Ád&lþÆï
+
+ÒÛ­±ñéi?_
+ãÿÌOòî zÂô²”%U—ÈkÔº+çÖUº{ÙÝtså3¥ &¼÷òµn&X™ÞRÓ'Tl)ˆÜ‰#ûZX)¼B’ž™i~ã-ýÇ;Ѳ@vxþŸ½˜åÖ…¼ °º]¡'%›xf)5ÄÚü§“öe2³ Å.}¸ý°qZm!Žö“¾î x={¿üáorUÞz\’$ ¨ –þKjÔeE]¼R’¼Òˆ‘aènHµs!¾ûéÎÔ
+Û¬š+èÝ~Á _ÿ]wëBÙ‚dzZdJ8ȳã˼±¼n'»ñj#u÷ g ËR=XB­uH®8°‹·ç“Öjþ¿r»ÂRSå(}¬yLQ“¨ª«›~ÌJðÊÜHÞ·cYF¥ÐàÀÞ¼9_,Óýuw¿F˜º0%»[ÿƒ¥!ÿqnæà Þk]rëCBTÝHxÃèí ™ýD0Ìq¼ÆhkbµYEƒ1º¼8á'‡+T 8ï/ª÷OèEÐm\ÄŸ^gTµð˜Xغ‘û[‡1\ó,1½3ÆÌVmMcÆÈ:„éK_~Ì´\W£…3^uÌTì˜ÙVtجᮡwcI}Õuvœ‹ sÄÚ-"΄Ïéó|y¶_(A95˜·¶ÌpžTà§S$<‰›¥\,ï?G|™PÛBî¿\­Ô©÷e#¾Ènƒ ÇÃS»!îÝÑXœêª1i6Ô
+çÓ)xn
+#M{î@v`D:P—
+
+œ‡–AêYåÓ¸0Ò¦;íøIiÅÞs)ƒT3hÏZSÑ,‚ԽԵݔ0R—¾g 9~„Gª$.®j6¡±ÚÈD>&‚Ô£RïNú°ÒîÀF.Ju)Jž¼£Mc!=7l"Mg Qò:¾"B
+Öâ$ÅÓ6Xɽk¤
+œOà‹Ê¥‘vÍ:£%W˜&oª·&9H¿B¶‚×l‡H <¤
+ü¬ÌÌ÷žÚØì
+%¿ëÛl«õJ¡±ž•ù̼ôý¥× "miw?¢HëËêh‚*ðDZ~§m­ü!$Œ´àÄ›d,FzÈ)…™ Ѷ†iåE„Àße[;š'„‘#©á Úï "í—×Y„î/cýíºg¤ÒžÏÖ?oÌÂHK¿‡mÙïsò,í0kÞ‰ø„טÒnÓ–Zms‚H}%³‹õÍ@Ž´#Ÿi®CïF:qêyL£ÿ*Œ ©CÑe¸#-ØF†@"51Hˆ€ý=Þ„~øÄGzÞkÂ4ÒKÔÈ)Öë)¤‰®=Ë„¦Ó¹Á
+œA‹,¤´s¾…‘ºº_¢HÁ&òkçécì±"KG)Üò¦bHgl¤®:°‘Yh}¥D…t¦Ñ¨¤Hû¿#uòÈ µÿûHÓR}Y¯l´‡¢H‘ö/‚؃@û0HÁX8cí‹">»Ä‘Bí_©‡úÿRx¬!Ì"…´ EŠt
+)ä}Z¨S48³:÷ßУOôDè“_³­œvƒëvÇæ}‘–Õqꔟ¶s/éuGK‹s/NKËÖé¶:5–ƒ&B^O´pÑÔc<Ž·®Tó'rÐÍ´±eeïGúÐáQ‡@Düõ/êEÄŸþ&€à§ f'#šhô‘Nÿ1÷4¤ßÜuKÐ0¸ðjÄó>±—°.ç~ÿSck̴Ϭ|oL@,iöpG¹(íS]nVÀ Th¯†u(½ǬT4\Þ=n Ã¶œÇѸ Û\+I¨Sk–×)v—†W-XºE“Cµ±üh<w "9–ýk"ÿŒèðÝyʘŸŸgFˆþˆÎàmþêÊçó§ºÏ@¯1¡Båµ)A,ùó·G듲,dK˜Ýø%EA)ä,÷±‚æ—V³®8;2µ²Ò½ã딤;Pn¾è•ü6å‰/Nšî
+qbqEþQô R\Ñã=ŠgK7Õ˞ؼw™C@ %z„i7€—ŒÜ,ÒSÊDý¡igß Ó®kÆDŶW"œàкÊׇÆÞÅÐàœÑV£øŒÔ鼆>Mìa䊽‹±F5¯X4ôì?ˆÑÀÒÚ¿4 ! Ö0ç:,ÚÞÙ½ÎGz è\‡™&ÓÁ>Vk3¨”Ÿÿ„±‰ïÒ˽1|[9¬•Œœ:ÂÀìr BtgòùnLÊå;i®{åXÙ¹JÎ¥ÊnjÚMðÏ·‘uHñ°,€dÔ¥òBÓ (ÆžPøL(ãˆ~\`|w)ø¸6ÊÕ{—XýBc¹¤{IOÎbƒŒÙæ—¦õA {œÆâ+§d–~¦…É=kßY‹nL2tF¦1Ðf¾/¢Û’e~Ó&™×µ'!ÞŸ¥¹rZ€XÅ=³ñJté͉î•åÝ9*è’¸Bw›:.o•/©X<Óù93¸Ã%‹æ/]Ù2 ¨•ü°úNF¿
+ü•N©þ(
+\b¥ƒµÃÝôþ"
+ÊÝšd Üdò+p^ß÷îö¾
+\HàlB­½”Í>:ÖQ\¦7ÞØ<¾qøìÝØÛþ²ÐÅÅLÍ, ø¼|bfÊÿš˜ÀZØKBV‹ŠÙ?F?p™¹…liŽ%þÌ/¨ILÂ<‡#Oµ…‚ü0
+ÂÄö¼–@uy)„Š yÝm,æ4F7ƒ½í¯êšUÒCSÈLvãFB¾êD+Ìù»c*ÙMì؆k%=Kv·O¥XÒ2% Â1MŸ™!)^6'SX0n–ýñÄfâ8>Ë|”'³ÆäKF/âJ1³wâ¡|)Åå¾ef>·í±„βïúØ£ëì™ãpí;ðÜ$à·MÁ Ù?œX¥EÕ.Ñ,0–¦ÄÏ{ªvÉÎã»>‹!oò,ÿEÅqZ‰§¦‰'ÁH«Ò1ðÚðR%Ö©˜PŠ¦øü=—E0Ks=˜‚ó'7Ïwz–#µ¸ÑéØÇ2DOÌɇXfÊ+Àž¯xùã:XߤØÓ4Qùƒd$ÚŸ(Æuü:®™è”áÖ drÉÓe|¾fÖÕkÂ~Ké|9
+v¬3ÙÞϬ‡ÜÇb"(nL¤˜n)'&²µ—ùŒ÷Ïkç»j@H¼ŸŽà˜žÍ¾L8ròHD¡Pú‚óL®‡H‰øç &׉;ÄãYµlD"žµGÃCijöød"xL(êíé´ÞaC&ùˆåUx— 0Éke„ä˜(vrÙPB‡ô~Í^\hŸ¹ Áy}Dò‘œ$BVy»ØS8âW‹Ü¦%fýx%ðÓÛи^iÞmzÓ£ &ŸI_ìôÔ ¾g¤Ú• aYìbá?e¤Ž•CI¤23RÇʉ—Á“T¿d¤öŽŸÉH…p>‘‘
+óÅÞÏH…P>‘‘
+áÈ»šg Åõ!¿4õå #½™ž}.vÞfC^*œ°ût*Šç—ŒèùD*œà¼|<îï~K.Ťm÷ìÊ7RáØw¡d¸ÿ“T8¯ÂÿA*œ°L¦}Ö•R Ù¢€}²TâÓR4VUÎð\},iybÉ ô‚ Ü²<Š2ȘøÍv/ê0— ¿z¸ÀÊFïÊ<‘ºˆr¡-Áò<¹™ŸW×½‡¿9ܼ£Ûÿ°jœI=¨Äe¤¼LoZã!¡š]S¤_H%³^´@ò“¨·‹Ê†_©S,žñ4rÉ„ešL&¬yX„ ~¸mGø†ÛeÚãÄÍÅÊ:ƒV£ô-žwæ«*ìåÅMv NËE¶ó™…Ô½4à!l/–açêv¤’ÝfVQ¤6²‘Ê°ÃË«âX éHiÚìg!åçbŒg–ŽŸìæê, ÷Ê„¼0¥N*ÙÍná!åÖã nÅ2ì<*ÕÑsí‰%»}Kem¥3ìö†(RMa¾œ‰!K×ã«vÄ‘¦JßiQòªwÎàP i•›a‡f0,=jô‰^ç™í‚Bí(¯§¥«WÑ¥¯Píè­³àPDK7+)B^7ü UÊw,#ä–«ÁwÔX8gDÍþ3©=Ï#Tù^ñ:VG]b‹ItêYpê³8Êzýd%9¡²#wªÈð.IU’{É ÷MÈ‘|’ ‰î„|!®ïY9ɸ>ùëéI9Ññ Õ}{VlDîøž×MôguFxw©½Q?N>¿¤çâ…ˆDÃ]å “öôþ%›îo~˜W³é„ì€[-ÎÏeÓɼæÍl:!Ÿ Ÿ_ÞϦÊ¥ûkæ£x6/^8Òþl:Yhž¼ì­l:!POnùC6Ýwä³é„Îiî{åDzé„réØöþg²é„rédÆö¼M'äk¿U`ÿ\6Ðì"OïG³é„”vìèg²é„réDn›#›î±KK•¸¦ô×l:!åT:›Nhþ¢¡Þ̦ãƒzZSøOÙtbºåg³éäSìl:(þ™ø‡²éþD±—³é$s¬>–M'œ[ýél:!
+"YÏbâuI®(Q]Ž{0õ§.AŠN½TvYªK¢F‚Ne—ED&ÜŸQÔž;ó,"§ÚxRØ\Ú3w¯–%Uèîí2w¬{®dªä)s'Z]./+WIf™;±L.y‰t²$¤ã“™BwïÍUº{SeFÒK%«ÌÝS"$ÌÛeîhFºÐÝÛeî¨Ü·'…îäò¸çÊmÚ¥'Q~žEþc!J‡<£rÿ9— -¬—»>EOy`º§±‡ÏÖ'L£ƒÆ³¤^V"œˆfÅó Cñ Ùù…
+£5z+G˜±÷.QR6„Hù´Þ«T µáñHfâ=FPKZâb1Ôóóú#™)÷›_×$A®Ÿª`^TĈeó~ó¥BÕb§o°ðÚÛêE*ªó8¸%€‚ónµj
+Ê …n8|-Vµ)#‘á¥^½Q€ [2dyàß®pw˯«q÷6®økÝê×*ÜIe>¾Á†·…FW¸ûH¥¼§ùDò*å½›Ot¯”÷>JT¸ãæ%ÉÉùK…;©z¯°ÆÝëîäÞj³ŸÞO¬í5GÌz•X €=Wsrª6Ýßkù™îwýG0¿°(˜ýÚ=WÎäæÃÎGò9‹6y¶ØS8â¹°¼$&ųk&P¹¼WòÛ…’˜Ø§¢P@˜Ø0 {22üZ·ºo2Ó˜ä$1iþ%ÊÚÅä¤1qùÄwÊ€ºÏ‹hS@'+C]Òl¿se@'‡+å$1iÃ;¾òýW»²óR“X¬(±Ó¾¦vèÞ7nåç)†—%vºT ˆOä¸&-â·ŠAûåÕŠt/•{D¤×ǺŸ»šª‹Nß>”ãÚ½šêufr~©Üã“JyÖ?渲øó†!#ëùyŽ+è’ì¬g©WÚ'¢àÆwSý`Æ[&Ü Öödškªy®®À‰¨±ÞïGFø”® ì>²Ó#gmDIPrí²|Çô'N^)Zc#åäÃ)ð³ªÕ­°]UÜ:läwW$N/ž„wºŽv®Læ¥áÙ b¹•Ú[Ó÷Å’ðz¢HÁX”äÁ):V[ZõÕEªÍN,?buØô Ò{&CàŠÛÁBÊMM;k½¦Ž “—å¨×E¿7BH8$0¿æ' oÌÏýc“7ªˆ õ¨pW‰1>X~FÜPiZíGš®ª¾„¢ºo¾ 'µ’´&UG0ßGšJ5INd@«†ïM÷OtæÕ¶rg_¤3imÇί´ ¸ ˆÆÀþ’b¶I0残¯tÞkhowóLJø‰ïÌ°I@T·qª˜=œ”±7Øa~bÓS‰xJÒch•h—àJïTRNjÓóĦó§*³ ‡î“Ê,r=I„th•èÔ=Tfꯄ IeÝñnž|#+íI„&E%…¼õÄ;ôzu|w¯5Ì»{¹Ê¢H—¢´øü"›è/Ei)p©N‰û‰_ë–âqZtbcyâ*Õ‹¸‡×©Çš)3%
+ü%‡®h]?é¡ñ|®ëÇéÈCU¿?T
+Þý‡º~Ò7ÝÏÄ߬ë'¹Ñ%Ñ.öº~Òná—n ’¨ë'mê
+FAÿ¡®ŸPš$ÃzïÕãû@ÎûKuý¤¡@?ÿ'êúI¨(dÃy'ñŸ‘ú׺~ª&«ª»:ƒíº~<6´q«úqîìz9=Åv¯ë÷4ïõ#uý¤«úÝWò›uý¤®Y©oÕõcò·„Ö"ïž«?×õ“& Ò`?P×O‚×Îkç‡êñ¤¡È®Ç÷¤œœìz|oÕõ»CdŸW³9êúIä=<¶¿Z×OZ“‡7|¢®ŸØÞz<Iü[þ– uþv’øŒ ŸÕõ“>oæFü½®—Ú|kñ¯õø^ Ò¯Ç÷ö¹Wõ{¿Ÿœ$¦g·7<Æj¼VŠŸ‘ú^]?Žcû¡ª•1ô~]¿{ª”T]Ñ·ëúI«9"kìåº~ÒUýÞ¾µ‰®ë÷¦¯Of]?y¯¨ë']Õïåz|ÊÂåIËǺ~O†gUõcay«®ß£+™]ÕOì–³WëúI‡wñv±?×õ“tâù”þ\×O`^XUý¤ï…“_×ïï~K.Å^Ïy±+ߨëÇ.¡ª~ŽäÕõ“\¬
+ ïÕõ“V )ëõýº~Ò ±ôŽüv]?î ùUýô1Ù7Yqëú‰+A”óä&+™uýdé0o×õcÏäãŽDv­
+×õ“Þ$³8?–KíûVíáKJG+7…#>ø÷J-©y{%x*îÚ•¼ç¥-rVÓm1§7‘ðOù¦hT:ÎiÙ3„±YËsÏž%Ž%Z8ÃÜ7Çy‰m%‡¿Kj›µ³öR`>¦.F²›n‡Y§ß—ó´Ì‘‘Mß_mtæ­^›8&l™ŸR\u¼&<Xo<s‚O£îö3šbùXs×Ýoîu[\S_‘ìXò–æ.¤]£DmÝŒÎêEO{Ù˜—ÜÇ–n¾P·ãzñ«lÔòëÀ&ÔÙÏ£ºÝqè=â§ØÎSÂÚ¥¨VíÔ¥=øâ;›Õ_—ªo×¾7мr;#ÍF½…YŒƒæø9¶Œ!§6f#ù„\Ls¶´GS;F~ÝéìkgϪgrvO¾+÷Lˆ1l5·-΂_yK{ûµžÎ½=,6çWQ¹Õ2„“_ü* ýñiõ
+ÿL˜© Û­Y…à)ð¿ClÚˆžf8Œ75ÁQréÍâø® ÷
+%H x~gP¿{q„êU¶1‹ÜÃålJ2êÐGÏ6AííCòƒŽº N -ÈÐBçFøÀ3otsEu¿ÓÞauÚ}*›|Ú•LÙ0,žeå;‰Ÿ¶`Œâí«ŽºP8Cí0Òw¥[ãÞ“¬.£gÊd\ ~V»ÀÆ-îí 4=„i•â0&ÕÑÔ]¯É8tþ®“ÑtÑ3¥2MÛføŽ8Â{WçŠÕYI’8Â6Wº­<N%~±Òq a _{Ó`ƒf§&øm\G˜ìéÍûE+z
+6ÿ–=;Ön‰½óM¸ /‘©ÒÒÕ¡?Õ»‚ÙgtKT—jèJø¢Ë¯¬et=Ã÷É9¹©t1çM›£®µ€âÁ•æר†õƒéyqu¿ RŒ¾‡„{g…&Yîa¨‚ÞÌå µ­cÀÊ+ÓFz@Ĺ~ïš#Kçö›}O -û¨îéL÷±Øl¡#NÔ¬!
+ü͉ ¯Ñ½W[Ý
+ ¤RHÙò»Óž¶Çⶼ}ëŸÊf¸;èž0܈y®–W»É¶3‚Þ^7ŸhïÌ%–݉ÏÌ»)б85!OÞºŠ4óN 1‚gßaê@Ŧ³|!%ÿ¦âÛ€QT·ØMIt 0ŽØMºr‘ß>)dõ4oèj]ʨšŸ×Z6uÜEF3ãøIÌ‹{”Ë…Jïö,gÛ)Z¶Æ3OÌqÝ–íýÚÅ#í´b~{ÆúZj}¬ë߀ gƒvˆ!ÈU·pôÈ»¿ê)l°JjGh¬úî‘ÃpýÎ43åôІ—nøÉɸñe˜é ˆi(EôÆš î9òí©ðbÊJ¦ã˜̾¢Ý{ÏÚl›^í³Ù³Ú äh#{,0[òga7ͶÜ:ë§ãù)‘C¹w¸p£}šYøÔ²OÔZEd„ó^ø3Ý+ ˆCçO2ÖXÂK{áÐúã+ìîf4ñاëG n“8Ó⑃Ùû«ÍDX·p˜øKew賡`ª¸>âè¬[ÆÈÕ;†_1—3ÚÔyQ½ò‚x 9ØÄê6£ŽQ©TBšÒÄãÐ×0Â>Í&Ê€_4 ¿ô(;7]˜EAo#V»é°%C4mD Œ= :—5AC1} ÃÈèîî¢è¦U‹¢n ±ù5yL%Êmì(oqã\`ƪ2ªÀÑÿh‹ð~ðè`ýšïí¡Öùøhâ,âä…é’ùx|²aC¾BÛË*‹¤ûÈ 4
+A“»Ó”=Õ¹:‚úE{áX+¯4Ž†iKüÑÕ!àè@þ8¿û@¾*ñ÷ʪØö—§_7F‚Þf
+†@÷RNNthÉÞòvÐeeΑy̾ºf ÿÍ’ƒôr0“´«‘Š_8±Vª=c>¤#Ì_bvM$a iÇpJÚÁDÅ=
+؇q¯^³ÎX#à.¨“áÛ‹¤™yÁ;f`^
+!îf{ÈÛ!JŽåàŸ(Åû¦ã¡,Ìöw¹"¹I
+m‘÷?»I
+m‘
+üÓ›¤ÐÉlœŸÚ$…¶Hn) Û¡ÎC$.;z×Íw?ñ9ìf0£MÆ®§f0ÝŽŠE˜jØ›r\‡òm#‘lL\¼`ÕÑs-Þ
+Ðe[öB¡LàÅÝÈœ
+ÐV"3z™ð”ïEj¾yí¸Tå RÇ,9[†¨Oî¨v¼µ¢ž‰Ü²‘Ä}Jºý»€KBÅØJï ÷32Ur ÃüÍgô’J~“–ÿ·*9¤ØŸ|F/©älÓÿJN]Bêû?VÉἈ¨ÐTÉÙæÿN%XX>±ÿ+•®1 ”ßUò3eôi¥›œú!Ðk)+szê˜nØ ¬è¸Îëã¾µ"eƒ²øzt\¢ÛN ;Tf‰“ RwÑÜÐoÜ3S‡d€×¾€bÜ:ݽ½´´
+Š$ÌN5~Údv~Æäø<e„£,V‰•$è†RŒÍêìòžé¡GK…¢lº ´Ðê~:f¤£Æ?QP)k·S»Û¤Àqð¤>¿\°{˜˜ÿ¬v…ñæ'…ýõ¯ ükÿçúçp»Ñ^?xà†O ….1>Ïõÿ
+;>´ÆOb5½¬ö»ñé?ÿðQ§Xhf‰tÃà?è‡mš‚çzèy‚¾ ¶qð_çÚø|*+l›Ãév
+È/<†ûŸÀ
+ŸèÿöÉgŸþìóß|ú¿×ßãýOþ¯?þþò¿ÿÍÝô›»vÿ—uÿ«ÿñ ž•¿Åg¥oñYákŸõîo%–X xóækC‚@x*Þ¾ÿä‹/þ,Á`W~½dø·ý?áÝñc#¦ /8„Ý·þPFK½[∈7 é÷eay®ýþòLí¾Ä‡°P¼6Þ¨ë_—xÊõ¡…Š­8Úú·55cýø¼;®YÈÌsí‰ã9±­ 3ò=™w¤PÖÓz9þ=ŵmð­û)L^~üξf¿Ë~Îñ¾§z»vÈ/¯øŸùèõ×¹6|Œº¿6ýÛ¯ñié¡æ÷Ø›Ø?¿”‡ÒŽ¿Ú[¾½Û#1¯;îðgà'×4Ùl쿽ŗåõ´8O—¤Tùùû1>pü’Ø«ì‡ïzú˜·Ï|¨OÂ?Ü=3“ÏÍö¼¾Á3«v]ÙÓÊ¿½{OÑs‹°·wÏ¡ð)RoVþÞý»Ï¹™¸üXJk«”Ê}|ÛäPÇÀù[#N8Ší¸þˆô#V^VþÞ>·­su=Ó—yÝ>—‚öÞþZRð[—–”ü}ÎãMÌã7ž˜Çš˜Ço>1wÏ|ìyl‚x]h#7Œ•ÖøÂ}m¶Ø1[!ˆÖË–šñŒ™×Îã}åaA¾áGÊœ|¥µK¨x¥4¾cMÞXcyî ¸¹=äÆWeýkª÷9¯÷Ñ-ù¡§¸>šéÚ Úœ3Œ¥fö‡:'h}4þ”ÚÈk²5²$jám}®·c¨!½òú®Ö8‰½ó× 0ð ùa®Ò¯õuÛÚ:5=¤Q’ÍÇÒ ËxˆkÚïog3½..-•Û½ø§o.X?@°<~`yü†‚å¹õIø÷µÏ’zÉé(…Ç­Œì¨.±ãV«õ¨!k/ü´µ×¼ÏÊßšP•øƒ#Ï¡‘ž×¿á‚­Ü›£gà6÷€O˜Ê\ð…1׬ûj+^×Òû}ic1V®øx(evŒ„–꣄|Ú\)WŸ‰¸@&õ鈕›½™†ó-¹ÒÖ¾ÃXšÖz®„‘¾6·äÊÀ䬻©>ôÊ{jNI_1ê²X×+,PuÉŒ”—dYŸ±¬¸®ïÈ#­²
+¹¼ŽW*$Ä\`*êÃZÇŠ‘ZG”ÌH}uäÒº6d\$SYKY}¯¯Sb¬i¥Ð¨ë`ë„óXzŠî[3Ò×'f€£âHâ.ÍKçÈ ±!ÙÞ.Ðl3³i$®™†ª+í»=7¨¶A7µ0΄3qéâimãû¸vLò3qm4LÙÂi ëuq(öÍ{Auaxøž°,½R“Ž-GïÀ^™ØÏàc½ÞRÜc)U·A»‡¦Eq·¶\N¶KQë¾°ks¯ïÄê¦ýTÄ‘]©úÉg‹äXXŸRZ| ¿ùP›G½¢
+KPŒµZrGk(¹ÞòÁ3Çd:c~Ôä…Ðud­±¥Åòm¬‰Ç@M¦éÎub¤,µÊ!Ÿ–nÃÅŒ%ê4É-Qÿ®™8]¯3Ó"\2ºjêp¬ù^Ö\Óð=a.|GÊtŒ´€ÍÒ´ziûô*PÌÖV ¼
+¿·„ò}\ª™±…ðÖ5ë¼Ê¹ê kg>ïëAØ-}Nj÷ËÐ̶â#ó[Ý ‘e3`B‰®®a½ÖûÆx¬ gòX¯:ÓxÀ<}¨M‚±"Ús)ÖfNí1¼Í>èéËÂ+Ûݽ¹­­¥ª±e@&‰Þ—們ä&NS, ŽiS»––OùL N0ÌAÀÅ}‰îJËʾá²HZ{Ó´z]Ç‚åóC#©dºJèÉa‰@P9yÄ|]?‹IÌÓ´Ÿ‰oxt kKÆV,îzÉu¹Ö¿d8ÖþH¹k¹×ÞŽ<h–"ã¨ä¯éï•zŒPI ¡EwÝø4‹5°ç^*Oj“਴ˆkÖñOTz¬Ÿx/Fawãr
+g™ã²ãé#Eó2„u c ¸†P .cIÌ=q±<@êÄò(k%—12Ê$r.²zós½õù;,Ø4,K¥Beˆ¹dê\/€ê¸uý¥Ü6à‹êËz¦úÀ²´+5Ù…Ë´þH !h
+Û”×*ð0Æ@2e½e3m©ù/TθEe,8êúƜϢ’Ö´{¼l}­à8| ’òü6I³[ô˜dðY& —I±&ï ’³Cr¬ƒã-5å:Í1P¶O«Ø+,œ
+]X¸@/fèÑDeY¿LU£´";’×,YYÒÜB3T”õùœˆ•c¡2x/œ-xRIM½ç}öC;]Òrà|6 a‰ÒuµCÏœ§:‚9˹ððÑØ:\»ëç–ø¤ÝEvä@a9gÜÂ’[e­@·ó¸äÌ­ãèþfÕZ{¨¾
+Xž>Ô&áŠKê“xß–±“YŠŸäWXfØ-ùðÑ®í—׬æp‰1ñª» Kê\”0]Xò_Þ #¼™ó ›Å`¹p–è¢uÃ’Ðà q¦"TÄuÐÃÆK®=`Ó>ú—Ý>a<5S\×9²“ûõ/µK¦iX…š7,sÌHßx¨†¶þ”–J™a-ÒI<àå{¯me‘ŠÑõ X˜G7X®¯àÜU6Çrµ2_CìàæCm®¸ÔL—ŽÉOîqÒ˜«g`N(dÌe‰v>ßÂRpïã [_Óvð  -JŒv¯M:ÕÒ”"Ù[eÔ§u7]–jÈ@ÝQ6wy}Q^óò,—aň–ûï²äî‚e,£8,yR¯Ã˜îGþ|ZVò=€žf,sª”Ÿs‰Y×-ÛZRÀ2 †¢,3îËSn
+Âr"ͱ1Ã%A¿„°¾ÁåªýþfõZû)¿
+`ž>Ô&Ái©bYfN<Ÿß¡%¬?1ª%‹}s½D-ÛÏM®13:—Àl2Cá·ÒHëKPrQZt\ÒÈ… ¥i
+e2ù­RÖ¸šÜ1© GX_ŸÁVÉ4èÄÉ Ø`j&ü ™aT˜‰8äðäï¯Oï
+¤úð,¡š–=½ÄåÒ.‹P¹Œí5F‰P=×ì9T Cec/=&æ(`2ÿ
+HR$p‰g8`×m=J<#F‘D°Œî…°.;œj(²I
+£7‰b’àfíô ¿L0JY
+„z§ þû‡¥N)lÀUÄ©XîFõKgy”úuÂÑLsã¨O¾ƒù;jrjª²v€#ø¥1²­O=hÈ$n4'2  × O´«}ã(¦ÀH…G˜EódèÜ튗@ÀÍ>ü¸KüÒ/ƒw繋üû¥­ue”.3cÌ¢D+‰rjMtN8šëš€*ß7ӡ׫õJC£%¾ærX:c®”¸©çŠÖ¤)ß?ÍyH$.¿¡ÈSdÒêÌ4AݺÍ&¶ž"i®Í¸‘d()ò“Iˆïâw–1¯ÇHÙfîhQئáÔ‘ VT@‚º‰Ä%$Òsî– ª•Iiqh"¦ÒÊ3Ò›A«Íõ! ÆõØ@ZV(ÓöbîKd0p¡»Zfã2aúúå%áL]¤+P°Ž[4Lð¨ƒFéO$^[¼È†Åšä™:»-~/±4[=ű W,•¥©È•Ç?K2Yü£KS¿ßEFg0!ºk`bDNæQΆ“¹£t,™ ¸ûFS«É0^ M±Ç"3VÁµ¢ &¬Ø±¹Ôô¤”ÂÀp‚²³¼"Œ>…/8?pÞõØý6æÌ4¨™Ê·^pFä®m±”h``d¦?AÝjÄ™Ž2¼wÌR$‰T
+®Í@]×ÐF\e0NRŽYæXÌ›¬²ÜŸÀ‚©†qÿ  °Ú^Œ LC€‚Ù(™zaëò¥¥T$¸—¶SÓËÓ¿‰$¼ª\Á%î[ £¿@~ð‘r‡Ä…‡qä×Õ‰2…u6(JÕ£)˜+ó…"ñi¸8Þ)¬H|úÎæ<¯õŽîiQç©Ò"òªæ£w^G¹P0VÜC6ÍÐcç×i|¼.
+}í·oýZG{Tª+L“2½@)úº =VËÚfUhÙ1­Ó5àFôôðûeίŸ6à ïq)¹‘LÚ…<¡4¬»ºr}³T\$¦¹7¼£ô¡Þ#Sù¨)–úΖem:z*±çéà‹ò×׋Í! V:×b*ÓF¬©Z¸ ….ð¯õl85L[`Ý5t4#!6Š¯2äŽÅcdçÉ·ÕÜ»u‡w¡ý¸ÕVÕ*ï)xÒ2d™’é ¹ÄRÉ(ƒ¨IÍ"wEˆJ°Â9:Y·‘Æ°XÃúÄšÁ¥ÁAK‘²Á³F»p ˆ=™”S›ëiÑëÄ.‹2¨ ôuÉãöÈdéQ¯aÈuf«Õ–“´Zý,~ƒ…]šÝ¶V.¯O]c3(pwWqCÍ*¬“ï× Ûa¡ÈDeÅøP]¯²*¦¨Ð`sJ¥è˜¢ÌкU?-øŽ°,žÜ=?rô²NLð¨Žñ‚ƒ6Gi=<7âèИÌ=‚Þ2aÓÔêšJ™Ÿèc·°ef&ME½ºè"å¹î¨AäÉ]Û«Wݪ¸AQTHo!†!8C“MáJàÉKÍu¢¬Ï¦CV(‚y|D+ö[Ó"Š#ñ]{;GˆjM#JJ\€©u¯Nç,ÃÜ´„Ëê †Èüù-Xê•dk¾ŒŸrYS’š!tí¡ÎÚΩ‚‹
+µ }íxŽˆT¡vvà Ç3ÒmF°“‰b¢ÂDpà@õè–!¶’ºv2YX½ÛïŸ~õã \MƒIäòÌW÷7au µÓZeƒ"SŠJßÓDEèKüÇB?ÏgÃY’sŠCÌ„>°V«Naú½Ö¢@£éhJwd2ÈÓ·¿ªÍßØ]²V¢Í³1ƒ5Öôàõ-ݽY
+i“?ýÆÈþ2|OÆñ!œñ4‹Bþ˜?™•…ÀšíõüâJ8ý¼«ZÌÍŒwÍ
+ß¾üu.^lîC/íü戂D‹ D«dv»fvO¡êªÊÂØ {wB ˜²‘ú<OR–Høåó¬Déþ'?ûü$"Š÷}þÇß}ôþwŸ|ñ»ÏÿåçŸþñþþ77¤D‘ÜGFtßó~;Ï>ü;}ó_-RüÊ}þÅþâc2·ýÅÇðZ,½s-©ýùâÁby!W‡÷ºF6¹›`Bž 1Ä·ì"¿]wÜQ]Rt]µ¡ÊwÜb/…«¾Œ’ê£?üáýqMãýGÿöÇßßÿâ“/þøé~÷ÿ}úW ÎúÚOMäšèßüþŸ?ýø£ŸÍÿÛ?ñéþýÓß|ü|ú§uÑšïŸ\áò÷xÿ“¿ýýïßßÿåÿúäŸ|öÅñ¯?ûüwüÝ'ï×ïÚÊ¿òŸ>~úùz% ¿1õ'¿øô“õ¿ùÝgÿí·¿ýâÓ?þZÿŸù§_ý•Á|þ%õe÷ù§÷÷Ûß~úøÇãËýŽýõ?ûü‹?~òùã§ÿô“?~òñÏ~úñÏ?ù_õt‚þîúøoø‰ÛN˜ŒëòÏÿù}/…X
+÷¿¸ †4qýꣻW_ª?S¢þ<}q÷×gB²Û·þDÇLE(.h†uA!ýÛ:˜Ë0+.IJƒzú1‡ õöUÌØ‚¸GhŽzçI}g™î±lN
+>‰ª9³¿Iö%™>b‡Bj¡Ÿ¯BZCLòP ³îù} “è~_PøU¹uï¤6Å€±Š"\;† Áçú`P˜òø9ÑY‡#fWÑÛe9-*yúÿI¥wŸÇòƒe6åè!4G=ŽE”óøˆ9Þñt•½u‚ÛÍ5D.—ñö™Õ}÷Û»¿þå’•Óä≵îƒÉêø²8QßÐ]mÊR‡ß a:-®*EŠh¶Y« AÌío£  U´ØÃpæVÓøk,µîe™ ¡º¤Ú/ðýè):w1Ô.?†1èRx)Y|2j7¿ý¼/§¥{f|¨í­ä#…ýZØc˜Å†¤r^ho§vò··åµAØ^ôÅg!ñãæåmà­©2pí‹àöÔÔøsŽ‘ãçöØñVûIûÕ/Ÿ÷ö«ç@õwÏÎú3KSŸ¼Ô³ ý ·y{÷SÏ‚îéŒ?â+Ò¯x¹Ù½€îäG„ýˆ°çö÷çYëki£ƒ9¯ä-ë65zn¡sß× Tsc“©LžÁ`NÅg</ª·óŒÚVO9=ÆP”{{ÕØgéù?t}?p}?h}?p}.³rƒçæéf6Q]¤Ál—Ég‡¡äÏJ-&j~§JClŒ#Kù±‘˜Ìµ™Ê‹êþD–o‚Ê÷R>&}œ(u“çy­*ý
+ܨs
+ózî`¬8hµ_àÍã?;›=¬!o»yø­ ©`°YÖ^~`…BQ|Mj`«#}>!7ÃûóX}2á’%úPQùøa¢òñEå㇉Êǯž›©—sîø¡âçæréúÅ0ÖPeZ s¯Šñ{ÔÚ]9%TQ(åPÛ" ~á8ŽQçnêqßÖ`µÁd?Ag¼ö¡ô^Mî +ÈdJE]<wyÏf{0f Ø$°½°%öoVEà0H?º^!8H£'81;‘kMy”Ó¶gKá­Å½\u`Ç°^jÚ‰ëÖÝ€n ‘ £qûpçŒMž}!,š¢‡ex,Ø3±Ç—Jþ2þMÄÕì>Kz1DÞ#»µ2¥Ó»…“”o¼¿O:)±… <‡ˆxz8ufÅBŠqj‚v±gÏ\€tÁ/)d^ù¾ÎÍÔK„ŽÇAøÚnÍäµ
+ª L§ƒÂá)íZ܇s,×ÝP€ŒµrU¹€?„ÕY77£œ0+ÀzvHºÜ§±û­ŒüPŒ(¯<=$Á·£`ÇŽ{Ù QÉ:I°zA[1QÌŨéþÉì\W$Ög”ðW†ÊøÜD½”=èxZC„ÛP©Š(¤)
+7¡î5ºþ•¹µ
+†N
+)bÂi ª9rQÞüƒ÷ld¤’#»5h'8¼«‰eöÕ˜\›‘"P£).¤Ä¬žH•,DúØ|­h#1Ž-POü Bsxy£ð4·CáŸÌ‚Á2[v1þ¿ÒÝ5{±ËBY”øUƒõ:6S/§:˜Û!KIùÈÁ1 ¬%µl‡dÉî=˜ÒÜ
+ÔE{ã†Y`S¸`•—®›ª&
+‹‰Äضn?šÄEX׳¢P>‡,ÃucR,€ÛÖ³+R¶ FÌQܼҳ׾´Seà„5Ìsÿ“â…ÇAÉ–ßÄjMUâk›‰iô\=r]yšô²H_2T›ºÛåÈãÄ^×3´üê1}™›©SÏ‹ÝqÓÒè)t¶c°FÓûXK­¡îÀG ¸Ã‹–%L´2M
+–¨æ»½‘‹¥è¢è²éx\F`:y#†ˆÄîÔ¶ ;ZÐÈÚ«[ç]Cc±o·Ù´93ãev/kʤµWŽëëØL½œÚèpnOøcØÈÖįˇ)®pö­÷ÕÈœI;TÖç°Û¸ûÀ§µFj&…KKrŠxØ…5·é;É&U’Ù³ëßæŸ8¤¾u Y9~1ç.jàÍ;1BÏ–yN×UÐh‘'Ð o“6fõ|*š–Ì2Ý~C±…#ï:º¿‰©™Ô s½Qmj/+êT7¯Õ×9°™z9/ÒñÀÈ·«ËÅ y+¦kÙ•˜ÌOTÓát+qNS´‡kÑ}‰Ã¸¬.ðØ$™iìòA:î4´S|#B_ß?™d,&+¡(eÝôD@>{ß Ž.7R2{&_î2j+fSÆJ•¬[¯^O’3»mû˜‡Äwö‘›™½,([ݼrP_çÀfêå¤JÇ©º¨ÎI¯ÛC4P7_3$(šë¡šb9Ú¡IÄ`¨‡y×›«ÕÓ€Þ˜Q Q,3R˜eÀtŒL“ժѠ¬&øÅ&OÅ:ï[åß^À–ëK¸KÅÄ÷4i-ç†À<cÀVò =5›´fé>%G‰ÎKÛ‚V‚¤Ên26Ø$n45¤jè÷é_ÖÕ)'_3¶¯s`3õrr¦SŒ}ôx¨!¶¸•¢ö_[N(.v› Úía§qŸî¦3¦8Øû°øBœÍ–L¡C[µ×D6Ô›s£Š*“[v|¥ŠÜÈÖ{ÙS1à-QÑÜjJ¿¦t6ˆH²ª¡èJæ.v3Òœ;¼‘m‡0‹aœšËX¾Ø.¢ˆ³{³¨eóÑ¿^`?›©ñ;Ï:¥Ó0Å–bÝ㔶ÌÑÃR­W]“rßRhÔ9m+‡³í„C'™·d ·šQŒ¢5±p6±2ÑõfÉÕÐE¸Þ“BÆQp“«Õ†•O±x…W«æ©GIY@oJê š2䘸åÌJR£]
+…r÷/Íbª@qVß^Ö¤—P=|2µ—µìãµ ê§s ‰z95Ô9 ÓwPÍÌï©[kpź©ÐT¹ÆyHn7ܘʋïLŽD)”Ü;ÀÒ{3E%mp’~…n•Ø·¾‘d»1ýÕÔíbÚKõ”æ:’k/Œ×Ñi'»¼BæÇ#‹³»ºK-¶%†ù¶{eØ;aÍ2k“ØgN/äôãÕHÑ”?[f¶]íL@yœµûW“J’Þ°ÇàáD©®«Ï!oKs„uT|Í{â26O/æ¥:iþðNû–`X·¥%¸åøBo¥¨5©žAÖ²R}í–•–ZÕ—KÛ=D3ãå”Ì*±#¡æýH°LMU+kÄÐŽ:¢Ír²‚½]SI,O‰l¥Y‰”"LðH[ÓÊ…~•´ƒ1ɼÜ8%̹Éغ¶MßiòetsÈÌbUœL®‘%ì%Ì#›=¾©±°oÈ´NK»ß@v:ýÚ²å’i{ÿÕéHN¦Wö”„ÁuFG³2vë*3»¾”·HeµÒì
+†¬2°ú¹+ñHŒdoüѬR¿6¬ÄM7MU
+F+4“š·:4éÃôëùÑÜ$@؈ `ä• px8G²{Lª•p¨*‘úÞM.+þÈ,®yèèQÛÁ)!N 3èÿ0@ÇÆOòbº¬Ó/€½e+ÄQ¯–«çÐ-à u‡¢Nñr8«ª¥x•K¯
+8Ÿ6êuE^Jç%E„ñ©í^ï!>öðåP‘3ý`ßkŸæçðñM4 Ü›Ák
+S(š¹àVcž&œw;^7 1f{
+ææõÏ^HóÌbÙêuà¨^(Ätõ9kJj•’]†çÆU i(6dÂ*mhkQÏdBwh<ª©KPìÍéXÝ&Xâ‡:>";ž¬øJK±‡IãPÛ¾ŽV|°5›%µât½è™ ~)× Ü˜¡È
+Õw½`RsÚun'-Ã÷6!*)›Ö‚EÔ
+ðŸAC/s÷ o'T©}Ie¿‰|<¤ŸO?Äkú8Þ…O©õxÛÛïù2º¬§_­ØÌbÑß‘_½.øõúÄhtwÉÔ”ÁI@¿§&†Ô›I5‚h43ïü oØ[7?²ÖM-±úw_Ò‹æ`?ÅN?äCû]ôÓÛOyû¥ßj“ñwO'õé´'oñtù® |ÁÀÛ»§0y¤ ÖÞÞ=…ãÀ^!paÀú`WÅ yE ùûã“£zÀž<ˆùþý‰i‰ãþ÷~ä3žûø!ÓôøÓôøÓôøͧéñîK?ø2!>eº£ÈlÁÄ¥ªkØŽœÉM%³sÝ,"aFk(%Ýg¡Ç£r幬eûá,Ú(ÿðèGËåç¿­4¬å$ÚÞEô‡@u»Ô••Æ’ø9ßä bâXô™–ä‡Ðº¶-Y‚&;P×Í©©„T©4E¾×Ú&åÞ¢íܹ¨.bg9LCbƒ›hŽgôSÍ’J¯‡:ö蟾±¸yüqóøâæñŠ›/ÿVŸYÆŸ–·û‚Ì
+š¤‡Nn!~ÚRÆ÷ÿ*ÄÇå[}6œ¢êC¹Peˆùë’HãÀ$3“Yu_v(a®ÅkZÄ¢ÆGÌÿFO£iéíHöb)°í’7ýÝÂ{ä­b/Õõ\ÿ~l•ž”YtŒ£æþ»¹“_ô¸~ðuBNSF× †‚Ýf% n%°³N^ÀLÜ•¼²;€å¬zƒœt^àÕIeš"··>¦H62†à
+ä1ÄnÕ¿¹(kÒ•Ó”­¨ý9É7’FS[¾BQ*£ò¦±£#ÛA==½!
+0j<)è›4¼&éH‹›oõÙx))¥?®"L]Û@pAô>ü
+;²Ë+%†-ïªzþÃbE¥nU/t;¢°S×Ì¿0>TÑÉ
+§˜úìáï,rOI™0xŒuÓŽU‡¢ ¦Š’V•GIœQ@jPƒ5´cÇñµ~=ñîh<ËÈáúͲö„R^ò×›¥K¤$i€c® ,—´r¤æŸµ±N1¢‹vˆÚq{žƒ‹ ’³Ë¢œP°,—_ào?ÕçâeŽÇÃÊ …öˆd9Ô¶(¿Ä¸r1Puw‚@ÙtVÏÍZס÷Ñ3ÕÐY­¾D$Z¬¯Ø܉MÅÅ0ð”›[Yg<ºÄ2G©"Ï"Ü¥ËA Áž×ÍR¨ñKÈñhéQî§zDó$s°#¦½¤nl–Å—²˜üÐÁ,QþCÌG6²s ,OÅ[,©E ÄGOã+;OÜÍ2)0ÿ: yó­>/ãoô‡5e™™FJN@‹ˆ„
+w¥ô,T7ˆ_¢ ‹ÂÊv²=‹¸MyÛhlB‚p¸A¬—ÝV&°u`5L+ÎÁDA±O›…ó•X·ŸêsñRÖÆÃűÞwƒ’2¯Y{[ ’ZÚˆî’hÄèZ¬dR2†Á¶Â*îRÊz9´ªd%*ürvíÚ<ÅsæÐ窂˜âpRå!D.œÓFL ‘kw%À‹í2Ym
+ô+0€PÝH&E#YrTÎÙÚýuânVjS„¾TÞ|«ÏÆKÉ ¾%2)œ*Öj$¶Îcï3oÌÑ0¨*cf9Îü¥DF ‰Í¹É¬fè|Öu™:ãÚ™:Îï"ç)t<
+½ž’…ù…È0Ì8FÖOÆlÕ`Iä¯àÊ"ëw¶êiÎIF‰aûˆ Oçñ½¤¸"¶lhòZ —X—#LlHÒØT¡æͤ ™££¼@^¾Õ&㥼„ÇÓÈÊîR2Ûì[êaœ…ר2Kwä ÊÐÛÎ{&.²je¹Â,Œƒã'*»"% ~ñu€Fde+LÅÕZ–¿€Læ@ŠìãÑñ9+µ]E@ÃÓ_@Ê÷t#Åñˆ?Š,ÅT_djᇡ*$$„ò®CñEÛf|W¶à?7‰Ç˼¿ÎÛÍ:¥òJ,ïë·úl¼”PðÁ¹oP²¸˜–U02T¸«‰8ºèa.°ÁŒtãB/Ö]¢ì´ºÐ‘$E£ ±%Gƒƒ(ÛÊ7ñE¡b´h…†DêFŒÙP=g¦ÜÔŒ¯Ké\W•3ä$Ø¢èˆ"»!*‘&òd|?FZz”£„%Ó>¬2ì°º a©9ˆ¨Ã]NÄýA(‚%“a>ÎnŸ¹›¥2B WËÛoµÙxàñ¤>vF
+‹ÛØU˺—%¸Á\¸Ãú=ÑL’el j8ôèîŠ×b—f:½aèuZ ]pD.5‰InÝÀÙS€Èµu:9@Š’¬z[Â5¡0]šé›ªV”Ýeçá0ºcÈÃ6/£eRä¨fzèQ ¯'ùéT°—qðÏ|Ïšt_’²v1»”­j›¸›Eª¯èì¾ýV›—úgÜØP‰ú<úAŒ)áÈDx2ÁtÀY! T'…9þ«QWBÃà-ëÔ N¦¤¬~L^C(œ±Y¯Ùšj~S·™H1D ÐR÷$Ü W\¤–$„OßÄ$™ЇÁ½ApŒâåXº Ã;Q#Úȃ ¡8•ìØi¨DtE(6Å*£¸b¯w³Rmö×Oò­6/¤Ô; á&B=oãV,\Lúðl¾: uj##]!*[#™4x b’Ëd‹Òþ+v³–‡ºšÓ4d‘,D!‰‹Ôk€¹)o¸´¢ö“DRÕùW–Ä…!6U.Á‚ÛŒ¶ú¥5ý
+š.4&V UõÈNò˜¦Nò^1#ïL.ÑfÀœÇv#ßG xÎV³UÅÝñ¤Ž~l†P3ËD&ó(¹ô-ô2@4€7I,Å
+‹QQ Í›Ó$µ0PÀ·elfᇒq0è€ 'tÂD2´ Šv­3ŠÙE$D¯NʹƑ=˜àb§¥^èåfò3ˆ~K³
+©$¦P'ùmRT5c3ïª" (`ÿiðÄKz»~/c’ÃÑ“¸ˆ¬Y`èpÓgª#1M}Ó…‘OÇþ0îê€Ñ_¼³(r;ÈoŠr1m!é
+YrÈMdTùåÒ$Èõ«uB¶î:f¡Ö¨š]úü'݉wšÑ³ðØ´ž¨L5
+<éd.Âe,cjE5üÌ‘ž2k >HlHð¼¿Nø ùvÖ Ö)½…:øgL›&ᬹ¢œÑ¹ U9DfGUÊ£ÌczVCrt.²`öQŽ"úE
+
+TsžCIHÁFÍìbPmZ™½’î@aˆê!¶œÃCª pÑ‚¯9…ñÂtÐÜÅ®ŽJö@…Ÿiú¸¹ˆ+MèÀŒkÉýˆH¬Yö‚oä«ŽT_‚5>9r%
+QY Ë?ib€ €Ñ¤ÖÀ‰^fxðŸœAÎbOtó€¾ µ¡9Øõj·³þ+>TñoHgŽRJCÁ€‡Ð™C§É-¾`B*2šž0/ø9‡°F‡!¼ÞÀþÉ3d”…mrn †€ÿ"4!ô2Ç“8Š´§™TM¦&Û0\ó>Ô‘^6DÊ@Gœ…ðëß{Q¹Kµ@&¾¡^®ý;¤%Œ„˜i x[E]³ìlJÀ^ê÷
+5q¨Z2?ý2­‚*s
+×Á1{Yrt[
+XËJ§ å6?¤éVËhæßæ¢ 9^3DTXÍÈ~òò/ä±+`ìñlQ Ï u2ø0ÀYn‘õ¥µÆ'îWØÀÇ\€×tH*™%‹˜^N7Mw P5ä¬Ub[IÚ¼xÓl7¯þRººg m(ØéÆÔ# 2/i*vÛ³#àuK¤W È¥?1$Ÿû®Éç (íœüGTž·ÿˆQ|ü‡4p´èÜ]¦P=.ÇQl—ô*_@ž÷ûr’³"Ô£KŽ( |Çåz—}Ü·¿„?Îðþ=cË{b} ßpÜw‹{j)â°qßI×E*Ò3C°ÀÛönß÷t ¿óvÄùšÉùX6a…q®Ä¸¤ž“"w_ƒ”âx30ôÔ,“Ç;I‚ŽJïî”6‚L+Tý¸,†PÌ…Çæ{9î¿?Þ‰Å)~ ¢nÈ~©žŒÂÈýÔ8aþ݃öQÓZŒd$”¡ô›=áö
+Šî¶¸qaÖlÏ ›çD /vQóµ‚ækÅÌ'6»3eœ#úƒyã؃ Xr<nüìN¥>ô,Wû·MÙVÕ&IGÞ¹
+Fü÷.PŠ×6Ùð§vß\Ú=~ˆ´{ü i÷øU_m3óR"™ý¼,² •ì ¸ì"q¡fÅÖ'@ £ª‡©ë)e½Ù
+ÝJ¿•NåwZ†ZÃ+Ô^._m3óR¾›ÓóXª.øÑÊÙÒ˜ÿ8LUM1Nþ+¯Qĉ¬6Êš\öÙxϱ$y[ÕnŠr7Ê·8«é:ÖÌñž¸5Þ£ÃÅ’úÓweM°ø4vƒòP «‹©϶¬Ë` µ(ÿ­He%4Fzv ƒ){Þ†—Èœ&-ü2M·K£Î㯠—¯¶™y)ÛÍéy¬:¿±Ñ³7àF6U‘\!m„¬v“!Ū-ùÓ™UÿF#Êð;Ô¼(5Œz@“ŸtMfКÆYÜ.®‡½‡©œ5V׋լvëö ¥ 2Õ,T6d6£;YVÏtõ¹9?;2ïìˆ!cé2¦ÉÊVÝîn:'’;’Òåd6ÜXÞ6›—dù«ÃííWÛ̼ çô8±¹hðÛŽ¶è8´
+Ílùlx £÷À¢‹1[C bRÇTk“+ØYc°eƒ@BÙ˜ð Æ#™GK¬öP;Ëvd ç“(‚¶ç.RÖšÏG¥K°ïZñˆÓ$×fŸ”ãD}%K)Þ(×ÝNú1¶?#Z§å˜ÍNç÷²4^xZ>ñ—¼6Ð^¾Úfæ¥l9ûyIÎŽZ7
+FÛ²Âo»T¢Äi‰nj™Ël÷ò™èmî=R &l¦í7+”tÃýƒ$–m•û[= ×–í¬iÄ,iÈœ•ŸB» uÚEI%^¬mõ>ÒkÍ!‘,uû:“·«K}…˜½|µÍÌK)wNÏã"š¨ef,Ö…|ÅÀj3?h¦2¨™6ÉìRe»²U_½_«¸¦)¤.øºð#&M ¶dYl°˜‚¼Ã,0 ÖÂô³w©®“°!Tt/1ø'Í#«‚ *Ò¾‘¤»G<TJâA‹¾L&àÃy5Šç*8j+òWÃ${Ö˜»÷2—·ë^¡zpóÍ6+/åã9=/íp¹e¸û;¶!ürÕO²6É<4ød‹ ¨|5
+ê ,‹¢h5÷
+xYæ™å£ÍY\CI„ÒÉ› Mѳ£Ã½æÍŠF=I}`(0hòÍDž.ä毮O¾šóòžžÓƒÎ©„èôaÛÃâ´Éåðîd¡•°55v˜ò'›ÜÛÞC ¹ŠÐÒð3~‡ÒºÇ÷,ZµÛÕzÂRYhŠ´ž§­•ç©ö ïº1_D3£ì•PZš=¼ y ¬êUÁðœ\¼$[ª¢ ÑÉÒY
+D7Ø0›kzXئ¾™ËÛ…ëÏ,í¬7ßl³òRŸ“èæIllËÍ–" ±Ò`“i2îMàn˘•ü<IÕâÄgÉ!ˆ³¥ ã¶=‡˜UŠÊ³?y— b!æd ~¹O
+£úƒº‡ Õ%’†‡•à½7_9¹
+«5[à{ZLºSšdkâ<apmáÁHƒY¯ßáºÉõX½GœT2ýT/¾g¬[9ÆLŒ¥ˆd£Vàæ³DÞÛÙ1Ϫû줨îÎhÖ*s±”Ù ¶ÚF·›T«¶ •·8Œæ5‰å3¢¨ˆÖ›-^ÖÂðø±9U–2I¾9oóľHr,q³"}ôʺÊì:9•luØdz»b ú}I˜hÒ¢søá «àhí¢‹Õí‹:óôV/¢z}Œäê³í¹lZ˜]SüƒÌQ9 Áš5=Yo'ZúÀ>êŒ{š¦0ÀÓü™ âh5ž$Žyn`f;°YØ }­f¤ÕCÁItAüúhŽ^KR-›¢…ÆV3÷l6›è<¨XïO¼Ê4x'ºAÛò¤ÅÐ5<YQFÙÙŠ;)6™iЛù„5kdx®NMî0ïXu½*º^FúÝw;m/d aº¹™-&_«ÝØYæû G²ÝÑó•†Ñᘠ·ÃÍ9ŽxöRè§UœL]C¬ŒO©b4y ÏvÊÃõÕõ¤M
+öâ<«–¹ØÎÃv>V«zýü+ªp-N/¯ÈŽæê}Qå~ëìUföÛ|Æj)‡ÈœR…É_ä©’³,«ž²­&+Œ1$¯Êg»·iBÖÜgîãÉ»õã,û>‹¼‰Õ˜;ÓNJ%¶Dï°fEšÀ®o1(Ú&¨pì*¤–ÈåÍÅ(˜È]lÌK†v®;è>™¥0·0~ï‰<ð kÆAX(Px"&+×yÕ%+Ü)¨¾ÊÛ
+Óc¹æVˆÆT`y
+rö[”·ÕS]›ÄuUµb|°¸E Ñj‚‰êÖvL6ÏsPBôI+¢>6-g/i©F“­«¬‚Á’ÕÀ°hÂÙÜQ-»8V,…!W+ÔIžGyžÊó`ŠG¡gù¤Vk’k³Z>ü¸”ÐÑÜû“ð„õM-ØziRl3(˜=g*(~ekž­$j±Ø$gØ Tn¾ã¥4=_b~©5s|àL3Ý~à¡8{¶jíô ISƒSpÏDò"ÌàÙ—Wú‘Òç;¦ô!P›8}Š©òÔéŠÁaOý‰ºø= y¤›û?ÌÓ-*P±«ÜŽöÒ=]ô¬õ_^µÿ`Uƒ°÷úañûÀ® K³úzÿ÷‰ÞçÏ ö`k-âB3ñkcYM˜ÞÙ¡m&ÂÓ±ª¸Ï»ó½Ï qF—±ipÌä„w£Y²“iÁã(€ûa¿\U®·¥RåÇò¤Çè;{µ·W¿µ¼ü0™þ §HµÄh­|D§L9ïé*¨›Ô §¼Ze±löá鶖LŽM;¯ÙWoZ½3°tö_ñ‡i¶ôžÑªÇ>?vZ¡}ïscIÍÞ=ƒ£ÿ9XuÞýíÝ_|ô³úñß}þ›wŸüéÓ?¼y£¿ÿí§ÿò»Ï9r V0hµë{×ÿ†ûŠÁwÿ|÷—ÿý“Çÿç“YàÝ— Ž_ýüÝ/öÓûÿr\û7”Æáãuõú§ üÑú¿_ýÇ éÈJ†û{ºÙ3øŸ£f'·4™–k'\ò]ø×5ô÷åþç÷ÿøOáþ7ø¥_Ü1Vˆ:’ØYy?“I‘GÛ‡r÷Ä-¦÷ž­êœ]uA@ÕÚCµ».©è§žÌÌÇPm ÊEÕo<r¾´,õS±µéT¿^>ˆ5†A°k_‚×aNÎ~ _Ùr\n¾ëKYžù~ÝŒ"˜b.þµ˜'±I| ¤š-x»GøÚoLßOɃrü’ ¬»:òFÉf׌jÓ±³Gü—öÀ~™ýßë7½ýªï¶¹ù‡»g&ú™Å˜—×yfIŸ,ûoïžÐSÝâðíÝ3P}
+ç§Ø¸å^êÐŒþ‡ÛíðѸ‰Ûê&ëýF?ÂHÔ@ûëÁªbÈili Þ¥==+z0×虉{ü‰{ü°‰{ü‰{ü°‰{¼ûªf‚luÖq,ó®g/ÈI™#mw¤C<#§j5V[â÷h§ ;句e¿eS
+¨}]Lü¸eqâغ±G)ƒoHпtˆÎ=ÙRØa”OŒÕ5)ü¹ <ãˆöaô¥b¬
+qèß«.Š‰MR’˜Õ9»â«ŸJ¶Ã4•Ì–5#qLõZT¨Guò+Noa"lSžnï?}€Èzü ‘õø"ëñƒDÖW¸O"«ÃK7»Nìd°Ò¢ì½¦ª£›“a½T * (¯–¼È¬]Œß€&¶à {Í)¬
+±2»ØóÁxŠ@@DCv$μ§øi0Ñ0¦ú
+tl€8`Ì“t×42¢F†u;iJ݈ìP¬ È]‹½ª½
+CÀ6Z—á%HYd'\ƒZ6Æ8¸fºíõh¸·w%:‹ÆøúdÏÓ÷é1Ùãô]u·&{N~覘̭Ôi*2Ô™ àsUe5µlѾ„Ó˜çDCÐ#ùnÙ¸RGbè£>¢Ä‡B\A@‰#h„óÈnëȯ"¦é¼2pÞ?ÌŽ|¼U¤çˆóv+öÓ+:Ï}üu‚n¦‘…t Q*
+ìx°˜l4ì˜Ûý1”?ý±If4²ˆ ß JŸ .+züu³5DžU‰hAî‚e²‚)ŸÚ>Òäx£ßÊ¡ ÀÙ¹PVdâA3Ø—eÚRâãëGK7Ÿ?Ë=GÐ
+ ƒ9àh)/ù
+Ñ=(SŽÌP–[š¶{jš³›œ=ªÂ úF’Å%m„;ð}@pžÅÛŸëcµŠhÜŠ@0MtCgæ*àñ†YFÞm¼wo!‚9'°®‚NErsâÚýÓ‰¼YÆì…•¯ ¿O?ܦÇñk¶Ow…×Ry%Ÿa±®yõÉ+%xü|¹cÁô’”q G ’ ¬ŠïÕ°¢%µ6Ö™Êôè¼<äëX&t”á`vÃ¥Ù¥y¬Ù 30Â}…Õ*òÑ¥ˆ:ø¿q %
+ÂͨRb,±!J©Â5ÕLJ†•Dp<ˆ§
+­¯8ióJ(M£Ï¸™½XœÀ¥&!à†§Ù,^fØ•êN+;†ËÊp/ëW[yÊïÓ·é¹wû¶çîˆ=rPfŸ8„N:
+“\øBBrq’”_æªbÄX×&iÜxIPÓÂPÎpÌhÝe‚ÉøC,@xRa­¡X‚F#î{Vö†Nn¸¶ˆbí¬>²›Ä#Š“ò1àð¥•Iš‚{ œîðpNÞpWˆáTþ GséÖ Ãtyà÷Y‹€t :Ò |»ˆ0Q©
+S«¤¾Ä0v¦nÜé`’;-e÷Úå׆áë‡Ûô8†-²0¶¹æ‡ ú>„‘¬1F²Zörª\?=–V” \@í+ëÑš‘ÍÇÁ¤ð¤ãˆZ½e^&$âû$·éFò­2Ÿ\ï@p£ù–Õ “ZÍ¥
+)HD¥YÔƒ‘è0~–•4² aë'Æàb &„Áˆ
+pÈe–E$ˆwF±‹¸áÙp¹ x†Ó”F`{²5Ì‘ºûÓâ†V_£÷ôÃmzÂ2׫÷ÎùÎÛwÛaW‹Ë3Ð0¬‘÷h‡œEA‰nÌfÃÅ™¨>†@Íȇ6Á¡ÊûHʆI€ÉG݇té ÂRN‘»åÎ]²·5eHU’º!®Æ´¦îITÀ4ûò!âš•©Ò­e']¼lœØP c¬XW…2ÅD†"…–¯¹ds ø½”=§^ä,fÃÓ#é>€˜n‘Y7çâ6óe•š¥‘ž×’2ý5‚øúá6=b6oŠ½mÁÈ*€iqy&[܆âÙ´85§2'(ãhDq§c-ËŠ2c®²ÅíÛ†ôF9ÛPç×.# Ãx¶0¯]„(P%f‚K:…TßÏóàHépš5DŽåëÅÝ éA~´d©È¥BaŠ#”ÌŠÿeë2IˆHª*óf2ù‘2óü¢ŽëÛ‘Y«nf&^rƒn@:^¾×8[ïYïŸâÁp找ã*£ç ă¿Õ¬ÝË-ÌŠªú³ÎŒ€jé§ 3$»‰5Šq› Þ°”í>j¬Ý’Û'R¦7ÞMVþ·˜Î»ñù1ÿ+S¯í–Ä›‹|š@a ´œÒ/–-c’HVšéJÎB7&Dú-
+èà‹'¦*Ê[P˜
+‡ìæΓU%
+BKŸŒx0z&ÌÀ9B ÎrÆUÃ8ž¾×83j+ ¾ gž€¾RGsY”g²³øC­oœ ‰*±fUW´ðã³Î2EÞ©^©§I8‘g æ:«ù¯Èx)=ZÈ* fu :_‘ÙÆþlp xÁˆÖ9ϳdë¾$Ü70Súh1e¢€ u:ÛÜäÀ4%ÿV%ò$«ؤ•Á¯]PY~ƒí!‘~­+CågH±¡Á÷c­_•€Á1f‚j7¤M/¶ûOxài×À=“áÄß®^ñDú?Ê©n¬m&Ðð'ÆX–akMpd’Ú]Öwmv¦ˆvëe­]áŠ_g¨ÖŽÜ7K¦U9t<éé²0áx†vÅ
+:;/°dË"¬À˜ÚÀ%±@qdªAV63Œe¶ìÁ9`î“qd*©…ŒÕ‘Tûr”ÿ@a-¸„‘¬Ëé?`æår³ NןvBúѸdG*7Ý>°¢ˆ&°Âô`‰•µºGú=Â8m8Cñú¡iu A‡ŠÂ`å£ê¾™9™‘NR”%©Hfñ¢†L‘©L&á’#ÁXkŽ†ÒìŸ^D‘ûÝ­(š°ÜÑÕÎe†¦‚¨Kˆw#Âym # i±”4Ü<à/Óøh c‡ÄÎâÝÉ:»yסï¤Å#9³µË÷Å#â3~e+½µª7b!ëØ®‰ 9AŽ`¤s‡êlVŽÄàøƒ?*ñ"ÜÅ=Š„&ïÑ…ÜÔŽ ;KŒñ¨eã^û®VgI
+u׃ëwZↈ[…›f¾ÊS½îOuJÛ
+ ª?ŽŒLB‘U«uµi¶uæmC,‹LÆJC¶À
+G,¨JÃÛ´Rég±'a…
+ _USÕz¡ÓŒ Šódk˜Êb/I;u­z:ÐP*epMÅ«´31=!%5­ ”“ª–¼úM%sÑhü±Ñ¦6Ñ™í½Æ°Ï9frŒ„;¬
+g挓ª(Yy¥X–éÀîŸÀaFc× b¸<•äƒ)aþšî[ŸÆí¥ôQÕ÷É(V‰Ãkª¢]…¼Ý¤j\Îd^3.¸"ž,Ëã·ã×+SAþã%h«ñÜÔÚä)U°µ£DW¬Ò3þŸ¯ñm7,ù-k>4nvM`ð&Y@»$ÑôÂHûJ^’€¶âLÐk8žÒÈqb;Ÿb”ø–5/šÛâ¡=¸êºd{d*ݺ'sðø­ø² ¼ð¦Ô’áçV¤œŸÅ)ÖÑ'¥˜µ×ZÆåñµŽŸZ¨ðùbŠvïr´Òí_º;4 «Þ±5Œê¡2ø/œ,šþÙÄ5º~Ê—ÍÐ7ô [ÒÜ°’¦(ŇóQ­ÁÂÒþ4RjÆ^ÿz«Þ&¾pÂ<urÊ85Xóå es4ߌ½¿Sï2_Eë÷)/áÀõÛ¿;§ÔsS鳚ÚCi [“Ô4ì’aœ±ÍÊõ8V5eä!–Õ¯S=ä‚üî¹ Ï·*JÈ=D«­¹G [:…­R6[𞤎ð8ÂciWŒz}Ä1Ãoݳ‡èÉæ·äSØsúþUä‘þ&ºb¿ê~Äí÷ü°¨$ß裾ŒIÒöÎ÷‰Jò>å<Œîy&É+>/èì_ίÆf›ß™ß—¿ƒ¦qh"úH®e&x Qã³óÔŒ¡%©òiÆuŠ@6™UGX²y8’êAöØ;º²ä@ÿ
+lcLk¬ïg'%ùo5ÆSÒ&j»‘Q•¼%2Ø>`‰ü’û"¸°³Ü¾•¤›(€’cQ=<XÒÆ2{wËS„ïNÿܘ/Ð{£ª‹÷?==¥åÝ á<ë±'û’Æ…µ÷-xT/‰A)rD ßÃ
+ôSå·¡XɺÈiQ|DZ•ªã"¸‹˜ è48 ®l·í±iŽ%ödÆêTþêåO6òH*#2¸Ÿ®šjê̘¥¹T¹ð¯U™LÇœ ë&rŒô}×à‡¼}fz}òÑ¿þå:Èý/Q¼´?]ÿ÷‹«Dùè‡+Q¾8ÍÇ™¸ÔIz?€½ô s$ë}J˜†á²ÔÏõW £Æ-­—\öˆgi²wÞ jóÈ‹zãXñTyÕýþõ¯Áê:‘Lz>!7\ŽÕžŒ@‰¾ù¥vz”¨ðIÈ Õ+Ãåqú¤/ã"}òÕú[baîé Ô‘"ešü+w/¾~]Kî¸=à/ùöîBˆª\S=!æË;úÀ[q¬:®AÆŠ¾ßŸsŒ?¶ÇöùƒR½ù’·_õ¡\‘O§ò¹é~ò
+Ï.Ýuy/x{wÁÈS•'û Ÿö¼ú¶ÑoÂëô#D^Dþþ¼Ô3•ìë ®Œ æù-ø`$Ú@¾ƒ!TcŒ||Žá™|þµlmOC©”vs éNoüø‹ðøa‹ðøáñ/Âí \>ø錜g-Älcð!"ŒgG:ë+¨¿’T»‰Ÿ6¤onH¢âÈ°*\æ’êšVí£ú°"éç{Ž±“>~¸‰³Á~Ža–5Gà#Òˆfše3r¬&+G@¿È®k¨@*`JH%TâaCbåËaiCÓEìæ„· ¥i ©÷Šï÷§¡Œù7RàOßTž=~<{ü0yöø äÙãW}¨MƇÒqí瀪À&ŸÝ µhCE1›´²ÅIhDtxÇ)4ÀÎÁÛê4ø‰.o]“¢öÃÖ Ž‘¡+Àg¤-Tõ[ ¥ŒÜsq¸`Úw¦ÕY•È fWDŒ€Œ ¶Pøl¸ÄXtᇆ)vë¯ÍÊõ¬K‚u /…¼­Ú ðNc„d•@,Sg0rÚkmš\bé9‘O‡“=dÐnÚ%››±Y| ó0w¨6-ÛÈ5&ºkĘ U5Q¤oÀE¹­. ùû¡k¤¸$)Š`¬ù×±¯4QÁÖ½W˜ÜBR±ðW°÷.j“ñ-1‹”ÆÏWæ¸ØÃDl—µêê5‡ÊÊgv”wPu®±ë€d
+k$t.®EÚ×®?Êõt&4œ=êb++Œ0+OËÁpÆ–áé²0iâbÍ÷×™¼¬á¸¹ä ÖñÌ\|;<d§‡æ%«Cjdé"QHaJ@'¸SP†éMä X(:""è¾
+0ª¿·aI£+·–ç2ʺ×nñL 3"¥~Fðæ ¢r€f;‹VŠy¶BliSxÒ-=^pÝ.‹›©¼¬â¼¹æ ×ùÜd|Kd`§Çd»8;f°ÉXj&~@Kñ› š­)sS“®:“%èÄ2ò*Ê.ˆ;œ¿„ëTv$» l›¡+xUÆsw=n›âÛƒRX Ìæ¯è‚ÅßçE”R¥@pmÓ|ÊÔc’üÔ‹ó%3ªBN*jÀ×,ÛcCiÙÕW}°çíñ’Ûe4|ýðñzùP›ŒoƒüëäÜõÖ°¤D‚¦hàV¤¬rD$ŠpPÄa†ÃpM€UÚ°øgX4M£(Ó4Y̱‘\`×\841/•dª€xèÞµp5Í­ H'é´ÀOŠ®16=© éâZ{5CžÔo£9
+³ïù¯Ð[áy§ì` U®û
+Y®“y»Žêö
+
+Æ>ˆ!Í ¦Ê–„W¦é›€UtÀêtè^­&OKèBð”VÑåÖЊϖ2èŠ*bF†Ît+øL*ªbÀÈ„É÷¿†ŠŠ•¨
+tl(³ùʹ<¯bqåú‡×'j“ñíwžÊnk×iË0³¡“Ñ%.Œ¹ù™>¯„†²uײæûšµ›Ü*RAÙ°†ú@U•ÀY»bwÖ÷5šIp "Äk·€WNépÕš0¥¡¦ X&~ÕR›S6´q%±Ûƒ’§u >­Ñ”Á`VÛdë ÀèÆ$[‡®Z¶à­Ú:-Kð´†éÿoïÛv$¹®+¿ þ¡^xŠ>÷ËÌ“Ô–
+„"FzÞ>‡&´lz;6š±qžÆQ•¡QŠ%¦,©bôK”3y…ðÉÎ7`ž;¨qyö]¤r*.𢮜5~>õ/Ôø(–¿-æçE…‡ènÉÅ\½) %PýðâìÙG™øJÄŽêÓ-È %4´
+  ¡bZŠuDSÖߨ|¡ 5ÈOhÐåYz‹tãTÎô0«‘èðŽâX³SbP»µÌº”þú¢ËØÛ/pp’¯ãÏ ^ÂÑ¥÷Т2™c
+Jì¼?Ôw•=¶µeo4R©–}l¡)Öä*¡ÔèçHŃŠÆÛ˜¹i».3½ýz³pªq½ âIÅ/May:ê¬,„¤OLÕØåU…iDäx%Ür̆{ÇóÉ4í
+ŠŽ™„„â«QÈgÝ55zªìiÓeë £©¼Øy%¾«½û‹×ŽèÅ
+Q6¼è²x$ôb£XÇßÝ
+hԛ么ìfI;x_Ed&|õ".ùeY/è‰(š’¢%zVh…UÊõ¡•3He/ sÔ)+W†éù6’(Á©$m˜¤õ=×Ø!©ên³v,cZïb¸R½¼[BÓªbÈfWT{Ó¤h¦Çæ©z*7DI!'÷ܦ-A¯¬óZÞˆ ·ÜŠÔNžwÈÁÚ£ÙûÙÂÇÕ‚v¦­.œ£w:PŠ]ñ'«6®Ý”`*däEAZŸ€Äû•dÉšÑÌ bŠœvbµÛßÆ°]Ŭ’¤ú°nOa\ +Û@îšÐè¡än ¹ Õ‘4ê¤ó
+^O¾€a8HÌDÎZ+‚ч¦anOðI,G¯†­8ãÊû
+å¹€ƒ5Œ°¦[’/MÉ4Ì¢.:]¬‡e?<ó²MŽ Z»–tÌF±v*’ªÑí"Ã%^+×é$…Ÿ ±©ïÅf˲ÅÇ uÎ!ÜN³Ý¦1 õïÑ,.±Öc¸Mœ®8`ït҄ઠBpÑh8þDª„g‘<1íÅ$ ´Wx«]‹>k3<- ŠÝXóJO2?I6”u6{H«Ÿ??ö³"¹/Á´VÐÈ^Êj6YÑîql´k×O×·œJ¥;·`pP€X2%;~¥ÉJo¦8ãÊ
+ð21<²é»i-/V4Pöä1»Î w¬dÀ]5»OõQàPUûÙ.ù¢°ùé´÷{;ãœgO ˜ªÀÆ"N:
+¾Ó]t”[xÒò@=š?׬Ý~S‘ön˜ž}d… ?’ນö5£t÷·á†zÂ'½îäøãõF5hå¯Ëvž¦]åôù×µ`mØ»‚¼È°$c§nä…4ê{&RX‡LŒQëol–ÞdÏž"™ÿO§ÝP—E~üÞú{ê³þ7ÝërÅ»Îæ]ãºö›+;søÞá~dŒP´Z‘{‚Š«´é·)T')Ô >úþ|ìýùÈûóqwb|jdĵB×dÉ1—‘ Ê=T@„ÉXÝ°~\$sÇJŽ‘|xÉ^+¤PènÖ˜É@a{!K½Z×_°üEÇÈ°³–x“ȶÊ{šÙÒ°AU
+}3åp5Š8Ìóð«ú\‡›ÓÕœÂYðèQŸ"vº¾(K8±C€baB'>÷¯X‚ b’%|
+Ìã '|Iü)6@/ØéÀä}¸Tf(ï?é¢í›)"àÕc8†Ë´.¶‹¿¹ ×­­‹í·ÿè1_½g9|ÙþåîÚF\Ý®Ëm}uw}ûä1˜^Ý]Ý5h–Ç»p èÃ,]ÐD}÷(Õ ì^`÷Ýa÷óÓ¢àÊ­^
+ör\J.(јËĘ] ¡h Å¿×7h(¦h; Rb —“6OGvÈI˜
+áx<ïÛ×'TÀ©í®,É•…;-pµ¤
+ •›ËsÓìd“k€’¬â" h¯Òt¬-áœÄÑ4C%¸<hÚ‰¬ôõJ¨ËÖp¥ÂÑÔ+œDzƒáÌ.5‰é9RD™b¾žâUæü«r©F¾ìj®ªWã->€™£Þ1Ö—\'ø^xVeò7­x’³@Jò]×|*ôðõó›ÅÎÙùh“’¯\S¤¦{¼¾l¦é¾ =ëÉšŒY=öiN¹HQ‘}‘óxY‘¦ábË2d)M€æô=Ž(¨E›i%ÜQDŠö»°¾É ¼ØК\‘œÎi·1¯'a¹Ð,|k×˯~è*îÊ’\Y¸}«¸Þ–nA—5
+ªqìí¹dóšHâÇo"¿ÚíN`ž=àÕÂЀ*•ÐNÄ»9²Eß„A î2¾“W ¢lÅÉTHQäø£Æ°eϱ\“Ó8Y¨9o‘Æ[F îSî2‡~aŒ¢Þ09°wFÃ#ˆˆ5Ö™EdWNÌåñT.ÿE[=±¾lÏe =\ ="‹0k.}· 7Ì&–#@„*]J
+| HrUZjF“¿pî*«‚ï…3;<0Ž?elò«]VÙýa~nšaŽ5-:IƒÔž4ì͇”‡32Š\§_®fÿ:ÊŒ®)«F1¾Ú'ö‚ƒª&¸S® 0ÏJFxºpÇ0‚Ißí>÷Üù“tŠMfä°^íÆYƒÏ"žëAcÑx²Uð<·£k˜°)·‰R|Îúo™ö‹³&HOûùÕe[xM£ÈËzj5|ÑnÄûyº0¹ü@åQ(¬(a¤ˆî Eu˜·Âòˆ8ñ’·ŽÒ);ê¢àÀ»‘<¹œSZÚÎ÷Ú5b5uqr¦0y`$«<<Ó×ÕëPÞ8=ÛùÐé):<Q8[84*ø£äoˆ˜Õåp%•,âÝ›³4
+›åª¯jz&FB‹ÐD!„™!R'ËJÅJ _ˆ»äø¡Q4Tëòxq/7Wd^°þÄrø²Ý†8ôpÝ©òQ÷é“ôfo7qÝé…OµGÇÚ‹[[Zö—ý€¢—Ø…ëÍ×ÔSÑ*l.yè­{b¸n ÓüºÑÎR¤‰àl+H]"21ê1FÇ: B}ÊVÞ ~³’Wùž¶×öZÓ«£MÕœrJÀUÌ»OÄ'æÕƒÙ–c=áÉÊ-ìYç7÷éEÃ…,bÖ«Œ#~¼¼—ÛKî°__ _´ñ•žÂcÛï`ÏökÌØÚ)4Åâ!­$ü·ìæü´ùÛ8Â@Üq*ähÖDz]H|vÜ„ªÈ.L›æP71,WԄפIÌ¿´º·sê„+î÷°Î•S­˜šÇ=–
+C¤&‘QjØI OcÐŒý[+<…&D_j‘Õ8“Y?cˆä‡c6Ѳ¿ku¬ã稫-íyk³¼È¤?¹¶l7¢;=]x >âPGÃÃ(æÔm8­fw@Ò£Oµ­y¨6£ÅBQˆê6ÍæW¢ÑíS˜ZœëRÎ,¨%ÌH·‹õÚŒ ù ì3ÞDÁ4q«§WBÉÒëì!”‘Ä=
+Ë[F÷3ÞL5­Aoô+8ºâèp¥qÝÚ4Ë\øÐðsš]æ}=4Êy g¶2n¡½h)gŠ÷WV÷r{ÉDõ‚ö'—ÖíFl©'ÿšþŽv4_@¨qëD{oš²Šå*z5´NÅŽæ,ž
+ ‹ß€‡ñïAfY1À½Í覓išŽ8TØú1‰P—¡ŽoÇrª2QÛ²õ-Pcsi±OŒ0†¾Ë*àçÆU±¶kè]ì4ãI®þÀÜÜkí÷®—U2Æ"Ýa3ƒ9ïÙÀžµj¥Óš¾º—ÛË& ´?¹¶l·àZ=]•½¤õɼBôD1’d£›¨{nêµ NÈn1›è#Óm*<Ís–ýÈQ$½É í‰8v°4t{Y™ŠìÄÄ¢Xø‹$²Ö‡øš@#NUxÚ^¹ÚŒ_÷Áf’X†÷¦‰¾th{Õ1“ª€:»€ õ‚òU`}7dȨMw)eC{W¨æ¾Ï|všJWør{Ūô‚ö§–Öí/fj=yc½›×J3?Ô`Òn6f˜$x:%rú
+‰!=ŒeµÓˆÞtzÏÆ Z+âsÇ(hB¨J½ÁòÁ‚íUOCø¨¡§=7 ŽÓœìÍÀXÈCß3«y~\XDšåþ
+fnCb{þ)ÌLt,:ʨp„ÅÙ¥GXŠ‘É
+ÜR-«9¤*—ºãdßbŒ©Ø:lšw¨Œr¨éŸ=ñqª˜<?êw¯Úï©”N„K]‰âÒ܆ ÁPíµ!£±‰Í†Ai ã”Ô ¬ŠÃŽQ<
+¢“Ao1£®ˆÁ¶n/–䙻ƳÎ<0™«¸9ÞšQl®¡ú0£àÿÇöDãd¤áÈÚ"˜S¥?S.OÇ\¥QÛ{¤™FŸÖäµ³Æ}÷xyoÄyË’ó!1¹ ¹Ô˜èGÔ»R³§ªIcPК¢&!–l,ª<c²ùˆæ7 Ô±”¾à› Qöèy˜N¾k`ü‰2cXœ®FW,Ö÷¿i}’Þ—ü¼T8
+w†=d K¹öŠÕîxETkvÚÌäÄ5ËËH_œ-„ÛÛ
+Ì]m–W%3Z¯$ÒŽA²·½4ijÙÒAQ];+AµÕ­æõMòq·˜»üùÒ(äj^+õ›”0ìê¨udöKŒ’pÛ ìðØ’'•§Uñݪ®L™ÜTD½ªØѾc² _ ¦°”ËF¤zÓùQ/ªÑ¬óBž
+‹Ùâ(s·«ukt?oÖèÅÅÒPT@m`y²ªyÓ¸Ft î\k¡®÷†Äñê‰0çLGë:2wœ{Î] kÞ£¨§cl td V Áð—©ï¬Oqú—)“R¢>Ç)de3‘µ˜×ä6LØ,Œ€G[ ì‹;{õ™Æµ3-2ΫJ{B»Xk†rÞò9®5úVú)7€¸FŒ²äó"—Q¬ êŽ,qÁ ìÊQª¬[±ÂØyU|œì6”ÊÜ% Ž8zeD›/§ØàÒ±ò¢Ú.ÁÉ€Ã{Ç+ÊÚ]Ó¤Æ}l 3KÄm“j0|¡õ3Xà¡©Ä"š™Özå©nõüD,„S Ï^vUŒVŸëÖÕ¾<¶9¼Ê}ûÍC
+§5…Ïj·¾ö™6 ô…Zù{¥VV4P!B‘+ïzŒ-®
+ A¬Ï­Gð‹=~RpV“ï˜Ie> ;m'Ýêƒ0­òÚÉ by:³ÍæTFXð”gûg8G0’Ï¡·YO¢¶Wh,!|(μV˜
+Xd¢Ã<ý¢‹,Œ©ÐæúÜ~ó:‹Ë‡$g÷?†/8|ŸÁß«
+ÔpurÔü¬ƒÈïþÕÝQXllÙºVŠ—7/Á+3Ks;~¦ÛʬË,Éá×\¶ß”_hÝøÅýzç
+Ø2ýËÝÕ5¿²1éÊ=]Ùæ+`8CæÕÝu`]…ßãU¿çKÌ_bæ‚w÷»ñ½
+ÒKôÕ@×X½¡ óç
+¶1 †å¨‘øAü-kýkMžuÆHÔãTÐ#hQ$¦D=C¨Þ6Ï4D£9Ó⯘•–ƒæÿ*/
+Ø:ÝŒÕm¿nÊqYBld4}mÛÔƒ^¤5»Ï9§2pÌ&q¥!¹¦RdÉ&ÑPÓDOÕ5r‘úe$UÂ?
+V¨B¶Rós‚)d¦ênŽ´“—“§Þ'[ è묘KûBsòA[µRž†¦i¢ùâ7î:àüd,ŒøeQ&Œa¯ø\ éþñª]îYíGíÅ
+Ø:ÝŒ m¿n‡HãJÉA„b-ŠJq“X¢(íq„2Æ0½4ìÅI›W.^´bÄ4»Tá"%Oè639]uóMN{5 0
+ðt°½@fÆäÆ9q“<ËÀH=Zó|C·/Í‹Yå”UÚ»v?ðÛ[ÏnŒrÿx}/vבóáâûrlnCÁ¶_”äeîMiÊãU/ÝÖ<
+™K\ÏÚ7vXJ˜ìÙÆ°¨[J)¹9må”â².šúAÂ(I4Ö½« W¦ŠLl±§Ý.iCHfɴ٬⠪ÁŒdã'ÞQhiª›=ÎŒz¯˜gñtw²
+Ufï¥P˜‹°ŸK_•¤ ª»Ðfå ‡ŠVÆÞÕ ½Ë^o<ÍéÌæÞ>¨qJ‰“1üñÒ^li ­|Ø ¾\[§›1­í¡øˆ.5WÚ#k›BqOiCªÞµlo¡HDÆ®î[7FöSÑͬàHFßCsÕ[ƒ°]JYAºÚ†gŠ¦ öÜ^íË´¡ìí½’…çèf-°À×Ì÷HœÁBIÚ;ø2PN( 1“Fy&¡—6Ín,F¯Þ¡1‘67­£à ­‘¦¶ž‡Ëµ½ØÙí
+k­æ(†ìlYÆÛœz»Ï­Ds7ùp†w$Ëô¦Þ-‹9[·Rtý&쎕ë,“vv2.š"­Ê:©#ß?^Ù‹me Ù ìË°uº‹Ú~Ùc1ª!Ûl[æŹknf—ÔëÓRH´8t"J]°RFŽÎ¥Äs¨;°‡…H¶—?—ÕqK;;úÕZ³‰ÐK?Ø@öÌÑB$9s‹ëª}š HdûÔ)LO ¶cl„$:‡äBï²è„šM÷qµÍÔHÝdS%ÖÒÆ•œW÷bggK8¶/W@Ët+δcþÆÚM¡ Gk¶G'C;dä6%¾Bx¡´™îºÙ'Ã,Ð$ªMš,‡Nä2L³ÓÓnJÞE›—ÔqÆC1û"3Xiž6™1¥[=Êèv±>Š%Òûn±ÿZ‡bUŸ‰ÍA¶3A.~hÚ!ÆPØ™eóÕàÀ˜&VD¬Rœ@ÌJØò~&ˆÀnÓ÷vS&™å>O»mûrÂDöàÿ‡z(-€­ÒÍÈÕ>.êüTTí|eë=m&EdÃä >í·MªY´Ñt4;¤ùf(͉°/ˆŸv@(Fvxz):<{‘s/ÍÝCÿMð©TŠíû½+iñÖ4h—¦ƒ²úÝqo Øà£m)Tî|UxõGÍ AV†"=˜žc±8¼ò™o>Ë—Z#5¨´Ì® ¹,Ï€ìtò¾c=Á\¢.rÄx ±[1±í?ÄÉËV fb–ä6xgòî⊠–dæT[ ^¬Ùpʼ>áfå„€‘¥ÿ¶cÕœ €·™±Y' ÓÅW=yæ\I¤ž|+ÄD˹hpK¶É=ñ«Uf‘‡O†%yx
+Ûl¨%ÉV9L Ø–$<)ì®Ùž¤£ëÄi–G»­ÌobTüƒ(òäöU§Fœ"™«Èæsƒéë‰É1ÀŽ*KT÷¼XÍÔö#™°¤q ¶9~,øk¥_X/Ê­H׿ëÁ[3“k”xR€A2¯GEf›ŸrçA¦Õ žÚà#œõîæô<ê/_Ì``lܪ[F‹Îõƒiø#åöÁjJñf¼7J”ðk3šhÏ÷¡Œá £k¨š¡I˜†éòjÔYûbŠ©´Ê Ž
+
+‰¸#Š
+â¬*,âk}¥Ñ”{Àû ïéá9\¨sD¼ÀŸIQ´Ï›¬‹´Útõf‡°3ÌCÏ`¶S|‚€ù«§[y•Zõ<å“ÝþiGªš©‰Óô®|‚JàJi£ÿ®ˆ&ìj\¨ÞŽÓÌFõD~P¶…fkñÞö³,ÙXûjkVOhl`¨a
+ìZ£Pœ:¸ÝìgÃÑ˘àê0²U$¯!Ÿ`5Õäìg·ù‘ɹí ï:Ï¡æU{ØÌ“,¦Ϋv+'Г»zDÞôC0k·Î¸VÐŒï™?ºƒïuãS쩈OL6·vôê´PÌŒÎc„²g€—ÕõH):Ž%û»-±x¼´êö­Ù7sYŸó±ç¥ø ¸>·ÝâV1þ…£¾Xع®
+œ ¹bc˜£õ^f·Ò©ï-ñž“·7®®Í¼¬¦¡æ­{¶ÐÚ¦Bmº‘‘­K§xÊÄt#c+–fu[´5Í©˜jaÁ}ôÏy gV3=üªÙDûj³N’fÒ+T¤ßi€úç¬8²d/Ì=,î­Ø¹¬åGVc(‹¨BJ‘
+0000000016 00000 n
+0000045544 00000 n
+0000000004 00000 f
+0000000006 00000 f
+0000055744 00000 n
+0000000007 00000 f
+0000000008 00000 f
+0000000009 00000 f
+0000000010 00000 f
+0000000011 00000 f
+0000000012 00000 f
+0000000013 00000 f
+0000000014 00000 f
+0000000018 00000 f
+0000045595 00000 n
+0000055567 00000 n
+0000055598 00000 n
+0000000019 00000 f
+0000000020 00000 f
+0000000021 00000 f
+0000000022 00000 f
+0000000023 00000 f
+0000000024 00000 f
+0000000025 00000 f
+0000000026 00000 f
+0000000030 00000 f
+0000045664 00000 n
+0000055451 00000 n
+0000055482 00000 n
+0000000031 00000 f
+0000000032 00000 f
+0000000033 00000 f
+0000000034 00000 f
+0000000035 00000 f
+0000000036 00000 f
+0000000040 00000 f
+0000045735 00000 n
+0000055335 00000 n
+0000055366 00000 n
+0000000041 00000 f
+0000000042 00000 f
+0000000043 00000 f
+0000000044 00000 f
+0000000045 00000 f
+0000000046 00000 f
+0000000047 00000 f
+0000000048 00000 f
+0000000049 00000 f
+0000000050 00000 f
+0000000051 00000 f
+0000000052 00000 f
+0000000053 00000 f
+0000000054 00000 f
+0000000055 00000 f
+0000000056 00000 f
+0000000057 00000 f
+0000000058 00000 f
+0000000059 00000 f
+0000000060 00000 f
+0000000061 00000 f
+0000000062 00000 f
+0000000063 00000 f
+0000000064 00000 f
+0000000068 00000 f
+0000045805 00000 n
+0000055219 00000 n
+0000055250 00000 n
+0000000069 00000 f
+0000000070 00000 f
+0000000071 00000 f
+0000000072 00000 f
+0000000073 00000 f
+0000000074 00000 f
+0000000075 00000 f
+0000000076 00000 f
+0000000077 00000 f
+0000000081 00000 f
+0000045874 00000 n
+0000055103 00000 n
+0000055134 00000 n
+0000000082 00000 f
+0000000083 00000 f
+0000000084 00000 f
+0000000085 00000 f
+0000000086 00000 f
+0000000087 00000 f
+0000000088 00000 f
+0000000089 00000 f
+0000000093 00000 f
+0000045945 00000 n
+0000054987 00000 n
+0000055018 00000 n
+0000000094 00000 f
+0000000095 00000 f
+0000000096 00000 f
+0000000097 00000 f
+0000000098 00000 f
+0000000099 00000 f
+0000000100 00000 f
+0000000101 00000 f
+0000000102 00000 f
+0000000103 00000 f
+0000000104 00000 f
+0000000105 00000 f
+0000000106 00000 f
+0000000107 00000 f
+0000000108 00000 f
+0000000109 00000 f
+0000000110 00000 f
+0000000111 00000 f
+0000000112 00000 f
+0000000113 00000 f
+0000000114 00000 f
+0000000115 00000 f
+0000000119 00000 f
+0000046015 00000 n
+0000054869 00000 n
+0000054901 00000 n
+0000000120 00000 f
+0000000121 00000 f
+0000000122 00000 f
+0000000123 00000 f
+0000000124 00000 f
+0000000125 00000 f
+0000000126 00000 f
+0000000127 00000 f
+0000000128 00000 f
+0000000132 00000 f
+0000046087 00000 n
+0000054751 00000 n
+0000054783 00000 n
+0000000133 00000 f
+0000000134 00000 f
+0000000135 00000 f
+0000000136 00000 f
+0000000137 00000 f
+0000000138 00000 f
+0000000139 00000 f
+0000000140 00000 f
+0000000144 00000 f
+0000046161 00000 n
+0000054633 00000 n
+0000054665 00000 n
+0000000145 00000 f
+0000000146 00000 f
+0000000147 00000 f
+0000000148 00000 f
+0000000149 00000 f
+0000000150 00000 f
+0000000151 00000 f
+0000000152 00000 f
+0000000153 00000 f
+0000000154 00000 f
+0000000155 00000 f
+0000000156 00000 f
+0000000157 00000 f
+0000000158 00000 f
+0000000159 00000 f
+0000000160 00000 f
+0000000161 00000 f
+0000000162 00000 f
+0000000163 00000 f
+0000000164 00000 f
+0000000165 00000 f
+0000000166 00000 f
+0000000170 00000 f
+0000046234 00000 n
+0000054515 00000 n
+0000054547 00000 n
+0000000171 00000 f
+0000000172 00000 f
+0000000173 00000 f
+0000000174 00000 f
+0000000175 00000 f
+0000000176 00000 f
+0000000177 00000 f
+0000000178 00000 f
+0000000179 00000 f
+0000000183 00000 f
+0000046306 00000 n
+0000054397 00000 n
+0000054429 00000 n
+0000000184 00000 f
+0000000185 00000 f
+0000000186 00000 f
+0000000187 00000 f
+0000000188 00000 f
+0000000189 00000 f
+0000000190 00000 f
+0000000191 00000 f
+0000000195 00000 f
+0000046380 00000 n
+0000054279 00000 n
+0000054311 00000 n
+0000000196 00000 f
+0000000197 00000 f
+0000000198 00000 f
+0000000199 00000 f
+0000000200 00000 f
+0000000201 00000 f
+0000000202 00000 f
+0000000203 00000 f
+0000000204 00000 f
+0000000205 00000 f
+0000000206 00000 f
+0000000207 00000 f
+0000000208 00000 f
+0000000209 00000 f
+0000000210 00000 f
+0000000211 00000 f
+0000000212 00000 f
+0000000213 00000 f
+0000000214 00000 f
+0000000215 00000 f
+0000000216 00000 f
+0000000217 00000 f
+0000000221 00000 f
+0000046453 00000 n
+0000054161 00000 n
+0000054193 00000 n
+0000000222 00000 f
+0000000223 00000 f
+0000000224 00000 f
+0000000225 00000 f
+0000000226 00000 f
+0000000227 00000 f
+0000000228 00000 f
+0000000229 00000 f
+0000000230 00000 f
+0000000231 00000 f
+0000000232 00000 f
+0000000233 00000 f
+0000000234 00000 f
+0000000235 00000 f
+0000000236 00000 f
+0000000240 00000 f
+0000046525 00000 n
+0000054043 00000 n
+0000054075 00000 n
+0000000241 00000 f
+0000000242 00000 f
+0000000243 00000 f
+0000000244 00000 f
+0000000245 00000 f
+0000000246 00000 f
+0000000247 00000 f
+0000000248 00000 f
+0000000249 00000 f
+0000000250 00000 f
+0000000251 00000 f
+0000000252 00000 f
+0000000253 00000 f
+0000000254 00000 f
+0000000258 00000 f
+0000046599 00000 n
+0000053925 00000 n
+0000053957 00000 n
+0000000259 00000 f
+0000000260 00000 f
+0000000261 00000 f
+0000000262 00000 f
+0000000263 00000 f
+0000000264 00000 f
+0000000265 00000 f
+0000000266 00000 f
+0000000267 00000 f
+0000000268 00000 f
+0000000269 00000 f
+0000000270 00000 f
+0000000271 00000 f
+0000000272 00000 f
+0000000273 00000 f
+0000000274 00000 f
+0000000275 00000 f
+0000000276 00000 f
+0000000277 00000 f
+0000000278 00000 f
+0000000279 00000 f
+0000000280 00000 f
+0000000281 00000 f
+0000000282 00000 f
+0000000283 00000 f
+0000000284 00000 f
+0000000285 00000 f
+0000000286 00000 f
+0000000290 00000 f
+0000046672 00000 n
+0000053807 00000 n
+0000053839 00000 n
+0000000291 00000 f
+0000000292 00000 f
+0000000293 00000 f
+0000000294 00000 f
+0000000295 00000 f
+0000000296 00000 f
+0000000297 00000 f
+0000000298 00000 f
+0000000299 00000 f
+0000000300 00000 f
+0000000301 00000 f
+0000000302 00000 f
+0000000303 00000 f
+0000000304 00000 f
+0000000305 00000 f
+0000000309 00000 f
+0000046744 00000 n
+0000053689 00000 n
+0000053721 00000 n
+0000000310 00000 f
+0000000311 00000 f
+0000000312 00000 f
+0000000313 00000 f
+0000000314 00000 f
+0000000315 00000 f
+0000000316 00000 f
+0000000317 00000 f
+0000000318 00000 f
+0000000319 00000 f
+0000000320 00000 f
+0000000321 00000 f
+0000000322 00000 f
+0000000323 00000 f
+0000000327 00000 f
+0000046818 00000 n
+0000053571 00000 n
+0000053603 00000 n
+0000000328 00000 f
+0000000329 00000 f
+0000000330 00000 f
+0000000331 00000 f
+0000000332 00000 f
+0000000333 00000 f
+0000000334 00000 f
+0000000335 00000 f
+0000000336 00000 f
+0000000337 00000 f
+0000000338 00000 f
+0000000339 00000 f
+0000000340 00000 f
+0000000341 00000 f
+0000000342 00000 f
+0000000343 00000 f
+0000000344 00000 f
+0000000345 00000 f
+0000000346 00000 f
+0000000347 00000 f
+0000000348 00000 f
+0000000349 00000 f
+0000000350 00000 f
+0000000351 00000 f
+0000000352 00000 f
+0000000353 00000 f
+0000000354 00000 f
+0000000355 00000 f
+0000000359 00000 f
+0000046891 00000 n
+0000053453 00000 n
+0000053485 00000 n
+0000000360 00000 f
+0000000361 00000 f
+0000000362 00000 f
+0000000363 00000 f
+0000000364 00000 f
+0000000365 00000 f
+0000000366 00000 f
+0000000367 00000 f
+0000000368 00000 f
+0000000369 00000 f
+0000000370 00000 f
+0000000371 00000 f
+0000000372 00000 f
+0000000373 00000 f
+0000000374 00000 f
+0000000378 00000 f
+0000046963 00000 n
+0000053335 00000 n
+0000053367 00000 n
+0000000379 00000 f
+0000000380 00000 f
+0000000381 00000 f
+0000000382 00000 f
+0000000383 00000 f
+0000000384 00000 f
+0000000385 00000 f
+0000000386 00000 f
+0000000387 00000 f
+0000000388 00000 f
+0000000389 00000 f
+0000000390 00000 f
+0000000391 00000 f
+0000000392 00000 f
+0000000396 00000 f
+0000047037 00000 n
+0000053217 00000 n
+0000053249 00000 n
+0000000397 00000 f
+0000000398 00000 f
+0000000399 00000 f
+0000000400 00000 f
+0000000401 00000 f
+0000000402 00000 f
+0000000403 00000 f
+0000000404 00000 f
+0000000405 00000 f
+0000000406 00000 f
+0000000407 00000 f
+0000000408 00000 f
+0000000409 00000 f
+0000000410 00000 f
+0000000411 00000 f
+0000000412 00000 f
+0000000413 00000 f
+0000000414 00000 f
+0000000415 00000 f
+0000000416 00000 f
+0000000417 00000 f
+0000000418 00000 f
+0000000419 00000 f
+0000000420 00000 f
+0000000421 00000 f
+0000000422 00000 f
+0000000423 00000 f
+0000000424 00000 f
+0000000428 00000 f
+0000047110 00000 n
+0000053099 00000 n
+0000053131 00000 n
+0000000429 00000 f
+0000000430 00000 f
+0000000431 00000 f
+0000000432 00000 f
+0000000433 00000 f
+0000000434 00000 f
+0000000435 00000 f
+0000000436 00000 f
+0000000437 00000 f
+0000000438 00000 f
+0000000439 00000 f
+0000000440 00000 f
+0000000441 00000 f
+0000000442 00000 f
+0000000443 00000 f
+0000000447 00000 f
+0000047182 00000 n
+0000052981 00000 n
+0000053013 00000 n
+0000000448 00000 f
+0000000449 00000 f
+0000000450 00000 f
+0000000451 00000 f
+0000000452 00000 f
+0000000453 00000 f
+0000000454 00000 f
+0000000455 00000 f
+0000000456 00000 f
+0000000457 00000 f
+0000000458 00000 f
+0000000459 00000 f
+0000000460 00000 f
+0000000461 00000 f
+0000000465 00000 f
+0000047254 00000 n
+0000052863 00000 n
+0000052895 00000 n
+0000000466 00000 f
+0000000467 00000 f
+0000000468 00000 f
+0000000469 00000 f
+0000000470 00000 f
+0000000471 00000 f
+0000000472 00000 f
+0000000473 00000 f
+0000000474 00000 f
+0000000475 00000 f
+0000000476 00000 f
+0000000477 00000 f
+0000000478 00000 f
+0000000479 00000 f
+0000000483 00000 f
+0000047328 00000 n
+0000052745 00000 n
+0000052777 00000 n
+0000000484 00000 f
+0000000485 00000 f
+0000000486 00000 f
+0000000487 00000 f
+0000000488 00000 f
+0000000489 00000 f
+0000000490 00000 f
+0000000491 00000 f
+0000000492 00000 f
+0000000493 00000 f
+0000000494 00000 f
+0000000495 00000 f
+0000000496 00000 f
+0000000497 00000 f
+0000000498 00000 f
+0000000499 00000 f
+0000000500 00000 f
+0000000501 00000 f
+0000000502 00000 f
+0000000503 00000 f
+0000000504 00000 f
+0000000505 00000 f
+0000000506 00000 f
+0000000507 00000 f
+0000000508 00000 f
+0000000509 00000 f
+0000000510 00000 f
+0000000511 00000 f
+0000000512 00000 f
+0000000513 00000 f
+0000000514 00000 f
+0000000518 00000 f
+0000047401 00000 n
+0000052627 00000 n
+0000052659 00000 n
+0000000519 00000 f
+0000000520 00000 f
+0000000521 00000 f
+0000000522 00000 f
+0000000523 00000 f
+0000000524 00000 f
+0000000525 00000 f
+0000000526 00000 f
+0000000527 00000 f
+0000000528 00000 f
+0000000529 00000 f
+0000000530 00000 f
+0000000531 00000 f
+0000000532 00000 f
+0000000533 00000 f
+0000000537 00000 f
+0000047473 00000 n
+0000052509 00000 n
+0000052541 00000 n
+0000000538 00000 f
+0000000539 00000 f
+0000000540 00000 f
+0000000541 00000 f
+0000000542 00000 f
+0000000543 00000 f
+0000000544 00000 f
+0000000545 00000 f
+0000000546 00000 f
+0000000547 00000 f
+0000000548 00000 f
+0000000549 00000 f
+0000000550 00000 f
+0000000551 00000 f
+0000000555 00000 f
+0000047545 00000 n
+0000052391 00000 n
+0000052423 00000 n
+0000000556 00000 f
+0000000557 00000 f
+0000000558 00000 f
+0000000559 00000 f
+0000000560 00000 f
+0000000561 00000 f
+0000000562 00000 f
+0000000563 00000 f
+0000000564 00000 f
+0000000565 00000 f
+0000000566 00000 f
+0000000567 00000 f
+0000000568 00000 f
+0000000569 00000 f
+0000000573 00000 f
+0000047619 00000 n
+0000052273 00000 n
+0000052305 00000 n
+0000000574 00000 f
+0000000575 00000 f
+0000000576 00000 f
+0000000577 00000 f
+0000000578 00000 f
+0000000579 00000 f
+0000000580 00000 f
+0000000581 00000 f
+0000000582 00000 f
+0000000583 00000 f
+0000000584 00000 f
+0000000585 00000 f
+0000000586 00000 f
+0000000587 00000 f
+0000000588 00000 f
+0000000589 00000 f
+0000000590 00000 f
+0000000591 00000 f
+0000000592 00000 f
+0000000593 00000 f
+0000000594 00000 f
+0000000595 00000 f
+0000000596 00000 f
+0000000597 00000 f
+0000000598 00000 f
+0000000599 00000 f
+0000000600 00000 f
+0000000601 00000 f
+0000000602 00000 f
+0000000603 00000 f
+0000000604 00000 f
+0000000608 00000 f
+0000047692 00000 n
+0000052155 00000 n
+0000052187 00000 n
+0000000609 00000 f
+0000000610 00000 f
+0000000611 00000 f
+0000000612 00000 f
+0000000613 00000 f
+0000000614 00000 f
+0000000615 00000 f
+0000000616 00000 f
+0000000617 00000 f
+0000000618 00000 f
+0000000619 00000 f
+0000000620 00000 f
+0000000621 00000 f
+0000000622 00000 f
+0000000623 00000 f
+0000000627 00000 f
+0000047764 00000 n
+0000052037 00000 n
+0000052069 00000 n
+0000000628 00000 f
+0000000629 00000 f
+0000000630 00000 f
+0000000631 00000 f
+0000000632 00000 f
+0000000633 00000 f
+0000000634 00000 f
+0000000635 00000 f
+0000000636 00000 f
+0000000637 00000 f
+0000000638 00000 f
+0000000639 00000 f
+0000000640 00000 f
+0000000641 00000 f
+0000000645 00000 f
+0000047836 00000 n
+0000051919 00000 n
+0000051951 00000 n
+0000000646 00000 f
+0000000647 00000 f
+0000000648 00000 f
+0000000649 00000 f
+0000000650 00000 f
+0000000651 00000 f
+0000000652 00000 f
+0000000653 00000 f
+0000000654 00000 f
+0000000655 00000 f
+0000000656 00000 f
+0000000657 00000 f
+0000000658 00000 f
+0000000659 00000 f
+0000000663 00000 f
+0000047910 00000 n
+0000051801 00000 n
+0000051833 00000 n
+0000000664 00000 f
+0000000665 00000 f
+0000000666 00000 f
+0000000667 00000 f
+0000000668 00000 f
+0000000669 00000 f
+0000000670 00000 f
+0000000671 00000 f
+0000000672 00000 f
+0000000673 00000 f
+0000000674 00000 f
+0000000675 00000 f
+0000000676 00000 f
+0000000677 00000 f
+0000000678 00000 f
+0000000679 00000 f
+0000000680 00000 f
+0000000681 00000 f
+0000000682 00000 f
+0000000683 00000 f
+0000000684 00000 f
+0000000685 00000 f
+0000000686 00000 f
+0000000687 00000 f
+0000000688 00000 f
+0000000689 00000 f
+0000000690 00000 f
+0000000691 00000 f
+0000000692 00000 f
+0000000693 00000 f
+0000000694 00000 f
+0000000698 00000 f
+0000047983 00000 n
+0000051683 00000 n
+0000051715 00000 n
+0000000702 00000 f
+0000048054 00000 n
+0000051565 00000 n
+0000051597 00000 n
+0000000703 00000 f
+0000000704 00000 f
+0000000705 00000 f
+0000000706 00000 f
+0000000707 00000 f
+0000000708 00000 f
+0000000709 00000 f
+0000000710 00000 f
+0000000711 00000 f
+0000000712 00000 f
+0000000713 00000 f
+0000000714 00000 f
+0000000715 00000 f
+0000000716 00000 f
+0000000717 00000 f
+0000000721 00000 f
+0000048126 00000 n
+0000051447 00000 n
+0000051479 00000 n
+0000000722 00000 f
+0000000723 00000 f
+0000000724 00000 f
+0000000725 00000 f
+0000000726 00000 f
+0000000727 00000 f
+0000000728 00000 f
+0000000729 00000 f
+0000000730 00000 f
+0000000731 00000 f
+0000000732 00000 f
+0000000733 00000 f
+0000000734 00000 f
+0000000738 00000 f
+0000048198 00000 n
+0000051329 00000 n
+0000051361 00000 n
+0000000739 00000 f
+0000000740 00000 f
+0000000741 00000 f
+0000000742 00000 f
+0000000743 00000 f
+0000000744 00000 f
+0000000745 00000 f
+0000000746 00000 f
+0000000747 00000 f
+0000000748 00000 f
+0000000749 00000 f
+0000000750 00000 f
+0000000751 00000 f
+0000000752 00000 f
+0000000756 00000 f
+0000048272 00000 n
+0000051211 00000 n
+0000051243 00000 n
+0000000757 00000 f
+0000000758 00000 f
+0000000759 00000 f
+0000000760 00000 f
+0000000761 00000 f
+0000000762 00000 f
+0000000763 00000 f
+0000000764 00000 f
+0000000765 00000 f
+0000000766 00000 f
+0000000767 00000 f
+0000000768 00000 f
+0000000769 00000 f
+0000000770 00000 f
+0000000771 00000 f
+0000000772 00000 f
+0000000773 00000 f
+0000000774 00000 f
+0000000775 00000 f
+0000000776 00000 f
+0000000777 00000 f
+0000000778 00000 f
+0000000779 00000 f
+0000000780 00000 f
+0000000781 00000 f
+0000000782 00000 f
+0000000783 00000 f
+0000000784 00000 f
+0000000788 00000 f
+0000048345 00000 n
+0000051093 00000 n
+0000051125 00000 n
+0000000792 00000 f
+0000048416 00000 n
+0000050975 00000 n
+0000051007 00000 n
+0000000793 00000 f
+0000000794 00000 f
+0000000795 00000 f
+0000000796 00000 f
+0000000797 00000 f
+0000000798 00000 f
+0000000799 00000 f
+0000000800 00000 f
+0000000801 00000 f
+0000000802 00000 f
+0000000803 00000 f
+0000000804 00000 f
+0000000805 00000 f
+0000000806 00000 f
+0000000807 00000 f
+0000000811 00000 f
+0000048488 00000 n
+0000050857 00000 n
+0000050889 00000 n
+0000000812 00000 f
+0000000813 00000 f
+0000000814 00000 f
+0000000815 00000 f
+0000000816 00000 f
+0000000817 00000 f
+0000000818 00000 f
+0000000819 00000 f
+0000000820 00000 f
+0000000821 00000 f
+0000000822 00000 f
+0000000823 00000 f
+0000000824 00000 f
+0000000828 00000 f
+0000048560 00000 n
+0000050739 00000 n
+0000050771 00000 n
+0000000829 00000 f
+0000000830 00000 f
+0000000831 00000 f
+0000000832 00000 f
+0000000833 00000 f
+0000000834 00000 f
+0000000835 00000 f
+0000000836 00000 f
+0000000837 00000 f
+0000000838 00000 f
+0000000839 00000 f
+0000000840 00000 f
+0000000841 00000 f
+0000000842 00000 f
+0000000846 00000 f
+0000048634 00000 n
+0000050621 00000 n
+0000050653 00000 n
+0000000847 00000 f
+0000000848 00000 f
+0000000849 00000 f
+0000000850 00000 f
+0000000851 00000 f
+0000000852 00000 f
+0000000853 00000 f
+0000000854 00000 f
+0000000855 00000 f
+0000000856 00000 f
+0000000857 00000 f
+0000000858 00000 f
+0000000859 00000 f
+0000000860 00000 f
+0000000861 00000 f
+0000000862 00000 f
+0000000863 00000 f
+0000000864 00000 f
+0000000865 00000 f
+0000000866 00000 f
+0000000867 00000 f
+0000000868 00000 f
+0000000869 00000 f
+0000000870 00000 f
+0000000871 00000 f
+0000000872 00000 f
+0000000873 00000 f
+0000000874 00000 f
+0000000875 00000 f
+0000000876 00000 f
+0000000877 00000 f
+0000000881 00001 f
+0000048707 00000 n
+0000050503 00000 n
+0000050535 00000 n
+0000000885 00000 f
+0000048778 00000 n
+0000050385 00000 n
+0000050417 00000 n
+0000000886 00000 f
+0000000887 00000 f
+0000000888 00000 f
+0000000889 00000 f
+0000000890 00000 f
+0000000891 00000 f
+0000000892 00000 f
+0000000893 00000 f
+0000000894 00000 f
+0000000895 00000 f
+0000000896 00000 f
+0000000897 00000 f
+0000000898 00000 f
+0000000899 00000 f
+0000000900 00000 f
+0000000904 00000 f
+0000048850 00000 n
+0000050267 00000 n
+0000050299 00000 n
+0000000905 00000 f
+0000000906 00000 f
+0000000907 00000 f
+0000000908 00000 f
+0000000909 00000 f
+0000000910 00000 f
+0000000911 00000 f
+0000000912 00000 f
+0000000913 00000 f
+0000000914 00000 f
+0000000915 00000 f
+0000000916 00000 f
+0000000917 00000 f
+0000000921 00000 f
+0000048922 00000 n
+0000050149 00000 n
+0000050181 00000 n
+0000000922 00000 f
+0000000923 00000 f
+0000000924 00000 f
+0000000925 00000 f
+0000000926 00000 f
+0000000927 00000 f
+0000000928 00000 f
+0000000929 00000 f
+0000000930 00000 f
+0000000931 00000 f
+0000000932 00000 f
+0000000933 00000 f
+0000000934 00000 f
+0000000935 00000 f
+0000000939 00000 f
+0000048996 00000 n
+0000050031 00000 n
+0000050063 00000 n
+0000000940 00000 f
+0000000941 00000 f
+0000000942 00000 f
+0000000943 00000 f
+0000000944 00000 f
+0000000945 00000 f
+0000000946 00000 f
+0000000947 00000 f
+0000000948 00000 f
+0000000949 00000 f
+0000000950 00000 f
+0000000951 00000 f
+0000000952 00000 f
+0000000953 00000 f
+0000000954 00000 f
+0000000955 00000 f
+0000000956 00000 f
+0000000957 00001 f
+0000000958 00000 f
+0000000959 00000 f
+0000000960 00000 f
+0000000961 00000 f
+0000000969 00000 f
+0000174447 00000 n
+0000174523 00000 n
+0000174747 00000 n
+0000175772 00000 n
+0000183336 00000 n
+0000248925 00000 n
+0000314514 00000 n
+0000000974 00001 f
+0000055683 00000 n
+0000049069 00000 n
+0000049913 00000 n
+0000049945 00000 n
+0000000987 00001 f
+0000049140 00000 n
+0000049795 00000 n
+0000049827 00000 n
+0000058541 00000 n
+0000159418 00000 n
+0000160143 00000 n
+0000173817 00000 n
+0000060510 00000 n
+0000060547 00000 n
+0000160206 00000 n
+0000159481 00000 n
+0000063196 00000 n
+0000000997 00001 f
+0000147299 00000 n
+0000148055 00000 n
+0000159355 00000 n
+0000148118 00000 n
+0000147362 00000 n
+0000063689 00000 n
+0000049212 00000 n
+0000049677 00000 n
+0000049709 00000 n
+0000001004 00001 f
+0000132732 00000 n
+0000133484 00000 n
+0000147235 00000 n
+0000133547 00000 n
+0000132795 00000 n
+0000064172 00000 n
+0000001014 00001 f
+0000120560 00000 n
+0000121317 00000 n
+0000132668 00000 n
+0000121381 00000 n
+0000120624 00000 n
+0000064671 00000 n
+0000049284 00000 n
+0000049557 00000 n
+0000049590 00000 n
+0000001021 00001 f
+0000106070 00000 n
+0000106822 00000 n
+0000120496 00000 n
+0000106886 00000 n
+0000106134 00000 n
+0000065160 00000 n
+0000001022 00001 f
+0000001032 00001 f
+0000093888 00000 n
+0000094644 00000 n
+0000106006 00000 n
+0000094708 00000 n
+0000093952 00000 n
+0000065664 00000 n
+0000049361 00000 n
+0000049437 00000 n
+0000049470 00000 n
+0000001039 00001 f
+0000079247 00000 n
+0000080008 00000 n
+0000093824 00000 n
+0000080072 00000 n
+0000079311 00000 n
+0000066147 00000 n
+0000000000 00001 f
+0000067133 00000 n
+0000067894 00000 n
+0000079183 00000 n
+0000067958 00000 n
+0000067197 00000 n
+0000066651 00000 n
+0000058655 00000 n
+0000058947 00000 n
+0000056363 00000 n
+0000362622 00000 n
+0000059211 00000 n
+0000173880 00000 n
+0000173932 00000 n
+0000058383 00000 n
+0000000982 00000 n
+trailer <</Size 1055/Root 1 0 R/Info 1049 0 R/ID[<DFAD564605F549E3ABF1FC747EBA5A34><45AB28F93CAE4209BAB0B0278EBC9323>]>> startxref 362760 %%EOF \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt b/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt
new file mode 100644
index 0000000000..17d1caeb66
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt
@@ -0,0 +1 @@
+jquery=1.4.2
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
new file mode 100644
index 0000000000..6d81b8271e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
@@ -0,0 +1,190 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package model
+
+import scala.collection._
+import comment._
+
+/** Some entity of the Scaladoc model. */
+trait Entity {
+ def name : String
+ def inTemplate: TemplateEntity
+ def toRoot: List[Entity]
+ def qualifiedName: String
+ override def toString = qualifiedName
+ def universe: Universe
+}
+
+/** A class, trait, object or package. A package is represented as an instance
+ * of the `Package` subclass. A class, trait, object or package may be
+ * directly an instance of `WeakTemplateEntity` if it is not ''documentable''
+ * (that is, if there is no documentation page for it in the current site),
+ * otherwise, it will be represented as an instance of the `TemplateEntity`
+ * subclass. */
+trait TemplateEntity extends Entity {
+ def isPackage: Boolean
+ def isRootPackage: Boolean
+ def isTrait: Boolean
+ def isClass: Boolean
+ def isObject: Boolean
+ def isDocTemplate: Boolean
+ def isCaseClass: Boolean
+ def selfType : Option[TypeEntity]
+}
+trait NoDocTemplate extends TemplateEntity
+
+/** A member of a class, trait, object or package. */
+trait MemberEntity extends Entity {
+ def comment: Option[Comment]
+ def inTemplate: DocTemplateEntity
+ def toRoot: List[MemberEntity]
+ def inDefinitionTemplates: List[TemplateEntity]
+ def definitionName: String
+ def visibility: Visibility
+ def flags: List[Paragraph]
+ def deprecation: Option[Body]
+ def inheritedFrom: List[TemplateEntity]
+ def resultType: TypeEntity
+ def isDef: Boolean
+ def isVal: Boolean
+ def isLazyVal: Boolean
+ def isVar: Boolean
+ def isImplicit: Boolean
+ def isAbstract: Boolean
+ def isConstructor: Boolean
+ def isAliasType: Boolean
+ def isAbstractType: Boolean
+ def isTemplate: Boolean
+}
+
+trait HigherKinded extends Entity {
+ def typeParams: List[TypeParam]
+}
+
+/** A ''documentable'' class, trait or object (that is, a documentation page
+ * will be generated for it in the current site). */
+trait DocTemplateEntity extends TemplateEntity with MemberEntity {
+ def toRoot: List[DocTemplateEntity]
+ def inSource: Option[(io.AbstractFile, Int)]
+ def sourceUrl: Option[java.net.URL]
+ def parentType: Option[TypeEntity]
+ def linearization: List[(TemplateEntity, TypeEntity)]
+ def linearizationTemplates: List[TemplateEntity]
+ def linearizationTypes: List[TypeEntity]
+ def subClasses: List[DocTemplateEntity]
+ def members: List[MemberEntity]
+ def templates: List[DocTemplateEntity]
+ def methods: List[Def]
+ def values: List[Val]
+ def abstractTypes: List[AbstractType]
+ def aliasTypes: List[AliasType]
+ def companion: Option[DocTemplateEntity]
+}
+
+/** A ''documentable'' trait. */
+trait Trait extends DocTemplateEntity with HigherKinded
+
+/** A ''documentable'' class. */
+trait Class extends Trait with HigherKinded {
+ def primaryConstructor: Option[Constructor]
+ def constructors: List[Constructor]
+ def valueParams: List[List[ValueParam]]
+}
+
+/** A ''documentable'' object. */
+trait Object extends DocTemplateEntity
+
+/** A package that contains at least one ''documentable'' class, trait,
+ * object or package. */
+trait Package extends Object {
+ def inTemplate: Package
+ def toRoot: List[Package]
+ def packages: List[Package]
+}
+
+/** A package represent the root of Entities hierarchy */
+trait RootPackage extends Package
+
+trait NonTemplateMemberEntity extends MemberEntity {
+ def isUseCase: Boolean
+}
+
+/** A method (`def`) of a ''documentable'' class, trait or object. */
+trait Def extends NonTemplateMemberEntity with HigherKinded {
+ def valueParams : List[List[ValueParam]]
+}
+
+trait Constructor extends NonTemplateMemberEntity {
+ def isPrimary: Boolean
+ def valueParams : List[List[ValueParam]]
+}
+
+/** A value (`val`), lazy val (`lazy val`) or variable (`var`) of a
+ * ''documentable'' class, trait or object. */
+trait Val extends NonTemplateMemberEntity
+
+/** An abstract type of a ''documentable'' class, trait or object. */
+trait AbstractType extends NonTemplateMemberEntity with HigherKinded {
+ def lo: Option[TypeEntity]
+ def hi: Option[TypeEntity]
+}
+
+/** An abstract type of a ''documentable'' class, trait or object. */
+trait AliasType extends NonTemplateMemberEntity with HigherKinded {
+ def alias: TypeEntity
+}
+
+trait ParameterEntity extends Entity {
+ def isTypeParam: Boolean
+ def isValueParam: Boolean
+}
+
+/** A type parameter to a class or trait or to a method. */
+trait TypeParam extends ParameterEntity with HigherKinded {
+ def variance: String
+ def lo: Option[TypeEntity]
+ def hi: Option[TypeEntity]
+}
+
+/** A value parameter to a constructor or to a method. */
+trait ValueParam extends ParameterEntity {
+ def resultType: TypeEntity
+ def defaultValue: Option[TreeEntity]
+ def isImplicit: Boolean
+}
+
+/** An type that represents visibility of members. */
+sealed trait Visibility {
+ def isProtected: Boolean = false
+ def isPublic: Boolean = false
+}
+
+/** The visibility of `private[this]` members. */
+case class PrivateInInstance() extends Visibility
+
+/** The visibility of `protected[this]` members. */
+case class ProtectedInInstance() extends Visibility {
+ override def isProtected = true
+}
+
+/** The visibility of `private[owner]` members. An unqualified private members
+ * is encoded with `owner` equal to the members's `inTemplate`. */
+case class PrivateInTemplate(owner: TemplateEntity) extends Visibility
+
+/** The visibility of `protected[owner]` members. An unqualified protected
+ * members is encoded with `owner` equal to the members's `inTemplate`.
+ * Note that whilst the member is visible in any template owned by `owner`,
+ * it is only visible in subclasses of the member's `inTemplate`. */
+case class ProtectedInTemplate(owner: TemplateEntity) extends Visibility {
+ override def isProtected = true
+}
+
+/** The visibility of public members. */
+case class Public() extends Visibility {
+ override def isPublic = true
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
new file mode 100644
index 0000000000..68830f7915
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -0,0 +1,573 @@
+/* NSC -- new Scala compiler -- Copyright 2007-2010 LAMP/EPFL */
+
+package scala.tools.nsc
+package doc
+package model
+
+import comment._
+
+import scala.collection._
+import scala.util.matching.Regex
+
+import symtab.Flags
+
+import model.{ RootPackage => RootPackageEntity }
+
+/** This trait extracts all required information for documentation from compilation units */
+class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory: ModelFactory with CommentFactory with TreeFactory =>
+
+ import global._
+ import definitions.{ ObjectClass, ScalaObjectClass, RootPackage, EmptyPackage, NothingClass, AnyClass, AnyRefClass }
+
+ private var droppedPackages = 0
+ def templatesCount = templatesCache.size - droppedPackages
+
+ private var modelFinished = false
+ private var universe: Universe = null
+
+ /** */
+ def makeModel: Universe = {
+ val universe = new Universe { thisUniverse =>
+ thisFactory.universe = thisUniverse
+ val settings = thisFactory.settings
+ val rootPackage =
+ makeRootPackage getOrElse { throw new Error("no documentable class found in compilation units") }
+ }
+ modelFinished = true
+ thisFactory.universe = null
+ universe
+ }
+
+ /** */
+ protected val templatesCache =
+ new mutable.LinkedHashMap[Symbol, DocTemplateImpl]
+
+ def findTemplate(query: String): Option[DocTemplateImpl] = {
+ if (!modelFinished) throw new Error("cannot find template in unfinished universe")
+ templatesCache.values find { tpl => tpl.qualifiedName == query && !tpl.isObject }
+ }
+
+ def optimize(str: String): String =
+ if (str.length < 16) str.intern else str
+
+ /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
+
+ /** Provides a default implementation for instances of the `Entity` type. */
+ abstract class EntityImpl(val sym: Symbol, inTpl: => TemplateImpl) extends Entity {
+ val name = optimize(sym.nameString)
+ def inTemplate: TemplateImpl = inTpl
+ def toRoot: List[EntityImpl] = this :: inTpl.toRoot
+ def qualifiedName = name
+ val universe = thisFactory.universe
+ }
+
+ /** Provides a default implementation for instances of the `WeakTemplateEntity` type. It must be instantiated as a
+ * `SymbolicEntity` to access the compiler symbol that underlies the entity. */
+ trait TemplateImpl extends EntityImpl with TemplateEntity {
+ override def qualifiedName: String =
+ if (inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
+ def isPackage = sym.isPackage
+ def isTrait = sym.isTrait
+ def isClass = sym.isClass && !sym.isTrait
+ def isObject = sym.isModule && !sym.isPackage
+ def isCaseClass = sym.isClass && sym.hasFlag(Flags.CASE)
+ def isRootPackage = false
+ def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
+ }
+
+ /** Provides a default implementation for instances of the `WeakTemplateEntity` type. It must be instantiated as a
+ * `SymbolicEntity` to access the compiler symbol that underlies the entity. */
+ class NoDocTemplateImpl(sym: Symbol, inTpl: => TemplateImpl) extends EntityImpl(sym, inTpl) with TemplateImpl with NoDocTemplate {
+ def isDocTemplate = false
+ }
+
+ /** Provides a default implementation for instances of the `MemberEntity` type. It must be instantiated as a
+ * `SymbolicEntity` to access the compiler symbol that underlies the entity. */
+ abstract class MemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
+ lazy val comment =
+ if (inTpl == null) None else thisFactory.comment(sym, inTpl)
+ override def inTemplate = inTpl
+ override def toRoot: List[MemberImpl] = this :: inTpl.toRoot
+ def inDefinitionTemplates =
+ if (inTpl == null)
+ makeRootPackage.toList
+ else
+ makeTemplate(sym.owner) :: (sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
+ def visibility = {
+ if (sym.isPrivateLocal) PrivateInInstance()
+ else if (sym.isProtectedLocal) ProtectedInInstance()
+ else {
+ val qual =
+ if (sym.privateWithin != null && sym.privateWithin != NoSymbol)
+ Some(makeTemplate(sym.privateWithin))
+ else None
+ if (sym.isPrivate) PrivateInTemplate(inTpl)
+ else if (sym.isProtected) ProtectedInTemplate(qual getOrElse inTpl)
+ else if (qual.isDefined) PrivateInTemplate(qual.get)
+ else Public()
+ }
+ }
+ def flags = {
+ val fgs = mutable.ListBuffer.empty[Paragraph]
+ if (sym.isImplicit) fgs += Paragraph(Text("implicit"))
+ if (sym hasFlag Flags.SEALED) fgs += Paragraph(Text("sealed"))
+ if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract"))
+ if (!sym.isTrait && (sym hasFlag Flags.DEFERRED)) fgs += Paragraph(Text("abstract"))
+ if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
+ fgs.toList
+ }
+ def deprecation =
+ if (sym.isDeprecated)
+ Some(sym.deprecationMessage match {
+ case Some(msg) => parseWiki(msg, NoPosition)
+ case None =>Body(Nil)
+ })
+ else
+ comment flatMap { _.deprecated }
+ def inheritedFrom =
+ if (inTemplate.sym == this.sym.owner || inTemplate.sym.isPackage) Nil else
+ makeTemplate(this.sym.owner) :: (sym.allOverriddenSymbols map { os => makeTemplate(os.owner) })
+ def resultType = {
+ def resultTpe(tpe: Type): Type = tpe match { // similar to finalResultType, except that it leaves singleton types alone
+ case PolyType(_, res) => resultTpe(res)
+ case MethodType(_, res) => resultTpe(res)
+ case _ => tpe
+ }
+ makeType(resultTpe(sym.tpe), inTemplate, sym)
+ }
+ def isDef = false
+ def isVal = false
+ def isLazyVal = false
+ def isVar = false
+ def isImplicit = sym.isImplicit
+ def isConstructor = false
+ def isAliasType = false
+ def isAbstractType = false
+ def isAbstract =
+ ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED))) ||
+ sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic
+ def isTemplate = false
+ }
+
+ /** Provides a default implementation for instances of the `TemplateEntity` type. It must be instantiated as a
+ * `TemplateSymbolicEntity` to access the compiler symbol that underlies the entity and to be registered with the
+ * `templatesCache` at the very start of its instantiation.
+ *
+ * The instantiation of `TemplateImpl` triggers the creation of the following entities.
+ * * The owner of the template (as a full template);
+ * * All ancestors of the template (as weak templates);
+ * * All non-package members (including other templates, as full templates). */
+ abstract class DocTemplateImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with DocTemplateEntity {
+ //if (inTpl != null) println("mbr " + sym + " in " + (inTpl.toRoot map (_.sym)).mkString(" > "))
+ templatesCache += (sym -> this)
+ lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
+ override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
+ def inSource = if (sym.sourceFile != null) Some(sym.sourceFile, sym.pos.line) else None
+ def sourceUrl = {
+ def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/")
+ val assumedSourceRoot: String = {
+ val fixed = fixPath(settings.sourcepath.value)
+ if (fixed endsWith "/") fixed.dropRight(1) else fixed
+ }
+ if (!settings.docsourceurl.isDefault)
+ inSource map { case (file, _) =>
+ val filePath = fixPath(file.path).replaceFirst("^" + assumedSourceRoot, "").stripSuffix(".scala")
+ val tplOwner = this.inTemplate.qualifiedName
+ val tplName = this.name
+ val patches = new Regex("""€\{(FILE_PATH|TPL_OWNER|TPL_NAME)\}""")
+ val patchedString = patches.replaceAllIn(settings.docsourceurl.value, { m => m.group(1) match {
+ case "FILE_PATH" => filePath
+ case "TPL_OWNER" => tplOwner
+ case "TPL_NAME" => tplName
+ }
+ })
+ new java.net.URL(patchedString)
+ }
+ else None
+ }
+ def parentType = {
+ if (sym.isPackage) None else {
+ val tps =
+ (sym.tpe.parents filter (_ != ScalaObjectClass.tpe)) map { _.asSeenFrom(sym.thisType, sym) }
+ Some(makeType(RefinedType(tps, EmptyScope), inTpl))
+ }
+ }
+ val linearization: List[(TemplateEntity, TypeEntity)] = {
+ val acs = sym.ancestors filter { _ != ScalaObjectClass }
+ val tps = acs map { cls => makeType(sym.info.baseType(cls), this) }
+ val tpls = acs map { makeTemplate(_) }
+ tpls map {
+ case dtpl: DocTemplateImpl => dtpl.registerSubClass(this)
+ case _ =>
+ }
+ tpls zip tps
+ }
+ def linearizationTemplates = linearization map { _._1 }
+ def linearizationTypes = linearization map { _._2 }
+ private lazy val subClassesCache = mutable.Buffer.empty[DocTemplateEntity]
+ def registerSubClass(sc: DocTemplateEntity): Unit = {
+ assert(subClassesCache != null)
+ subClassesCache += sc
+ }
+ def subClasses = subClassesCache.toList
+ protected lazy val memberSyms =
+ // Only this class's constructors are part of its members, inherited constructors are not.
+ sym.info.members.filter(s => localShouldDocument(s) && (!s.isConstructor || s.owner == sym))
+ val members = memberSyms flatMap (makeMember(_, this))
+ val templates = members collect { case c: DocTemplateEntity => c }
+ val methods = members collect { case d: Def => d }
+ val values = members collect { case v: Val => v }
+ val abstractTypes = members collect { case t: AbstractType => t }
+ val aliasTypes = members collect { case t: AliasType => t }
+ override def isTemplate = true
+ def isDocTemplate = true
+ def companion = sym.companionSymbol match {
+ case NoSymbol => None
+ case comSym if !isEmptyJavaObject(comSym) && (comSym.isClass || comSym.isModule) =>
+ Some(makeDocTemplate(comSym, inTpl))
+ case _ => None
+ }
+ }
+
+ abstract class PackageImpl(sym: Symbol, inTpl: => PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
+ override def inTemplate = inTpl
+ override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
+ val packages = members collect { case p: Package => p }
+ }
+
+ abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity
+
+ abstract class NonTemplateMemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity {
+ override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name)
+ lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "#" + name)
+ def isUseCase = sym.isSynthetic
+ }
+
+ abstract class NonTemplateParamMemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends NonTemplateMemberImpl(sym, inTpl) {
+ def valueParams =
+ sym.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
+ if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl)
+ }}
+ }
+
+ abstract class ParameterImpl(sym: Symbol, inTpl: => TemplateImpl) extends EntityImpl(sym, inTpl) with ParameterEntity {
+ override def inTemplate = inTpl
+ }
+
+ private trait TypeBoundsImpl extends EntityImpl {
+ def lo = sym.info.bounds match {
+ case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass =>
+ Some(makeType(appliedType(lo, sym.info.typeParams map {_.tpe}), inTemplate))
+ case _ => None
+ }
+ def hi = sym.info.bounds match {
+ case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass =>
+ Some(makeType(appliedType(hi, sym.info.typeParams map {_.tpe}), inTemplate))
+ case _ => None
+ }
+ }
+
+ trait HigherKindedImpl extends EntityImpl with HigherKinded {
+ def typeParams =
+ sym.typeParams map (makeTypeParam(_, inTemplate))
+ }
+
+ /* ============== MAKER METHODS ============== */
+
+ /** */
+ def normalizeTemplate(aSym: Symbol): Symbol = aSym match {
+ case null | EmptyPackage | NoSymbol =>
+ normalizeTemplate(RootPackage)
+ case ScalaObjectClass | ObjectClass =>
+ normalizeTemplate(AnyRefClass)
+ case _ if aSym.isModuleClass || aSym.isPackageObject =>
+ normalizeTemplate(aSym.sourceModule)
+ case _ =>
+ aSym
+ }
+
+ def makeRootPackage: Option[PackageImpl] =
+ makePackage(RootPackage, null)
+
+ /** Creates a package entity for the given symbol or returns `None` if the symbol does not denote a package that
+ * contains at least one ''documentable'' class, trait or object. Creating a package entity */
+ def makePackage(aSym: Symbol, inTpl: => PackageImpl): Option[PackageImpl] = {
+ val bSym = normalizeTemplate(aSym)
+ if (templatesCache isDefinedAt (bSym))
+ Some(templatesCache(bSym) match {case p: PackageImpl => p})
+ else {
+ val pack =
+ if (bSym == RootPackage)
+ new RootPackageImpl(bSym) {
+ override val name = "root"
+ override def inTemplate = this
+ override def toRoot = this :: Nil
+ override def qualifiedName = "_root_"
+ override def inheritedFrom = Nil
+ override def isRootPackage = true
+ override protected lazy val memberSyms =
+ (bSym.info.members ++ EmptyPackage.info.members) filter { s =>
+ s != EmptyPackage && s != RootPackage
+ }
+ }
+ else
+ new PackageImpl(bSym, inTpl) {}
+ if (pack.templates.isEmpty) {
+ droppedPackages += 1
+ None
+ }
+ else Some(pack)
+ }
+
+ }
+
+ /** */
+ def makeTemplate(aSym: Symbol): TemplateImpl = {
+ val bSym = normalizeTemplate(aSym)
+ if (bSym == RootPackage)
+ makeRootPackage.get
+ else if (bSym.isPackage)
+ makeTemplate(bSym.owner) match {
+ case inPkg: PackageImpl => makePackage(bSym, inPkg) getOrElse (new NoDocTemplateImpl(bSym, inPkg))
+ case _ => throw new Error("'" + bSym + "' must be in a package")
+ }
+ else if (templateShouldDocument(bSym))
+ makeTemplate(bSym.owner) match {
+ case inDTpl: DocTemplateImpl => makeDocTemplate(bSym, inDTpl)
+ case _ => throw new Error("'" + bSym + "' must be in documentable template")
+ }
+ else
+ new NoDocTemplateImpl(bSym, makeTemplate(bSym.owner))
+ }
+
+ /** */
+ def makeDocTemplate(aSym: Symbol, inTpl: => DocTemplateImpl): DocTemplateImpl = {
+ val bSym = normalizeTemplate(aSym)
+ val minimumInTpl =
+ if (bSym.owner != inTpl.sym)
+ makeTemplate(aSym.owner) match {
+ case inDTpl: DocTemplateImpl => inDTpl
+ case inNDTpl => throw new Error("'" + bSym + "' is owned by '" + inNDTpl + "' which is not documented")
+ }
+ else
+ inTpl
+ if (templatesCache isDefinedAt (bSym))
+ templatesCache(bSym)
+ else if (bSym.isModule || (bSym.isAliasType && bSym.tpe.typeSymbol.isModule))
+ new DocTemplateImpl(bSym, minimumInTpl) with Object
+ else if (bSym.isTrait || (bSym.isAliasType && bSym.tpe.typeSymbol.isTrait))
+ new DocTemplateImpl(bSym, minimumInTpl) with Trait
+ else if (bSym.isClass || (bSym.isAliasType && bSym.tpe.typeSymbol.isClass))
+ new DocTemplateImpl(bSym, minimumInTpl) with Class {
+ def valueParams =
+ // we don't want params on a class (non case class) signature
+ if (isCaseClass) List(sym.constrParamAccessors map (makeValueParam(_, this)))
+ else List.empty
+ val constructors =
+ members collect { case d: Constructor => d }
+ def primaryConstructor = constructors find { _.isPrimary }
+ }
+ else
+ throw new Error("'" + bSym + "' that isn't a class, trait or object cannot be built as a documentable template")
+ }
+
+ /** */
+ def makeMember(aSym: Symbol, inTpl: => DocTemplateImpl): List[MemberImpl] = {
+
+ def makeMember0(bSym: Symbol): Option[MemberImpl] = {
+ if (bSym.isGetter && bSym.isLazy)
+ Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ override def isLazyVal = true
+ })
+ else if (bSym.isGetter && bSym.accessed.isMutable)
+ Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ override def isVar = true
+ })
+ else if (bSym.isMethod && !bSym.isGetterOrSetter && !bSym.isConstructor && !bSym.isModule)
+ Some(new NonTemplateParamMemberImpl(bSym, inTpl) with HigherKindedImpl with Def {
+ override def isDef = true
+ })
+ else if (bSym.isConstructor)
+ Some(new NonTemplateParamMemberImpl(bSym, inTpl) with Constructor {
+ override def isConstructor = true
+ def isPrimary = sym.isPrimaryConstructor
+ })
+ else if (bSym.isGetter) // Scala field accessor or Java field
+ Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ override def isVal = true
+ })
+ else if (bSym.isAbstractType)
+ Some(new NonTemplateMemberImpl(bSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with AbstractType {
+ override def isAbstractType = true
+ })
+ else if (bSym.isAliasType)
+ Some(new NonTemplateMemberImpl(bSym, inTpl) with HigherKindedImpl with AliasType {
+ override def isAliasType = true
+ def alias = makeType(sym.tpe.dealias, inTpl, sym)
+ })
+ else if (bSym.isPackage)
+ inTpl match { case inPkg: PackageImpl => makePackage(bSym, inPkg) }
+ else if ((bSym.isClass || bSym.isModule) && templateShouldDocument(bSym))
+ Some(makeDocTemplate(bSym, inTpl))
+ else
+ None
+ }
+
+ if (!localShouldDocument(aSym) || aSym.isModuleClass || aSym.isPackageObject || aSym.isMixinConstructor)
+ Nil
+ else {
+ val allSyms = useCases(aSym, inTpl.sym) map { case (bSym, bComment, bPos) =>
+ addCommentBody(bSym, inTpl, bComment, bPos)
+ }
+ (allSyms :+ aSym) flatMap { makeMember0(_) }
+ }
+
+ }
+
+ /** */
+ def makeTypeParam(aSym: Symbol, inTpl: => TemplateImpl): TypeParam =
+ new ParameterImpl(aSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with TypeParam {
+ def isTypeParam = true
+ def isValueParam = false
+ def variance: String = {
+ if (sym hasFlag Flags.COVARIANT) "+"
+ else if (sym hasFlag Flags.CONTRAVARIANT) "-"
+ else ""
+ }
+ }
+
+ /** */
+ def makeValueParam(aSym: Symbol, inTpl: => DocTemplateImpl): ValueParam = {
+ makeValueParam(aSym, inTpl, aSym.nameString)
+ }
+
+ /** */
+ def makeValueParam(aSym: Symbol, inTpl: => DocTemplateImpl, newName: String): ValueParam =
+ new ParameterImpl(aSym, inTpl) with ValueParam {
+ override val name = newName
+ def isTypeParam = false
+ def isValueParam = true
+ def defaultValue =
+ if (aSym.hasDefault)
+ // units.filter should return only one element
+ (currentRun.units filter (_.source.file == aSym.sourceFile)).toList match {
+ case List(unit) =>
+ (unit.body find (_.symbol == aSym)) match {
+ case Some(ValDef(_,_,_,rhs)) =>
+ Some(makeTree(rhs))
+ case _ => None
+ }
+ case _ => None
+ }
+ else None
+ def resultType =
+ makeType(sym.tpe, inTpl, sym)
+ def isImplicit = aSym.isImplicit
+ }
+
+ /** */
+ def makeType(aType: Type, inTpl: => TemplateImpl, dclSym: Symbol): TypeEntity = {
+ def ownerTpl(sym: Symbol): Symbol =
+ if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
+ val tpe =
+ if (thisFactory.settings.useStupidTypes.value) aType else {
+ def ownerTpl(sym: Symbol): Symbol =
+ if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
+ val fixedSym = if (inTpl.sym.isModule) inTpl.sym.moduleClass else inTpl.sym
+ aType.asSeenFrom(fixedSym.thisType, ownerTpl(dclSym))
+ }
+ makeType(tpe, inTpl)
+ }
+
+ /** */
+ def makeType(aType: Type, inTpl: => TemplateImpl): TypeEntity =
+ new TypeEntity {
+ private val nameBuffer = new StringBuilder
+ private var refBuffer = new immutable.TreeMap[Int, (TemplateEntity, Int)]
+ private def appendTypes0(types: List[Type], sep: String): Unit = types match {
+ case Nil =>
+ case tp :: Nil =>
+ appendType0(tp)
+ case tp :: tps =>
+ appendType0(tp)
+ nameBuffer append sep
+ appendTypes0(tps, sep)
+ }
+ private def checkFunctionType(tpe: TypeRef): Boolean = {
+ val TypeRef(_, sym, args) = tpe
+ (args.length > 0) && (args.length - 1 <= definitions.MaxFunctionArity) &&
+ (sym == definitions.FunctionClass(args.length - 1))
+ }
+ private def appendType0(tpe: Type): Unit = tpe match {
+ /* Type refs */
+ case tp: TypeRef if (checkFunctionType(tp)) =>
+ nameBuffer append '('
+ appendTypes0(tp.args.init, ", ")
+ nameBuffer append ") ⇒ "
+ appendType0(tp.args.last)
+ case tp: TypeRef if (tp.typeSymbol == definitions.RepeatedParamClass) =>
+ appendType0(tp.args.head)
+ nameBuffer append '*'
+ case tp: TypeRef if (tp.typeSymbol == definitions.ByNameParamClass) =>
+ nameBuffer append "⇒ "
+ appendType0(tp.args.head)
+ case tp: TypeRef if (definitions.isTupleType(tp)) =>
+ nameBuffer append '('
+ appendTypes0(tp.args, ", ")
+ nameBuffer append ')'
+ case TypeRef(pre, aSym, targs) =>
+ val bSym = normalizeTemplate(aSym)
+ if (bSym.isNonClassType)
+ nameBuffer append bSym.name
+ else {
+ val tpl = makeTemplate(bSym)
+ val pos0 = nameBuffer.length
+ refBuffer += pos0 -> (tpl, tpl.name.length)
+ nameBuffer append tpl.name
+ }
+ if (!targs.isEmpty) {
+ nameBuffer append '['
+ appendTypes0(targs, ", ")
+ nameBuffer append ']'
+ }
+ /* Refined types */
+ case RefinedType(parents, defs) =>
+ appendTypes0((if (parents.length > 1) parents filterNot (_ == ObjectClass.tpe) else parents), " with ")
+ if (!defs.isEmpty) {
+ nameBuffer append " {...}" // TODO: actually print the refinement
+ }
+ /* Polymorphic types */
+ case PolyType(tparams, result) if tparams nonEmpty =>
+// throw new Error("Polymorphic type '" + tpe + "' cannot be printed as a type")
+ def typeParamsToString(tps: List[Symbol]): String = if(tps isEmpty) "" else
+ tps.map{tparam =>
+ tparam.varianceString + tparam.name + typeParamsToString(tparam.typeParams)
+ }.mkString("[", ", ", "]")
+ nameBuffer append typeParamsToString(tparams)
+ appendType0(result)
+ case PolyType(tparams, result) if (tparams.isEmpty) =>
+ nameBuffer append '⇒'
+ appendType0(result)
+ case tpen =>
+ nameBuffer append tpen.toString
+ }
+ appendType0(aType)
+ val refEntity = refBuffer
+ val name = optimize(nameBuffer.toString)
+ }
+
+ def templateShouldDocument(aSym: Symbol): Boolean = {
+ // TODO: document sourceless entities (e.g., Any, etc), based on a new Setting to be added
+ (aSym.isPackageClass || (aSym.sourceFile != null)) && localShouldDocument(aSym) &&
+ ( aSym.owner == NoSymbol || templateShouldDocument(aSym.owner) ) && !isEmptyJavaObject(aSym)
+ }
+
+ def isEmptyJavaObject(aSym: Symbol): Boolean = {
+ def hasMembers = aSym.info.members.exists(s => localShouldDocument(s) && (!s.isConstructor || s.owner == aSym))
+ aSym.isModule && aSym.hasFlag(Flags.JAVA) && !hasMembers
+ }
+
+ def localShouldDocument(aSym: Symbol): Boolean = {
+ !aSym.isPrivate && (aSym.isProtected || aSym.privateWithin == NoSymbol) && !aSym.isSynthetic
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala b/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala
new file mode 100644
index 0000000000..7a0c8c7961
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala
@@ -0,0 +1,16 @@
+
+
+
+/** we create a TreeEntity to store some text and some hyperLinks to make on entities in it */
+
+package scala.tools.nsc
+package doc
+package model
+
+import scala.collection.immutable.TreeMap
+
+
+class TreeEntity {
+ var expression:String = ""
+ var refs = new TreeMap[Int, (Entity, Int)] // start, (Entity to be linked to , end)
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
new file mode 100755
index 0000000000..6a5c8f6e8d
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -0,0 +1,89 @@
+package scala.tools.nsc
+package doc
+package model
+
+/** The goal of this trait is , using makeTree,
+ * to browse a tree to
+ * 1- have the String of the complete tree (tree.expression)
+ * 2- fill references to create hyperLinks later in html.pageTemplate
+ *
+ * It is applied in ModelFactory => makeTree
+ *
+ */
+
+trait TreeFactory {
+ thisTreeFactory: ModelFactory with TreeFactory =>
+ val global:Global
+
+ import global._
+
+ def makeTree(rhs:Tree):TreeEntity = {
+ val printSteps:Boolean = false
+ val tree = new TreeEntity
+ val firstIndex = rhs.pos.startOrPoint
+
+ /** Gets the full string of the right hand side of a parameter, without links */
+ def makeExpression(rhs:Tree){
+ val start = rhs.pos.startOrPoint
+ val end = rhs.pos.endOrPoint
+ var expr = ""
+ for (i <- start until end) expr += rhs.pos.source.content.apply(i)
+ rhs match {
+ case Block(r,s) => expr += "}"
+ case _ =>
+ }
+ tree.expression += expr
+ }
+
+ val traverser = new Traverser{
+ /** Finds the Entity on which we will later create a link on,
+ * stores it in tree.refs with its position
+ */
+ def makeLink(rhs:Tree){
+ var start = rhs.pos.point - firstIndex
+ val end = rhs.pos.endOrPoint - firstIndex
+ if(start != end) {
+ var asym = rhs.symbol
+ if (asym.isClass) makeTemplate(asym) match{
+ case docTmpl: DocTemplateImpl =>
+ tree.refs += ((start,(docTmpl,end)))
+ case _ =>
+ }
+ else if (asym.isTerm && asym.owner.isClass){
+ if (asym.isSetter) asym = asym.getter(asym.owner)
+ makeTemplate(asym.owner) match {
+ case docTmpl:DocTemplateImpl =>
+ val mbrs:List[MemberImpl] = makeMember(asym,docTmpl)
+ mbrs foreach {mbr =>
+ tree.refs += ((start,(mbr,end)))
+ }
+ case _ =>
+ }
+ }
+ }
+ }
+ /**
+ * Goes through the tree and makes links when a Select occurs,
+ * The case of New(_) is ignored because the object we want to create a link on
+ * will be reached with recursivity and we don't want a link on the "new" string
+ * If a link is not created, its case is probably not defined in here
+ */
+ override def traverse(tree:Tree) = tree match {
+ case Select(qualifier, name) =>
+ qualifier match {
+ case New(_) =>
+ case _ => makeLink(tree)
+ }
+ traverse(qualifier)
+ case Ident(_) => makeLink(tree)
+ case _ =>
+ super.traverse(tree)
+ }
+ }
+
+ makeExpression(rhs)
+ traverser.traverse(rhs)
+ return tree
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala b/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
new file mode 100644
index 0000000000..989dfa048e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
@@ -0,0 +1,25 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package model
+
+import scala.collection._
+
+abstract class TypeEntity {
+
+ /** A string representation of this type. */
+ def name: String
+
+ /** Maps which parts of this type's name reference other entities. The map is indexed by the position of the first
+ * character that reference some entity, and contains the entity and the position of the last referenced
+ * character. The referenced character ranges do not to overlap or nest. The map is sorted by position. */
+ def refEntity: SortedMap[Int, (TemplateEntity, Int)]
+
+ override def toString =
+ name
+
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala
new file mode 100644
index 0000000000..255c61095e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala
@@ -0,0 +1,73 @@
+/* NSC -- new Scala compiler -- Copyright 2007-2010 LAMP/EPFL */
+
+package scala.tools.nsc
+package doc
+package model
+package comment
+
+import scala.collection._
+
+import java.net.URL
+
+/** A body of text. A comment has a single body, which is composed of at least one block. Inside every body is exactly
+ * one summary (see [[scala.tools.nsc.doc.model.comment.Summary]]). */
+final case class Body(blocks: Seq[Block]) {
+
+ /** The summary text of the comment body. */
+ lazy val summary: Option[Inline] = {
+ def summaryInBlock(block: Block): Seq[Inline] = block match {
+ case Title(text, _) => summaryInInline(text)
+ case Paragraph(text) => summaryInInline(text)
+ case UnorderedList(items) => items flatMap { summaryInBlock(_) }
+ case OrderedList(items, _) => items flatMap { summaryInBlock(_) }
+ case DefinitionList(items) => items.values.toSeq flatMap { summaryInBlock(_) }
+ case _ => Nil
+ }
+ def summaryInInline(text: Inline): Seq[Inline] = text match {
+ case Summary(text) => List(text)
+ case Chain(items) => items flatMap { summaryInInline(_) }
+ case Italic(text) => summaryInInline(text)
+ case Bold(text) => summaryInInline(text)
+ case Underline(text) => summaryInInline(text)
+ case Superscript(text) => summaryInInline(text)
+ case Subscript(text) => summaryInInline(text)
+ case Link(_, title) => summaryInInline(title)
+ case _ => Nil
+ }
+ (blocks flatMap { summaryInBlock(_) }).toList match {
+ case Nil => None
+ case inline :: Nil => Some(inline)
+ case inlines => Some(Chain(inlines))
+ }
+ }
+
+}
+
+/** A block-level element of text, such as a paragraph or code block. */
+sealed abstract class Block
+
+final case class Title(text: Inline, level: Int) extends Block
+final case class Paragraph(text: Inline) extends Block
+final case class Code(data: String) extends Block
+final case class UnorderedList(items: Seq[Block]) extends Block
+final case class OrderedList(items: Seq[Block], style: String) extends Block
+final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block
+final case class HorizontalRule() extends Block
+
+/** An section of text inside a block, possibly with formatting. */
+sealed abstract class Inline
+
+final case class Chain(items: Seq[Inline]) extends Inline
+final case class Italic(text: Inline) extends Inline
+final case class Bold(text: Inline) extends Inline
+final case class Underline(text: Inline) extends Inline
+final case class Superscript(text: Inline) extends Inline
+final case class Subscript(text: Inline) extends Inline
+final case class Link(target: String, title: Inline) extends Inline
+final case class EntityLink(target: TemplateEntity) extends Inline
+final case class Monospace(text: String) extends Inline
+final case class Text(text: String) extends Inline
+final case class HtmlTag(data: String) extends Inline
+
+/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */
+final case class Summary(text: Inline) extends Inline
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
new file mode 100644
index 0000000000..e2841e0db1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
@@ -0,0 +1,77 @@
+/* NSC -- new Scala compiler -- Copyright 2007-2010 LAMP/EPFL */
+
+package scala.tools.nsc
+package doc
+package model
+package comment
+
+import scala.collection._
+
+/** A Scaladoc comment and all its tags.
+ *
+ * '''Note:''' the only instantiation site of this class is in [[CommentFactory]].
+ *
+ * @author Gilles Dubochet
+ * @author Manohar Jonnalagedda */
+abstract class Comment {
+
+ /** The main body of the comment that describes what the entity does and is. */
+ def body: Body
+
+ /** A shorter version of the body. Usually, this is the first sentence of the body. */
+ def short: Inline = body.summary getOrElse Text("")
+
+ /** A list of authors. The empty list is used when no author is defined. */
+ def authors: List[Body]
+
+ /** A list of other resources to see, including links to other entities or to external documentation. The empty list
+ * is used when no other resource is mentionned. */
+ def see: List[Body]
+
+ /** A description of the result of the entity. Typically, this provides additional information on the domain of the
+ * result, contractual post-conditions, etc. */
+ def result: Option[Body]
+
+ /** A map of exceptions that the entity can throw when accessed, and a description of what they mean. */
+ def throws: Map[String, Body]
+
+ /** A map of value parameters, and a description of what they are. Typically, this provides additional information on
+ * the domain of the parameters, contractual pre-conditions, etc. */
+ def valueParams: Map[String, Body]
+
+ /** A map of type parameters, and a description of what they are. Typically, this provides additional information on
+ * the domain of the parameters. */
+ def typeParams: Map[String, Body]
+
+ /** The version number of the entity. There is no formatting or further meaning attached to this value. */
+ def version: Option[Body]
+
+ /** A version number of a containing entity where this member-entity was introduced. */
+ def since: Option[Body]
+
+ /** An annotation as to expected changes on this entity. */
+ def todo: List[Body]
+
+ /** Whether the entity is deprecated. Using the "@deprecated" Scala attribute is prefereable to using this Scaladoc
+ * tag. */
+ def deprecated: Option[Body]
+
+ /** An additional note concerning the contract of the entity. */
+ def note: List[Body]
+
+ /** A usage example related to the entity. */
+ def example: List[Body]
+
+ /** The comment as it appears in the source text. */
+ def source: Option[String]
+
+ /** A description for the primary constructor */
+ def constructor: Option[Body]
+
+ override def toString =
+ body.toString + "\n" +
+ (authors map ("@author " + _.toString)).mkString("\n") +
+ (result map ("@return " + _.toString)).mkString("\n") +
+ (version map ("@version " + _.toString)).mkString
+
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
new file mode 100644
index 0000000000..2938fc163f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
@@ -0,0 +1,865 @@
+/* NSC -- new Scala compiler -- Copyright 2007-2010 LAMP/EPFL */
+
+package scala.tools.nsc
+package doc
+package model
+package comment
+
+import reporters.Reporter
+import scala.collection._
+import scala.util.matching.Regex
+import scala.annotation.switch
+import util.{NoPosition, Position}
+
+/** The comment parser transforms raw comment strings into `Comment` objects. Call `parse` to run the parser. Note that
+ * the parser is stateless and should only be built once for a given Scaladoc run.
+ *
+ * @param reporter The reporter on which user messages (error, warnings) should be printed.
+ *
+ * @author Manohar Jonnalagedda
+ * @author Gilles Dubochet */
+trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
+
+ val global: Global
+ import global.reporter
+
+ protected val commentCache = mutable.HashMap.empty[(global.Symbol, TemplateImpl), Comment]
+
+ def addCommentBody(sym: global.Symbol, inTpl: => TemplateImpl, docStr: String, docPos: global.Position): global.Symbol = {
+ commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos)
+ sym
+ }
+
+ def comment(sym: global.Symbol, inTpl: => DocTemplateImpl): Option[Comment] = {
+ val key = (sym, inTpl)
+ if (commentCache isDefinedAt key)
+ Some(commentCache(key))
+ else { // not reached for use-case comments
+ val c = defineComment(sym, inTpl)
+ if (c isDefined) commentCache += (sym, inTpl) -> c.get
+ c
+ }
+ }
+
+ /** A comment is usualy created by the parser, however for some special cases we have to give
+ * some inTpl comments (parent class for example) to the comment of the symbol
+ * This function manages some of those cases : Param accessor and Primary constructor */
+ def defineComment(sym: global.Symbol, inTpl: => DocTemplateImpl):Option[Comment] =
+ //param accessor case
+ // We just need the @param argument, we put it into the body
+ if( sym.isParamAccessor &&
+ inTpl.comment.isDefined &&
+ inTpl.comment.get.valueParams.isDefinedAt(sym.encodedName)) {
+ val comContent = Some(inTpl.comment.get.valueParams(sym.encodedName))
+ Some(createComment(body0=comContent))
+ }
+
+ // Primary constructor case
+ // We need some content of the class definition : @constructor for the body,
+ // @param and @deprecated, we can add some more if necessary
+ else if (sym.isPrimaryConstructor && inTpl.comment.isDefined ) {
+ val tplComment = inTpl.comment.get
+ // If there is nothing to put into the comment there is no need to create it
+ if(tplComment.constructor.isDefined ||
+ tplComment.throws != Map.empty ||
+ tplComment.valueParams != Map.empty ||
+ tplComment.typeParams != Map.empty ||
+ tplComment.deprecated.isDefined
+ )
+ Some(createComment( body0 = tplComment.constructor,
+ throws0 = tplComment.throws,
+ valueParams0 = tplComment.valueParams,
+ typeParams0 = tplComment.typeParams,
+ deprecated0 = tplComment.deprecated
+ ))
+ else None
+ }
+ //other comment cases
+ // parse function will make the comment
+ else {
+ val rawComment = global.expandedDocComment(sym, inTpl.sym).trim
+ if (rawComment != "") {
+ val c = parse(rawComment, global.rawDocComment(sym), global.docCommentPos(sym))
+ Some(c)
+ }
+ else None
+ }
+
+ /* Creates comments with necessary arguments */
+ def createComment(body0: Option[Body] = None,
+ authors0: List[Body] = List.empty,
+ see0: List[Body] = List.empty,
+ result0: Option[Body] = None,
+ throws0: Map[String,Body] = Map.empty,
+ valueParams0: Map[String,Body] = Map.empty,
+ typeParams0: Map[String,Body] = Map.empty,
+ version0: Option[Body] = None,
+ since0: Option[Body] = None,
+ todo0: List[Body] = List.empty,
+ deprecated0: Option[Body] = None,
+ note0: List[Body] = List.empty,
+ example0: List[Body] = List.empty,
+ constructor0: Option[Body] = None,
+ source0: Option[String] = None
+ ):Comment =
+ new Comment{
+ val body = if(body0 isDefined) body0.get else Body(Seq.empty)
+ val authors = authors0
+ val see = see0
+ val result = result0
+ val throws = throws0
+ val valueParams = valueParams0
+ val typeParams = typeParams0
+ val version = version0
+ val since = since0
+ val todo = todo0
+ val deprecated = deprecated0
+ val note = note0
+ val example = example0
+ val constructor = constructor0
+ val source = source0
+
+ }
+
+ protected val endOfText = '\u0003'
+ protected val endOfLine = '\u000A'
+
+ /** Something that should not have happened, happened, and Scaladoc should exit. */
+ protected def oops(msg: String): Nothing =
+ throw FatalError("program logic: " + msg)
+
+ /** The body of a line, dropping the (optional) start star-marker, one leading whitespace and all trailing whitespace. */
+ protected val CleanCommentLine =
+ new Regex("""(?:\s*\*\s?)?(.*)""")
+
+ /** Dangerous HTML tags that should be replaced by something safer, such as wiki syntax, or that should be dropped. */
+ protected val DangerousTags =
+ new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|<!--.*-->""")
+
+ /** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string if it cannot be salvaged. */
+ protected def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match {
+ case "p" | "div" => "\n\n"
+ case "h1" => "\n= "
+ case "/h1" => " =\n"
+ case "h2" => "\n== "
+ case "/h2" => " ==\n"
+ case "h3" => "\n=== "
+ case "/h3" => " ===\n"
+ case "h4" | "h5" | "h6" => "\n==== "
+ case "/h4" | "/h5" | "/h6" => " ====\n"
+ case "li" => "\n * - "
+ case _ => ""
+ }
+
+ /** Javadoc tags that should be replaced by something useful, such as wiki syntax, or that should be dropped. */
+ protected val JavadocTags =
+ new Regex("""\{\@(code|docRoot|inheritDoc|link|linkplain|literal|value)([^}]*)\}""")
+
+ /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */
+ protected def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match {
+ case "code" => "`" + mtch.group(2) + "`"
+ case "docRoot" => ""
+ case "inheritDoc" => ""
+ case "link" => "`" + mtch.group(2) + "`"
+ case "linkplain" => "`" + mtch.group(2) + "`"
+ case "literal" => mtch.group(2)
+ case "value" => "`" + mtch.group(2) + "`"
+ case _ => ""
+ }
+
+ /** Safe HTML tags that can be kept. */
+ protected val SafeTags =
+ new Regex("""((<code( [^>]*)?>.*</code>)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""")
+
+ protected val safeTagMarker = '\u000E'
+
+ /** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */
+ protected val SimpleTag =
+ new Regex("""\s*@(\S+)\s+(.*)""")
+
+ /** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name of the symbol, and the rest of the
+ * line. */
+ protected val SymbolTag =
+ new Regex("""\s*@(param|tparam|throws)\s+(\S*)\s*(.*)""")
+
+ /** The start of a scaladoc code block */
+ protected val CodeBlockStart =
+ new Regex("""(.*)\{\{\{(.*)""")
+
+ /** The end of a scaladoc code block */
+ protected val CodeBlockEnd =
+ new Regex("""(.*)\}\}\}(.*)""")
+
+ /** A key used for a tag map. The key is built from the name of the tag and from the linked symbol if the tag has one.
+ * Equality on tag keys is structural. */
+ protected sealed abstract class TagKey {
+ def name: String
+ }
+
+ protected final case class SimpleTagKey(name: String) extends TagKey
+ protected final case class SymbolTagKey(name: String, symbol: String) extends TagKey
+
+ /** Parses a raw comment string into a `Comment` object.
+ * @param comment The expanded comment string (including start and end markers) to be parsed.
+ * @param src The raw comment source string.
+ * @param pos The position of the comment in source. */
+ protected def parse(comment: String, src: String, pos: Position): Comment = {
+
+ /** The cleaned raw comment as a list of lines. Cleaning removes comment start and end markers, line start markers
+ * and unnecessary whitespace. */
+ def clean(comment: String): List[String] = {
+ def cleanLine(line: String): String = {
+ //replaceAll removes trailing whitespaces
+ line.replaceAll("""\s+$""", "") match {
+ case CleanCommentLine(ctl) => ctl
+ case tl => tl
+ }
+ }
+ val strippedComment = comment.trim.stripPrefix("/*").stripSuffix("*/")
+ val safeComment = DangerousTags.replaceAllIn(strippedComment, { htmlReplacement(_) })
+ val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) })
+ val markedTagComment =
+ SafeTags.replaceAllIn(javadoclessComment, { mtch =>
+ java.util.regex.Matcher.quoteReplacement(safeTagMarker + mtch.matched + safeTagMarker)
+ })
+ markedTagComment.lines.toList map (cleanLine(_))
+ }
+
+ /** Parses a comment (in the form of a list of lines) to a Comment instance, recursively on lines. To do so, it
+ * splits the whole comment into main body and tag bodies, then runs the `WikiParser` on each body before creating
+ * the comment instance.
+ *
+ * @param docBody The body of the comment parsed until now.
+ * @param tags All tags parsed until now.
+ * @param lastTagKey The last parsed tag, or `None` if the tag section hasn't started. Lines that are not tagged
+ * are part of the previous tag or, if none exists, of the body.
+ * @param remaining The lines that must still recursively be parsed.
+ * @param inCodeBlock Whether the next line is part of a code block (in which no tags must be read). */
+ def parse0(docBody: String, tags: Map[TagKey, List[String]], lastTagKey: Option[TagKey], remaining: List[String], inCodeBlock: Boolean): Comment = {
+ remaining match {
+
+ case CodeBlockStart(before, after) :: ls if (!inCodeBlock) =>
+ if (before.trim != "")
+ parse0(docBody, tags, lastTagKey, before :: ("{{{" + after) :: ls, false)
+ else if (after.trim != "")
+ parse0(docBody, tags, lastTagKey, "{{{" :: after :: ls, true)
+ else
+ parse0(docBody + endOfLine + "{{{", tags, lastTagKey, ls, true)
+
+ case CodeBlockEnd(before, after) :: ls =>
+ if (before.trim != "")
+ parse0(docBody, tags, lastTagKey, before :: ("}}}" + after) :: ls, true)
+ else if (after.trim != "")
+ parse0(docBody, tags, lastTagKey, "}}}" :: after :: ls, false)
+ else
+ parse0(docBody + endOfLine + "}}}", tags, lastTagKey, ls, false)
+
+ case SymbolTag(name, sym, body) :: ls if (!inCodeBlock) =>
+ val key = SymbolTagKey(name, sym)
+ val value = body :: tags.getOrElse(key, Nil)
+ parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+
+ case SimpleTag(name, body) :: ls if (!inCodeBlock) =>
+ val key = SimpleTagKey(name)
+ val value = body :: tags.getOrElse(key, Nil)
+ parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+
+ case line :: ls if (lastTagKey.isDefined) =>
+ val key = lastTagKey.get
+ val value =
+ ((tags get key): @unchecked) match {
+ case Some(b :: bs) => (b + endOfLine + line) :: bs
+ case None => oops("lastTagKey set when no tag exists for key")
+ }
+ parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock)
+
+ case line :: ls =>
+ val newBody = if (docBody == "") line else docBody + endOfLine + line
+ parse0(newBody, tags, lastTagKey, ls, inCodeBlock)
+
+ case Nil =>
+
+ val bodyTags: mutable.Map[TagKey, List[Body]] =
+ mutable.Map(tags mapValues (_ map (parseWiki(_, pos))) toSeq: _*)
+
+ def oneTag(key: SimpleTagKey): Option[Body] =
+ ((bodyTags remove key): @unchecked) match {
+ case Some(r :: rs) =>
+ if (!rs.isEmpty) reporter.warning(pos, "Only one '@" + key.name + "' tag is allowed")
+ Some(r)
+ case None => None
+ }
+
+ def allTags(key: SimpleTagKey): List[Body] =
+ (bodyTags remove key) getOrElse Nil
+
+ def allSymsOneTag(key: TagKey): Map[String, Body] = {
+ val keys: Seq[SymbolTagKey] =
+ bodyTags.keys.toSeq flatMap {
+ case stk: SymbolTagKey if (stk.name == key.name) => Some(stk)
+ case stk: SimpleTagKey if (stk.name == key.name) =>
+ reporter.warning(pos, "Tag '@" + stk.name + "' must be followed by a symbol name")
+ None
+ case _ => None
+ }
+ val pairs: Seq[(String, Body)] =
+ for (key <- keys) yield {
+ val bs = (bodyTags remove key).get
+ if (bs.length > 1)
+ reporter.warning(pos, "Only one '@" + key.name + "' tag for symbol " + key.symbol + " is allowed")
+ (key.symbol, bs.head)
+ }
+ Map.empty[String, Body] ++ pairs
+ }
+
+ val com = createComment (
+ body0 = Some(parseWiki(docBody, pos)),
+ authors0 = allTags(SimpleTagKey("author")),
+ see0 = allTags(SimpleTagKey("see")),
+ result0 = oneTag(SimpleTagKey("return")),
+ throws0 = allSymsOneTag(SimpleTagKey("throws")),
+ valueParams0 = allSymsOneTag(SimpleTagKey("param")),
+ typeParams0 = allSymsOneTag(SimpleTagKey("tparam")),
+ version0 = oneTag(SimpleTagKey("version")),
+ since0 = oneTag(SimpleTagKey("since")),
+ todo0 = allTags(SimpleTagKey("todo")),
+ deprecated0 = oneTag(SimpleTagKey("deprecated")),
+ note0 = allTags(SimpleTagKey("note")),
+ example0 = allTags(SimpleTagKey("example")),
+ constructor0 = oneTag(SimpleTagKey("constructor")),
+ source0 = Some(clean(src).mkString("\n"))
+ )
+
+ for ((key, _) <- bodyTags)
+ reporter.warning(pos, "Tag '@" + key.name + "' is not recognised")
+
+ com
+
+ }
+ }
+
+ parse0("", Map.empty, None, clean(comment), false)
+
+ }
+
+ /** Parses a string containing wiki syntax into a `Comment` object. Note that the string is assumed to be clean:
+ * - Removed Scaladoc start and end markers.
+ * - Removed start-of-line star and one whitespace afterwards (if present).
+ * - Removed all end-of-line whitespace.
+ * - Only `endOfLine` is used to mark line endings. */
+ def parseWiki(string: String, pos: Position): Body =
+ new WikiParser(string.toArray, pos).document()
+
+ /** TODO
+ *
+ * @author Ingo Maier
+ * @author Manohar Jonnalagedda
+ * @author Gilles Dubochet */
+ protected final class WikiParser(val buffer: Array[Char], pos: Position) extends CharReader(buffer) { wiki =>
+
+ var summaryParsed = false
+
+ def document(): Body = {
+ nextChar()
+ val blocks = new mutable.ListBuffer[Block]
+ while(char != endOfText)
+ blocks += block()
+ Body(blocks.toList)
+ }
+
+ /* BLOCKS */
+
+ /** {{{ block ::= code | title | hrule | para }}} */
+ def block(): Block = {
+ if (checkSkipInitWhitespace("{{{"))
+ code()
+ else if (checkSkipInitWhitespace("="))
+ title()
+ else if (checkSkipInitWhitespace("----"))
+ hrule()
+ else if (checkList)
+ listBlock
+ else {
+ para()
+ }
+ }
+
+ /** listStyle ::= '-' spc | '1.' spc | 'I.' spc | 'i.' spc | 'A.' spc | 'a.' spc
+ * Characters used to build lists and their constructors */
+ protected val listStyles = Map[String, (Seq[Block] => Block)]( // TODO Should this be defined at some list companion?
+ "- " -> ( UnorderedList(_) ),
+ "1. " -> ( OrderedList(_,"decimal") ),
+ "I. " -> ( OrderedList(_,"upperRoman") ),
+ "i. " -> ( OrderedList(_,"lowerRoman") ),
+ "A. " -> ( OrderedList(_,"upperAlpha") ),
+ "a. " -> ( OrderedList(_,"lowerAlpha") )
+ )
+
+ /** Checks if the current line is formed with more than one space and one the listStyles */
+ def checkList =
+ (countWhitespace > 0) && (listStyles.keys exists { checkSkipInitWhitespace(_) })
+
+ /** {{{
+ * nListBlock ::= nLine { mListBlock }
+ * nLine ::= nSpc listStyle para '\n'
+ * }}}
+ * Where n and m stand for the number of spaces. When m > n, a new list is nested. */
+ def listBlock: Block = {
+
+ /** Consumes one list item block and returns it, or None if the block is not a list or a different list. */
+ def listLine(indent: Int, style: String): Option[Block] =
+ if (countWhitespace > indent && checkList)
+ Some(listBlock)
+ else if (countWhitespace != indent || !checkSkipInitWhitespace(style))
+ None
+ else {
+ jumpWhitespace()
+ jump(style)
+ val p = Paragraph(inline(false))
+ blockEnded("end of list line ")
+ Some(p)
+ }
+
+ /** Consumes all list item blocks (possibly with nested lists) of the same list and returns the list block. */
+ def listLevel(indent: Int, style: String): Block = {
+ val lines = mutable.ListBuffer.empty[Block]
+ var line: Option[Block] = listLine(indent, style)
+ while (line.isDefined) {
+ lines += line.get
+ line = listLine(indent, style)
+ }
+ val constructor = listStyles(style)
+ constructor(lines)
+ }
+
+ val indent = countWhitespace
+ val style = (listStyles.keys find { checkSkipInitWhitespace(_) }).getOrElse(listStyles.keys.head)
+ listLevel(indent, style)
+
+ }
+
+ def code(): Block = {
+ jumpWhitespace()
+ jump("{{{")
+ readUntil("}}}")
+ if (char == endOfText)
+ reportError(pos, "unclosed code block")
+ else
+ jump("}}}")
+ blockEnded("code block")
+ Code(getRead)
+ }
+
+ /** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */
+ def title(): Block = {
+ jumpWhitespace()
+ val inLevel = repeatJump("=")
+ val text = inline(check(Array.fill(inLevel)('=')))
+ val outLevel = repeatJump("=", inLevel)
+ if (inLevel != outLevel)
+ reportError(pos, "unbalanced or unclosed heading")
+ blockEnded("heading")
+ Title(text, inLevel)
+ }
+
+ /** {{{ hrule ::= "----" { '-' } '\n' }}} */
+ def hrule(): Block = {
+ jumpWhitespace()
+ repeatJump("-")
+ blockEnded("horizontal rule")
+ HorizontalRule()
+ }
+
+ /** {{{ para ::= inline '\n' }}} */
+ def para(): Block = {
+ val p =
+ if (summaryParsed)
+ Paragraph(inline(false))
+ else {
+ val s = summary()
+ val r =
+ if (checkParaEnded) List(s) else List(s, inline(false))
+ summaryParsed = true
+ Paragraph(Chain(r))
+ }
+ while (char == endOfLine && char != endOfText)
+ nextChar()
+ p
+ }
+
+ /* INLINES */
+
+ def inline(isInlineEnd: => Boolean): Inline = {
+
+ def inline0(): Inline = {
+ if (char == safeTagMarker) htmlTag()
+ else if (check("'''")) bold()
+ else if (check("''")) italic()
+ else if (check("`")) monospace()
+ else if (check("__")) underline()
+ else if (check("^")) superscript()
+ else if (check(",,")) subscript()
+ else if (check("[[")) link()
+ else {
+ readUntil { char == safeTagMarker || check("''") || char == '`' || check("__") || char == '^' || check(",,") || check("[[") || isInlineEnd || checkParaEnded || char == endOfLine }
+ Text(getRead())
+ }
+ }
+
+ val inlines: List[Inline] = {
+ val iss = mutable.ListBuffer.empty[Inline]
+ iss += inline0()
+ while(!isInlineEnd && !checkParaEnded) {
+ if (char == endOfLine) nextChar()
+ val current = inline0()
+ (iss.last, current) match {
+ case (Text(t1), Text(t2)) =>
+ iss.update(iss.length - 1, Text(t1 + endOfLine + t2))
+ case _ => iss += current
+ }
+ }
+ iss.toList
+ }
+
+ inlines match {
+ case Nil => Text("")
+ case i :: Nil => i
+ case is => Chain(is)
+ }
+
+ }
+
+ def htmlTag(): Inline = {
+ jump(safeTagMarker)
+ readUntil(safeTagMarker)
+ if (char != endOfText) jump(safeTagMarker)
+ var read = getRead
+ HtmlTag(read)
+ }
+
+ def bold(): Inline = {
+ jump("'''")
+ val i = inline(check("'''"))
+ jump("'''")
+ Bold(i)
+ }
+
+ def italic(): Inline = {
+ jump("''")
+ val i = inline(check("''"))
+ jump("''")
+ Italic(i)
+ }
+
+ def monospace(): Inline = {
+ jump("`")
+ readUntil { char == '`' }
+ jump("`")
+ Monospace(getRead())
+ }
+
+ def underline(): Inline = {
+ jump("__")
+ val i = inline(check("__"))
+ jump("__")
+ Underline(i)
+ }
+
+ def superscript(): Inline = {
+ jump("^")
+ val i = inline(check("^"))
+ jump("^")
+ Superscript(i)
+ }
+
+ def subscript(): Inline = {
+ jump(",,")
+ val i = inline(check(",,"))
+ jump(",,")
+ Subscript(i)
+ }
+
+ def summary(): Inline = {
+ val i = inline(check("."))
+ Summary(
+ if (jump("."))
+ Chain(List(i, Text(".")))
+ else
+ i
+ )
+ }
+
+ def entityLink(query: String): Inline = findTemplate(query) match {
+ case Some(tpl) =>
+ EntityLink(tpl)
+ case None =>
+ Text(query)
+ }
+
+ def link(): Inline = {
+ val SchemeUri = new Regex("""([^:]+:.*)""")
+ jump("[[")
+ readUntil { check("]]") || check(" ") }
+ val target = getRead()
+ val title =
+ if (!check("]]")) Some({
+ jump(" ")
+ inline(check("]]"))
+ })
+ else None
+ jump("]]")
+ (target, title) match {
+ case (SchemeUri(uri), Some(title)) =>
+ Link(uri, title)
+ case (SchemeUri(uri), None) =>
+ Link(uri, Text(uri))
+ case (qualName, None) =>
+ entityLink(qualName)
+ case (qualName, Some(text)) =>
+ reportError(pos, "entity link to " + qualName + " cannot have a custom title'" + text + "'")
+ entityLink(qualName)
+ }
+
+ }
+
+ /* UTILITY */
+
+ /** {{{ eol ::= { whitespace } '\n' }}} */
+ def blockEnded(blockType: String): Unit = {
+ if (char != endOfLine && char != endOfText) {
+ reportError(pos, "no additional content on same line after " + blockType)
+ jumpUntil(endOfLine)
+ }
+ while (char == endOfLine)
+ nextChar()
+ }
+
+ def checkParaEnded(): Boolean = {
+ (char == endOfText) ||
+ ((char == endOfLine) && {
+ val poff = offset
+ val pc = char
+ nextChar() // read EOL
+ val ok = {
+ checkSkipInitWhitespace(Array(endOfLine)) ||
+ checkSkipInitWhitespace(Array('=')) ||
+ checkSkipInitWhitespace(Array('{', '{', '{')) ||
+ checkList ||
+ checkSkipInitWhitespace(Array('\u003D'))
+ }
+ offset = poff
+ char = pc
+ ok
+ })
+ }
+
+ def reportError(pos: Position, message: String): Unit =
+ reporter.warning(pos, message)
+
+ }
+
+ protected sealed class CharReader(buffer: Array[Char]) { reader =>
+
+ var char: Char = _
+ var offset: Int = 0
+
+ final def nextChar(): Unit = {
+ if (offset >= buffer.length)
+ char = endOfText
+ else {
+ char = buffer(offset)
+ offset += 1
+ }
+ }
+
+ implicit def strintToChars(s: String): Array[Char] = s.toArray
+
+ def store(body: => Unit): String = {
+ val pre = offset
+ body
+ val post = offset
+ buffer.toArray.slice(pre, post).toString
+ }
+
+ final def check(chars: Array[Char]): Boolean = {
+ val poff = offset
+ val pc = char
+ val ok = jump(chars)
+ offset = poff
+ char = pc
+ ok
+ }
+
+ def checkSkipInitWhitespace(chars: Array[Char]): Boolean = {
+ val poff = offset
+ val pc = char
+ jumpWhitespace()
+ val (ok0, chars0) =
+ if (chars.head == ' ')
+ (offset > poff, chars.tail)
+ else
+ (true, chars)
+ val ok = ok0 && jump(chars0)
+ offset = poff
+ char = pc
+ ok
+ }
+
+ def countWhitespace: Int = {
+ var count = 0
+ val poff = offset
+ val pc = char
+ while (isWhitespace(char) && char != endOfText) {
+ nextChar()
+ count += 1
+ }
+ offset = poff
+ char = pc
+ count
+ }
+
+ /* JUMPERS */
+
+ /** jumps a character and consumes it
+ * @return true only if the correct character has been jumped */
+ final def jump(ch: Char): Boolean = {
+ if (char == ch) {
+ nextChar()
+ true
+ }
+ else false
+ }
+
+ /** jumps all the characters in chars, consuming them in the process.
+ * @return true only if the correct characters have been jumped */
+ final def jump(chars: Array[Char]): Boolean = {
+ var index = 0
+ while (index < chars.length && char == chars(index) && char != endOfText) {
+ nextChar()
+ index += 1
+ }
+ index == chars.length
+ }
+
+ final def checkedJump(chars: Array[Char]): Boolean = {
+ val poff = offset
+ val pc = char
+ val ok = jump(chars)
+ if (!ok) {
+ offset = poff
+ char = pc
+ }
+ ok
+ }
+
+ final def repeatJump(chars: Array[Char], max: Int): Int = {
+ var count = 0
+ var more = true
+ while (more && count < max) {
+ if (!checkedJump(chars))
+ more = false
+ else
+ count += 1
+ }
+ count
+ }
+
+ final def repeatJump(chars: Array[Char]): Int = {
+ var count = 0
+ var more = true
+ while (more) {
+ if (!checkedJump(chars))
+ more = false
+ else
+ count += 1
+ }
+ count
+ }
+
+ final def jumpUntil(ch: Char): Int = {
+ var count = 0
+ while(char != ch && char != endOfText) {
+ nextChar()
+ count=count+1
+ }
+ count
+ }
+
+ final def jumpUntil(chars: Array[Char]): Int = {
+ assert(chars.length > 0)
+ var count = 0
+ val c = chars(0)
+ while(!check(chars) && char != endOfText) {
+ nextChar()
+ while (char != c && char != endOfText) {
+ nextChar()
+ count += 1
+ }
+ }
+ count
+ }
+
+ final def jumpUntil(pred: => Boolean): Int = {
+ var count = 0
+ while (!pred && char != endOfText) {
+ nextChar()
+ count += 1
+ }
+ count
+ }
+
+ def jumpWhitespace() = jumpUntil(!isWhitespace(char))
+
+ /* READERS */
+
+ private val readBuilder = new mutable.StringBuilder
+
+ final def getRead(): String = {
+ val bld = readBuilder.toString
+ readBuilder.clear()
+ if (bld.length < 6) bld.intern else bld
+ }
+
+ final def readUntil(ch: Char): Int = {
+ var count = 0
+ while(char != ch && char != endOfText) {
+ readBuilder += char
+ nextChar()
+ }
+ count
+ }
+
+ final def readUntil(chars: Array[Char]): Int = {
+ assert(chars.length > 0)
+ var count = 0
+ val c = chars(0)
+ while(!check(chars) && char != endOfText) {
+ readBuilder += char
+ nextChar()
+ while (char != c && char != endOfText) {
+ readBuilder += char
+ nextChar()
+ }
+ }
+ count
+ }
+
+ final def readUntil(pred: => Boolean): Int = {
+ var count = 0
+ while (!pred && char != endOfText) {
+ readBuilder += char
+ nextChar()
+ }
+ count
+ }
+
+ /* CHARS CLASSES */
+
+ def isWhitespace(c: Char) = (c: @switch) match {
+ case ' ' | '\t' => true
+ case _ => false
+ }
+
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/doc/script.js b/src/compiler/scala/tools/nsc/doc/script.js
deleted file mode 100644
index 54c69b1c42..0000000000
--- a/src/compiler/scala/tools/nsc/doc/script.js
+++ /dev/null
@@ -1,112 +0,0 @@
-<!--
-// NSC -- new Scala compiler
-// Copyright 2005-2009 LAMP/EPFL
-// @author Stephane Micheloud
-
-// $Id$
-
-function setWindowTitle(title) {
- parent.document.title = title;
-}
-
-var java_api_root = 'http://java.sun.com/javase/6/docs/api/';
-//var java_api_root = 'http://java.sun.com/j2se/1.5.0/docs/api/';
-//var java_api_root = 'http://lamp.epfl.ch/~linuxsoft/java/jdk1.5/docs/api/';
-
-var javax_servlet_api_root = 'http://java.sun.com/products/servlet/2.3/javadoc/';
-
-var scala_doc_url = parent.document.URL;
-var scala_api_root = scala_doc_url.substring(0, scala_doc_url.lastIndexOf("/")+1);
-
-var ant_api_root = 'http://lamp.epfl.ch/~linuxsoft/ant/manual/api/';
-//var ant_api_root = 'http://www.net-freaks.org/doc/ant-1.6.5/manual/api/';
-
-var eclipse_api_root = 'http://help.eclipse.org/help32/topic/org.eclipse.platform.doc.isv/reference/api/';
-
-var fjbg_api_root = 'http://lamp.epfl.ch/~linuxsoft/fjbg/api/';
-
-var liftweb_api_root = 'http://lamp.epfl.ch/~linuxsoft/liftweb/apidocs/';
-
-function get_api_root(key) {
- root = null;
- if (key.indexOf("ch/epfl/lamp/fjbg/") == 0) { root = fjbg_api_root; }
- else if (key.indexOf("java/" ) == 0) { root = java_api_root; }
- else if (key.indexOf("javax/" ) == 0) { root = java_api_root; }
- else if (key.indexOf("javax/servlet/" ) == 0) { root = javax_servlet_api_root; }
- else if (key.indexOf("scala/" ) == 0) { root = scala_api_root; }
- else if (key.indexOf("org/apache/tools/" ) == 0) { root = ant_api_root; }
- else if (key.indexOf("org/eclipse/" ) == 0) { root = eclipse_api_root; }
- else if (key.indexOf("net/liftweb/" ) == 0) { root = liftweb_api_root; }
- return root;
-}
-
-var scala_src_root = 'http://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/';
-var lib_src_root = scala_src_root + 'src/library/';
-var comp_src_root = scala_src_root + 'src/compiler/';
-var actors_src_root = scala_src_root + 'src/actors/';
-var dbc_src_root = scala_src_root + 'src/dbc/';
-var swing_src_root = scala_src_root + 'src/swing/';
-
-//var liftweb_src_root = 'http://liftweb.googlecode.com/svn/trunk/liftweb/lift/src/main/scala/';
-
-function get_src_root(key) {
- root = null;
- if (key.indexOf("scala/actors/") == 0) { root = actors_src_root; }
- else if (key.indexOf("scala/dbc/" ) == 0) { root = dbc_src_root; }
- else if (key.indexOf("scala/swing/" ) == 0) { root = swing_src_root; }
- else if (key.indexOf("scala/tools/" ) == 0) { root = comp_src_root; }
- else if (key.indexOf("scala/" ) == 0) { root = lib_src_root; }
- //else if (key.indexOf("net/liftweb/" ) == 0) { root = liftweb_src_root; }
- return root;
-}
-
-function init() {
- elems = document.getElementsByTagName('a');
- for (i = 0; i < elems.length; i++) {
- try {
- key = elems[i].getAttribute('class');
- href = elems[i].getAttribute('href');
- api_root = get_api_root(key);
- if (api_root != null) {
- href1 = href.substring(href.lastIndexOf("#"))
- value = api_root + key + ".html" + href1;
- elems[i].setAttribute('href', value);
- }
- src_root = get_src_root(key);
- if (src_root != null) {
- value = src_root + key + '.scala?view=markup';
- elems[i].setAttribute('href', value);
- elems[i].setAttribute('target' , '_top');
- }
- }
- catch (e) {
- // do nothing
- }
- }
-}
-
-function getLocation() {
- kinds = parent.navigationFrame.document.getElementById("kinds");
- oldLocation = parent.classesFrame.window.location.href;
- //alert("oldLocation="+oldLocation);
- pos = oldLocation.lastIndexOf("#");
- classesURL = (pos > 0) ? oldLocation.substring(0, pos) : oldLocation;
- newLocation = classesURL + kinds.options[kinds.selectedIndex].value;
- //alert("newLocation="+newLocation);
- return newLocation;
-}
-
-function gotoKind() {
- parent.classesFrame.window.location = getLocation();
-}
-
-function resetKind() {
- kinds = parent.navigationFrame.document.getElementById("kinds");
- kinds.selectedIndex = 0;
-}
-
-function gotoName(letter) {
- parent.classesFrame.window.location = getLocation() + "_" + letter;
-}
--->
-
diff --git a/src/compiler/scala/tools/nsc/doc/style.css b/src/compiler/scala/tools/nsc/doc/style.css
deleted file mode 100644
index e243249ca5..0000000000
--- a/src/compiler/scala/tools/nsc/doc/style.css
+++ /dev/null
@@ -1,148 +0,0 @@
-/* Scaladoc style sheet */
-
-a:link {
- color: #0000ee;
-}
-
-a:visited {
- color: #551a8b;
-}
-
-a:active {
- color: #0000ee;
-}
-
-body {
- background-color: #ffffff;
-}
-
-div.entity {
- margin: 18px 0px 18px 0px;
- font-size: x-large;
- font-weight: bold;
-}
-
-div.doctitle {
- font-weight: bold;
- font-style: italic;
-}
-
-div.doctitle-larger {
- margin: 0px 0px 10px 0px;
- font-size: larger;
- font-weight: bold;
-}
-
-div.kinds {
- margin: 0.6em 0 0 0; /* top right bottom left */
- font-weight: bold;
-}
-
-div.page-title {
- margin: 15px 0px 15px 0px;
- font-size: x-large;
- font-weight: bold;
- text-align: center;
-}
-
-div.source {
- font-size: smaller;
- color: gray;
-}
-
-span.entity {
- color: #ff6666;
-}
-
-table.member {
- margin: 0 0 1.2em 0; /* top rigth bottom left */
- border-collapse: collapse;
- border: 2px inset #888888;
- width: 100%;
-}
-
-table.member td.title {
- border: 2px inset #888888;
- background-color: #ccccff;
- font-size: x-large;
- font-weight: bold;
-}
-
-table.inherited {
- margin: 0 0 1.2em 0; /* top rigth bottom left */
- border-collapse: collapse;
- border: 2px inset #888888;
- width: 100%;
-}
-
-table.inherited td.title {
- background-color: #eeeeff;
- font-weight: bold;
-}
-
-table.member-detail {
- margin: 10px 0px 0px 0px;
- border-collapse: collapse;
- border: 2px inset #888888;
- background-color: #ffffff;
- width: 100%;
-}
-
-table.member-detail td.title {
- border: 2px inset #888888;
- background-color: #ccccff;
- font-size: x-large;
- font-weight: bold;
-}
-
-table.navigation {
- border-collapse: collapse;
- width: 100%;
- font-family: Arial,Helvetica,Sans-Serif;
-}
-
-td.inherited-members {
- border-top: 2px inset #888888;
- border-right: 0px;
-}
-
-td.inherited-owner {
- background-color: #eeeeff;
- font-weight: bold;
-}
-
-td.modifiers {
- border-top: 2px inset #888888;
- border-right: 2px inset #888888;
- width: 50px;
- text-align: right;
-}
-
-td.navigation-enabled {
- font-weight: bold;
- color: #000000;
- background-color: #eeeeff;
-}
-
-td.navigation-links {
- width: 100%;
- background-color: #eeeeff;
-}
-
-td.navigation-selected {
- font-weight: bold;
- color: #ffffff;
- background-color: #00008b;
-}
-
-td.signature {
- border-top: 2px inset #888888;
- width: 90%;
-}
-
-ul.list {
- margin: 0;
- padding: 0;
- list-style: none;
-}
-
diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
index e56bfec433..6771c5da64 100644
--- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
@@ -1,3 +1,8 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2011 Scxala Solutions and LAMP/EPFL
+ * @author Iulian Dragos
+ * @author Hubert Plocinicak
+ */
package scala.tools.nsc
package interactive
@@ -51,7 +56,7 @@ object BuildManagerTest extends EvalLoop {
def prompt = "builder > "
- def error(msg: String) {
+ private def buildError(msg: String) {
println(msg + "\n scalac -help gives more information")
}
@@ -63,12 +68,13 @@ object BuildManagerTest extends EvalLoop {
}
val result = fs.foldRight((List[AbstractFile](), List[String]()))(partition)
if (!result._2.isEmpty)
- println("No such file(s): " + result._2.mkString(","))
+ Console.err.println("No such file(s): " + result._2.mkString(","))
Set.empty ++ result._1
}
- val settings = new Settings(error)
- val command = new CompilerCommand(args.toList, settings, error, false)
+ val settings = new Settings(buildError)
+ settings.Ybuildmanagerdebug.value = true
+ val command = new CompilerCommand(args.toList, settings)
// settings.make.value = "off"
// val buildManager: BuildManager = new SimpleBuildManager(settings)
val buildManager: BuildManager = new RefinedBuildManager(settings)
@@ -78,7 +84,7 @@ object BuildManagerTest extends EvalLoop {
// enter resident mode
loop { line =>
val args = line.split(' ').toList
- val command = new CompilerCommand(args, new Settings(error), error, true)
+ val command = new CompilerCommand(args, settings)
buildManager.update(command.files, Set.empty)
}
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index ab02ae7460..b466cfe2db 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -1,71 +1,101 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
package scala.tools.nsc
package interactive
-import scala.concurrent.SyncVar
-import scala.util.control.ControlException
+import scala.util.control.ControlThrowable
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.util.{SourceFile, Position, WorkScheduler}
import scala.tools.nsc.symtab._
import scala.tools.nsc.ast._
/** Interface of interactive compiler to a client such as an IDE
+ * The model the presentation compiler consists of the following parts:
+ *
+ * unitOfFile: The map from sourcefiles to loaded units. A sourcefile/unit is loaded if it occurs in that map.
+ *
+ * manipulated by: removeUnitOf, reloadSources.
+ *
+ * A call to reloadSources will add the given sources to the loaded units, and
+ * start a new background compiler pass to compile all loaded units (with the indicated sources first).
+ * Each background compiler pass has its own typer run.
+ * The background compiler thread can be interrupted each time an AST node is
+ * completely typechecked in the following ways:
+
+ * 1. by a new call to reloadSources. This starts a new background compiler pass with a new typer run.
+ * 2. by a call to askTypeTree. This starts a new typer run if the forceReload parameter = true
+ * 3. by a call to askTypeAt, askTypeCompletion, askScopeCompletion, askToDoFirst, askLinkPos, askLastType.
+ * 4. by raising an exception in the scheduler.
+ * 5. by passing a high-priority action wrapped in ask { ... }.
+ *
+ * Actions under 1-3 can themselves be interrupted if they involve typechecking
+ * AST nodes. High-priority actions under 5 cannot; they always run to completion.
+ * So these high-priority actions should to be short.
+ *
+ * Normally, an interrupted action continues after the interrupting action is finished.
+ * However, if the interrupting action created a new typer run, the interrupted
+ * action is aborted. If there's an outstanding response, it will be set to
+ * a Right value with a FreshRunReq exception.
*/
trait CompilerControl { self: Global =>
- /** Response {
- override def toString = "TypeMember("+sym+","+tpe+","+accessible+","+inherited+","+viaView+")"
- }{
- override def toString = "TypeMember("+sym+","+tpe+","+accessible+","+inherited+","+viaView+")"
- }wrapper to client
- */
- type Response[T] = SyncVar[Either[T, Throwable]]
+ import syntaxAnalyzer.UnitParser
- abstract class WorkItem extends (() => Unit)
+ type Response[T] = scala.tools.nsc.interactive.Response[T]
- /** Info given for every member found by completion
+ /** The scheduler by which client and compiler communicate
+ * Must be initialized before starting compilerRunner
*/
- abstract class Member {
- val sym: Symbol
- val tpe: Type
- val accessible: Boolean
- }
+ protected[interactive] val scheduler = new WorkScheduler
- case class TypeMember(sym: Symbol, tpe: Type, accessible: Boolean, inherited: Boolean, viaView: Symbol) extends Member
- case class ScopeMember(sym: Symbol, tpe: Type, accessible: Boolean, viaImport: Tree) extends Member
+ /** Return the compilation unit attached to a source file, or None
+ * if source is not loaded.
+ */
+ def getUnitOf(s: SourceFile): Option[RichCompilationUnit] = getUnit(s)
- /** The scheduler by which client and compiler communicate
- * Must be initialized before starting compilerRunner
+ /** Run operation `op` on a compilation unit associated with given `source`.
+ * If source has a loaded compilation unit, this one is passed to `op`.
+ * Otherwise a new compilation unit is created, but not added to the set of loaded units.
*/
- protected val scheduler = new WorkScheduler
+ def onUnitOf[T](source: SourceFile)(op: RichCompilationUnit => T): T =
+ op(unitOfFile.getOrElse(source.file, new RichCompilationUnit(source)))
/** The compilation unit corresponding to a source file
+ * if it does not yet exist create a new one atomically
+ * Note: We want to get roid of this operation as it messes compiler invariants.
*/
- def unitOf(s: SourceFile): RichCompilationUnit = unitOfFile get s.file match {
- case Some(unit) =>
- unit
- case None =>
- val unit = new RichCompilationUnit(s)
- unitOfFile(s.file) = unit
- unit
- }
+ @deprecated("use getUnitOf(s) or onUnitOf(s) instead")
+ def unitOf(s: SourceFile): RichCompilationUnit = getOrCreateUnitOf(s)
/** The compilation unit corresponding to a position */
- def unitOf(pos: Position): RichCompilationUnit = unitOf(pos.source)
+ @deprecated("use getUnitOf(pos.source) or onUnitOf(pos.source) instead")
+ def unitOf(pos: Position): RichCompilationUnit = getOrCreateUnitOf(pos.source)
- /** Remove the CompilationUnit corresponding to the given SourceFile
+ /** Removes the CompilationUnit corresponding to the given SourceFile
* from consideration for recompilation.
*/
- def removeUnitOf(s: SourceFile) = unitOfFile remove s.file
+ def removeUnitOf(s: SourceFile): Option[RichCompilationUnit] = { toBeRemoved += s.file; unitOfFile get s.file }
+
+ /** Returns the top level classes and objects that were deleted
+ * in the editor since last time recentlyDeleted() was called.
+ */
+ def recentlyDeleted(): List[Symbol] = deletedTopLevelSyms.synchronized {
+ val result = deletedTopLevelSyms
+ deletedTopLevelSyms.clear()
+ result.toList
+ }
/** Locate smallest tree that encloses position
+ * @pre Position must be loaded
*/
- def locateTree(pos: Position): Tree =
- new Locator(pos) locateIn unitOf(pos).body
+ def locateTree(pos: Position): Tree = onUnitOf(pos.source) { unit => new Locator(pos) locateIn unit.body }
/** Locates smallest context that encloses position as an optional value.
*/
def locateContext(pos: Position): Option[Context] =
- locateContext(unitOf(pos).contexts, pos)
+ for (unit <- getUnit(pos.source); cx <- locateContext(unit.contexts, pos)) yield cx
/** Returns the smallest context that contains given `pos`, throws FatalError if none exists.
*/
@@ -73,74 +103,268 @@ trait CompilerControl { self: Global =>
throw new FatalError("no context found for "+pos)
}
- /** Make sure a set of compilation units is loaded and parsed.
- * Return () to syncvar `result` on completion.
- */
- def askReload(sources: List[SourceFile], result: Response[Unit]) =
- scheduler postWorkItem new WorkItem {
- def apply() = reload(sources, result)
- override def toString = "reload "+sources
- }
+ private def postWorkItem(item: WorkItem) =
+ if (item.onCompilerThread) item() else scheduler.postWorkItem(item)
- /** Set sync var `result` to a fully attributed tree located at position `pos`
+ /** Makes sure a set of compilation units is loaded and parsed.
+ * Returns () to syncvar `response` on completion.
+ * Afterwards a new background compiler run is started with
+ * the given sources at the head of the list of to-be-compiled sources.
*/
- def askTypeAt(pos: Position, result: Response[Tree]) =
- scheduler postWorkItem new WorkItem {
- def apply() = self.getTypedTreeAt(pos, result)
- override def toString = "typeat "+pos.source+" "+pos.show
+ def askReload(sources: List[SourceFile], response: Response[Unit]) = {
+ val superseeded = scheduler.dequeueAll {
+ case ri: ReloadItem if ri.sources == sources => Some(ri)
+ case _ => None
}
+ superseeded.foreach(_.response.set())
+ postWorkItem(new ReloadItem(sources, response))
+ }
- def askType(source: SourceFile, forceReload: Boolean, result: Response[Tree]) =
- scheduler postWorkItem new WorkItem {
- def apply() = self.getTypedTree(source, forceReload, result)
- override def toString = "typecheck"
+ /** Removes source files and toplevel symbols, and issues a new typer run.
+ * Returns () to syncvar `response` on completion.
+ */
+ def askFilesDeleted(sources: List[SourceFile], response: Response[Unit]) = {
+ postWorkItem(new FilesDeletedItem(sources, response))
}
- /** Set sync var `result' to list of members that are visible
+ /** Sets sync var `response` to the smallest fully attributed tree that encloses position `pos`.
+ * Note: Unlike for most other ask... operations, the source file belonging to `pos` needs not be be loaded.
+ */
+ def askTypeAt(pos: Position, response: Response[Tree]) =
+ postWorkItem(new AskTypeAtItem(pos, response))
+
+ /** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`.
+ * @pre `source` needs to be loaded.
+ */
+ def askType(source: SourceFile, forceReload: Boolean, response: Response[Tree]) =
+ postWorkItem(new AskTypeItem(source, forceReload, response))
+
+ /** Sets sync var `response` to the position of the definition of the given link in
+ * the given sourcefile.
+ *
+ * @param sym The symbol referenced by the link (might come from a classfile)
+ * @param source The source file that's supposed to contain the definition
+ * @param response A response that will be set to the following:
+ * If `source` contains a definition that is referenced by the given link
+ * the position of that definition, otherwise NoPosition.
+ * Note: This operation does not automatically load `source`. If `source`
+ * is unloaded, it stays that way.
+ */
+ def askLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) =
+ postWorkItem(new AskLinkPosItem(sym, source, response))
+
+ /** Sets sync var `response' to list of members that are visible
* as members of the tree enclosing `pos`, possibly reachable by an implicit.
- * - if `selection` is false, as identifiers in the scope enclosing `pos`
+ * @pre source is loaded
*/
- def askTypeCompletion(pos: Position, result: Response[List[Member]]) =
- scheduler postWorkItem new WorkItem {
- def apply() = self.getTypeCompletion(pos, result)
- override def toString = "type completion "+pos.source+" "+pos.show
- }
+ def askTypeCompletion(pos: Position, response: Response[List[Member]]) =
+ postWorkItem(new AskTypeCompletionItem(pos, response))
- /** Set sync var `result' to list of members that are visible
+ /** Sets sync var `response' to list of members that are visible
* as members of the scope enclosing `pos`.
+ * @pre source is loaded
*/
- def askScopeCompletion(pos: Position, result: Response[List[Member]]) =
- scheduler postWorkItem new WorkItem {
- def apply() = self.getScopeCompletion(pos, result)
- override def toString = "scope completion "+pos.source+" "+pos.show
- }
+ def askScopeCompletion(pos: Position, response: Response[List[Member]]) =
+ postWorkItem(new AskScopeCompletionItem(pos, response))
- /** Ask to do unit first on present and subsequent type checking passes */
- def askToDoFirst(f: SourceFile) = {
- scheduler postWorkItem new WorkItem {
- def apply() = moveToFront(List(f))
- override def toString = "dofirst "+f
+ /** Asks to do unit corresponding to given source file on present and subsequent type checking passes.
+ * If the file is in the 'crashedFiles' ignore list it is removed and typechecked normally.
+ */
+ def askToDoFirst(source: SourceFile) =
+ postWorkItem(new AskToDoFirstItem(source))
+
+ /** If source is not yet loaded, loads it, and starts a new run, otherwise
+ * continues with current pass.
+ * Waits until source is fully type checked and returns body in response.
+ * @param source The source file that needs to be fully typed.
+ * @param response The response, which is set to the fully attributed tree of `source`.
+ * If the unit corresponding to `source` has been removed in the meantime
+ * the a NoSuchUnitError is raised in the response.
+ */
+ def askLoadedTyped(source: SourceFile, response: Response[Tree]) =
+ postWorkItem(new AskLoadedTypedItem(source, response))
+
+ /** If source if not yet loaded, get an outline view with askParseEntered.
+ * If source is loaded, wait for it to be typechecked.
+ * In both cases, set response to parsed (and possibly typechecked) tree.
+ * @param keepSrcLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
+ */
+ def askStructure(keepSrcLoaded: Boolean)(source: SourceFile, response: Response[Tree]) = {
+ getUnit(source) match {
+ case Some(_) => askLoadedTyped(source, response)
+ case None => askParsedEntered(source, keepSrcLoaded, response)
}
}
- /** Cancel currently pending high-priority jobs */
- def askCancel() =
- scheduler.raise(new CancelActionReq)
+ /** Set sync var `response` to the parse tree of `source` with all top-level symbols entered.
+ * @param source The source file to be analyzed
+ * @param keepLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
+ * If keepLoaded is `false` the operation is run at low priority, only after
+ * everything is brought up to date in a regular type checker run.
+ * @param response The response.
+ */
+ def askParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) =
+ postWorkItem(new AskParsedEnteredItem(source, keepLoaded, response))
- /** Cancel current compiler run and start a fresh one where everything will be re-typechecked
+ /** Cancels current compiler run and start a fresh one where everything will be re-typechecked
* (but not re-loaded).
*/
- def askReset() =
- scheduler.raise(new FreshRunReq)
+ def askReset() = scheduler raise (new FreshRunReq)
- /** Tell the compile server to shutdown, and do not restart again */
- def askShutdown() =
- scheduler.raise(new ShutdownReq)
+ /** Tells the compile server to shutdown, and not to restart again */
+ def askShutdown() = scheduler raise ShutdownReq
- // ---------------- Interpreted exeptions -------------------
+ @deprecated("use parseTree(source) instead") // deleted 2nd parameter, as thius has to run on 2.8 also.
+ def askParse(source: SourceFile, response: Response[Tree]) = respond(response) {
+ parseTree(source)
+ }
+
+ /** Returns parse tree for source `source`. No symbols are entered. Syntax errors are reported.
+ * Can be called asynchronously from presentation compiler.
+ */
+ def parseTree(source: SourceFile): Tree = ask { () =>
+ getUnit(source) match {
+ case Some(unit) if unit.status >= JustParsed =>
+ unit.body
+ case _ =>
+ new UnitParser(new CompilationUnit(source)).parse()
+ }
+ }
+
+ /** Asks for a computation to be done quickly on the presentation compiler thread */
+ def ask[A](op: () => A): A = if (self.onCompilerThread) op() else scheduler doQuickly op
- class CancelActionReq extends Exception with ControlException
- class FreshRunReq extends Exception with ControlException
- class ShutdownReq extends Exception with ControlException
+ def onCompilerThread = Thread.currentThread == compileRunner
+
+ /** Info given for every member found by completion
+ */
+ abstract class Member {
+ val sym: Symbol
+ val tpe: Type
+ val accessible: Boolean
+ def implicitlyAdded = false
+ }
+
+ case class TypeMember(
+ sym: Symbol,
+ tpe: Type,
+ accessible: Boolean,
+ inherited: Boolean,
+ viaView: Symbol) extends Member {
+ override def implicitlyAdded = viaView != NoSymbol
+ }
+
+ case class ScopeMember(
+ sym: Symbol,
+ tpe: Type,
+ accessible: Boolean,
+ viaImport: Tree) extends Member
+
+ // items that get sent to scheduler
+
+ abstract class WorkItem extends (() => Unit) {
+ val onCompilerThread = self.onCompilerThread
+
+ /** Raise a MissingReponse, if the work item carries a response. */
+ def raiseMissing(): Unit
+ }
+
+ case class ReloadItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
+ def apply() = reload(sources, response)
+ override def toString = "reload "+sources
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class FilesDeletedItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
+ def apply() = filesDeleted(sources, response)
+ override def toString = "files deleted "+sources
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskTypeAtItem(val pos: Position, response: Response[Tree]) extends WorkItem {
+ def apply() = self.getTypedTreeAt(pos, response)
+ override def toString = "typeat "+pos.source+" "+pos.show
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskTypeItem(val source: SourceFile, val forceReload: Boolean, response: Response[Tree]) extends WorkItem {
+ def apply() = self.getTypedTree(source, forceReload, response)
+ override def toString = "typecheck"
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskTypeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem {
+ def apply() = self.getTypeCompletion(pos, response)
+ override def toString = "type completion "+pos.source+" "+pos.show
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskScopeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem {
+ def apply() = self.getScopeCompletion(pos, response)
+ override def toString = "scope completion "+pos.source+" "+pos.show
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ class AskToDoFirstItem(val source: SourceFile) extends WorkItem {
+ def apply() = {
+ moveToFront(List(source))
+ enableIgnoredFile(source.file)
+ }
+ override def toString = "dofirst "+source
+
+ def raiseMissing() = ()
+ }
+
+ case class AskLinkPosItem(val sym: Symbol, val source: SourceFile, response: Response[Position]) extends WorkItem {
+ def apply() = self.getLinkPos(sym, source, response)
+ override def toString = "linkpos "+sym+" in "+source
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskLoadedTypedItem(val source: SourceFile, response: Response[Tree]) extends WorkItem {
+ def apply() = self.waitLoadedTyped(source, response, this.onCompilerThread)
+ override def toString = "wait loaded & typed "+source
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskParsedEnteredItem(val source: SourceFile, val keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
+ def apply() = self.getParsedEntered(source, keepLoaded, response, this.onCompilerThread)
+ override def toString = "getParsedEntered "+source+", keepLoaded = "+keepLoaded
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
}
+
+ // ---------------- Interpreted exceptions -------------------
+
+/** Signals a request for a fresh background compiler run.
+ * Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
+ */
+class FreshRunReq extends ControlThrowable
+
+/** Signals a request for a shutdown of the presentation compiler.
+ * Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
+ */
+object ShutdownReq extends ControlThrowable
+
+class NoSuchUnitError(file: AbstractFile) extends Exception("no unit found for file "+file)
+
+class MissingResponse extends Exception("response missing")
diff --git a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala b/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
index af50e4e468..fc48d4819c 100644
--- a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
+++ b/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
@@ -1,3 +1,7 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
package scala.tools.nsc
package interactive
@@ -17,7 +21,7 @@ trait ContextTrees { self: Global =>
* 3. The `pos` field of a context is the same as `context.tree.pos`, unless that
* position is transparent. In that case, `pos` equals the position of
* one of the solid descendants of `context.tree`.
- * 4. Children of a context have non-overlapping increasining positions.
+ * 4. Children of a context have non-overlapping increasing positions.
* 5. No context in the tree has a transparent position.
*/
class ContextTree(val pos: Position, val context: Context, val children: ArrayBuffer[ContextTree]) {
@@ -27,7 +31,7 @@ trait ContextTrees { self: Global =>
/** Optionally returns the smallest context that contains given `pos`, or None if none exists.
*/
- def locateContext(contexts: Contexts, pos: Position): Option[Context] = {
+ def locateContext(contexts: Contexts, pos: Position): Option[Context] = synchronized {
def locateNearestContextTree(contexts: Contexts, pos: Position, recent: Array[ContextTree]): Option[ContextTree] = {
locateContextTree(contexts, pos) match {
case Some(x) =>
@@ -46,7 +50,7 @@ trait ContextTrees { self: Global =>
if (contexts.isEmpty) None
else {
val hi = contexts.length - 1
- if ((contexts(hi).pos precedes pos) || (pos precedes contexts(0).pos)) None
+ if ((contexts(hi).pos properlyPrecedes pos) || (pos properlyPrecedes contexts(0).pos)) None
else {
def loop(lo: Int, hi: Int): Option[ContextTree] = {
val mid = (lo + hi) / 2
@@ -70,7 +74,7 @@ trait ContextTrees { self: Global =>
* If the `context` has a transparent position, add it multiple times
* at the positions of all its solid descendant trees.
*/
- def addContext(contexts: Contexts, context: Context) {
+ def addContext(contexts: Contexts, context: Context): Unit = {
val cpos = context.tree.pos
if (cpos.isTransparent)
for (t <- context.tree.children flatMap solidDescendants)
@@ -82,7 +86,7 @@ trait ContextTrees { self: Global =>
/** Insert a context with non-transparent position `cpos`
* at correct position into a buffer of context trees.
*/
- def addContext(contexts: Contexts, context: Context, cpos: Position) {
+ def addContext(contexts: Contexts, context: Context, cpos: Position): Unit = synchronized {
try {
if (!cpos.isRange) {}
else if (contexts.isEmpty) contexts += new ContextTree(cpos, context)
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index be3af86d53..1bbaef9912 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -1,101 +1,254 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
package scala.tools.nsc
package interactive
-import java.io.{ PrintWriter, StringWriter }
+import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter }
+import collection.mutable.{ArrayBuffer, ListBuffer, SynchronizedBuffer, HashMap}
-import scala.collection.mutable.{LinkedHashMap, SynchronizedMap}
+import scala.collection.mutable
+import mutable.{LinkedHashMap, SynchronizedMap, HashSet, LinkedHashSet, SynchronizedSet}
import scala.concurrent.SyncVar
-import scala.util.control.ControlException
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.{SourceFile, Position, RangePosition, OffsetPosition, NoPosition, WorkScheduler}
+import scala.util.control.ControlThrowable
+import scala.tools.nsc.io.{ AbstractFile, LogReplay, Logger, NullLogger, Replayer }
+import scala.tools.nsc.util.{ SourceFile, BatchSourceFile, Position, RangePosition, NoPosition, WorkScheduler, MultiHashMap }
import scala.tools.nsc.reporters._
import scala.tools.nsc.symtab._
import scala.tools.nsc.ast._
+import scala.tools.nsc.io.Pickler._
+import scala.tools.nsc.typechecker.DivergentImplicit
+import scala.annotation.tailrec
+import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
/** The main class of the presentation compiler in an interactive environment such as an IDE
*/
-class Global(settings: Settings, reporter: Reporter)
+class Global(settings: Settings, reporter: Reporter, projectName: String = "")
extends scala.tools.nsc.Global(settings, reporter)
with CompilerControl
with RangePositions
with ContextTrees
- with RichCompilationUnits {
-self =>
+ with RichCompilationUnits
+ with Picklers {
import definitions._
- final val debugIDE = false
+ val debugIDE: Boolean = settings.YpresentationDebug.value
+ val verboseIDE: Boolean = settings.YpresentationVerbose.value
- override def onlyPresentation = true
+ private def replayName = settings.YpresentationReplay.value
+ private def logName = settings.YpresentationLog.value
+ private def afterTypeDelay = settings.YpresentationDelay.value
+ private final val SleepTime = 10
- /** A list indicating in which order some units should be typechecked.
- * All units in firsts are typechecked before any unit not in this list
- * Modified by askToDoFirst, reload, typeAtTree.
- */
- var firsts: List[SourceFile] = List()
+ val log =
+ if (replayName != "") new Replayer(new FileReader(replayName))
+ else if (logName != "") new Logger(new FileWriter(logName))
+ else NullLogger
+
+ import log.logreplay
+ debugLog("logger: " + log.getClass + " writing to " + (new java.io.File(logName)).getAbsolutePath)
+ debugLog("classpath: "+classPath)
+
+ private var curTime = System.nanoTime
+ private def timeStep = {
+ val last = curTime
+ curTime = System.nanoTime
+ ", delay = " + (curTime - last) / 1000000 + "ms"
+ }
+
+ /** Print msg only when debugIDE is true. */
+ @inline final def debugLog(msg: => String) =
+ if (debugIDE) println("[%s] %s".format(projectName, msg))
+
+ /** Inform with msg only when verboseIDE is true. */
+ @inline final def informIDE(msg: => String) =
+ if (verboseIDE) println("[%s][%s]".format(projectName, msg))
+
+ override def forInteractive = true
/** A map of all loaded files to the rich compilation units that correspond to them.
*/
val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with
- SynchronizedMap[AbstractFile, RichCompilationUnit]
+ SynchronizedMap[AbstractFile, RichCompilationUnit] {
+ override def put(key: AbstractFile, value: RichCompilationUnit) = {
+ val r = super.put(key, value)
+ if (r.isEmpty) debugLog("added unit for "+key)
+ r
+ }
+ override def remove(key: AbstractFile) = {
+ val r = super.remove(key)
+ if (r.nonEmpty) debugLog("removed unit for "+key)
+ r
+ }
+ }
+
+ /** A set containing all those files that need to be removed
+ * Units are removed by getUnit, typically once a unit is finished compiled.
+ */
+ protected val toBeRemoved: mutable.Set[AbstractFile] =
+ new HashSet[AbstractFile] with SynchronizedSet[AbstractFile]
+
+ /** A set containing all those files that need to be removed after a full background compiler run
+ */
+ protected val toBeRemovedAfterRun: mutable.Set[AbstractFile] =
+ new HashSet[AbstractFile] with SynchronizedSet[AbstractFile]
+
+ class ResponseMap extends MultiHashMap[SourceFile, Response[Tree]] {
+ override def += (binding: (SourceFile, Set[Response[Tree]])) = {
+ assert(interruptsEnabled, "delayed operation within an ask")
+ super.+=(binding)
+ }
+ }
+
+ /** A map that associates with each abstract file the set of responses that are waiting
+ * (via waitLoadedTyped) for the unit associated with the abstract file to be loaded and completely typechecked.
+ */
+ protected val waitLoadedTypeResponses = new ResponseMap
+
+ /** A map that associates with each abstract file the set of responses that ware waiting
+ * (via build) for the unit associated with the abstract file to be parsed and entered
+ */
+ protected var getParsedEnteredResponses = new ResponseMap
+
+ private def cleanResponses(rmap: ResponseMap): Unit = {
+ for ((source, rs) <- rmap.toList) {
+ for (r <- rs) {
+ if (getUnit(source).isEmpty)
+ r raise new NoSuchUnitError(source.file)
+ if (r.isComplete)
+ rmap(source) -= r
+ }
+ if (rmap(source).isEmpty)
+ rmap -= source
+ }
+ }
+
+ private def cleanAllResponses() {
+ cleanResponses(waitLoadedTypeResponses)
+ cleanResponses(getParsedEnteredResponses)
+ }
+
+ private def checkNoOutstanding(rmap: ResponseMap): Unit =
+ for ((_, rs) <- rmap.toList; r <- rs) {
+ debugLog("ERROR: missing response, request will be discarded")
+ r raise new MissingResponse
+ }
+
+ def checkNoResponsesOutstanding() {
+ checkNoOutstanding(waitLoadedTypeResponses)
+ checkNoOutstanding(getParsedEnteredResponses)
+ }
+
+ /** The compilation unit corresponding to a source file
+ * if it does not yet exist create a new one atomically
+ * Note: We want to remove this.
+ */
+ protected[interactive] def getOrCreateUnitOf(source: SourceFile): RichCompilationUnit =
+ unitOfFile.getOrElse(source.file, { println("precondition violated: "+source+" is not loaded"); new Exception().printStackTrace(); new RichCompilationUnit(source) })
+
+ /** Work through toBeRemoved list to remove any units.
+ * Then return optionally unit associated with given source.
+ */
+ protected[interactive] def getUnit(s: SourceFile): Option[RichCompilationUnit] = {
+ toBeRemoved.synchronized {
+ for (f <- toBeRemoved) {
+ informIDE("removed: "+s)
+ unitOfFile -= f
+ allSources = allSources filter (_.file != f)
+ }
+ toBeRemoved.clear()
+ }
+ unitOfFile get s.file
+ }
+
+ /** A list giving all files to be typechecked in the order they should be checked.
+ */
+ protected var allSources: List[SourceFile] = List()
+
+ private var lastException: Option[Throwable] = None
+
+ /** A list of files that crashed the compiler. They will be ignored during background
+ * compilation until they are removed from this list.
+ */
+ private var ignoredFiles: Set[AbstractFile] = Set()
+
+ /** Flush the buffer of sources that are ignored during background compilation. */
+ def clearIgnoredFiles() {
+ ignoredFiles = Set()
+ }
+
+ /** Remove a crashed file from the ignore buffer. Background compilation will take it into account
+ * and errors will be reported against it. */
+ def enableIgnoredFile(file: AbstractFile) {
+ ignoredFiles -= file
+ debugLog("Removed crashed file %s. Still in the ignored buffer: %s".format(file, ignoredFiles))
+ }
/** The currently active typer run */
private var currentTyperRun: TyperRun = _
+ newTyperRun()
- /** Is a background compiler run needed? */
+ /** Is a background compiler run needed?
+ * Note: outOfDate is true as long as there is a background compile scheduled or going on.
+ */
private var outOfDate = false
- /** Units compiled by a run with id >= minRunId are considered up-to-date */
- private[interactive] var minRunId = 1
+ def isOutOfDate: Boolean = outOfDate
+
+ def demandNewCompilerRun() = {
+ if (outOfDate) throw new FreshRunReq // cancel background compile
+ else outOfDate = true // proceed normally and enable new background compile
+ }
- /** Is a reload/background compiler currently running? */
- private var acting = false
+ protected[interactive] var minRunId = 1
+
+ private var interruptsEnabled = true
+
+ private val NoResponse: Response[_] = new Response[Any]
+
+ /** The response that is currently pending, i.e. the compiler
+ * is working on providing an asnwer for it.
+ */
+ private var pendingResponse: Response[_] = NoResponse
// ----------- Overriding hooks in nsc.Global -----------------------
- /** Called from typechecker, which signal hereby that a node has been completely typechecked.
- * If the node is included in unit.targetPos, abandons run and returns newly attributed tree.
+ /** Called from parser, which signals hereby that a method definition has been parsed.
+ */
+ override def signalParseProgress(pos: Position) {
+ checkForMoreWork(pos)
+ }
+
+ /** Called from typechecker, which signals hereby that a node has been completely typechecked.
+ * If the node includes unit.targetPos, abandons run and returns newly attributed tree.
* Otherwise, if there's some higher priority work to be done, also abandons run with a FreshRunReq.
* @param context The context that typechecked the node
* @param old The original node
* @param result The transformed node
*/
override def signalDone(context: Context, old: Tree, result: Tree) {
- def integrateNew() {
- context.unit.body = new TreeReplacer(old, result) transform context.unit.body
- }
- if (activeLocks == 0) {
+ if (interruptsEnabled && analyzer.lockedCount == 0) {
if (context.unit != null &&
result.pos.isOpaqueRange &&
(result.pos includes context.unit.targetPos)) {
- integrateNew()
- var located = new Locator(context.unit.targetPos) locateIn result
+ var located = new TypedLocator(context.unit.targetPos) locateIn result
if (located == EmptyTree) {
println("something's wrong: no "+context.unit+" in "+result+result.pos)
located = result
}
throw new TyperResult(located)
}
- val typerRun = currentTyperRun
-
- while(true)
- try {
- pollForWork()
- if (typerRun == currentTyperRun)
- return
-
- // @Martin
- // Guard against NPEs in integrateNew if context.unit == null here.
- // But why are we doing this at all? If it was non-null previously
- // integrateNew will already have been called. If it was null previously
- // it will still be null now?
- if (context.unit != null)
- integrateNew()
- throw new FreshRunReq
- } catch {
- case ex : ValidateError => // Ignore, this will have been reported elsewhere
- case t : Throwable => throw t
- }
+ try {
+ checkForMoreWork(old.pos)
+ } catch {
+ case ex: ValidateException => // Ignore, this will have been reported elsewhere
+ debugLog("validate exception caught: "+ex)
+ case ex: Throwable =>
+ log.flush()
+ throw ex
+ }
}
}
@@ -107,39 +260,131 @@ self =>
case _ =>
}
+ /** The top level classes and objects currently seen in the presentation compiler
+ */
+ private val currentTopLevelSyms = new mutable.LinkedHashSet[Symbol]
+
+ /** The top level classes and objects no longer seen in the presentation compiler
+ */
+ val deletedTopLevelSyms = new mutable.LinkedHashSet[Symbol] with mutable.SynchronizedSet[Symbol]
+
+ /** Called from typechecker every time a top-level class or object is entered.
+ */
+ override def registerTopLevelSym(sym: Symbol) { currentTopLevelSyms += sym }
+
+ /** Symbol loaders in the IDE parse all source files loaded from a package for
+ * top-level idents. Therefore, we can detect top-level symbols that have a name
+ * different from their source file
+ */
+ override lazy val loaders = new BrowsingLoaders {
+ val global: Global.this.type = Global.this
+ }
+
// ----------------- Polling ---------------------------------------
+ case class WorkEvent(atNode: Int, atMillis: Long)
+
+ private var moreWorkAtNode: Int = -1
+ private var nodesSeen = 0
+ private var lastWasReload = false
+
+ /** The number of pollForWorks after which the presentation compiler yields.
+ * Yielding improves responsiveness on systems with few cores because it
+ * gives the UI thread a chance to get new tasks and interrupt the presentation
+ * compiler with them.
+ */
+ private final val yieldPeriod = 10
+
/** Called from runner thread and signalDone:
- * Poll for exeptions.
- * Poll for work reload/typedTreeAt/doFirst commands during background checking.
+ * Poll for interrupts and execute them immediately.
+ * Then, poll for exceptions and execute them.
+ * Then, poll for work reload/typedTreeAt/doFirst commands during background checking.
+ * @param pos The position of the tree if polling while typechecking, NoPosition otherwise
+ *
*/
- def pollForWork() {
- scheduler.pollException() match {
- case Some(ex: CancelActionReq) => if (acting) throw ex
- case Some(ex: FreshRunReq) =>
- currentTyperRun = new TyperRun()
- minRunId = currentRunId
- if (outOfDate) throw ex
- else outOfDate = true
- case Some(ex: Throwable) => throw ex
- case _ =>
- }
- scheduler.nextWorkItem() match {
- case Some(action) =>
- try {
- acting = true
- if (debugIDE) println("picked up work item: "+action)
- action()
- if (debugIDE) println("done with work item: "+action)
- } catch {
- case ex: CancelActionReq =>
- if (debugIDE) println("cancelled work item: "+action)
- } finally {
- if (debugIDE) println("quitting work item: "+action)
- acting = false
- }
+ private[interactive] def pollForWork(pos: Position) {
+ if (!interruptsEnabled) return
+ if (pos == NoPosition || nodesSeen % yieldPeriod == 0)
+ Thread.`yield`()
+
+ def nodeWithWork(): Option[WorkEvent] =
+ if (scheduler.moreWork || pendingResponse.isCancelled) Some(new WorkEvent(nodesSeen, System.currentTimeMillis))
+ else None
+
+ nodesSeen += 1
+ logreplay("atnode", nodeWithWork()) match {
+ case Some(WorkEvent(id, _)) =>
+ debugLog("some work at node "+id+" current = "+nodesSeen)
+// assert(id >= nodesSeen)
+ moreWorkAtNode = id
case None =>
}
+
+ if (nodesSeen >= moreWorkAtNode) {
+
+ logreplay("asked", scheduler.pollInterrupt()) match {
+ case Some(ir) =>
+ try {
+ interruptsEnabled = false
+ debugLog("ask started"+timeStep)
+ ir.execute()
+ } finally {
+ debugLog("ask finished"+timeStep)
+ interruptsEnabled = true
+ }
+ pollForWork(pos)
+ case _ =>
+ }
+
+ if (logreplay("cancelled", pendingResponse.isCancelled)) {
+ throw CancelException
+ }
+
+ logreplay("exception thrown", scheduler.pollThrowable()) match {
+ case Some(ex: FreshRunReq) =>
+ newTyperRun()
+ minRunId = currentRunId
+ demandNewCompilerRun()
+
+ case Some(ShutdownReq) =>
+ scheduler.synchronized { // lock the work queue so no more items are posted while we clean it up
+ val units = scheduler.dequeueAll {
+ case item: WorkItem => Some(item.raiseMissing())
+ case _ => Some(())
+ }
+ debugLog("ShutdownReq: cleaning work queue (%d items)".format(units.size))
+ debugLog("Cleanup up responses (%d loadedType pending, %d parsedEntered pending)"
+ .format(waitLoadedTypeResponses.size, getParsedEnteredResponses.size))
+ checkNoResponsesOutstanding()
+
+ log.flush();
+ throw ShutdownReq
+ }
+
+ case Some(ex: Throwable) => log.flush(); throw ex
+ case _ =>
+ }
+
+ lastWasReload = false
+
+ logreplay("workitem", scheduler.nextWorkItem()) match {
+ case Some(action) =>
+ try {
+ debugLog("picked up work item at "+pos+": "+action+timeStep)
+ action()
+ debugLog("done with work item: "+action)
+ } finally {
+ debugLog("quitting work item: "+action+timeStep)
+ }
+ case None =>
+ }
+ }
+ }
+
+ protected def checkForMoreWork(pos: Position) {
+ val typerRun = currentTyperRun
+ pollForWork(pos)
+ if (typerRun != currentTyperRun) demandNewCompilerRun()
}
def debugInfo(source : SourceFile, start : Int, length : Int): String = {
@@ -150,7 +395,7 @@ self =>
val tree = locateTree(pos)
val sw = new StringWriter
val pw = new PrintWriter(sw)
- treePrinters.create(pw).print(tree)
+ newTreePrinter(pw).print(tree)
pw.flush
val typed = new Response[Tree]
@@ -159,7 +404,7 @@ self =>
case Some(tree) =>
val sw = new StringWriter
val pw = new PrintWriter(sw)
- treePrinters.create(pw).print(tree)
+ newTreePrinter(pw).print(tree)
pw.flush
sw.toString
case None => "<None>"
@@ -179,257 +424,591 @@ self =>
// ----------------- The Background Runner Thread -----------------------
+ private var threadId = 0
+
/** The current presentation compiler runner */
- private var compileRunner = newRunnerThread
+ @volatile private[interactive] var compileRunner = newRunnerThread()
/** Create a new presentation compiler runner.
*/
- def newRunnerThread: Thread = new Thread("Scala Presentation Compiler") {
- override def run() {
+ private def newRunnerThread(): Thread = {
+ threadId += 1
+ compileRunner = new PresentationCompilerThread(this, projectName)
+ compileRunner.start()
+ compileRunner
+ }
+
+ /** Compile all loaded source files in the order given by `allSources`.
+ */
+ private[interactive] final def backgroundCompile() {
+ informIDE("Starting new presentation compiler type checking pass")
+ reporter.reset()
+
+ // remove any files in first that are no longer maintained by presentation compiler (i.e. closed)
+ allSources = allSources filter (s => unitOfFile contains (s.file))
+
+ // ensure all loaded units are parsed
+ for (s <- allSources; unit <- getUnit(s)) {
+ checkForMoreWork(NoPosition)
+ if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units.
+ parseAndEnter(unit)
+ serviceParsedEntered()
+ }
+
+ // sleep window
+ if (afterTypeDelay > 0 && lastWasReload) {
+ val limit = System.currentTimeMillis() + afterTypeDelay
+ while (System.currentTimeMillis() < limit) {
+ Thread.sleep(SleepTime)
+ checkForMoreWork(NoPosition)
+ }
+ }
+
+ // ensure all loaded units are typechecked
+ for (s <- allSources; if !ignoredFiles(s.file); unit <- getUnit(s)) {
try {
- while (true) {
- scheduler.waitForMoreWork()
- pollForWork()
- while (outOfDate) {
- try {
- backgroundCompile()
- outOfDate = false
- } catch {
- case ex: FreshRunReq =>
- }
- }
- }
+ if (!unit.isUpToDate)
+ if (unit.problems.isEmpty || !settings.YpresentationStrict.value)
+ typeCheck(unit)
+ else debugLog("%s has syntax errors. Skipped typechecking".format(unit))
+ else debugLog("already up to date: "+unit)
+ for (r <- waitLoadedTypeResponses(unit.source))
+ r set unit.body
+ serviceParsedEntered()
} catch {
- case ex: ShutdownReq =>
- ;
+ case ex: FreshRunReq => throw ex // propagate a new run request
+ case ShutdownReq => throw ShutdownReq // propagate a shutdown request
+
case ex =>
- outOfDate = false
- compileRunner = newRunnerThread
- ex match {
- case _ : FreshRunReq => // This shouldn't be reported
- case _ : ValidateError => // This will have been reported elsewhere
- case _ => ex.printStackTrace(); inform("Fatal Error: "+ex)
+ println("[%s]: exception during background compile: ".format(unit.source) + ex)
+ ex.printStackTrace()
+ for (r <- waitLoadedTypeResponses(unit.source)) {
+ r.raise(ex)
}
+ serviceParsedEntered()
+
+ lastException = Some(ex)
+ ignoredFiles += unit.source.file
+ println("[%s] marking unit as crashed (crashedFiles: %s)".format(unit, ignoredFiles))
+
+ reporter.error(unit.body.pos, "Presentation compiler crashed while type checking this file: %s".format(ex.toString()))
}
}
- start()
+
+ // move units removable after this run to the "to-be-removed" buffer
+ toBeRemoved ++= toBeRemovedAfterRun
+
+ // clean out stale waiting responses
+ cleanAllResponses()
+
+ // wind down
+ if (waitLoadedTypeResponses.nonEmpty || getParsedEnteredResponses.nonEmpty) {
+ // need another cycle to treat those
+ newTyperRun()
+ backgroundCompile()
+ } else {
+ outOfDate = false
+ informIDE("Everything is now up to date")
+ }
}
- /** Compile all given units
+ /** Service all pending getParsedEntered requests
*/
- private def backgroundCompile() {
- if (debugIDE) inform("Starting new presentation compiler type checking pass")
- reporter.reset
- firsts = firsts filter (s => unitOfFile contains (s.file))
- val prefix = firsts map unitOf
- val units = prefix ::: (unitOfFile.valuesIterator.toList diff prefix) filter (!_.isUpToDate)
- recompile(units)
- if (debugIDE) inform("Everything is now up to date")
- }
-
- /** Reset unit to just-parsed state */
- def reset(unit: RichCompilationUnit): Unit =
- if (unit.status > JustParsed) {
- unit.depends.clear()
- unit.defined.clear()
- unit.synthetics.clear()
- unit.toCheck.clear()
- unit.targetPos = NoPosition
- unit.contexts.clear()
- unit.body = EmptyTree
- unit.status = NotLoaded
- }
-
- /** Parse unit and create a name index. */
- def parse(unit: RichCompilationUnit): Unit = {
- currentTyperRun.compileLate(unit)
- if (!reporter.hasErrors) validatePositions(unit.body)
- //println("parsed: [["+unit.body+"]]")
- unit.status = JustParsed
- }
-
- /** Make sure symbol and type attributes are reset and recompile units.
+ private def serviceParsedEntered() {
+ var atOldRun = true
+ for ((source, rs) <- getParsedEnteredResponses; r <- rs) {
+ if (atOldRun) { newTyperRun(); atOldRun = false }
+ getParsedEnteredNow(source, r)
+ }
+ getParsedEnteredResponses.clear()
+ }
+
+ /** Reset unit to unloaded state */
+ private def reset(unit: RichCompilationUnit): Unit = {
+ unit.depends.clear()
+ unit.defined.clear()
+ unit.synthetics.clear()
+ unit.toCheck.clear()
+ unit.targetPos = NoPosition
+ unit.contexts.clear()
+ unit.problems.clear()
+ unit.body = EmptyTree
+ unit.status = NotLoaded
+ }
+
+ /** Parse unit and create a name index, unless this has already been done before */
+ private def parseAndEnter(unit: RichCompilationUnit): Unit =
+ if (unit.status == NotLoaded) {
+ debugLog("parsing: "+unit)
+ currentTyperRun.compileLate(unit)
+ if (debugIDE && !reporter.hasErrors) validatePositions(unit.body)
+ if (!unit.isJava) syncTopLevelSyms(unit)
+ unit.status = JustParsed
+ }
+
+ /** Make sure unit is typechecked
*/
- def recompile(units: List[RichCompilationUnit]) {
- for (unit <- units) {
- reset(unit)
- if (debugIDE) inform("parsing: "+unit)
- parse(unit)
+ private def typeCheck(unit: RichCompilationUnit) {
+ debugLog("type checking: "+unit)
+ parseAndEnter(unit)
+ unit.status = PartiallyChecked
+ currentTyperRun.typeCheck(unit)
+ unit.lastBody = unit.body
+ unit.status = currentRunId
+ }
+
+ /** Update deleted and current top-level symbols sets */
+ def syncTopLevelSyms(unit: RichCompilationUnit) {
+ val deleted = currentTopLevelSyms filter { sym =>
+ /** We sync after namer phase and it resets all the top-level symbols
+ * that survive the new parsing
+ * round to NoPeriod.
+ */
+ sym.sourceFile == unit.source.file &&
+ sym.validTo != NoPeriod &&
+ runId(sym.validTo) < currentRunId
}
- for (unit <- units) {
- if (debugIDE) inform("type checking: "+unit)
- activeLocks = 0
- currentTyperRun.typeCheck(unit)
- unit.status = currentRunId
+ for (d <- deleted) {
+ d.owner.info.decls unlink d
+ deletedTopLevelSyms += d
+ currentTopLevelSyms -= d
}
}
- /** Move list of files to front of firsts */
+ /** Move list of files to front of allSources */
def moveToFront(fs: List[SourceFile]) {
- firsts = fs ::: (firsts diff fs)
+ allSources = fs ::: (allSources diff fs)
}
- // ----------------- Implementations of client commmands -----------------------
+ // ----------------- Implementations of client commands -----------------------
def respond[T](result: Response[T])(op: => T): Unit =
+ respondGradually(result)(Stream(op))
+
+ def respondGradually[T](response: Response[T])(op: => Stream[T]): Unit = {
+ val prevResponse = pendingResponse
try {
- result set Left(op)
- return
+ pendingResponse = response
+ if (!response.isCancelled) {
+ var results = op
+ while (!response.isCancelled && results.nonEmpty) {
+ val result = results.head
+ results = results.tail
+ if (results.isEmpty) {
+ response set result
+ debugLog("responded"+timeStep)
+ } else response setProvisionally result
+ }
+ }
} catch {
- case ex : FreshRunReq =>
- scheduler.postWorkItem(() => respond(result)(op))
+ case CancelException =>
+ debugLog("cancelled")
+ case ex: FreshRunReq =>
+ if (debugIDE) {
+ println("FreshRunReq thrown during response")
+ ex.printStackTrace()
+ }
+ response raise ex
throw ex
case ex =>
- result set Right(ex)
- throw ex
+ if (debugIDE) {
+ println("exception thrown during response: "+ex)
+ ex.printStackTrace()
+ }
+ response raise ex
+ } finally {
+ pendingResponse = prevResponse
}
+ }
+
+ private def reloadSource(source: SourceFile) {
+ val unit = new RichCompilationUnit(source)
+ unitOfFile(source.file) = unit
+ toBeRemoved -= source.file
+ toBeRemovedAfterRun -= source.file
+ reset(unit)
+ //parseAndEnter(unit)
+ }
/** Make sure a set of compilation units is loaded and parsed */
- def reloadSources(sources: List[SourceFile]) {
- currentTyperRun = new TyperRun()
- for (source <- sources) {
- val unit = new RichCompilationUnit(source)
- unitOfFile(source.file) = unit
- parse(unit)
- }
+ private def reloadSources(sources: List[SourceFile]) {
+ newTyperRun()
+ minRunId = currentRunId
+ sources foreach reloadSource
moveToFront(sources)
}
/** Make sure a set of compilation units is loaded and parsed */
- def reload(sources: List[SourceFile], result: Response[Unit]) {
- respond(result)(reloadSources(sources))
- if (outOfDate) throw new FreshRunReq
- else outOfDate = true
+ private[interactive] def reload(sources: List[SourceFile], response: Response[Unit]) {
+ informIDE("reload: " + sources)
+ lastWasReload = true
+ respond(response)(reloadSources(sources))
+ demandNewCompilerRun()
}
- /** A fully attributed tree located at position `pos` */
- def typedTreeAt(pos: Position): Tree = {
- val unit = unitOf(pos)
- val sources = List(unit.source)
- if (unit.status == NotLoaded) reloadSources(sources)
- moveToFront(sources)
- val typedTree = currentTyperRun.typedTreeAt(pos)
- new Locator(pos) locateIn typedTree
+ private[interactive] def filesDeleted(sources: List[SourceFile], response: Response[Unit]) {
+ informIDE("files deleted: " + sources)
+ val deletedFiles = sources.map(_.file).toSet
+ val deletedSyms = currentTopLevelSyms filter {sym => deletedFiles contains sym.sourceFile}
+ for (d <- deletedSyms) {
+ d.owner.info.decls unlink d
+ deletedTopLevelSyms += d
+ currentTopLevelSyms -= d
+ }
+ sources foreach (removeUnitOf(_))
+ minRunId = currentRunId
+ respond(response) ()
+ demandNewCompilerRun()
+ }
+
+ /** Arrange for unit to be removed after run, to give a chance to typecheck the unit fully.
+ * If we do just removeUnit, some problems with default parameters can ensue.
+ * Calls to this method could probably be replaced by removeUnit once default parameters are handled more robustly.
+ */
+ private def afterRunRemoveUnitOf(source: SourceFile) {
+ toBeRemovedAfterRun += source.file
+ }
+
+ /** A fully attributed tree located at position `pos` */
+ private def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match {
+ case None =>
+ reloadSources(List(pos.source))
+ try typedTreeAt(pos)
+ finally afterRunRemoveUnitOf(pos.source)
+ case Some(unit) =>
+ informIDE("typedTreeAt " + pos)
+ parseAndEnter(unit)
+ val tree = locateTree(pos)
+ debugLog("at pos "+pos+" was found: "+tree.getClass+" "+tree.pos.show)
+ tree match {
+ case Import(expr, _) =>
+ debugLog("import found"+expr.tpe+" "+expr.tpe.members)
+ case _ =>
+ }
+ if (stabilizedType(tree) ne null) {
+ debugLog("already attributed: "+tree.symbol+" "+tree.tpe)
+ tree
+ } else {
+ unit.targetPos = pos
+ try {
+ debugLog("starting targeted type check")
+ typeCheck(unit)
+ println("tree not found at "+pos)
+ EmptyTree
+ } catch {
+ case ex: TyperResult => new Locator(pos) locateIn ex.tree
+ } finally {
+ unit.targetPos = NoPosition
+ }
+ }
}
/** A fully attributed tree corresponding to the entire compilation unit */
- def typedTree(source: SourceFile, forceReload: Boolean): Tree = {
- val unit = unitOf(source)
- val sources = List(source)
- if (unit.status == NotLoaded || forceReload) reloadSources(sources)
- moveToFront(sources)
- currentTyperRun.typedTree(unitOf(source))
+ private def typedTree(source: SourceFile, forceReload: Boolean): Tree = {
+ informIDE("typedTree " + source + " forceReload: " + forceReload)
+ val unit = getOrCreateUnitOf(source)
+ if (forceReload) reset(unit)
+ parseAndEnter(unit)
+ if (unit.status <= PartiallyChecked) typeCheck(unit)
+ unit.body
+ }
+
+ /** Set sync var `response` to a fully attributed tree located at position `pos` */
+ private[interactive] def getTypedTreeAt(pos: Position, response: Response[Tree]) {
+ respond(response)(typedTreeAt(pos))
}
- /** Set sync var `result` to a fully attributed tree located at position `pos` */
- def getTypedTreeAt(pos: Position, result: Response[Tree]) {
- respond(result)(typedTreeAt(pos))
+ /** Set sync var `response` to a fully attributed tree corresponding to the
+ * entire compilation unit */
+ private[interactive] def getTypedTree(source: SourceFile, forceReload: Boolean, response: Response[Tree]) {
+ respond(response)(typedTree(source, forceReload))
}
- /** Set sync var `result` to a fully attributed tree corresponding to the entire compilation unit */
- def getTypedTree(source : SourceFile, forceReload: Boolean, result: Response[Tree]) {
- respond(result)(typedTree(source, forceReload))
+ /** Implements CompilerControl.askLinkPos */
+ private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) {
+
+ /** Find position of symbol `sym` in unit `unit`. Pre: `unit is loaded. */
+ def findLinkPos(unit: RichCompilationUnit): Position = {
+ val originalTypeParams = sym.owner.typeParams
+ parseAndEnter(unit)
+ val pre = adaptToNewRunMap(ThisType(sym.owner))
+ val newsym = pre.typeSymbol.info.decl(sym.name) filter { alt =>
+ sym.isType || {
+ try {
+ val tp1 = pre.memberType(alt) onTypeError NoType
+ val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
+ matchesType(tp1, tp2, false)
+ } catch {
+ case ex: Throwable =>
+ println("error in hyperlinking: " + ex)
+ ex.printStackTrace()
+ false
+ }
+ }
+ }
+ if (newsym == NoSymbol) {
+ debugLog("link not found " + sym + " " + source + " " + pre)
+ NoPosition
+ } else if (newsym.isOverloaded) {
+ settings.uniqid.value = true
+ debugLog("link ambiguous " + sym + " " + source + " " + pre + " " + newsym.alternatives)
+ NoPosition
+ } else {
+ debugLog("link found for " + newsym + ": " + newsym.pos)
+ newsym.pos
+ }
+ }
+
+ informIDE("getLinkPos "+sym+" "+source)
+ respond(response) {
+ if (sym.owner.isClass) {
+ getUnit(source) match {
+ case None =>
+ reloadSources(List(source))
+ try findLinkPos(getUnit(source).get)
+ finally afterRunRemoveUnitOf(source)
+ case Some(unit) =>
+ findLinkPos(unit)
+ }
+ } else {
+ debugLog("link not in class "+sym+" "+source+" "+sym.owner)
+ NoPosition
+ }
+ }
}
def stabilizedType(tree: Tree): Type = tree match {
- case Ident(_) if tree.symbol.isStable => singleType(NoPrefix, tree.symbol)
- case Select(qual, _) if tree.symbol.isStable => singleType(qual.tpe, tree.symbol)
+ case Ident(_) if tree.symbol.isStable =>
+ singleType(NoPrefix, tree.symbol)
+ case Select(qual, _) if qual.tpe != null && tree.symbol.isStable =>
+ singleType(qual.tpe, tree.symbol)
+ case Import(expr, selectors) =>
+ tree.symbol.info match {
+ case analyzer.ImportType(expr) => expr match {
+ case s@Select(qual, name) => singleType(qual.tpe, s.symbol)
+ case i : Ident => i.tpe
+ case _ => tree.tpe
+ }
+ case _ => tree.tpe
+ }
+
case _ => tree.tpe
}
import analyzer.{SearchResult, ImplicitSearch}
- def getScopeCompletion(pos: Position, result: Response[List[Member]]) {
- respond(result) { scopeMembers(pos) }
+ private[interactive] def getScopeCompletion(pos: Position, response: Response[List[Member]]) {
+ informIDE("getScopeCompletion" + pos)
+ respond(response) { scopeMembers(pos) }
}
- val Dollar = newTermName("$")
+ private val Dollar = newTermName("$")
+
+ private class Members[M <: Member] extends LinkedHashMap[Name, Set[M]] {
+ override def default(key: Name) = Set()
+
+ private def matching(sym: Symbol, symtpe: Type, ms: Set[M]): Option[M] = ms.find { m =>
+ (m.sym.name == sym.name) && (m.sym.isType || (m.tpe matches symtpe))
+ }
+
+ private def keepSecond(m: M, sym: Symbol, implicitlyAdded: Boolean): Boolean =
+ m.sym.hasFlag(ACCESSOR | PARAMACCESSOR) &&
+ !sym.hasFlag(ACCESSOR | PARAMACCESSOR) &&
+ (!implicitlyAdded || m.implicitlyAdded)
+
+ def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) {
+ if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) {
+ add(sym.accessed, pre, implicitlyAdded)(toMember)
+ } else if (!sym.name.decode.containsName(Dollar) && !sym.isSynthetic && sym.hasRawInfo) {
+ val symtpe = pre.memberType(sym) onTypeError ErrorType
+ matching(sym, symtpe, this(sym.name)) match {
+ case Some(m) =>
+ if (keepSecond(m, sym, implicitlyAdded)) {
+ //print(" -+ "+sym.name)
+ this(sym.name) = this(sym.name) - m + toMember(sym, symtpe)
+ }
+ case None =>
+ //print(" + "+sym.name)
+ this(sym.name) = this(sym.name) + toMember(sym, symtpe)
+ }
+ }
+ }
+
+ def addNonShadowed(other: Members[M]) = {
+ for ((name, ms) <- other)
+ if (ms.nonEmpty && this(name).isEmpty) this(name) = ms
+ }
+
+ def allMembers: List[M] = values.toList.flatten
+ }
/** Return all members visible without prefix in context enclosing `pos`. */
- def scopeMembers(pos: Position): List[ScopeMember] = {
+ private def scopeMembers(pos: Position): List[ScopeMember] = {
typedTreeAt(pos) // to make sure context is entered
val context = doLocateContext(pos)
- val locals = new LinkedHashMap[Name, ScopeMember]
+ val locals = new Members[ScopeMember]
+ val enclosing = new Members[ScopeMember]
def addScopeMember(sym: Symbol, pre: Type, viaImport: Tree) =
- if (!sym.name.decode.containsName(Dollar) &&
- !sym.hasFlag(Flags.SYNTHETIC) &&
- !locals.contains(sym.name)) {
- //println("adding scope member: "+pre+" "+sym)
- locals(sym.name) = new ScopeMember(
- sym,
- pre.memberType(sym),
- context.isAccessible(sym, pre, false),
- viaImport)
+ locals.add(sym, pre, false) { (s, st) =>
+ new ScopeMember(s, st, context.isAccessible(s, pre, false), viaImport)
}
+ def localsToEnclosing() = {
+ enclosing.addNonShadowed(locals)
+ locals.clear()
+ }
+ //print("add scope members")
var cx = context
while (cx != NoContext) {
for (sym <- cx.scope)
addScopeMember(sym, NoPrefix, EmptyTree)
- cx = cx.enclClass
- val pre = cx.prefix
- for (sym <- pre.members)
- addScopeMember(sym, pre, EmptyTree)
+ localsToEnclosing()
+ if (cx == cx.enclClass) {
+ val pre = cx.prefix
+ for (sym <- pre.members)
+ addScopeMember(sym, pre, EmptyTree)
+ localsToEnclosing()
+ }
cx = cx.outer
}
+ //print("\nadd imported members")
for (imp <- context.imports) {
val pre = imp.qual.tpe
- for (sym <- imp.allImportedSymbols) {
+ for (sym <- imp.allImportedSymbols)
addScopeMember(sym, pre, imp.qual)
- }
+ localsToEnclosing()
}
- val result = locals.valuesIterator.toList
- if (debugIDE) for (m <- result) println(m)
+ // println()
+ val result = enclosing.allMembers
+// if (debugIDE) for (m <- result) println(m)
result
}
- def getTypeCompletion(pos: Position, result: Response[List[Member]]) {
- respond(result) { typeMembers(pos) }
- if (debugIDE) scopeMembers(pos)
+ private[interactive] def getTypeCompletion(pos: Position, response: Response[List[Member]]) {
+ informIDE("getTypeCompletion " + pos)
+ respondGradually(response) { typeMembers(pos) }
+ //if (debugIDE) typeMembers(pos)
}
- def typeMembers(pos: Position): List[TypeMember] = {
- val tree = typedTreeAt(pos)
- println("typeMembers at "+tree+" "+tree.tpe)
+ private def typeMembers(pos: Position): Stream[List[TypeMember]] = {
+ var tree = typedTreeAt(pos)
+
+ // if tree consists of just x. or x.fo where fo is not yet a full member name
+ // ignore the selection and look in just x.
+ tree match {
+ case Select(qual, name) if tree.tpe == ErrorType => tree = qual
+ case _ =>
+ }
+
val context = doLocateContext(pos)
+
+ if (tree.tpe == null)
+ // TODO: guard with try/catch to deal with ill-typed qualifiers.
+ tree = analyzer.newTyper(context).typedQualifier(tree)
+
+ debugLog("typeMembers at "+tree+" "+tree.tpe)
+
val superAccess = tree.isInstanceOf[Super]
- val scope = new Scope
- val members = new LinkedHashMap[Symbol, TypeMember]
- def addTypeMember(sym: Symbol, pre: Type, inherited: Boolean, viaView: Symbol) {
- val symtpe = pre.memberType(sym)
- if (scope.lookupAll(sym.name) forall (sym => !(members(sym).tpe matches symtpe))) {
- scope enter sym
- members(sym) = new TypeMember(
- sym,
- symtpe,
- context.isAccessible(sym, pre, superAccess && (viaView == NoSymbol)),
+ val members = new Members[TypeMember]
+
+ def addTypeMember(sym: Symbol, pre: Type, inherited: Boolean, viaView: Symbol) = {
+ val implicitlyAdded = viaView != NoSymbol
+ members.add(sym, pre, implicitlyAdded) { (s, st) =>
+ new TypeMember(s, st,
+ context.isAccessible(s, pre, superAccess && !implicitlyAdded),
inherited,
viaView)
}
}
+
+ /** Create a function application of a given view function to `tree` and typechecked it.
+ */
def viewApply(view: SearchResult): Tree = {
assert(view.tree != EmptyTree)
- try {
- analyzer.newTyper(context.makeImplicit(false)).typed(Apply(view.tree, List(tree)) setPos tree.pos)
- } catch {
- case ex: TypeError => EmptyTree
- }
+ analyzer.newTyper(context.makeImplicit(reportAmbiguousErrors = false))
+ .typed(Apply(view.tree, List(tree)) setPos tree.pos)
+ .onTypeError(EmptyTree)
}
+
val pre = stabilizedType(tree)
- for (sym <- tree.tpe.decls)
- addTypeMember(sym, pre, false, NoSymbol)
- for (sym <- tree.tpe.members)
- addTypeMember(sym, pre, true, NoSymbol)
- val applicableViews: List[SearchResult] =
- new ImplicitSearch(tree, functionType(List(tree.tpe), AnyClass.tpe), true, context.makeImplicit(false))
- .allImplicits
- for (view <- applicableViews) {
- val vtree = viewApply(view)
- val vpre = stabilizedType(vtree)
- for (sym <- vtree.tpe.members) {
- addTypeMember(sym, vpre, false, view.tree.symbol)
+
+ val ownerTpe = tree.tpe match {
+ case analyzer.ImportType(expr) => expr.tpe
+ case null => pre
+ case MethodType(List(), rtpe) => rtpe
+ case _ => tree.tpe
+ }
+
+ //print("add members")
+ for (sym <- ownerTpe.members)
+ addTypeMember(sym, pre, sym.owner != ownerTpe.typeSymbol, NoSymbol)
+ members.allMembers #:: {
+ //print("\nadd pimped")
+ val applicableViews: List[SearchResult] =
+ if (ownerTpe.isErroneous) List()
+ else new ImplicitSearch(
+ tree, functionType(List(ownerTpe), AnyClass.tpe), isView = true,
+ context.makeImplicit(reportAmbiguousErrors = false)).allImplicits
+ for (view <- applicableViews) {
+ val vtree = viewApply(view)
+ val vpre = stabilizedType(vtree)
+ for (sym <- vtree.tpe.members) {
+ addTypeMember(sym, vpre, false, view.tree.symbol)
+ }
+ }
+ //println()
+ Stream(members.allMembers)
+ }
+ }
+
+ /** Implements CompilerControl.askLoadedTyped */
+ private[interactive] def waitLoadedTyped(source: SourceFile, response: Response[Tree], onSameThread: Boolean = true) {
+ getUnit(source) match {
+ case Some(unit) =>
+ if (unit.isUpToDate) {
+ debugLog("already typed");
+ response set unit.body
+ } else if (ignoredFiles(source.file)) {
+ response.raise(lastException.getOrElse(CancelException))
+ } else if (onSameThread) {
+ getTypedTree(source, forceReload = false, response)
+ } else {
+ debugLog("wait for later")
+ outOfDate = true
+ waitLoadedTypeResponses(source) += response
+ }
+ case None =>
+ debugLog("load unit and type")
+ try reloadSources(List(source))
+ finally waitLoadedTyped(source, response, onSameThread)
+ }
+ }
+
+ /** Implements CompilerControl.askParsedEntered */
+ private[interactive] def getParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree], onSameThread: Boolean = true) {
+ getUnit(source) match {
+ case Some(unit) =>
+ getParsedEnteredNow(source, response)
+ case None =>
+ try {
+ if (keepLoaded || outOfDate && onSameThread)
+ reloadSources(List(source))
+ } finally {
+ if (keepLoaded || !outOfDate || onSameThread)
+ getParsedEnteredNow(source, response)
+ else
+ getParsedEnteredResponses(source) += response
+ }
+ }
+ }
+
+ /** Parses and enters given source file, stroring parse tree in response */
+ private def getParsedEnteredNow(source: SourceFile, response: Response[Tree]) {
+ respond(response) {
+ onUnitOf(source) { unit =>
+ parseAndEnter(unit)
+ unit.body
}
}
- members.valuesIterator.toList
}
// ---------------- Helper classes ---------------------------
@@ -443,66 +1022,19 @@ self =>
}
}
- /** A traverser that resets all type and symbol attributes in a tree
- object ResetAttrs extends Transformer {
- override def transform(t: Tree): Tree = {
- if (t.hasSymbol) t.symbol = NoSymbol
- t match {
- case EmptyTree =>
- t
- case tt: TypeTree =>
- if (tt.original != null) tt.original
- else t
- case _ =>
- t.tpe = null
- super.transform(t)
- }
- }
- }
- */
-
/** The typer run */
class TyperRun extends Run {
// units is always empty
- // symSource, symData are ignored
- override def compiles(sym: Symbol) = false
-
- def typeCheck(unit: CompilationUnit): Unit = applyPhase(typerPhase, unit)
- def enterNames(unit: CompilationUnit): Unit = applyPhase(namerPhase, unit)
-
- /** Return fully attributed tree at given position
- * (i.e. largest tree that's contained by position)
+ /** canRedefine is used to detect double declarations of classes and objects
+ * in multiple source files.
+ * Since the IDE rechecks units several times in the same run, these tests
+ * are disabled by always returning true here.
*/
- def typedTreeAt(pos: Position): Tree = {
- println("starting typedTreeAt")
- val tree = locateTree(pos)
- println("at pos "+pos+" was found: "+tree+tree.pos.show)
- if (tree.tpe ne null) {
- println("already attributed")
- tree
- } else {
- val unit = unitOf(pos)
- assert(unit.status >= JustParsed)
- unit.targetPos = pos
- try {
- println("starting targeted type check")
- typeCheck(unit)
- throw new FatalError("tree not found")
- } catch {
- case ex: TyperResult =>
- ex.tree
- } finally {
- unit.targetPos = NoPosition
- }
- }
- }
+ override def canRedefine(sym: Symbol) = true
- def typedTree(unit: RichCompilationUnit): Tree = {
- assert(unit.status >= JustParsed)
- unit.targetPos = NoPosition
- typeCheck(unit)
- unit.body
+ def typeCheck(unit: CompilationUnit): Unit = {
+ applyPhase(typerPhase, unit)
}
/** Apply a phase to a compilation unit
@@ -510,17 +1042,35 @@ self =>
*/
private def applyPhase(phase: Phase, unit: CompilationUnit) {
val oldSource = reporter.getSource
- try {
- reporter.setSource(unit.source)
+ reporter.withSource(unit.source) {
atPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit }
- } finally {
- reporter setSource oldSource
}
}
}
- class TyperResult(val tree: Tree) extends Exception with ControlException
+ def newTyperRun() {
+ currentTyperRun = new TyperRun
+ }
+
+ class TyperResult(val tree: Tree) extends ControlThrowable
assert(globalPhase.id == 0)
+
+ implicit def addOnTypeError[T](x: => T): OnTypeError[T] = new OnTypeError(x)
+
+ class OnTypeError[T](op: => T) {
+ def onTypeError(alt: => T) = try {
+ op
+ } catch {
+ case ex: TypeError =>
+ debugLog("type error caught: "+ex)
+ alt
+ case ex: DivergentImplicit =>
+ debugLog("divergent implicit caught: "+ex)
+ alt
+ }
+ }
}
+object CancelException extends Exception
+
diff --git a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
new file mode 100644
index 0000000000..397e83a362
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
@@ -0,0 +1,47 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+import collection.mutable.ArrayBuffer
+import util.Position
+import reporters.Reporter
+
+case class Problem(pos: Position, msg: String, severityLevel: Int)
+
+abstract class InteractiveReporter extends Reporter {
+
+ def compiler: Global
+
+ val otherProblems = new ArrayBuffer[Problem]
+
+ override def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = try {
+ severity.count += 1
+ val problems =
+ if (compiler eq null) {
+ otherProblems
+ } else if (pos.isDefined) {
+ compiler.getUnit(pos.source) match {
+ case Some(unit) =>
+ compiler.debugLog(pos.source.file.name + ":" + pos.line + ": " + msg)
+ unit.problems
+ case None =>
+ compiler.debugLog(pos.source.file.name + "[not loaded] :" + pos.line + ": " + msg)
+ otherProblems
+ }
+ } else {
+ compiler.debugLog("[no position] :" + msg)
+ otherProblems
+ }
+ problems += Problem(pos, msg, severity.id)
+ } catch {
+ case ex: UnsupportedOperationException =>
+ }
+
+ override def reset() {
+ super.reset()
+ otherProblems.clear()
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
new file mode 100644
index 0000000000..561fa47e94
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
@@ -0,0 +1,184 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+import util.{SourceFile, BatchSourceFile, InterruptReq}
+import io.{AbstractFile, PlainFile}
+
+import util.{Position, RangePosition, NoPosition, OffsetPosition, TransparentPosition, EmptyAction}
+import io.{Pickler, CondPickler}
+import io.Pickler._
+import collection.mutable
+import mutable.ListBuffer
+
+trait Picklers { self: Global =>
+
+ lazy val freshRunReq =
+ unitPickler
+ .wrapped { _ => new FreshRunReq } { x => () }
+ .labelled ("FreshRunReq")
+ .cond (_.isInstanceOf[FreshRunReq])
+
+ lazy val shutdownReq = singletonPickler(ShutdownReq)
+
+ def defaultThrowable[T <: Throwable]: CondPickler[T] = javaInstancePickler[T] cond { _ => true }
+
+ implicit lazy val throwable: Pickler[Throwable] =
+ freshRunReq | shutdownReq | defaultThrowable
+
+ implicit def abstractFile: Pickler[AbstractFile] =
+ pkl[String]
+ .wrapped[AbstractFile] { new PlainFile(_) } { _.path }
+ .asClass (classOf[PlainFile])
+
+ private val sourceFilesSeen = new mutable.HashMap[AbstractFile, Array[Char]] {
+ override def default(key: AbstractFile) = Array()
+ }
+
+ type Diff = (Int /*start*/, Int /*end*/, String /*replacement*/)
+
+ def delta(f: AbstractFile, cs: Array[Char]): Diff = {
+ val bs = sourceFilesSeen(f)
+ var start = 0
+ while (start < bs.length && start < cs.length && bs(start) == cs(start)) start += 1
+ var end = bs.length
+ var end2 = cs.length
+ while (end > start && end2 > start && bs(end - 1) == cs(end2 - 1)) { end -= 1; end2 -= 1 }
+ sourceFilesSeen(f) = cs
+ (start, end, cs.slice(start, end2).mkString(""))
+ }
+
+ def patch(f: AbstractFile, d: Diff): Array[Char] = {
+ val (start, end, replacement) = d
+ val patched = sourceFilesSeen(f).patch(start, replacement, end - start)
+ sourceFilesSeen(f) = patched
+ patched
+ }
+
+ implicit lazy val sourceFile: Pickler[SourceFile] =
+ (pkl[AbstractFile] ~ pkl[Diff]).wrapped[SourceFile] {
+ case f ~ d => new BatchSourceFile(f, patch(f, d))
+ } {
+ f => f.file ~ delta(f.file, f.content)
+ }.asClass (classOf[BatchSourceFile])
+
+ lazy val offsetPosition: CondPickler[OffsetPosition] =
+ (pkl[SourceFile] ~ pkl[Int])
+ .wrapped { case x ~ y => new OffsetPosition(x, y) } { p => p.source ~ p.point }
+ .asClass (classOf[OffsetPosition])
+
+ lazy val rangePosition: CondPickler[RangePosition] =
+ (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int])
+ .wrapped { case source ~ start ~ point ~ end => new RangePosition(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end }
+ .asClass (classOf[RangePosition])
+
+ lazy val transparentPosition: CondPickler[TransparentPosition] =
+ (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int])
+ .wrapped { case source ~ start ~ point ~ end => new TransparentPosition(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end }
+ .asClass (classOf[TransparentPosition])
+
+ lazy val noPosition = singletonPickler(NoPosition)
+
+ implicit lazy val position: Pickler[Position] = transparentPosition | rangePosition | offsetPosition | noPosition
+
+ implicit lazy val namePickler: Pickler[Name] =
+ pkl[String] .wrapped {
+ str => if ((str.length > 1) && (str endsWith "!")) newTypeName(str.init) else newTermName(str)
+ } {
+ name => if (name.isTypeName) name.toString+"!" else name.toString
+ }
+
+ implicit lazy val symPickler: Pickler[Symbol] = {
+ def ownerNames(sym: Symbol, buf: ListBuffer[Name]): ListBuffer[Name] = {
+ if (!sym.isRoot) {
+ ownerNames(sym.owner, buf)
+ buf += (if (sym.isModuleClass) sym.sourceModule else sym).name
+ if (!sym.isType && !sym.isStable) {
+ val sym1 = sym.owner.info.decl(sym.name)
+ if (sym1.isOverloaded) {
+ val index = sym1.alternatives.indexOf(sym)
+ assert(index >= 0, sym1+" not found in alternatives "+sym1.alternatives)
+ buf += index.toString
+ }
+ }
+ }
+ buf
+ }
+ def makeSymbol(root: Symbol, names: List[Name]): Symbol = names match {
+ case List() =>
+ root
+ case name :: rest =>
+ val sym = root.info.decl(name)
+ if (sym.isOverloaded) makeSymbol(sym.alternatives(rest.head.toString.toInt), rest.tail)
+ else makeSymbol(sym, rest)
+ }
+ pkl[List[Name]] .wrapped { makeSymbol(definitions.RootClass, _) } { ownerNames(_, new ListBuffer).toList }
+ }
+
+ implicit def workEvent: Pickler[WorkEvent] = {
+ (pkl[Int] ~ pkl[Long])
+ .wrapped { case id ~ ms => WorkEvent(id, ms) } { w => w.atNode ~ w.atMillis }
+ }
+
+ implicit def interruptReq: Pickler[InterruptReq] = {
+ val emptyIR: InterruptReq = new InterruptReq { type R = Unit; val todo = () => () }
+ pkl[Unit] .wrapped { _ => emptyIR } { _ => () }
+ }
+
+ implicit def reloadItem: CondPickler[ReloadItem] =
+ pkl[List[SourceFile]]
+ .wrapped { ReloadItem(_, new Response) } { _.sources }
+ .asClass (classOf[ReloadItem])
+
+ implicit def askTypeAtItem: CondPickler[AskTypeAtItem] =
+ pkl[Position]
+ .wrapped { new AskTypeAtItem(_, new Response) } { _.pos }
+ .asClass (classOf[AskTypeAtItem])
+
+ implicit def askTypeItem: CondPickler[AskTypeItem] =
+ (pkl[SourceFile] ~ pkl[Boolean])
+ .wrapped { case source ~ forceReload => new AskTypeItem(source, forceReload, new Response) } { w => w.source ~ w.forceReload }
+ .asClass (classOf[AskTypeItem])
+
+ implicit def askTypeCompletionItem: CondPickler[AskTypeCompletionItem] =
+ pkl[Position]
+ .wrapped { new AskTypeCompletionItem(_, new Response) } { _.pos }
+ .asClass (classOf[AskTypeCompletionItem])
+
+ implicit def askScopeCompletionItem: CondPickler[AskScopeCompletionItem] =
+ pkl[Position]
+ .wrapped { new AskScopeCompletionItem(_, new Response) } { _.pos }
+ .asClass (classOf[AskScopeCompletionItem])
+
+ implicit def askToDoFirstItem: CondPickler[AskToDoFirstItem] =
+ pkl[SourceFile]
+ .wrapped { new AskToDoFirstItem(_) } { _.source }
+ .asClass (classOf[AskToDoFirstItem])
+
+ implicit def askLinkPosItem: CondPickler[AskLinkPosItem] =
+ (pkl[Symbol] ~ pkl[SourceFile])
+ .wrapped { case sym ~ source => new AskLinkPosItem(sym, source, new Response) } { item => item.sym ~ item.source }
+ .asClass (classOf[AskLinkPosItem])
+
+ implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] =
+ pkl[SourceFile]
+ .wrapped { source => new AskLoadedTypedItem(source, new Response) } { _.source }
+ .asClass (classOf[AskLoadedTypedItem])
+
+ implicit def askParsedEnteredItem: CondPickler[AskParsedEnteredItem] =
+ (pkl[SourceFile] ~ pkl[Boolean])
+ .wrapped { case source ~ keepLoaded => new AskParsedEnteredItem(source, keepLoaded, new Response) } { w => w.source ~ w.keepLoaded }
+ .asClass (classOf[AskParsedEnteredItem])
+
+ implicit def emptyAction: CondPickler[EmptyAction] =
+ pkl[Unit]
+ .wrapped { _ => new EmptyAction } { _ => () }
+ .asClass (classOf[EmptyAction])
+
+ implicit def action: Pickler[() => Unit] =
+ reloadItem | askTypeAtItem | askTypeItem | askTypeCompletionItem | askScopeCompletionItem |
+ askToDoFirstItem | askLinkPosItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala b/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala
new file mode 100644
index 0000000000..098884dab1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala
@@ -0,0 +1,51 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ * @author Iulian Dragos
+ */
+package scala.tools.nsc.interactive
+
+/** A presentation compiler thread. This is a lightweight class, delegating most
+ * of its functionality to the compiler instance.
+ *
+ */
+final class PresentationCompilerThread(var compiler: Global, name: String = "")
+ extends Thread("Scala Presentation Compiler [" + name + "]") {
+
+ /** The presentation compiler loop.
+ */
+ override def run() {
+ compiler.debugLog("starting new runner thread")
+ while (compiler ne null) try {
+ compiler.checkNoResponsesOutstanding()
+ compiler.log.logreplay("wait for more work", { compiler.scheduler.waitForMoreWork(); true })
+ compiler.pollForWork(compiler.NoPosition)
+ while (compiler.isOutOfDate) {
+ try {
+ compiler.backgroundCompile()
+ } catch {
+ case ex: FreshRunReq =>
+ compiler.debugLog("fresh run req caught, starting new pass")
+ }
+ compiler.log.flush()
+ }
+ } catch {
+ case ex @ ShutdownReq =>
+ compiler.debugLog("exiting presentation compiler")
+ compiler.log.close()
+
+ // make sure we don't keep around stale instances
+ compiler = null
+ case ex =>
+ compiler.log.flush()
+
+ ex match {
+ case ex: FreshRunReq =>
+ compiler.debugLog("fresh run req caught outside presentation compiler loop; ignored") // This shouldn't be reported
+ case _ : Global#ValidateException => // This will have been reported elsewhere
+ compiler.debugLog("validate exception caught outside presentation compiler loop; ignored")
+ case _ => ex.printStackTrace(); compiler.informIDE("Fatal Error: "+ex)
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala
index f3b1900ef2..2690a6c79b 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala
@@ -1,3 +1,7 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
package scala.tools.nsc
package interactive
@@ -20,15 +24,15 @@ object REPL {
var reporter: ConsoleReporter = _
- def error(msg: String) {
+ private def replError(msg: String) {
reporter.error(/*new Position */FakePos("scalac"),
msg + "\n scalac -help gives more information")
}
def process(args: Array[String]) {
- val settings = new Settings(error)
+ val settings = new Settings(replError)
reporter = new ConsoleReporter(settings)
- val command = new CompilerCommand(args.toList, settings, error, false)
+ val command = new CompilerCommand(args.toList, settings)
if (command.settings.version.value)
reporter.info(null, versionMsg, true)
else {
@@ -80,9 +84,12 @@ object REPL {
* complete file off1 off2?
*/
def run(comp: Global) {
- val reloadResult = new comp.Response[Unit]
- val typeatResult = new comp.Response[comp.Tree]
- val completeResult = new comp.Response[List[comp.Member]]
+ val reloadResult = new Response[Unit]
+ val typeatResult = new Response[comp.Tree]
+ val completeResult = new Response[List[comp.Member]]
+ val typedResult = new Response[comp.Tree]
+ val structureResult = new Response[comp.Tree]
+
def makePos(file: String, off1: String, off2: String) = {
val source = toSourceFile(file)
comp.rangePos(source, off1.toInt, off1.toInt, off2.toInt)
@@ -95,11 +102,28 @@ object REPL {
comp.askTypeCompletion(pos, completeResult)
show(completeResult)
}
+ def doTypedTree(file: String) {
+ comp.askType(toSourceFile(file), true, typedResult)
+ show(typedResult)
+ }
+ def doStructure(file: String) {
+ comp.askParsedEntered(toSourceFile(file), false, structureResult)
+ show(structureResult)
+ }
+
loop { line =>
(line split " ").toList match {
case "reload" :: args =>
comp.askReload(args map toSourceFile, reloadResult)
show(reloadResult)
+ case "reloadAndAskType" :: file :: millis :: Nil =>
+ comp.askReload(List(toSourceFile(file)), reloadResult)
+ Thread.sleep(millis.toInt)
+ println("ask type now")
+ comp.askType(toSourceFile(file), false, typedResult)
+ typedResult.get
+ case List("typed", file) =>
+ doTypedTree(file)
case List("typeat", file, off1, off2) =>
doTypeAt(makePos(file, off1, off2))
case List("typeat", file, off1) =>
@@ -109,7 +133,11 @@ object REPL {
case List("complete", file, off1) =>
doComplete(makePos(file, off1, off1))
case List("quit") =>
- System.exit(1)
+ comp.askShutdown()
+ // deleted sys. as this has to run on 2.8 also
+ exit(1)
+ case List("structure", file) =>
+ doStructure(file)
case _ =>
println("unrecongized command")
}
@@ -118,11 +146,11 @@ object REPL {
def toSourceFile(name: String) = new BatchSourceFile(new PlainFile(new java.io.File(name)))
- def show[T](svar: SyncVar[Either[T, Throwable]]) {
+ def show[T](svar: Response[T]) {
svar.get match {
case Left(result) => println("==> "+result)
- case Right(exc/*: Throwable ??*/) => exc.printStackTrace; println("ERROR: "+exc)
+ case Right(exc) => exc.printStackTrace; println("ERROR: "+exc)
}
- svar.unset()
+ svar.clear()
}
}
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
index 2e6041f4c4..873ef50007 100644
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
@@ -1,9 +1,13 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
package scala.tools.nsc
package interactive
import ast.Trees
import symtab.Positions
-import scala.tools.nsc.util.{SourceFile, Position, RangePosition, OffsetPosition, NoPosition, WorkScheduler}
+import scala.tools.nsc.util.{SourceFile, Position, RangePosition, NoPosition, WorkScheduler}
import scala.collection.mutable.ListBuffer
/** Handling range positions
@@ -71,7 +75,7 @@ self: scala.tools.nsc.Global =>
Range(new RangePosition(null, lo, lo, hi), EmptyTree)
/** The maximal free range */
- private lazy val maxFree: Range = free(0, Math.MAX_INT)
+ private lazy val maxFree: Range = free(0, Int.MaxValue)
/** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */
private def maybeFree(lo: Int, hi: Int) =
@@ -103,7 +107,7 @@ self: scala.tools.nsc.Global =>
/** Ensure that given tree has no positions that overlap with
* any of the positions of `others`. This is done by
- * shortening the range or assinging TransparentPositions
+ * shortening the range or assigning TransparentPositions
* to some of the nodes in `tree`.
*/
override def ensureNonOverlapping(tree: Tree, others: List[Tree]) {
@@ -190,7 +194,7 @@ self: scala.tools.nsc.Global =>
inform("")
}
- def error(msg: String)(body : => Unit) {
+ def positionError(msg: String)(body : => Unit) {
inform("======= Bad positions: "+msg)
inform("")
body
@@ -199,21 +203,21 @@ self: scala.tools.nsc.Global =>
inform(tree.toString)
inform("")
inform("=======")
- throw new ValidateError(msg)
+ throw new ValidateException(msg)
}
def validate(tree: Tree, encltree: Tree): Unit = {
if (!tree.isEmpty) {
if (!tree.pos.isDefined)
- error("Unpositioned tree ["+tree.id+"]") { reportTree("Unpositioned", tree) }
+ positionError("Unpositioned tree ["+tree.id+"]") { reportTree("Unpositioned", tree) }
if (tree.pos.isRange) {
if (!encltree.pos.isRange)
- error("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") {
+ positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") {
reportTree("Enclosing", encltree)
reportTree("Enclosed", tree)
}
if (!(encltree.pos includes tree.pos))
- error("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") {
+ positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") {
reportTree("Enclosing", encltree)
reportTree("Enclosed", tree)
}
@@ -221,7 +225,7 @@ self: scala.tools.nsc.Global =>
findOverlapping(tree.children flatMap solidDescendants) match {
case List() => ;
case xs => {
- error("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) {
+ positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) {
reportTree("Ancestor", tree)
for((x, y) <- xs) {
reportTree("First overlapping", x)
@@ -238,7 +242,7 @@ self: scala.tools.nsc.Global =>
validate(tree, tree)
}
- class ValidateError(msg : String) extends Exception(msg)
+ class ValidateException(msg : String) extends Exception(msg)
// ---------------- Locating trees ----------------------------------
@@ -253,18 +257,24 @@ self: scala.tools.nsc.Global =>
traverse(root)
this.last
}
+ protected def isEligible(t: Tree) = !t.pos.isTransparent
override def traverse(t: Tree) {
- if (t.pos includes pos) {
- if (!t.pos.isTransparent) last = t
- super.traverse(t)
- } else if (t.symbol != null) {
- for(annot <- t.symbol.annotations if !annot.pos.isTransparent) {
- last = Annotated(TypeTree(annot.atp) setPos annot.pos, t)
- last.setType(annot.atp)
- last.setPos(annot.pos)
- traverseTrees(annot.args)
- }
+ t match {
+ case tt : TypeTree if tt.original != null => traverse(tt.original)
+ case _ =>
+ if (t.pos includes pos) {
+ if (isEligible(t)) last = t
+ super.traverse(t)
+ } else t match {
+ case mdef: MemberDef =>
+ traverseTrees(mdef.mods.annotations)
+ case _ =>
+ }
}
}
}
+
+ class TypedLocator(pos: Position) extends Locator(pos) {
+ override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null
+ }
}
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
index fdc2fd3a24..3a1a9d5bd9 100644
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
@@ -1,19 +1,20 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
- * @author Martin Odersky
+ * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * @author Iulian Dragos
+ * @author Hubert Plocinicak
*/
-// $Id$
-
package scala.tools.nsc
package interactive
import scala.collection._
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
import scala.util.control.Breaks._
+import scala.tools.nsc.symtab.Flags
import dependencies._
-import util.FakePos
+import util.{FakePos, ClassPath}
import io.AbstractFile
+import scala.tools.util.PathResolver
/** A more defined build manager, based on change sets. For each
* updated source file, it computes the set of changes to its
@@ -32,26 +33,44 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
super.computeInternalPhases
phasesSet += dependencyAnalysis
}
+ lazy val _classpath: ClassPath[_] = new NoSourcePathPathResolver(settings).result
+ override def classPath: ClassPath[_] = _classpath
def newRun() = new Run()
}
+ class NoSourcePathPathResolver(settings: Settings) extends PathResolver(settings) {
+ override def containers = Calculated.basis.dropRight(1).flatten.distinct
+ }
+
protected def newCompiler(settings: Settings) = new BuilderGlobal(settings)
val compiler = newCompiler(settings)
- import compiler.Symbol
+ import compiler.{Symbol, Type, atPhase, currentRun}
+ import compiler.dependencyAnalysis.Inherited
+
+ private case class SymWithHistory(sym: Symbol, befErasure: Type)
/** Managed source files. */
private val sources: mutable.Set[AbstractFile] = new mutable.HashSet[AbstractFile]
- private val definitions: mutable.Map[AbstractFile, List[Symbol]] =
- new mutable.HashMap[AbstractFile, List[Symbol]] {
+ private val definitions: mutable.Map[AbstractFile, List[SymWithHistory]] =
+ new mutable.HashMap[AbstractFile, List[SymWithHistory]] {
override def default(key: AbstractFile) = Nil
}
/** External references used by source file. */
private var references: mutable.Map[AbstractFile, immutable.Set[String]] = _
+ /** External references for inherited members */
+ private var inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] = _
+
+ /** Reverse of definitions, used for caching */
+ private var classes: mutable.Map[String, AbstractFile] =
+ new mutable.HashMap[String, AbstractFile] {
+ override def default(key: String) = null
+ }
+
/** Add the given source files to the managed build process. */
def addSourceFiles(files: Set[AbstractFile]) {
sources ++= files
@@ -69,8 +88,8 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
*/
private def invalidatedByRemove(files: Set[AbstractFile]): Set[AbstractFile] = {
val changes = new mutable.HashMap[Symbol, List[Change]]
- for (f <- files; sym <- definitions(f))
- changes += sym -> List(Removed(Class(sym.fullNameString)))
+ for (f <- files; SymWithHistory(sym, _) <- definitions(f))
+ changes += sym -> List(Removed(Class(sym.fullName)))
invalidated(files, changes)
}
@@ -87,80 +106,144 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
* of the dependency analysis.
*/
private def update(files: Set[AbstractFile]) = {
- def update0(files: Set[AbstractFile], updated: Set[AbstractFile]): Unit = if (!files.isEmpty) {
+ val coll: mutable.Map[AbstractFile, immutable.Set[AbstractFile]] =
+ mutable.HashMap[AbstractFile, immutable.Set[AbstractFile]]()
+ compiler.reporter.reset()
+
+ // See if we really have corresponding symbols, not just those
+ // which share the name
+ def isCorrespondingSym(from: Symbol, to: Symbol): Boolean =
+ (from.hasFlag(Flags.TRAIT) == to.hasFlag(Flags.TRAIT)) && // has to run in 2.8, so no hasTraitFlag
+ (from.hasFlag(Flags.MODULE) == to.hasFlag(Flags.MODULE))
+
+ // For testing purposes only, order irrelevant for compilation
+ def toStringSet(set: Set[AbstractFile]): String =
+ set.toList sortBy (_.name) mkString("Set(", ", ", ")")
+
+ def update0(files: Set[AbstractFile]): Unit = if (!files.isEmpty) {
deleteClassfiles(files)
val run = compiler.newRun()
- compiler.inform("compiling " + files)
+ if (settings.Ybuildmanagerdebug.value)
+ compiler.inform("compiling " + toStringSet(files))
buildingFiles(files)
run.compileFiles(files.toList)
if (compiler.reporter.hasErrors) {
- compiler.reporter.reset
return
}
- val changesOf = new mutable.HashMap[Symbol, List[Change]]
+ // Deterministic behaviour required by partest
+ val changesOf = new mutable.HashMap[Symbol, List[Change]] {
+ override def toString: String = {
+ val changesOrdered =
+ toList.map(e => {
+ e._1.toString + " -> " +
+ e._2.sortBy(_.toString).mkString("List(", ", ", ")")
+ })
+ changesOrdered.sorted.mkString("Map(", ", ", ")")
+ }
+ }
val additionalDefs: mutable.HashSet[AbstractFile] = mutable.HashSet.empty
val defs = compiler.dependencyAnalysis.definitions
for (src <- files) {
if (definitions(src).isEmpty)
- additionalDefs ++= compiler.dependencyAnalysis.
- dependencies.dependentFiles(1, mutable.Set(src))
+ additionalDefs ++= compiler.dependencyAnalysis.
+ dependencies.dependentFiles(1, mutable.Set(src))
else {
val syms = defs(src)
for (sym <- syms) {
- definitions(src).find(_.fullNameString == sym.fullNameString) match {
- case Some(oldSym) =>
- changesOf(oldSym) = changeSet(oldSym, sym)
+ definitions(src).find(
+ s => (s.sym.fullName == sym.fullName) &&
+ isCorrespondingSym(s.sym, sym)) match {
+ case Some(SymWithHistory(oldSym, info)) =>
+ val changes = changeSet(oldSym.info, sym)
+ val changesErasure =
+ atPhase(currentRun.erasurePhase.prev) {
+ changeSet(info, sym)
+ }
+ changesOf(oldSym) = (changes ++ changesErasure).distinct
case _ =>
- // a new top level definition, no need to process
+ // a new top level definition
+ changesOf(sym) =
+ sym.info.parents.filter(_.typeSymbol.isSealed).map(
+ p => changeChangeSet(p.typeSymbol,
+ sym+" extends a sealed "+p.typeSymbol))
}
}
// Create a change for the top level classes that were removed
- val removed = definitions(src) remove ((s: Symbol) =>
- syms.find(_.fullNameString == s.fullNameString) match {
- case None => false
- case _ => true
- })
- for (sym <- removed) {
- changesOf(sym) = List(removeChangeSet(sym))
+ val removed = definitions(src) filterNot ((s:SymWithHistory) =>
+ syms.find(_.fullName == (s.sym.fullName)) != None)
+ for (s <- removed) {
+ changesOf(s.sym) = List(removeChangeSet(s.sym))
}
}
}
- println("Changes: " + changesOf)
+ if (settings.Ybuildmanagerdebug.value)
+ compiler.inform("Changes: " + changesOf)
updateDefinitions(files)
- val compiled = updated ++ files
- val invalid = invalidated(files, changesOf, additionalDefs ++ compiled)
- update0(invalid -- compiled, compiled)
+ val invalid = invalidated(files, changesOf, additionalDefs)
+ update0(checkCycles(invalid, files, coll))
}
- update0(files, immutable.Set())
+ update0(files)
+ // remove the current run in order to save some memory
+ compiler.dropRun()
}
+ // Attempt to break the cycling reference deps as soon as possible and reduce
+ // the number of compilations to minimum without having too coarse grained rules
+ private def checkCycles(files: Set[AbstractFile], initial: Set[AbstractFile],
+ collect: mutable.Map[AbstractFile, immutable.Set[AbstractFile]]):
+ Set[AbstractFile] = {
+ def followChain(set: Set[AbstractFile], rest: immutable.Set[AbstractFile]):
+ immutable.Set[AbstractFile] = {
+ val deps:Set[AbstractFile] = set.flatMap(
+ s => collect.get(s) match {
+ case Some(x) => x
+ case _ => Set[AbstractFile]()
+ })
+ val newDeps = deps -- rest
+ if (newDeps.isEmpty) rest else followChain(newDeps, rest ++ newDeps)
+ }
+ var res:Set[AbstractFile] = mutable.Set()
+ files.foreach( f =>
+ if (collect contains f) {
+ val chain = followChain(Set(f), immutable.Set()) ++ files
+ chain.foreach((fc: AbstractFile) => collect += fc -> chain)
+ res ++= chain
+ } else
+ res += f
+ )
+
+ initial.foreach((f: AbstractFile) => collect += (f -> (collect.getOrElse(f, immutable.Set()) ++ res)))
+ if (res.subsetOf(initial)) Set() else res
+ }
/** Return the set of source files that are invalidated by the given changes. */
def invalidated(files: Set[AbstractFile], changesOf: collection.Map[Symbol, List[Change]],
- processed: Set[AbstractFile] = Set.empty): Set[AbstractFile] = {
+ processed: Set[AbstractFile] = Set.empty):
+ Set[AbstractFile] = {
val buf = new mutable.HashSet[AbstractFile]
val newChangesOf = new mutable.HashMap[Symbol, List[Change]]
var directDeps =
compiler.dependencyAnalysis.dependencies.dependentFiles(1, files)
- directDeps --= processed
def invalidate(file: AbstractFile, reason: String, change: Change) = {
- println("invalidate " + file + " because " + reason + " [" + change + "]")
+ if (settings.Ybuildmanagerdebug.value)
+ compiler.inform("invalidate " + file + " because " + reason + " [" + change + "]")
buf += file
directDeps -= file
- for (sym <- definitions(file))
- newChangesOf(sym) = List(change)
+ for (syms <- definitions(file)) // fixes #2557
+ newChangesOf(syms.sym) = List(change, parentChangeSet(syms.sym))
break
}
for ((oldSym, changes) <- changesOf; change <- changes) {
def checkParents(cls: Symbol, file: AbstractFile) {
- val parentChange = cls.info.parents.exists(_.typeSymbol.fullNameString == oldSym.fullNameString)
-// println("checkParents " + cls + " oldSym: " + oldSym + " parentChange: " + parentChange + " " + cls.info.parents)
+ val parentChange = cls.info.parents.exists(_.typeSymbol.fullName == oldSym.fullName)
+ // if (settings.buildmanagerdebug.value)
+ // compiler.inform("checkParents " + cls + " oldSym: " + oldSym + " parentChange: " + parentChange + " " + cls.info.parents)
change match {
case Changed(Class(_)) if parentChange =>
invalidate(file, "parents have changed", change)
@@ -181,10 +264,10 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
def checkInterface(cls: Symbol, file: AbstractFile) {
change match {
case Added(Definition(name)) =>
- if (cls.info.decls.iterator.exists(_.fullNameString == name))
+ if (cls.info.decls.iterator.exists(_.fullName == name))
invalidate(file, "of new method with existing name", change)
case Changed(Class(name)) =>
- if (cls.info.typeSymbol.fullNameString == name)
+ if (cls.info.typeSymbol.fullName == name)
invalidate(file, "self type changed", change)
case _ =>
()
@@ -192,7 +275,8 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
}
def checkReferences(file: AbstractFile) {
-// println(file + ":" + references(file))
+ //if (settings.buildmanagerdebug.value)
+ // compiler.inform(file + ":" + references(file))
val refs = references(file)
if (refs.isEmpty)
invalidate(file, "it is a direct dependency and we don't yet have finer-grained dependency information", change)
@@ -213,26 +297,51 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
}
}
+ def checkInheritedReferences(file: AbstractFile) {
+ val refs = inherited(file)
+ if (!refs.isEmpty)
+ change match {
+ case ParentChanged(Class(name)) =>
+ for (Inherited(q, member) <- refs.find(p => (p != null && p.qualifier == name));
+ classFile <- classes.get(q);
+ defs <- definitions.get(classFile);
+ s <- defs.find(p => p.sym.fullName == q)
+ if ((s.sym).tpe.nonPrivateMember(member) == compiler.NoSymbol))
+ invalidate(file, "it references invalid (no longer inherited) definition", change)
+ ()
+ case _ => ()
+ }
+ }
+
for (file <- directDeps) {
breakable {
- for (cls <- definitions(file)) checkParents(cls, file)
- for (cls <- definitions(file)) checkInterface(cls, file)
+ for (cls <- definitions(file)) checkParents(cls.sym, file)
+ for (cls <- definitions(file)) checkInterface(cls.sym, file)
checkReferences(file)
+ checkInheritedReferences(file)
}
}
}
if (buf.isEmpty)
processed
else
- invalidated(buf -- processed, newChangesOf, processed ++ buf)
+ invalidated(buf.clone() --= processed, newChangesOf, processed ++ buf)
}
/** Update the map of definitions per source file */
private def updateDefinitions(files: Set[AbstractFile]) {
for (src <- files; val localDefs = compiler.dependencyAnalysis.definitions(src)) {
- definitions(src) = (localDefs map (_.cloneSymbol))
+ definitions(src) = (localDefs map (s => {
+ this.classes += s.fullName -> src
+ SymWithHistory(
+ s.cloneSymbol,
+ atPhase(currentRun.erasurePhase.prev) {
+ s.info.cloneInfo(s)
+ })
+ }))
}
this.references = compiler.dependencyAnalysis.references
+ this.inherited = compiler.dependencyAnalysis.inherited
}
/** Load saved dependency information. */
diff --git a/src/compiler/scala/tools/nsc/interactive/Response.scala b/src/compiler/scala/tools/nsc/interactive/Response.scala
new file mode 100644
index 0000000000..fbb07b15ad
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/Response.scala
@@ -0,0 +1,105 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+/** Typical interaction, given a predicate <user-input>, a function <display>,
+ * and an exception handler <handle>:
+ *
+ * val TIMEOUT = 100 // (milliseconds) or something like that
+ * val r = new Response()
+ * while (!r.isComplete && !r.isCancelled) {
+ * if (<user-input>) r.cancel()
+ * else r.get(TIMEOUT) match {
+ * case Some(Left(data)) => <display>(data)
+ * case Some(Right(exc)) => <handle>(exc)
+ * case None =>
+ * }
+ * }
+ */
+class Response[T] {
+
+ private var data: Option[Either[T, Throwable]] = None
+ private var complete = false
+ private var cancelled = false
+
+ /** Set provisional data, more to come
+ */
+ def setProvisionally(x: T) = synchronized {
+ data = Some(Left(x))
+ }
+
+ /** Set final data, and mark response as complete.
+ */
+ def set(x: T) = synchronized {
+ data = Some(Left(x))
+ complete = true
+ notifyAll()
+ }
+
+ /** Store raised exception in data, and mark response as complete.
+ */
+ def raise(exc: Throwable) = synchronized {
+ data = Some(Right(exc))
+ complete = true
+ notifyAll()
+ }
+
+ /** Get final data, wait as long as necessary.
+ * When interrupted will return with Right(InterruptedException)
+ */
+ def get: Either[T, Throwable] = synchronized {
+ while (!complete) {
+ try {
+ wait()
+ } catch {
+ case exc: InterruptedException => raise(exc)
+ }
+ }
+ data.get
+ }
+
+ /** Optionally get data within `timeout` milliseconds.
+ * When interrupted will return with Some(Right(InterruptedException))
+ * When timeout ends, will return last stored provisional result,
+ * or else None if no provisional result was stored.
+ */
+ def get(timeout: Long): Option[Either[T, Throwable]] = synchronized {
+ val start = System.currentTimeMillis
+ var current = start
+ while (!complete && start + timeout > current) {
+ try {
+ wait(timeout - (current - start))
+ } catch {
+ case exc: InterruptedException => raise(exc)
+ }
+ current = System.currentTimeMillis
+ }
+ data
+ }
+
+ /** Final data set was stored
+ */
+ def isComplete = synchronized { complete }
+
+ /** Cancel action computing this response (Only the
+ * party that calls get on a response may cancel).
+ */
+ def cancel() = synchronized { cancelled = true }
+
+ /** A cancel request for this response has been issued
+ */
+ def isCancelled = synchronized { cancelled }
+
+ def clear() = synchronized {
+ data = None
+ complete = false
+ cancelled = false
+ }
+}
+
+
+
+
diff --git a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
index 5c71ab4f73..9ef7d33549 100644
--- a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
@@ -1,15 +1,23 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
package scala.tools.nsc
package interactive
import scala.tools.nsc.util.{SourceFile, Position, NoPosition}
+import collection.mutable.ArrayBuffer
trait RichCompilationUnits { self: Global =>
/** The status value of a unit that has not yet been loaded */
- final val NotLoaded = -1
+ final val NotLoaded = -2
/** The status value of a unit that has not yet been typechecked */
- final val JustParsed = 0
+ final val JustParsed = -1
+
+ /** The status value of a unit that has been partially typechecked */
+ final val PartiallyChecked = 0
class RichCompilationUnit(source: SourceFile) extends CompilationUnit(source) {
@@ -30,6 +38,9 @@ trait RichCompilationUnits { self: Global =>
/** the current edit point offset */
var editPoint: Int = -1
+ /** The problems reported for this unit */
+ val problems = new ArrayBuffer[Problem]
+
/** The position of a targeted type check
* If this is different from NoPosition, the type checking
* will stop once a tree that contains this position range
@@ -41,5 +52,7 @@ trait RichCompilationUnits { self: Global =>
var contexts: Contexts = new Contexts
+ /** The last fully type-checked body of this unit */
+ var lastBody: Tree = EmptyTree
}
}
diff --git a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
index 2805bcf6ee..121f356c68 100644
--- a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
@@ -1,3 +1,7 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
package scala.tools.nsc
package interactive
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
index cf41852652..0c3d24d46f 100644
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
@@ -1,13 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
*/
-// $Id$
package scala.tools.nsc
package interpreter
import scala.tools.nsc.io.AbstractFile
-import scala.util.ScalaClassLoader
+import util.ScalaClassLoader
/**
* A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}.
@@ -18,7 +17,7 @@ class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader)
extends ClassLoader(parent)
with ScalaClassLoader
{
- override def findClass(name: String): Class[_] = {
+ def getBytesForClass(name: String): Array[Byte] = {
def onull[T](x: T): T = if (x == null) throw new ClassNotFoundException(name) else x
var file: AbstractFile = root
val pathParts = name.split("[./]").toList
@@ -27,7 +26,11 @@ class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader)
file = onull(file.lookupName(dirPart, true))
file = onull(file.lookupName(pathParts.last+".class", false))
- val bytes = file.toByteArray
+ file.toByteArray
+ }
+
+ override def findClass(name: String): Class[_] = {
+ val bytes = getBytesForClass(name)
defineClass(name, bytes, 0, bytes.length)
}
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
new file mode 100644
index 0000000000..a42b8347a7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
@@ -0,0 +1,64 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import java.io.File
+import java.lang.reflect
+import java.util.jar.{ JarEntry, JarFile }
+import java.util.concurrent.ConcurrentHashMap
+import util.ScalaClassLoader
+import ScalaClassLoader.getSystemLoader
+
+object ByteCode {
+ /** Until I figure out why I can't get scalap onto the classpath such
+ * that the compiler will bootstrap, we have to use reflection.
+ */
+ private lazy val DECODER: Option[AnyRef] =
+ for (clazz <- getSystemLoader.tryToLoadClass[AnyRef]("scala.tools.scalap.Decode$")) yield
+ clazz.getField("MODULE$").get()
+
+ private def decoderMethod(name: String, args: Class[_]*): Option[reflect.Method] = {
+ for (decoder <- DECODER ; m <- Option(decoder.getClass.getMethod(name, args: _*))) yield m
+ }
+
+ private lazy val aliasMap = {
+ for (module <- DECODER ; method <- decoderMethod("typeAliases", classOf[String])) yield
+ method.invoke(module, _: String).asInstanceOf[Option[Map[String, String]]]
+ }
+
+ /** Scala sig bytes.
+ */
+ def scalaSigBytesForPath(path: String) =
+ for {
+ module <- DECODER
+ method <- decoderMethod("scalaSigAnnotationBytes", classOf[String])
+ names <- method.invoke(module, path).asInstanceOf[Option[Array[Byte]]]
+ }
+ yield names
+
+ /** Attempts to retrieve case parameter names for given class name.
+ */
+ def caseParamNamesForPath(path: String) =
+ for {
+ module <- DECODER
+ method <- decoderMethod("caseParamNames", classOf[String])
+ names <- method.invoke(module, path).asInstanceOf[Option[List[String]]]
+ }
+ yield names
+
+ def aliasesForPackage(pkg: String) = aliasMap flatMap (_(pkg))
+
+ /** Attempts to find type aliases in package objects.
+ */
+ def aliasForType(path: String): Option[String] = {
+ val (pkg, name) = (path lastIndexOf '.') match {
+ case -1 => return None
+ case idx => (path take idx, path drop (idx + 1))
+ }
+ aliasesForPackage(pkg) flatMap (_ get name)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
index fed2a6c5c1..22a95a4bf8 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
@@ -1,225 +1,366 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Paul Phillips
*/
-// $Id$
-
-//
-// TODO, if practical:
-//
-// 1) Types: val s: String = x.<tab> should only show members which result in a String.
-// Possible approach: evaluate buffer as if current identifier is
-// 2) Implicits: x.<tab> should show not only x's members but those of anything for which
-// there is an implicit conversion from x.
-// 3) Chaining: x.foo(bar).<tab> should complete on foo's result type.
-// 4) Imports: after import scala.collection.mutable._, HashMap should be among
-// my top level identifiers.
-// 5) Caching: it's silly to parse all the jars on every startup, we should have
-// a peristent store somewhere we can write and only check last-mod dates.
-// 6) Security: Are we using the filesystem unnecessarily?
-//
+
package scala.tools.nsc
package interpreter
import jline._
-import java.net.URL
-import java.util.concurrent.ConcurrentHashMap
-import scala.concurrent.DelayedLazyVal
-
-// REPL completor - queries supplied interpreter for valid completions
-// based on current contents of buffer.
-class Completion(val interpreter: Interpreter) extends Completor {
- import Completion._
- import java.util.{ List => JList }
- import interpreter.compilerClasspath
-
- // it takes a little while to look through the jars so we use a future and a concurrent map
- class CompletionAgent {
- val dottedPaths = new ConcurrentHashMap[String, List[String]]
- val topLevelPackages = new DelayedLazyVal(
- () => enumToList(dottedPaths.keys) filterNot (_ contains '.'),
- getDottedPaths(dottedPaths, interpreter)
- )
+import java.util.{ List => JList }
+import util.returning
+
+object Completion {
+ def looksLikeInvocation(code: String) = (
+ (code != null)
+ && (code startsWith ".")
+ && !(code == ".")
+ && !(code startsWith "./")
+ && !(code startsWith "..")
+ )
+
+ object Forwarder {
+ def apply(forwardTo: () => Option[CompletionAware]): CompletionAware = new CompletionAware {
+ def completions(verbosity: Int) = forwardTo() map (_ completions verbosity) getOrElse Nil
+ override def follow(s: String) = forwardTo() flatMap (_ follow s)
+ }
+ }
+}
+import Completion._
+
+// REPL completor - queries supplied interpreter for valid
+// completions based on current contents of buffer.
+class Completion(val repl: Interpreter) extends CompletionOutput {
+ // verbosity goes up with consecutive tabs
+ private var verbosity: Int = 0
+ def resetVerbosity() = verbosity = 0
+
+ def isCompletionDebug = repl.isCompletionDebug
+ def DBG(msg: => Any) = if (isCompletionDebug) println(msg.toString)
+ def debugging[T](msg: String): T => T = (res: T) => returning[T](res)(x => DBG(msg + x))
+
+ lazy val global: repl.compiler.type = repl.compiler
+ import global._
+ import definitions.{ PredefModule, RootClass, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage }
+
+ // XXX not yet used.
+ lazy val dottedPaths = {
+ def walk(tp: Type): scala.List[Symbol] = {
+ val pkgs = tp.nonPrivateMembers filter (_.isPackage)
+ pkgs ++ (pkgs map (_.tpe) flatMap walk)
+ }
+ walk(RootClass.tpe)
+ }
+
+ def getType(name: String, isModule: Boolean) = {
+ val f = if (isModule) definitions.getModule(_: Name) else definitions.getClass(_: Name)
+ try Some(f(name).tpe)
+ catch { case _: MissingRequirementError => None }
}
- val agent = new CompletionAgent
- import agent._
-
- // One instance of a command line
- class Buffer(s: String) {
- val buffer = if (s == null) "" else s
- val segments = buffer.split("\\.", -1).toList
- val lastDot = buffer.lastIndexOf('.')
- val hasDot = segments.size > 0 && segments.last == ""
-
- // given foo.bar.baz, path = foo.bar and stub = baz
- val (path, stub) = segments.size match {
- case 0 => ("", "")
- case 1 => (segments.head, "")
- case _ => (segments.init.mkString("."), segments.last)
+
+ def typeOf(name: String) = getType(name, false)
+ def moduleOf(name: String) = getType(name, true)
+
+ trait CompilerCompletion {
+ def tp: Type
+ def effectiveTp = tp match {
+ case MethodType(Nil, resType) => resType
+ case PolyType(Nil, resType) => resType
+ case _ => tp
}
- def filt(xs: List[String]) = xs filter (_ startsWith stub)
+ // for some reason any's members don't show up in subclasses, which
+ // we need so 5.<tab> offers asInstanceOf etc.
+ private def anyMembers = AnyClass.tpe.nonPrivateMembers
+ def anyRefMethodsToShow = List("isInstanceOf", "asInstanceOf", "toString")
+
+ def tos(sym: Symbol) = sym.name.decode.toString
+ def memberNamed(s: String) = members find (x => tos(x) == s)
+ def hasMethod(s: String) = methods exists (x => tos(x) == s)
+
+ // XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the
+ // compiler to crash for reasons not yet known.
+ def members = (effectiveTp.nonPrivateMembers ++ anyMembers) filter (_.isPublic)
+ def methods = members filter (_.isMethod)
+ def packages = members filter (_.isPackage)
+ def aliases = members filter (_.isAliasType)
+
+ def memberNames = members map tos
+ def methodNames = methods map tos
+ def packageNames = packages map tos
+ def aliasNames = aliases map tos
+ }
- case class Result(candidates: List[String], position: Int) {
- def getCandidates() = (candidates map (_.trim) removeDuplicates) sort (_ < _)
+ object TypeMemberCompletion {
+ def apply(tp: Type): TypeMemberCompletion = {
+ if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
+ else new TypeMemberCompletion(tp)
}
+ def imported(tp: Type) = new ImportCompletion(tp)
+ }
- // work out completion candidates and position
- def analyzeBuffer(clist: JList[String]): Result = {
- lazy val ids = idsStartingWith(path)
- lazy val pkgs = pkgsStartingWith(path)
- lazy val count = (ids ::: pkgs).size
-
- def doSimple(): Result = count match {
- case 0 => Result(Nil, 0)
- case 1 if pkgs.size > 0 => Result(pkgs, 0)
- case 1 if buffer.length < ids.head.length => Result(ids, 0)
- case 1 => Result(ids, 0)
- // XXX for now commented out "dot inference" because it's overcomplicated
- // val members = membersOfId(ids.head) filter (_ startsWith stub)
- // if (members.isEmpty) Result(Nil, 0)
- // else Result(members, path.length + 1)
- case _ => Result(ids ::: pkgs, 0)
- }
+ class TypeMemberCompletion(val tp: Type) extends CompletionAware
+ with CompilerCompletion {
+ def excludeEndsWith: List[String] = Nil
+ def excludeStartsWith: List[String] = List("<") // <byname>, <repeated>, etc.
+ def excludeNames: List[String] =
+ anyref.methodNames.filterNot(anyRefMethodsToShow contains) ++ List("_root_")
+
+ def methodSignatureString(sym: Symbol) = {
+ def asString = new MethodSymbolOutput(sym).methodString()
- // a few keywords which don't appear as methods via reflection
- val memberKeywords = List("isInstanceOf", "asInstanceOf")
- def doDotted(): Result = {
- lazy val pkgs = filt(membersOfPath(path))
- lazy val ids = filt(membersOfId(path))
- lazy val idExtras = filt(memberKeywords) // isInstanceOf and asInstanceOf
- lazy val statics = filt(completeStaticMembers(path))
-
- if (!pkgs.isEmpty) Result(pkgs, path.length + 1)
- else if (!ids.isEmpty) Result(ids ::: idExtras, path.length + 1)
- else Result(statics ::: idExtras, path.length + 1)
+ if (isCompletionDebug)
+ repl.power.showAtAllPhases(asString)
+
+ atPhase(currentRun.typerPhase)(asString)
+ }
+
+ def exclude(name: String): Boolean = (
+ (name contains "$") ||
+ (excludeNames contains name) ||
+ (excludeEndsWith exists (name endsWith _)) ||
+ (excludeStartsWith exists (name startsWith _))
+ )
+ def filtered(xs: List[String]) = xs filterNot exclude distinct
+
+ def completions(verbosity: Int) =
+ debugging(tp + " completions ==> ")(filtered(memberNames))
+
+ override def follow(s: String): Option[CompletionAware] =
+ debugging(tp + " -> '" + s + "' ==> ")(memberNamed(s) map (x => TypeMemberCompletion(x.tpe)))
+
+ override def alternativesFor(id: String): List[String] =
+ debugging(id + " alternatives ==> ") {
+ val alts = members filter (x => x.isMethod && tos(x) == id) map methodSignatureString
+
+ if (alts.nonEmpty) "" :: alts else Nil
}
- segments.size match {
- case 0 => Result(Nil, 0)
- case 1 => doSimple()
- case _ => doDotted()
+ override def toString = "TypeMemberCompletion(%s)".format(tp)
+ }
+
+ class PackageCompletion(tp: Type) extends TypeMemberCompletion(tp) {
+ override def excludeNames = anyref.methodNames
+ }
+
+ class LiteralCompletion(lit: Literal) extends TypeMemberCompletion(lit.value.tpe) {
+ override def completions(verbosity: Int) = verbosity match {
+ case 0 => filtered(memberNames)
+ case _ => memberNames
+ }
+ }
+
+ class ImportCompletion(tp: Type) extends TypeMemberCompletion(tp) {
+ override def completions(verbosity: Int) = verbosity match {
+ case 0 => filtered(members filterNot (_.isSetter) map tos)
+ case _ => super.completions(verbosity)
+ }
+ }
+
+ // not for completion but for excluding
+ object anyref extends TypeMemberCompletion(AnyRefClass.tpe) { }
+
+ // the unqualified vals/defs/etc visible in the repl
+ object ids extends CompletionAware {
+ override def completions(verbosity: Int) = repl.unqualifiedIds ::: List("classOf")
+ // we try to use the compiler and fall back on reflection if necessary
+ // (which at present is for anything defined in the repl session.)
+ override def follow(id: String) =
+ if (completions(0) contains id) {
+ for (clazz <- repl clazzForIdent id) yield {
+ // XXX The isMemberClass check is a workaround for the crasher described
+ // in the comments of #3431. The issue as described by iulian is:
+ //
+ // Inner classes exist as symbols
+ // inside their enclosing class, but also inside their package, with a mangled
+ // name (A$B). The mangled names should never be loaded, and exist only for the
+ // optimizer, which sometimes cannot get the right symbol, but it doesn't care
+ // and loads the bytecode anyway.
+ //
+ // So this solution is incorrect, but in the short term the simple fix is
+ // to skip the compiler any time completion is requested on a nested class.
+ if (clazz.isMemberClass) new InstanceCompletion(clazz)
+ else (typeOf(clazz.getName) map TypeMemberCompletion.apply) getOrElse new InstanceCompletion(clazz)
+ }
}
+ else None
+ }
+
+ // wildcard imports in the repl like "import global._" or "import String._"
+ private def imported = repl.wildcardImportedTypes map TypeMemberCompletion.imported
+
+ // literal Ints, Strings, etc.
+ object literals extends CompletionAware {
+ def simpleParse(code: String): Tree = {
+ val unit = new CompilationUnit(new util.BatchSourceFile("<console>", code))
+ val scanner = new syntaxAnalyzer.UnitParser(unit)
+ val tss = scanner.templateStatSeq(false)._2
+
+ if (tss.size == 1) tss.head else EmptyTree
}
- def isValidId(s: String) = interpreter.unqualifiedIds contains s
- def membersOfId(s: String) = interpreter membersOfIdentifier s
-
- def isValidPath(s: String) = dottedPaths containsKey s
- def membersOfPath(s: String) = if (isValidPath(s)) dottedPaths get s else Nil
-
- // XXX generalize this to look through imports
- def membersOfScala() = membersOfPath("scala")
- def membersOfJavaLang() = membersOfPath("java.lang")
- def membersOfPredef() = membersOfId("scala.Predef")
- def defaultMembers = {
- val xs = membersOfScala ::: membersOfJavaLang ::: membersOfPredef
- val excludes = List("""Tuple\d+""".r, """Product\d+""".r, """Function\d+""".r,
- """.*Exception$""".r, """.*Error$""".r)
- xs filter (x => excludes forall (r => r.findFirstMatchIn(x).isEmpty))
+ def completions(verbosity: Int) = Nil
+
+ override def follow(id: String) = simpleParse(id) match {
+ case x: Literal => Some(new LiteralCompletion(x))
+ case _ => None
}
+ }
- def pkgsStartingWith(s: String) = topLevelPackages() filter (_ startsWith s)
- def idsStartingWith(s: String) = (interpreter.unqualifiedIds ::: defaultMembers) filter (_ startsWith s)
+ // top level packages
+ object rootClass extends TypeMemberCompletion(RootClass.tpe) { }
+ // members of Predef
+ object predef extends TypeMemberCompletion(PredefModule.tpe) {
+ override def excludeEndsWith = super.excludeEndsWith ++ List("Wrapper", "ArrayOps")
+ override def excludeStartsWith = super.excludeStartsWith ++ List("wrap")
+ override def excludeNames = anyref.methodNames
- def complete(clist: JList[String]): Int = {
- val res = analyzeBuffer(clist)
- res.getCandidates foreach (clist add _)
- res.position
+ override def exclude(name: String) = super.exclude(name) || (
+ (name contains "2")
+ )
+
+ override def completions(verbosity: Int) = verbosity match {
+ case 0 => Nil
+ case _ => super.completions(verbosity)
}
}
+ // members of scala.*
+ object scalalang extends PackageCompletion(ScalaPackage.tpe) {
+ def arityClasses = List("Product", "Tuple", "Function")
+ def skipArity(name: String) = arityClasses exists (x => name != x && (name startsWith x))
+ override def exclude(name: String) = super.exclude(name) || (
+ skipArity(name)
+ )
- // jline's completion comes through here - we ask a Buffer for the candidates.
- override def complete(_buffer: String, cursor: Int, candidates: JList[String]): Int =
- new Buffer(_buffer).complete(candidates)
+ override def completions(verbosity: Int) = verbosity match {
+ case 0 => filtered(packageNames ++ aliasNames)
+ case _ => super.completions(verbosity)
+ }
+ }
+ // members of java.lang.*
+ object javalang extends PackageCompletion(JavaLangPackage.tpe) {
+ override lazy val excludeEndsWith = super.excludeEndsWith ++ List("Exception", "Error")
+ override lazy val excludeStartsWith = super.excludeStartsWith ++ List("CharacterData")
+
+ override def completions(verbosity: Int) = verbosity match {
+ case 0 => filtered(packageNames)
+ case _ => super.completions(verbosity)
+ }
+ }
- def completeStaticMembers(path: String): List[String] = {
- import java.lang.reflect.Modifier.{ isPrivate, isProtected, isStatic }
- def isVisible(x: Int) = !isPrivate(x) && !isProtected(x)
- def isSingleton(x: Int, isJava: Boolean) = !isJava || isStatic(x)
+ // the list of completion aware objects which should be consulted
+ lazy val topLevelBase: List[CompletionAware] = List(ids, rootClass, predef, scalalang, javalang, literals)
+ def topLevel = topLevelBase ++ imported
- def getMembers(c: Class[_], isJava: Boolean): List[String] =
- c.getMethods.toList .
- filter (x => isVisible(x.getModifiers)) .
- filter (x => isSingleton(x.getModifiers, isJava)) .
- map (_.getName) .
- filter (isValidCompletion)
+ // the first tier of top level objects (doesn't include file completion)
+ def topLevelFor(parsed: Parsed) = topLevel flatMap (_ completionsFor parsed)
- def getClassObject(path: String): Option[Class[_]] =
- (interpreter getClassObject path) orElse
- (interpreter getClassObject ("scala." + path)) orElse
- (interpreter getClassObject ("java.lang." + path))
+ // the most recent result
+ def lastResult = Forwarder(() => ids follow repl.mostRecentVar)
- // java style, static methods
- val js = getClassObject(path) map (getMembers(_, true)) getOrElse Nil
- // scala style, methods on companion object
- val ss = getClassObject(path + "$") map (getMembers(_, false)) getOrElse Nil
+ def lastResultFor(parsed: Parsed) = {
+ /** The logic is a little tortured right now because normally '.' is
+ * ignored as a delimiter, but on .<tab> it needs to be propagated.
+ */
+ val xs = lastResult completionsFor parsed
+ if (parsed.isEmpty) xs map ("." + _) else xs
+ }
+
+ // chasing down results which won't parse
+ def execute(line: String): Option[Any] = {
+ val parsed = Parsed(line)
+ def noDotOrSlash = line forall (ch => ch != '.' && ch != '/')
- js ::: ss
+ if (noDotOrSlash) None // we defer all unqualified ids to the repl.
+ else {
+ (ids executionFor parsed) orElse
+ (rootClass executionFor parsed) orElse
+ (FileCompletion executionFor line)
+ }
}
-}
-object Completion
-{
- import java.io.File
- import java.util.jar.{ JarEntry, JarFile }
-
- def enumToList[T](e: java.util.Enumeration[T]): List[T] = enumToList(e, Nil)
- def enumToList[T](e: java.util.Enumeration[T], xs: List[T]): List[T] =
- if (e == null || !e.hasMoreElements) xs else enumToList(e, e.nextElement :: xs)
-
- // methods to leave out of completion
- val excludeMethods = List("", "hashCode", "equals", "wait", "notify", "notifyAll")
-
- private def exists(path: String) = new File(path) exists
-
- def isValidCompletion(x: String) = !(x contains "$$") && !(excludeMethods contains x)
- def isClass(x: String) = x endsWith ".class"
- def dropClass(x: String) = x.substring(0, x.length - 6) // drop .class
-
- def getClassFiles(path: String): List[String] = {
- if (!exists(path)) return Nil
-
- enumToList(new JarFile(path).entries) .
- map (_.getName) .
- filter (isClass) .
- map (dropClass) .
- filter (isValidCompletion)
- }
-
- // all the dotted path to classfiles we can find by poking through the jars
- def getDottedPaths(
- map: ConcurrentHashMap[String, List[String]],
- interpreter: Interpreter): Unit =
- {
- val cp =
- interpreter.compilerClasspath.map(_.getPath) ::: // compiler jars, scala-library.jar etc.
- interpreter.settings.bootclasspath.value.split(':').toList // boot classpath, java.lang.* etc.
-
- val jars = cp.removeDuplicates filter (_ endsWith ".jar")
-
- // for e.g. foo.bar.baz.C, returns (foo -> bar), (foo.bar -> baz), (foo.bar.baz -> C)
- // and scala.Range$BigInt needs to go scala -> Range -> BigInt
- def subpaths(s: String): List[(String, String)] = {
- val segs = s.split('.')
- for (i <- List.range(0, segs.length - 1)) yield {
- val k = segs.take(i+1).mkString(".")
- val v = segs(i+1)
- (k -> v)
+ // generic interface for querying (e.g. interpreter loop, testing)
+ def completions(buf: String): List[String] =
+ topLevelFor(Parsed.dotted(buf + ".", buf.length + 1))
+
+ // jline's entry point
+ lazy val jline: ArgumentCompletor =
+ returning(new ArgumentCompletor(new JLineCompletion, new JLineDelimiter))(_ setStrict false)
+
+ /** This gets a little bit hairy. It's no small feat delegating everything
+ * and also keeping track of exactly where the cursor is and where it's supposed
+ * to end up. The alternatives mechanism is a little hacky: if there is an empty
+ * string in the list of completions, that means we are expanding a unique
+ * completion, so don't update the "last" buffer because it'll be wrong.
+ */
+ class JLineCompletion extends Completor {
+ // For recording the buffer on the last tab hit
+ private var lastBuf: String = ""
+ private var lastCursor: Int = -1
+
+ // Does this represent two consecutive tabs?
+ def isConsecutiveTabs(buf: String, cursor: Int) =
+ cursor == lastCursor && buf == lastBuf
+
+ // Longest common prefix
+ def commonPrefix(xs: List[String]) =
+ if (xs.isEmpty) ""
+ else xs.reduceLeft(_ zip _ takeWhile (x => x._1 == x._2) map (_._1) mkString)
+
+ // This is jline's entry point for completion.
+ override def complete(_buf: String, cursor: Int, candidates: JList[String]): Int = {
+ val buf = onull(_buf)
+ verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
+ DBG("\ncomplete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
+
+ // we don't try lower priority completions unless higher ones return no results.
+ def tryCompletion(p: Parsed, completionFunction: Parsed => List[String]): Option[Int] = {
+ completionFunction(p) match {
+ case Nil => None
+ case xs =>
+ // modify in place and return the position
+ xs foreach (candidates add _)
+
+ // update the last buffer unless this is an alternatives list
+ if (xs contains "") Some(p.cursor)
+ else {
+ val advance = commonPrefix(xs)
+ lastCursor = p.position + advance.length
+ lastBuf = (buf take p.position) + advance
+
+ DBG("tryCompletion(%s, _) lastBuf = %s, lastCursor = %s, p.position = %s".format(p, lastBuf, lastCursor, p.position))
+ Some(p.position)
+ }
+ }
}
- }
- def oneJar(jar: String): Unit = {
- def cleanup(s: String): String = s map { c => if (c == '/' || c == '$') '.' else c } toString
- val classfiles = Completion getClassFiles jar map cleanup
+ def mkDotted = Parsed.dotted(buf, cursor) withVerbosity verbosity
+ def mkUndelimited = Parsed.undelimited(buf, cursor) withVerbosity verbosity
+
+ // a single dot is special cased to completion on the previous result
+ def lastResultCompletion =
+ if (!looksLikeInvocation(buf)) None
+ else tryCompletion(Parsed.dotted(buf drop 1, cursor), lastResultFor)
- for (cl <- classfiles; (k, v) <- subpaths(cl)) {
- if (map containsKey k) map.put(k, v :: map.get(k))
- else map.put(k, List(v))
+ def regularCompletion = tryCompletion(mkDotted, topLevelFor)
+ def fileCompletion = tryCompletion(mkUndelimited, FileCompletion completionsFor _.buffer)
+
+ /** This is the kickoff point for all manner of theoretically possible compiler
+ * unhappiness - fault may be here or elsewhere, but we don't want to crash the
+ * repl regardless. Hopefully catching Exception is enough, but because the
+ * compiler still throws some Errors it may not be.
+ */
+ try {
+ (lastResultCompletion orElse regularCompletion orElse fileCompletion) getOrElse cursor
+ }
+ catch {
+ case ex: Exception =>
+ DBG("Error: complete(%s, %s, _) provoked %s".format(_buf, cursor, ex))
+ candidates add " "
+ candidates add "<completion error>"
+ cursor
}
}
-
- jars foreach oneJar
}
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
new file mode 100644
index 0000000000..cfd3b5e05f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
@@ -0,0 +1,130 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import scala.reflect.NameTransformer
+
+/** An interface for objects which are aware of tab completion and
+ * will supply their own candidates and resolve their own paths.
+ */
+trait CompletionAware {
+ /** The delimiters which are meaningful when this CompletionAware
+ * object is in control.
+ */
+ // TODO
+ // def delimiters(): List[Char] = List('.')
+
+ /** The complete list of unqualified Strings to which this
+ * object will complete.
+ */
+ def completions(verbosity: Int): List[String]
+
+ /** Default filter to apply to completions.
+ */
+ def filterNotFunction(s: String): Boolean = false
+
+ /** Default sort.
+ */
+ def sortFunction(s1: String, s2: String): Boolean = s1 < s2
+
+ /** Default map.
+ */
+ def mapFunction(s: String) = NameTransformer decode s
+
+ /** The next completor in the chain.
+ */
+ def follow(id: String): Option[CompletionAware] = None
+
+ /** What to return if this completion is given as a command. It
+ * returns None by default, which means to allow the repl to interpret
+ * the line normally. Returning Some(_) means the line will never
+ * reach the scala interpreter.
+ */
+ def execute(id: String): Option[Any] = None
+
+ /** A list of useful information regarding a specific uniquely
+ * identified completion. This is specifically written for the
+ * following situation, but should be useful elsewhere too:
+ *
+ * x.y.z.methodName<tab>
+ *
+ * If "methodName" is among z's completions, and verbosity > 0
+ * indicating tab has been pressed twice consecutively, then we
+ * call alternativesFor and show a list of overloaded method
+ * signatures.
+ */
+ def alternativesFor(id: String): List[String] = Nil
+
+ /** Given string 'buf', return a list of all the strings
+ * to which it can complete. This may involve delegating
+ * to other CompletionAware objects.
+ */
+ def completionsFor(parsed: Parsed): List[String] = {
+ import parsed._
+
+ val comps = completions(verbosity) filter (_ startsWith buffer)
+ val results =
+ if (isEmpty) comps
+ else if (isUnqualified && !isLastDelimiter) {
+ if (verbosity > 0 && (comps contains buffer)) alternativesFor(buffer)
+ else comps
+ }
+ else follow(bufferHead) map (_ completionsFor bufferTail) getOrElse Nil
+
+ results filterNot filterNotFunction map mapFunction sortWith (sortFunction _)
+ }
+
+ /** TODO - unify this and completionsFor under a common traverser.
+ */
+ def executionFor(parsed: Parsed): Option[Any] = {
+ import parsed._
+
+ if (isUnqualified && !isLastDelimiter && (completions(verbosity) contains buffer)) execute(buffer)
+ else if (!isQualified) None
+ else follow(bufferHead) flatMap (_ executionFor bufferTail)
+ }
+}
+
+object CompletionAware {
+ val Empty = new CompletionAware { def completions(verbosity: Int) = Nil }
+
+ // class Forwarder(underlying: CompletionAware) extends CompletionAware {
+ // override def completions() = underlying.completions()
+ // override def filterNotFunction(s: String) = underlying.filterNotFunction(s)
+ // override def sortFunction(s1: String, s2: String) = underlying.sortFunction(s1, s2)
+ // override def mapFunction(s: String) = underlying.mapFunction(s)
+ // override def follow(id: String) = underlying.follow(id)
+ // override def execute(id: String) = underlying.execute(id)
+ // override def completionsFor(parsed: Parsed) = underlying.completionsFor(parsed)
+ // override def executionFor(parsed: Parsed) = underlying.executionFor(parsed)
+ // }
+ //
+
+ def unapply(that: Any): Option[CompletionAware] = that match {
+ case x: CompletionAware => Some((x))
+ case _ => None
+ }
+
+ /** Create a CompletionAware object from the given functions.
+ * The first should generate the list of completions whenever queried,
+ * and the second should return Some(CompletionAware) object if
+ * subcompletions are possible.
+ */
+ def apply(terms: () => List[String], followFunction: String => Option[CompletionAware]): CompletionAware =
+ new CompletionAware {
+ def completions = terms()
+ def completions(verbosity: Int) = completions
+ override def follow(id: String) = followFunction(id)
+ }
+
+ /** Convenience factories.
+ */
+ def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None)
+ def apply(map: collection.Map[String, CompletionAware]): CompletionAware =
+ apply(() => map.keys.toList, map.get _)
+}
+
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
new file mode 100644
index 0000000000..9b9d9a36f1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
@@ -0,0 +1,88 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+/** This has a lot of duplication with other methods in Symbols and Types,
+ * but repl completion utility is very sensitive to precise output. Best
+ * thing would be to abstract an interface for how such things are printed,
+ * as is also in progress with error messages.
+ */
+trait CompletionOutput {
+ self: Completion =>
+
+ import global._
+ import definitions.{ NothingClass, AnyClass, isTupleType, isFunctionType, isRepeatedParamType }
+
+ /** Reducing fully qualified noise for some common packages.
+ */
+ val typeTransforms = List(
+ "java.lang." -> "",
+ "scala.collection.immutable." -> "immutable.",
+ "scala.collection.mutable." -> "mutable.",
+ "scala.collection.generic." -> "generic."
+ )
+
+ def quietString(tp: String): String =
+ typeTransforms.foldLeft(tp) {
+ case (str, (prefix, replacement)) =>
+ if (str startsWith prefix) replacement + (str stripPrefix prefix)
+ else str
+ }
+
+ class MethodSymbolOutput(method: Symbol) {
+ val pkg = method.ownerChain find (_.isPackageClass) map (_.fullName) getOrElse ""
+
+ def relativize(str: String): String = quietString(str stripPrefix (pkg + "."))
+ def relativize(tp: Type): String = relativize(tp.normalize.toString)
+ def relativize(sym: Symbol): String = relativize(sym.info)
+
+ def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]")
+ def parenList(params: List[Any]) = params.mkString("(", ", ", ")")
+
+ def methodTypeToString(mt: MethodType) =
+ (mt.paramss map paramsString mkString "") + ": " + relativize(mt.finalResultType)
+
+ def typeToString(tp: Type): String = relativize(
+ tp match {
+ case x if isFunctionType(x) => functionString(x)
+ case x if isTupleType(x) => tupleString(x)
+ case x if isRepeatedParamType(x) => typeToString(x.typeArgs.head) + "*"
+ case mt @ MethodType(_, _) => methodTypeToString(mt)
+ case x => x.toString
+ }
+ )
+
+ def tupleString(tp: Type) = parenList(tp.normalize.typeArgs map relativize)
+ def functionString(tp: Type) = tp.normalize.typeArgs match {
+ case List(t, r) => t + " => " + r
+ case xs => parenList(xs.init) + " => " + xs.last
+ }
+
+ def tparamsString(tparams: List[Symbol]) = braceList(tparams map (_.defString))
+ def paramsString(params: List[Symbol]) = {
+ def paramNameString(sym: Symbol) = if (sym.isSynthetic) "" else sym.nameString + ": "
+ def paramString(sym: Symbol) = paramNameString(sym) + typeToString(sym.info.normalize)
+
+ val isImplicit = params.nonEmpty && params.head.isImplicit
+ val strs = (params map paramString) match {
+ case x :: xs if isImplicit => ("implicit " + x) :: xs
+ case xs => xs
+ }
+ parenList(strs)
+ }
+
+ def methodString() =
+ method.keyString + " " + method.nameString + (method.info.normalize match {
+ case PolyType(Nil, resType) => ": " + typeToString(resType) // nullary method
+ case PolyType(tparams, resType) => tparamsString(tparams) + typeToString(resType)
+ case mt @ MethodType(_, _) => methodTypeToString(mt)
+ case x =>
+ DBG("methodString(): %s / %s".format(x.getClass, x))
+ x.toString
+ })
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
new file mode 100644
index 0000000000..cdf5a343da
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
@@ -0,0 +1,36 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import jline.ArgumentCompletor.{ ArgumentDelimiter, ArgumentList }
+
+class JLineDelimiter extends ArgumentDelimiter {
+ def delimit(buffer: String, cursor: Int) = Parsed(buffer, cursor).asJlineArgumentList
+ def isDelimiter(buffer: String, cursor: Int) = Parsed(buffer, cursor).isDelimiter
+}
+
+trait Delimited {
+ self: Parsed =>
+
+ def delimited: Char => Boolean
+ def escapeChars: List[Char] = List('\\')
+ def quoteChars: List[(Char, Char)] = List(('\'', '\''), ('"', '"'))
+
+ /** Break String into args based on delimiting function.
+ */
+ protected def toArgs(s: String): List[String] =
+ if (s == "") Nil
+ else (s indexWhere isDelimiterChar) match {
+ case -1 => List(s)
+ case idx => (s take idx) :: toArgs(s drop (idx + 1))
+ }
+
+ def isDelimiterChar(ch: Char) = delimited(ch)
+ def isEscapeChar(ch: Char): Boolean = escapeChars contains ch
+ def isQuoteStart(ch: Char): Boolean = quoteChars map (_._1) contains ch
+ def isQuoteEnd(ch: Char): Boolean = quoteChars map (_._2) contains ch
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala
new file mode 100644
index 0000000000..c564562a63
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala
@@ -0,0 +1,54 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+/** TODO
+ * Spaces, dots, and other things in filenames are not correctly handled.
+ * space-escaping, knowing when we're inside quotes, etc. would be nice.
+ */
+
+import io.{ Directory, Path }
+
+/** This isn't 100% clean right now, but it works and is simple. Rather
+ * than delegate to new objects on each '/' in the path, we treat the
+ * buffer like a path and process it directly.
+ */
+object FileCompletion {
+ def executionFor(buffer: String): Option[Path] = {
+ val p = Path(buffer)
+ if (p.exists) Some(p) else None
+ }
+
+ private def fileCompletionForwarder(buffer: String, where: Directory): List[String] = {
+ completionsFor(where.path + buffer) map (_ stripPrefix where.path) toList
+ }
+
+ private def homeCompletions(buffer: String): List[String] = {
+ require(buffer startsWith "~/")
+ val home = Directory.Home getOrElse (return Nil)
+ fileCompletionForwarder(buffer.tail, home) map ("~" + _)
+ }
+ private def cwdCompletions(buffer: String): List[String] = {
+ require(buffer startsWith "./")
+ val cwd = Directory.Current getOrElse (return Nil)
+ fileCompletionForwarder(buffer.tail, cwd) map ("." + _)
+ }
+
+ def completionsFor(buffer: String): List[String] =
+ if (buffer startsWith "~/") homeCompletions(buffer)
+ else if (buffer startsWith "./") cwdCompletions(buffer)
+ else {
+ val p = Path(buffer)
+ val (dir, stub) =
+ // don't want /foo/. expanding "."
+ if (p.name == ".") (p.parent, ".")
+ else if (p.isDirectory) (p.toDirectory, "")
+ else (p.parent, p.name)
+
+ dir.list filter (_.name startsWith stub) map (_.path) toList
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interpreter/History.scala b/src/compiler/scala/tools/nsc/interpreter/History.scala
new file mode 100644
index 0000000000..7bd4e89095
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/History.scala
@@ -0,0 +1,36 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import java.io.File
+import jline.{ ConsoleReader, History => JHistory }
+import scala.collection.JavaConversions.asBuffer
+import Properties.userHome
+
+/** Primarily, a wrapper for JLine's History.
+ */
+class History(val jhistory: JHistory) {
+ def asJavaList = jhistory.getHistoryList
+ def asList: List[String] = asBuffer(asJavaList).toList
+ def index = jhistory.getCurrentIndex
+
+ def grep(s: String) = asList filter (_ contains s)
+}
+
+object History {
+ val ScalaHistoryFile = ".scala_history"
+
+ def apply(reader: ConsoleReader): History =
+ if (reader == null) apply()
+ else new History(reader.getHistory)
+
+ def apply(): History = new History(
+ try new JHistory(new File(userHome, ScalaHistoryFile))
+ // do not store history if error
+ catch { case _: Exception => new JHistory() }
+ )
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
index 7e56986900..92df6a8736 100644
--- a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Stepan Koltsov
*/
-// $Id$
package scala.tools.nsc
package interpreter
@@ -23,6 +22,13 @@ trait InteractiveReader {
catching(handler) { readOneLine(prompt) }
}
+ // override if history is available
+ def history: Option[History] = None
+ def historyList = history map (_.asList) getOrElse Nil
+
+ // override if completion is available
+ def completion: Option[Completion] = None
+
// hack necessary for OSX jvm suspension because read calls are not restarted after SIGTSTP
private def restartSystemCall(e: Exception): Boolean =
Properties.isMac && (e.getMessage == msgEINTR)
@@ -39,8 +45,11 @@ object InteractiveReader {
* library is available, but otherwise uses a <code>SimpleReader</code>.
*/
def createDefault(interpreter: Interpreter): InteractiveReader =
- catching(exes: _*)
- . opt (new JLineReader(interpreter))
- . getOrElse (new SimpleReader)
+ try new JLineReader(interpreter)
+ catch {
+ case e @ (_: Exception | _: NoClassDefFoundError) =>
+ // println("Failed to create JLineReader(%s): %s".format(interpreter, e))
+ new SimpleReader
+ }
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
index d1613b9cd8..ab11a53d43 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
@@ -1,39 +1,27 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Stepan Koltsov
*/
-// $Id$
package scala.tools.nsc
package interpreter
import java.io.File
-import jline.{ History, ConsoleReader, ArgumentCompletor }
+import jline.{ ConsoleReader, ArgumentCompletor, History => JHistory }
/** Reads from the console using JLine */
class JLineReader(interpreter: Interpreter) extends InteractiveReader {
def this() = this(null)
+
+ override lazy val history = Some(History(consoleReader))
+ override lazy val completion = Option(interpreter) map (x => new Completion(x))
+
val consoleReader = {
- val history = try {
- new jline.History(new File(System.getProperty("user.home"), ".scala_history"))
- } catch {
- // do not store history if error
- case _ => new jline.History()
- }
val r = new jline.ConsoleReader()
- r setHistory history
+ r setHistory (History().jhistory)
r setBellEnabled false
-
- if (interpreter != null) {
- // have to specify all delimiters for completion to work nicely
- val delims = new ArgumentCompletor.AbstractArgumentDelimiter {
- val delimChars = "(){}[],`;'\" \t".toArray
- def isDelimiterChar(s: String, pos: Int) = delimChars contains s.charAt(pos)
- }
- val comp = new ArgumentCompletor(new Completion(interpreter), delims)
- comp setStrict false
- r addCompletor comp
- // XXX make this use a setting
+ completion foreach { c =>
+ r addCompletor c.jline
r setAutoprintThreshhold 250
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
new file mode 100644
index 0000000000..84f5477c21
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
@@ -0,0 +1,68 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import jline.ArgumentCompletor.{ ArgumentDelimiter, ArgumentList }
+import util.returning
+
+/** One instance of a command buffer.
+ */
+class Parsed private (
+ val buffer: String,
+ val cursor: Int,
+ val delimited: Char => Boolean
+) extends Delimited {
+ def isEmpty = args.isEmpty
+ def isUnqualified = args.size == 1
+ def isQualified = args.size > 1
+ def isAtStart = cursor <= 0
+
+ private var _verbosity = 0
+ def verbosity = _verbosity
+ def withVerbosity(v: Int): this.type = returning[this.type](this)(_ => _verbosity = v)
+
+ def args = toArgs(buffer take cursor).toList
+ def bufferHead = args.head
+ def headLength = bufferHead.length + 1
+ def bufferTail = new Parsed(buffer drop headLength, cursor - headLength, delimited) withVerbosity verbosity
+
+ def prev = new Parsed(buffer, cursor - 1, delimited) withVerbosity verbosity
+ def next = new Parsed(buffer, cursor + 1, delimited) withVerbosity verbosity
+ def currentChar = buffer(cursor)
+ def currentArg = args.last
+ def position =
+ if (isEmpty) 0
+ else if (isLastDelimiter) cursor
+ else cursor - currentArg.length
+
+ def isFirstDelimiter = !isEmpty && isDelimiterChar(buffer.head)
+ def isLastDelimiter = !isEmpty && isDelimiterChar(buffer.last)
+ def firstIfDelimiter = if (isFirstDelimiter) buffer.head.toString else ""
+ def lastIfDelimiter = if (isLastDelimiter) buffer.last.toString else ""
+
+ def isQuoted = false // TODO
+ def isEscaped = !isAtStart && isEscapeChar(currentChar) && !isEscapeChar(prev.currentChar)
+ def isDelimiter = !isQuoted && !isEscaped && isDelimiterChar(currentChar)
+
+ def asJlineArgumentList =
+ if (isEmpty) new ArgumentList(Array[String](), 0, 0, cursor)
+ else new ArgumentList(args.toArray, args.size - 1, currentArg.length, cursor)
+
+ override def toString = "Parsed(%s / %d)".format(buffer, cursor)
+}
+
+object Parsed {
+ def apply(s: String): Parsed = apply(onull(s), onull(s).length)
+ def apply(s: String, cursor: Int): Parsed = apply(onull(s), cursor, "{},`; \t" contains _)
+ def apply(s: String, cursor: Int, delimited: Char => Boolean): Parsed =
+ new Parsed(onull(s), cursor, delimited)
+
+ def dotted(s: String): Parsed = dotted(onull(s), onull(s).length)
+ def dotted(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ == '.')
+
+ def undelimited(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ => false)
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala
new file mode 100644
index 0000000000..6c066580ae
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala
@@ -0,0 +1,44 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+class SeqCompletion[T](elems: Seq[T]) extends CompletionAware {
+ lazy val completions = elems.indices.toList map ("(%d)" format _)
+ def completions(verbosity: Int) = completions
+ private def elemAt(name: String) =
+ if (completions contains name) Some(elems(name drop 1 dropRight 1 toInt)) else None
+
+ override def execute(name: String) = elemAt(name)
+ override def follow(name: String) = elemAt(name) map (x => ProductCompletion(x))
+}
+
+/** TODO - deal with non-case products by giving them _1 _2 etc. */
+class ProductCompletion(root: Product) extends CompletionAware {
+ lazy val caseFields: List[Any] = root.productIterator.toList
+ lazy val caseNames: List[String] = ByteCode caseParamNamesForPath root.getClass.getName getOrElse Nil
+ private def isValid = caseFields.length == caseNames.length
+
+ private def fieldForName(s: String) = (completions indexOf s) match {
+ case idx if idx > -1 && isValid => Some(caseFields(idx))
+ case _ => None
+ }
+
+ lazy val completions = caseNames
+ def completions(verbosity: Int) = completions
+ override def execute(name: String) = fieldForName(name)
+ override def follow(name: String) = fieldForName(name) map (x => ProductCompletion(x))
+}
+
+object ProductCompletion {
+ /** TODO: other traversables. */
+ def apply(elem: Any): CompletionAware = elem match {
+ case x: Seq[_] => new SeqCompletion[Any](x)
+ case x: Product => new ProductCompletion(x)
+ // case x: Map[_, _] =>
+ case _ => CompletionAware.Empty
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReflectionCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/ReflectionCompletion.scala
new file mode 100644
index 0000000000..f9ff894d59
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/ReflectionCompletion.scala
@@ -0,0 +1,112 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import java.lang.reflect
+import reflect.{ Modifier, AccessibleObject }
+import Modifier.{ isPrivate, isProtected, isStatic }
+import scala.reflect.NameTransformer
+import scala.collection.mutable.HashMap
+import ReflectionCompletion._
+
+trait ReflectionCompletion extends CompletionAware {
+ def clazz: Class[_]
+ protected def visibleMembers: List[AccessibleObject]
+ protected def memberCompletions = visibleMembers filter isPublic map reflectName
+
+ def reflectName(m: AccessibleObject) = m match {
+ case x: reflect.Method => x.getName
+ case x: reflect.Field => x.getName
+ case x => error(x.toString)
+ }
+ def isPublic(m: AccessibleObject) = m match {
+ case x: reflect.Method => Modifier isPublic x.getModifiers
+ case x: reflect.Field => Modifier isPublic x.getModifiers
+ case x => error(x.toString)
+ }
+
+ lazy val (staticMethods, instanceMethods) = clazz.getMethods.toList partition (x => isStatic(x.getModifiers))
+ lazy val (staticFields, instanceFields) = clazz.getFields.toList partition (x => isStatic(x.getModifiers))
+
+ /** Oops, mirror classes don't descend from scalaobject.
+ */
+ def isScalaClazz(cl: Class[_]) = {
+ (allInterfacesFor(cl) exists (_.getName == "scala.ScalaObject")) ||
+ (classForName(cl.getName + "$").isDefined)
+ }
+ def allInterfacesFor(cl: Class[_]): List[Class[_]] = allInterfacesFor(cl, Nil)
+
+ private def allInterfacesFor(cl: Class[_], acc: List[Class[_]]): List[Class[_]] = {
+ if (cl == null) acc.distinct
+ else allInterfacesFor(cl.getSuperclass, acc ::: cl.getInterfaces.toList)
+ }
+}
+
+/** A completion aware object representing a single instance of some class.
+ * It completes to instance fields and methods, and delegates to another
+ * InstanceCompletion object if it can determine the result type of the element.
+ */
+class InstanceCompletion(val clazz: Class[_]) extends ReflectionCompletion {
+ protected def visibleMembers = instanceMethods ::: instanceFields
+ def extras = List("isInstanceOf", "asInstanceOf", "toString")
+ lazy val completions = memberCompletions ::: extras
+ def completions(verbosity: Int) = completions
+
+ val (zeroArg, otherArg) = instanceMethods partition (_.getParameterTypes.size == 0)
+ override def follow(id: String) = {
+ val nextClazz = zeroArg find (m => m.getName == id) map (_.getReturnType)
+ if (nextClazz.isDefined) nextClazz map (x => new InstanceCompletion(x))
+ else instanceFields find (_.getName == id) map (x => new InstanceCompletion(x.getType))
+ }
+}
+
+/** The complementary class to InstanceCompletion. It has logic to deal with
+ * java static members and scala companion object members.
+ */
+class StaticCompletion(val clazz: Class[_]) extends ReflectionCompletion {
+ protected def visibleMembers = whichMethods ::: whichFields
+ lazy val completions = memberCompletions
+ def completions(verbosity: Int) = completions
+
+ private def aliasForPath(path: String) = ByteCode aliasForType path flatMap (x => classForName(x + "$"))
+ def className = clazz.getName
+ def isJava = !isScalaClazz(clazz)
+
+ private def whichMethods = if (isJava) staticMethods else instanceMethods
+ private def whichFields = if (isJava) staticFields else instanceFields
+ val (zeroArg, otherArg) = whichMethods partition (_.getParameterTypes.size == 0)
+
+ override def follow(id: String) = {
+ val nextClazz = zeroArg find (m => m.getName == id) map (_.getReturnType)
+ if (nextClazz.isDefined) nextClazz map (x => new InstanceCompletion(x))
+ else staticFields find (_.getName == id) map (x => new InstanceCompletion(x.getType))
+ }
+
+ override def toString = "StaticCompletion(%s) => %s".format(clazz.getName, completions)
+}
+
+object ReflectionCompletion {
+ import java.io.File
+ import java.util.jar.{ JarEntry, JarFile }
+ import scala.tools.nsc.io.Streamable
+
+ // XXX at the moment this is imperfect because scala's protected semantics
+ // differ from java's, so protected methods appear public via reflection;
+ // yet scala enforces the protection. The result is that protected members
+ // appear in completion yet cannot actually be called. Fixing this
+ // properly requires a scala.reflect.* API. Fixing it uglily is possible
+ // too (cast to structural type!) but I deem poor use of energy.
+ private def skipModifiers(m: reflect.Method) = {
+ import java.lang.reflect.Modifier._
+ val flags = STATIC | PRIVATE | PROTECTED
+ (m.getModifiers & flags) == 0
+ }
+ private def getAnyClass(x: Any): Class[_] = x.asInstanceOf[AnyRef].getClass
+
+ def methodsOf(target: Any): List[String] =
+ getAnyClass(target).getMethods filter skipModifiers map (_.getName) toList
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
index 403c0b996c..c350468c3b 100644
--- a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
@@ -1,13 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Stepan Koltsov
*/
-// $Id$
package scala.tools.nsc
package interpreter
-import java.io.{BufferedReader, PrintWriter}
+import java.io.{ BufferedReader, PrintWriter }
+import io.{ Path, File, Directory }
/** Reads using standard JDK API */
class SimpleReader(
@@ -16,7 +16,9 @@ class SimpleReader(
val interactive: Boolean)
extends InteractiveReader {
def this() = this(Console.in, new PrintWriter(Console.out), true)
+ def this(in: File, out: PrintWriter, interactive: Boolean) = this(in.bufferedReader(), out, interactive)
+ def close() = in.close()
def readOneLine(prompt: String): String = {
if (interactive) {
out.print(prompt)
diff --git a/src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala
new file mode 100644
index 0000000000..f2af57cc36
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala
@@ -0,0 +1,44 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import xml.{ XML, Group, Node, NodeSeq }
+import XMLCompletion._
+import scala.collection.mutable.HashMap
+
+class XMLCompletion(root: Node) extends CompletionAware {
+ private val nodeCache = new HashMap[String, Node]
+ private def getNode(s: String): Option[Node] = {
+ completions // make sure cache is populated
+ nodeCache get s
+ }
+
+ lazy val completions: List[String] = {
+ def children = root.child.toList
+ def uniqueTags = children groupBy (_.label) filter (_._2.size == 1) map (_._1)
+ val uniqs = uniqueTags.toList
+
+ children.foldLeft(List[String]())((res, node) => {
+ val name = node.label
+ def count = res filter (_ startsWith (name + "[")) size // ]
+ val suffix = if (uniqs contains name) "" else "[%d]" format (count + 1)
+ val s = name + suffix
+
+ nodeCache(s) = node
+
+ s :: res
+ }).sorted
+ }
+ def completions(verbosity: Int) = completions
+
+ override def execute(id: String) = getNode(id)
+ override def follow(id: String) = getNode(id) map (x => new XMLCompletion(x))
+}
+
+object XMLCompletion {
+ def apply(x: Node) = new XMLCompletion(x)
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala
new file mode 100644
index 0000000000..eaf736c5b7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/package.scala
@@ -0,0 +1,30 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+
+package object interpreter {
+ /** Tracing */
+ def tracing[T](msg: String)(x: T): T = { println("(" + msg + ") " + x) ; x }
+
+ /** Frequency counter */
+ def freq[T](seq: Seq[T]) = seq groupBy identity mapValues (_.length)
+
+ /** null becomes "", otherwise identity */
+ def onull(s: String) = if (s == null) "" else s
+
+ /** Heuristically strip interpreter wrapper prefixes
+ * from an interpreter output string.
+ */
+ def stripWrapperGunk(str: String): String = {
+ val wrapregex = """(line[0-9]+\$object[$.])?(\$iw[$.])*"""
+ str.replaceAll(wrapregex, "")
+ }
+
+ /** Class objects */
+ def classForName(name: String): Option[Class[_]] =
+ try Some(Class forName name)
+ catch { case _: ClassNotFoundException | _: SecurityException => None }
+}
diff --git a/src/compiler/scala/tools/nsc/io/AbstractFile.scala b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
index 9e40feeca9..6f350233e2 100644
--- a/src/compiler/scala/tools/nsc/io/AbstractFile.scala
+++ b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
@@ -1,14 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package io
-import java.io.{ File => JFile, FileOutputStream, IOException, InputStream, OutputStream }
+import java.io.{ File => JFile, FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream }
import java.net.URL
import PartialFunction._
@@ -20,8 +19,6 @@ import scala.collection.mutable.ArrayBuffer
*/
object AbstractFile
{
- def isJarOrZip(f: Path) = cond(f.extension) { case "zip" | "jar" => true }
-
/** Returns "getFile(new File(path))". */
def getFile(path: String): AbstractFile = getFile(Path(path))
def getFile(path: Path): AbstractFile = getFile(path.toFile)
@@ -46,7 +43,7 @@ object AbstractFile
*/
def getDirectory(file: File): AbstractFile =
if (file.isDirectory) new PlainFile(file)
- else if (file.isFile && isJarOrZip(file)) ZipArchive fromFile file
+ else if (file.isFile && Path.isJarOrZip(file)) ZipArchive fromFile file
else null
/**
@@ -58,7 +55,7 @@ object AbstractFile
* @return ...
*/
def getURL(url: URL): AbstractFile =
- Option(url) partialMap { case url: URL if isJarOrZip(url.getPath) => ZipArchive fromURL url } orNull
+ Option(url) collect { case url: URL if Path.isJarOrZip(url.getPath) => ZipArchive fromURL url } orNull
}
/**
@@ -93,6 +90,9 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
/** Returns the path of this abstract file. */
def path: String
+ /** Checks extension case insensitively. */
+ def hasExtension(other: String) = Path(path) hasExtension other
+
/** The absolute file, if this is a relative file. */
def absolute: AbstractFile
@@ -101,12 +101,16 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
/** Returns the underlying File if any and null otherwise. */
def file: JFile
- def sfile = File(file) // XXX
+ def sfile = Option(file) map (x => File(x)) // XXX
+
+ /** An underlying source, if known. Mostly, a zip/jar file. */
+ def underlyingSource: Option[AbstractFile] = None
/** Does this abstract file denote an existing file? */
- def exists: Boolean =
- if (file ne null) file.exists
- else true
+ def exists: Boolean = (file eq null) || file.exists
+
+ /** Does this abstract file represent something which can contain classfiles? */
+ def isClassContainer = isDirectory || (sfile exists (Path isJarOrZip _))
/** Create a file on disk, if one does not exist already. */
def create: Unit
@@ -126,6 +130,9 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
/** Returns an output stream for writing the file */
def output: OutputStream
+ /** Returns a buffered output stream for writing the file - defaults to out */
+ def bufferedOutput: BufferedOutputStream = new BufferedOutputStream(output)
+
/** size of this file if it is a concrete file. */
def sizeOption: Option[Int] = None
@@ -221,8 +228,8 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
* creating an empty file if it does not already existing.
*/
def fileNamed(name: String): AbstractFile = {
- assert(isDirectory)
- Option(lookupName(name, false)) getOrElse new PlainFile((sfile / name).createFile())
+ assert(isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
+ Option(lookupName(name, false)) getOrElse new PlainFile((sfile.get / name).createFile())
}
/**
@@ -230,10 +237,13 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
* does not already exist.
*/
def subdirectoryNamed(name: String): AbstractFile = {
- assert (isDirectory)
- Option(lookupName(name, true)) getOrElse new PlainFile((sfile / name).createDirectory())
+ assert (isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
+ Option(lookupName(name, true)) getOrElse new PlainFile((sfile.get / name).createDirectory())
}
+ protected def unsupported(): Nothing = unsupported(null)
+ protected def unsupported(msg: String): Nothing = throw new UnsupportedOperationException(msg)
+
/** Returns the path of this abstract file. */
override def toString() = path
diff --git a/src/compiler/scala/tools/nsc/io/Directory.scala b/src/compiler/scala/tools/nsc/io/Directory.scala
index 3a01277fef..7c279a79e2 100644
--- a/src/compiler/scala/tools/nsc/io/Directory.scala
+++ b/src/compiler/scala/tools/nsc/io/Directory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,11 +10,16 @@ package scala.tools.nsc
package io
import java.io.{ File => JFile }
-import collection.Traversable
-object Directory
-{
- def apply(path: Path) = path.toDirectory
+object Directory {
+ import scala.util.Properties.{ tmpDir, userHome, userDir }
+
+ private def normalizePath(s: String) = Some(apply(Path(s).normalize))
+ def Current: Option[Directory] = if (userDir == "") None else normalizePath(userDir)
+ def Home: Option[Directory] = if (userHome == "") None else normalizePath(userHome)
+ def TmpDir: Option[Directory] = if (tmpDir == "") None else normalizePath(tmpDir)
+
+ def apply(path: Path): Directory = path.toDirectory
// Like File.makeTemp but creates a directory instead
def makeTemp(prefix: String = Path.randomPrefix, suffix: String = null, dir: JFile = null): Directory = {
@@ -30,11 +35,12 @@ import Path._
* @author Paul Phillips
* @since 2.8
*/
-class Directory(jfile: JFile) extends Path(jfile)
-{
+class Directory(jfile: JFile) extends Path(jfile) {
+ override def toAbsolute: Directory = if (isAbsolute) this else super.toAbsolute.toDirectory
override def toDirectory: Directory = this
override def toFile: File = new File(jfile)
override def isValid = jfile.isDirectory() || !jfile.exists()
+ override def normalize: Directory = super.normalize.toDirectory
/** An iterator over the contents of this directory.
*/
@@ -44,8 +50,14 @@ class Directory(jfile: JFile) extends Path(jfile)
case xs => xs.iterator map Path.apply
}
- def dirs: Iterator[Directory] = list partialMap { case x: Directory => x }
- def files: Iterator[File] = list partialMap { case x: File => x }
+ def dirs: Iterator[Directory] = list collect { case x: Directory => x }
+ def files: Iterator[File] = list collect { case x: File => x }
+
+ override def walkFilter(cond: Path => Boolean): Iterator[Path] =
+ list filter cond flatMap (_ walkFilter cond)
+
+ def deepDirs: Iterator[Directory] = Path.onlyDirs(deepList())
+ def deepFiles: Iterator[File] = Path.onlyFiles(deepList())
/** If optional depth argument is not given, will recurse
* until it runs out of contents.
@@ -59,7 +71,7 @@ class Directory(jfile: JFile) extends Path(jfile)
* to the (optionally) given depth.
*/
def subdirs(depth: Int = 1): Iterator[Directory] =
- deepList(depth) partialMap { case x: Directory => x }
+ deepList(depth) collect { case x: Directory => x }
/** Deletes the directory recursively. Returns false on failure.
* Use with caution!
@@ -72,6 +84,4 @@ class Directory(jfile: JFile) extends Path(jfile)
}
f.delete()
}
-
- override def toString() = "Directory(%s)".format(path)
}
diff --git a/src/compiler/scala/tools/nsc/io/File.scala b/src/compiler/scala/tools/nsc/io/File.scala
index 294139ba44..e9741ed5cb 100644
--- a/src/compiler/scala/tools/nsc/io/File.scala
+++ b/src/compiler/scala/tools/nsc/io/File.scala
@@ -1,40 +1,51 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.nsc
package io
import java.io.{
FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
- BufferedInputStream, BufferedOutputStream, IOException, File => JFile }
-import java.nio.channels.FileChannel
-import collection.Traversable
+ BufferedInputStream, BufferedOutputStream, IOException, PrintStream, File => JFile }
+import java.nio.channels.{ Channel, FileChannel }
import scala.io.Codec
-object File
-{
+object File {
def pathSeparator = JFile.pathSeparator
+ def separator = JFile.separator
- def apply(path: Path)(implicit codec: Codec = null) =
- if (codec != null) new File(path.jfile)(codec)
- else path.toFile
+ def apply(path: Path)(implicit codec: Codec) = new File(path.jfile)(codec)
// Create a temporary file
def makeTemp(prefix: String = Path.randomPrefix, suffix: String = null, dir: JFile = null) =
apply(JFile.createTempFile(prefix, suffix, dir))
- import java.nio.channels.Channel
type Closeable = { def close(): Unit }
def closeQuietly(target: Closeable) {
try target.close() catch { case e: IOException => }
}
+
+ // this is a workaround for http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6503430
+ // we are using a static initializer to statically initialize a java class so we don't
+ // trigger java.lang.InternalErrors later when using it concurrently. We ignore all
+ // the exceptions so as not to cause spurious failures when no write access is available,
+ // e.g. google app engine.
+ try {
+ val tmp = JFile.createTempFile("bug6503430", null, null)
+ val in = new FileInputStream(tmp).getChannel()
+ val out = new FileOutputStream(tmp, true).getChannel()
+ out.transferFrom(in, 0, 0)
+ tmp.delete()
+ }
+ catch {
+ case _: IllegalArgumentException | _: IllegalStateException | _: IOException | _: SecurityException => ()
+ }
}
import File._
import Path._
@@ -42,22 +53,25 @@ import Path._
/** An abstraction for files. For character data, a Codec
* can be supplied at either creation time or when a method
* involving character data is called (with the latter taking
- * precdence if supplied.) If neither is available, the value
+ * precedence if supplied.) If neither is available, the value
* of scala.io.Codec.default is used.
*
* @author Paul Phillips
* @since 2.8
*/
-class File(jfile: JFile)(implicit val creationCodec: Codec = null)
-extends Path(jfile)
-with Streamable.Chars
-{
+class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) with Streamable.Chars {
+ override val creationCodec = constructorCodec
def withCodec(codec: Codec): File = new File(jfile)(codec)
+
+ override def addExtension(ext: String): File = super.addExtension(ext).toFile
+ override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile
override def toDirectory: Directory = new Directory(jfile)
override def toFile: File = this
-
+ override def normalize: File = super.normalize.toFile
override def isValid = jfile.isFile() || !jfile.exists()
override def length = super[Path].length
+ override def walkFilter(cond: Path => Boolean): Iterator[Path] =
+ if (cond(this)) Iterator.single(this) else Iterator.empty
/** Obtains an InputStream. */
def inputStream() = new FileInputStream(jfile)
@@ -65,32 +79,43 @@ with Streamable.Chars
/** Obtains a OutputStream. */
def outputStream(append: Boolean = false) = new FileOutputStream(jfile, append)
def bufferedOutput(append: Boolean = false) = new BufferedOutputStream(outputStream(append))
+ def printStream(append: Boolean = false) = new PrintStream(outputStream(append), true)
/** Obtains an OutputStreamWriter wrapped around a FileOutputStream.
* This should behave like a less broken version of java.io.FileWriter,
* in that unlike the java version you can specify the encoding.
*/
- def writer(append: Boolean = false, codec: Codec = getCodec()) =
+ def writer(): OutputStreamWriter = writer(false)
+ def writer(append: Boolean): OutputStreamWriter = writer(append, creationCodec)
+ def writer(append: Boolean, codec: Codec): OutputStreamWriter =
new OutputStreamWriter(outputStream(append), codec.charSet)
/** Wraps a BufferedWriter around the result of writer().
*/
- def bufferedWriter(append: Boolean = false, codec: Codec = getCodec()) =
+ def bufferedWriter(): BufferedWriter = bufferedWriter(false)
+ def bufferedWriter(append: Boolean): BufferedWriter = bufferedWriter(append, creationCodec)
+ def bufferedWriter(append: Boolean, codec: Codec): BufferedWriter =
new BufferedWriter(writer(append, codec))
- /** Writes all the Strings in the given iterator to the file. */
- def writeAll(xs: Traversable[String], append: Boolean = false, codec: Codec = getCodec()): Unit = {
- val out = bufferedWriter(append, codec)
- try xs foreach (out write _)
+ /** Creates a new file and writes all the Strings to it. */
+ def writeAll(strings: String*): Unit = {
+ val out = bufferedWriter()
+ try strings foreach (out write _)
finally out close
}
- def copyFile(destPath: Path, preserveFileDate: Boolean = false) = {
- val FIFTY_MB = 1024 * 1024 * 50
+ def appendAll(strings: String*): Unit = {
+ val out = bufferedWriter(append = true)
+ try strings foreach (out write _)
+ finally out close
+ }
+
+ def copyTo(destPath: Path, preserveFileDate: Boolean = false): Boolean = {
+ val CHUNK = 1024 * 1024 * 16 // 16 MB
val dest = destPath.toFile
if (!isValid) fail("Source %s is not a valid file." format name)
if (this.normalize == dest.normalize) fail("Source and destination are the same.")
- if (!dest.parent.map(_.exists).getOrElse(false)) fail("Destination cannot be created.")
+ if (!dest.parent.exists) fail("Destination cannot be created.")
if (dest.exists && !dest.canWrite) fail("Destination exists but is not writable.")
if (dest.isDirectory) fail("Destination exists but is a directory.")
@@ -103,7 +128,7 @@ with Streamable.Chars
val size = in.size()
var pos, count = 0L
while (pos < size) {
- count = (size - pos) min FIFTY_MB
+ count = (size - pos) min CHUNK
pos += out.transferFrom(in, pos, count)
}
}
@@ -115,8 +140,18 @@ with Streamable.Chars
if (preserveFileDate)
dest.lastModified = this.lastModified
- ()
+ true
}
- override def toString() = "File(%s)".format(path)
+ /** Reflection since we're into the java 6+ API.
+ */
+ def setExecutable(executable: Boolean, ownerOnly: Boolean = true): Boolean = {
+ type JBoolean = java.lang.Boolean
+ val method =
+ try classOf[JFile].getMethod("setExecutable", classOf[Boolean], classOf[Boolean])
+ catch { case _: NoSuchMethodException => return false }
+
+ try method.invoke(jfile, executable: JBoolean, ownerOnly: JBoolean).asInstanceOf[JBoolean].booleanValue
+ catch { case _: Exception => false }
+ }
}
diff --git a/src/compiler/scala/tools/nsc/io/FileOperationException.scala b/src/compiler/scala/tools/nsc/io/FileOperationException.scala
index f4983cd156..2863a485df 100644
--- a/src/compiler/scala/tools/nsc/io/FileOperationException.scala
+++ b/src/compiler/scala/tools/nsc/io/FileOperationException.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.nsc
package io
diff --git a/src/compiler/scala/tools/nsc/io/Lexer.scala b/src/compiler/scala/tools/nsc/io/Lexer.scala
new file mode 100644
index 0000000000..262aac7809
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/Lexer.scala
@@ -0,0 +1,301 @@
+package scala.tools.nsc.io
+
+import java.io.{Reader, Writer, StringReader, StringWriter}
+import scala.collection.mutable.{Buffer, ArrayBuffer}
+import scala.math.BigInt
+
+/** Companion object of class `Lexer` which defines tokens and some utility concepts
+ * used for tokens and lexers
+ */
+object Lexer {
+
+ /** An exception raised if a if input does not correspond to what's expected
+ * @param rdr the lexer form which the bad input is read
+ * @param msg the error message
+ */
+ class MalformedInput(val rdr: Lexer, val msg: String) extends Exception("Malformed JSON input at "+rdr.tokenPos+": "+msg)
+
+ /** The class of tokens, i.e. descriptions of input words (or: lexemes).
+ * @param str the characters making up this token
+ */
+ class Token(val str: String) {
+ override def toString = str
+ }
+
+ /** A subclass of `Token` representing single-character delimiters
+ * @param char the delimiter character making up this token
+ */
+ case class Delim(char: Char) extends Token("'"+char.toString+"'")
+
+ /** A subclass of token representing integer literals */
+ case class IntLit(override val str: String) extends Token(str)
+
+ /** A subclass of token representaing floating point literals */
+ case class FloatLit(override val str: String) extends Token(str)
+
+ /** A subclass of token represenating string literals */
+ case class StringLit(override val str: String) extends Token(str) {
+ override def toString = quoted(str)
+ }
+
+ /** The `true` token */
+ val TrueLit = new Token("true")
+
+ /** The `false` token */
+ val FalseLit = new Token("false")
+
+ /** The `null` token */
+ val NullLit = new Token("null")
+
+ /** The '`(`' token */
+ val LParen = new Delim('(')
+
+ /** The '`(`' token */
+ val RParen = new Delim(')')
+
+ /** The '`{`' token */
+ val LBrace = new Delim('{')
+
+ /** The '`}`' token */
+ val RBrace = new Delim('}')
+
+ /** The '`[`' token */
+ val LBracket = new Delim('[')
+
+ /** The '`]`' token */
+ val RBracket = new Delim(']')
+
+ /** The '`,`' token */
+ val Comma = new Delim(',')
+
+ /** The '`:`' token */
+ val Colon = new Delim(':')
+
+ /** The token representing end of input */
+ val EOF = new Token("<end of input>")
+
+ private def toUDigit(ch: Int): Char = {
+ val d = ch & 0xF
+ (if (d < 10) d + '0' else d - 10 + 'A').toChar
+ }
+
+ private def addToStr(buf: StringBuilder, ch: Char) {
+ ch match {
+ case '"' => buf ++= "\\\""
+ case '\b' => buf ++= "\\b"
+ case '\f' => buf ++= "\\f"
+ case '\n' => buf ++= "\\n"
+ case '\r' => buf ++= "\\r"
+ case '\t' => buf ++= "\\t"
+ case '\\' => buf ++= "\\\\"
+ case _ =>
+ if (' ' <= ch && ch < 128) buf += ch
+ else buf ++= "\\u" += toUDigit(ch >>> 12) += toUDigit(ch >>> 8) += toUDigit(ch >>> 4) += toUDigit(ch)
+ }
+ }
+
+ /** Returns given string enclosed in `"`-quotes with all string characters escaped
+ * so that they correspond to the JSON standard.
+ * Characters that escaped are: `"`, `\b`, `\f`, `\n`, `\r`, `\t`, `\`.
+ * Furthermore, every other character which is not in the ASCII range 32-127 is
+ * escaped as a four hex-digit unicode character of the form `\ u x x x x`.
+ * @param str the string to be quoted
+ */
+ def quoted(str: String): String = {
+ val buf = new StringBuilder += '\"'
+ str foreach (addToStr(buf, _))
+ buf += '\"'
+ buf.toString
+ }
+
+ private val BUF_SIZE = 2 << 16
+}
+
+import Lexer._
+
+/** A simple lexer for tokens as they are used in JSON, plus parens `(`, `)`
+ * Tokens understood are:
+ *
+ * `(`, `)`, `[`, `]`, `{`, `}`, `:`, `,`, `true`, `false`, `null`,
+ * strings (syntax as in JSON),
+ * integer numbers (syntax as in JSON: -?(0|\d+)
+ * floating point numbers (syntax as in JSON: -?(0|\d+)(\.\d+)?((e|E)(+|-)?\d+)?)
+ * The end of input is represented as its own token, EOF.
+ * Lexers can keep one token lookahead
+ *
+ * @param rd the reader from which characters are read.
+ */
+class Lexer(rd: Reader) {
+
+ /** The last-read character */
+ var ch: Char = 0
+
+ /** The number of characters read so far */
+ var pos: Long = 0
+
+ /** The last-read token */
+ var token: Token = _
+
+ /** The number of characters read before the start of the last-read token */
+ var tokenPos: Long = 0
+
+ private var atEOF: Boolean = false
+ private val buf = new Array[Char](BUF_SIZE)
+ private var nread: Int = 0
+ private var bp = 0
+
+ /** Reads next character into `ch` */
+ def nextChar() {
+ assert(!atEOF)
+ if (bp == nread) {
+ nread = rd.read(buf)
+ bp = 0
+ if (nread <= 0) { ch = 0; atEOF = true; return }
+ }
+ ch = buf(bp)
+ bp += 1
+ pos += 1
+ }
+
+ /** If last-read character equals given character, reads next character,
+ * otherwise raises an error
+ * @param c the given character to compare with last-read character
+ * @throws MalformedInput if character does not match
+ */
+ def acceptChar(c: Char) = if (ch == c) nextChar() else error("'"+c+"' expected")
+
+ private val sb = new StringBuilder
+
+ private def putChar() {
+ sb += ch; nextChar()
+ }
+
+ private def putAcceptString(str: String) {
+ str foreach acceptChar
+ sb ++= str
+ }
+
+ /** Skips whitespace and reads next lexeme into `token`
+ * @throws MalformedInput if lexeme not recognized as a valid token
+ */
+ def nextToken() {
+ sb.clear()
+ while (!atEOF && ch <= ' ') nextChar()
+ tokenPos = pos - 1
+ if (atEOF) token = EOF
+ else ch match {
+ case '(' => putChar(); token = LParen
+ case ')' => putChar(); token = RParen
+ case '{' => putChar(); token = LBrace
+ case '}' => putChar(); token = RBrace
+ case '[' => putChar(); token = LBracket
+ case ']' => putChar(); token = RBracket
+ case ',' => putChar(); token = Comma
+ case ':' => putChar(); token = Colon
+ case 't' => putAcceptString("true"); token = TrueLit
+ case 'f' => putAcceptString("false"); token = FalseLit
+ case 'n' => putAcceptString("null"); token = NullLit
+ case '"' => getString()
+ case '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => getNumber()
+ case _ => error("unrecoginezed start of token: '"+ch+"'")
+ }
+ //println("["+token+"]")
+ }
+
+ /** Reads a string literal, and forms a `StringLit` token from it.
+ * Last-read input character `ch` must be opening `"`-quote.
+ * @throws MalformedInput if lexeme not recognized as a string literal.
+ */
+ def getString() {
+ def udigit() = {
+ nextChar()
+ if ('0' <= ch && ch <= '9') ch - '9'
+ else if ('A' <= ch && ch <= 'F') ch - 'A' + 10
+ else if ('a' <= ch && ch <= 'f') ch - 'a' + 10
+ else error("illegal unicode escape character: '"+ch+"'")
+ }
+ val delim = ch
+ nextChar()
+ while (ch != delim && ch >= ' ') {
+ if (ch == '\\') {
+ nextChar()
+ ch match {
+ case '\'' => sb += '\''
+ case '"' => sb += '"'
+ case '\\' => sb += '\\'
+ case '/' => sb += '/'
+ case 'b' => sb += '\b'
+ case 'f' => sb += '\f'
+ case 'n' => sb += '\n'
+ case 'r' => sb += '\r'
+ case 't' => sb += '\t'
+ case 'u' => sb += (udigit() << 12 | udigit() << 8 | udigit() << 4 | udigit()).toChar
+ case _ => error("illegal escape character: '"+ch+"'")
+ }
+ nextChar()
+ } else {
+ putChar()
+ }
+ }
+ acceptChar(delim)
+ token = StringLit(sb.toString)
+ }
+
+ /** Reads a numeric literal, and forms an `IntLit` or `FloatLit` token from it.
+ * Last-read input character `ch` must be either `-` or a digit.
+ * @throws MalformedInput if lexeme not recognized as a numeric literal.
+ */
+ def getNumber() {
+ def digit() =
+ if ('0' <= ch && ch <= '9') putChar()
+ else error("<digit> expected")
+ def digits() =
+ do { digit() } while ('0' <= ch && ch <= '9')
+ var isFloating = false
+ if (ch == '-') putChar()
+ if (ch == '0') digit()
+ else digits()
+ if (ch == '.') {
+ isFloating = true
+ putChar()
+ digits()
+ }
+ if (ch == 'e' || ch == 'E') {
+ isFloating = true
+ putChar()
+ if (ch == '+' || ch == '-') putChar()
+ digits()
+ }
+ token = if (isFloating) FloatLit(sb.toString) else IntLit(sb.toString)
+ }
+
+ /** If current token equals given token, reads next token, otherwise raises an error.
+ * @param t the given token to compare current token with
+ * @throws MalformedInput if the two tokens do not match.
+ */
+ def accept(t: Token) {
+ if (token == t) nextToken()
+ else error(t+" expected, but "+token+" found")
+ }
+
+ /** The current token is a delimiter consisting of given character, reads next token,
+ * otherwise raises an error.
+ * @param c the given delimiter character to compare current token with
+ * @throws MalformedInput if the the current token `token` is not a delimiter, or
+ * consists of a character different from `c`.
+ */
+ def accept(ch: Char) {
+ token match {
+ case Delim(`ch`) => nextToken()
+ case _ => accept(Delim(ch))
+ }
+ }
+
+ /** Always throws a `MalformedInput` exception with given error message.
+ * @param msg the error message
+ */
+ def error(msg: String) = throw new MalformedInput(this, msg)
+
+ nextChar()
+ nextToken()
+}
diff --git a/src/compiler/scala/tools/nsc/io/NullPrintStream.scala b/src/compiler/scala/tools/nsc/io/NullPrintStream.scala
new file mode 100644
index 0000000000..9340796a83
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/NullPrintStream.scala
@@ -0,0 +1,19 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package io
+
+import java.io.{ PrintStream, ByteArrayOutputStream }
+
+/** A sink for when you want to discard all output.
+ */
+class NullPrintStream extends PrintStream(new ByteArrayOutputStream()) { }
+
+object NullPrintStream extends NullPrintStream {
+ def setOut() = Console setOut this
+ def setErr() = Console setErr this
+ def setOutAndErr() = { setOut() ; setErr() }
+}
diff --git a/src/compiler/scala/tools/nsc/io/Path.scala b/src/compiler/scala/tools/nsc/io/Path.scala
index 37cc64cf75..afef38be3c 100644
--- a/src/compiler/scala/tools/nsc/io/Path.scala
+++ b/src/compiler/scala/tools/nsc/io/Path.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
*/
package scala.tools.nsc
@@ -7,11 +7,9 @@ package io
import java.io.{
FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
- BufferedInputStream, BufferedOutputStream, File => JFile }
+ BufferedInputStream, BufferedOutputStream, RandomAccessFile, File => JFile }
import java.net.{ URI, URL }
-import collection.{ Seq, Traversable }
-import PartialFunction._
-import scala.util.Random.nextASCIIString
+import scala.util.Random.alphanumeric
/** An abstraction for filesystem paths. The differences between
* Path, File, and Directory are primarily to communicate intent.
@@ -30,6 +28,16 @@ import scala.util.Random.nextASCIIString
object Path
{
+ private val ZipMagicNumber = List[Byte](80, 75, 3, 4)
+
+ /** If examineFile is true, it will look at the first four bytes of the file
+ * and see if the magic number indicates it may be a jar or zip.
+ */
+ private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber)
+ def isJarOrZip(f: Path): Boolean = isJarOrZip(f, false)
+ def isJarOrZip(f: Path, examineFile: Boolean): Boolean =
+ f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f))
+
// not certain these won't be problematic, but looks good so far
implicit def string2path(s: String): Path = apply(s)
implicit def jfile2path(jfile: JFile): Path = apply(jfile)
@@ -47,15 +55,22 @@ object Path
// true
// }
+ def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
+ def onlyDirs(xs: List[Path]): List[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
+ def onlyFiles(xs: Iterator[Path]): Iterator[File] = xs filter (_.isFile) map (_.toFile)
+ def onlyFiles(xs: List[Path]): List[File] = xs filter (_.isFile) map (_.toFile)
+
def roots: List[Path] = JFile.listRoots().toList map Path.apply
+ def apply(segments: Seq[String]): Path = apply(segments mkString JFile.separator)
def apply(path: String): Path = apply(new JFile(path))
def apply(jfile: JFile): Path =
if (jfile.isFile) new File(jfile)
else if (jfile.isDirectory) new Directory(jfile)
else new Path(jfile)
- private[io] def randomPrefix = nextASCIIString(6)
+ /** Avoiding any shell/path issues by only using alphanumerics. */
+ private[io] def randomPrefix = alphanumeric take 6 mkString
private[io] def fail(msg: String) = throw FileOperationException(msg)
}
import Path._
@@ -63,9 +78,9 @@ import Path._
/** The Path constructor is private so we can enforce some
* semantics regarding how a Path might relate to the world.
*/
-class Path private[io] (val jfile: JFile)
-{
+class Path private[io] (val jfile: JFile) {
val separator = JFile.separatorChar
+ val separatorStr = JFile.separator
// Validation: this verifies that the type of this object and the
// contents of the filesystem are in agreement. All objects are
@@ -79,35 +94,98 @@ class Path private[io] (val jfile: JFile)
def toAbsolute: Path = if (isAbsolute) this else Path(jfile.getAbsolutePath())
def toURI: URI = jfile.toURI()
def toURL: URL = toURI.toURL()
+ /** If this path is absolute, returns it: otherwise, returns an absolute
+ * path made up of root / this.
+ */
+ def toAbsoluteWithRoot(root: Path) = if (isAbsolute) this else root.toAbsolute / this
/** Creates a new Path with the specified path appended. Assumes
* the type of the new component implies the type of the result.
*/
- def /(child: Path): Path = new Path(new JFile(jfile, child.path))
+ def /(child: Path): Path = if (isEmpty) child else new Path(new JFile(jfile, child.path))
def /(child: Directory): Directory = /(child: Path).toDirectory
def /(child: File): File = /(child: Path).toFile
+ /** If this path is a container, recursively iterate over its contents.
+ * The supplied condition is a filter which is applied to each element,
+ * with that branch of the tree being closed off if it is true. So for
+ * example if the condition is true for some subdirectory, nothing
+ * under that directory will be in the Iterator; but otherwise each
+ * file and subdirectory underneath it will appear.
+ */
+ def walkFilter(cond: Path => Boolean): Iterator[Path] =
+ if (isFile) toFile walkFilter cond
+ else if (isDirectory) toDirectory walkFilter cond
+ else Iterator.empty
+
+ /** Equivalent to walkFilter(_ => false).
+ */
+ def walk: Iterator[Path] = walkFilter(_ => true)
+
// identity
def name: String = jfile.getName()
def path: String = jfile.getPath()
def normalize: Path = Path(jfile.getCanonicalPath())
- // todo -
- // def resolve(other: Path): Path
- // def relativize(other: Path): Path
+ def isRootPath: Boolean = roots exists (_ isSame this)
+
+ def resolve(other: Path) = if (other.isAbsolute || isEmpty) other else /(other)
+ def relativize(other: Path) = {
+ assert(isAbsolute == other.isAbsolute, "Paths not of same type: "+this+", "+other)
+
+ def createRelativePath(baseSegs: List[String], otherSegs: List[String]) : String = {
+ (baseSegs, otherSegs) match {
+ case (b :: bs, o :: os) if b == o => createRelativePath(bs, os)
+ case (bs, os) => ((".."+separator)*bs.length)+os.mkString(separatorStr)
+ }
+ }
+
+ Path(createRelativePath(segments, other.segments))
+ }
// derived from identity
def root: Option[Path] = roots find (this startsWith _)
- def segments: List[String] = (path split separator).toList filterNot (_.isEmpty)
- def parent: Option[Path] = Option(jfile.getParent()) map Path.apply
- def parents: List[Path] = parent match {
- case None => Nil
- case Some(p) => p :: p.parents
+ def segments: List[String] = (path split separator).toList filterNot (_.length == 0)
+ /**
+ * @return The path of the parent directory, or root if path is already root
+ */
+ def parent: Directory = path match {
+ case "" | "." => Directory("..")
+ case _ =>
+ // the only solution <-- a comment which could have used elaboration
+ if (segments.nonEmpty && segments.last == "..")
+ (path / "..").toDirectory
+ else jfile.getParent match {
+ case null =>
+ if (isAbsolute) toDirectory // it should be a root. BTW, don't need to worry about relative pathed root
+ else Directory(".") // a dir under pwd
+ case x =>
+ Directory(x)
+ }
+ }
+ def parents: List[Directory] = {
+ val p = parent
+ if (p isSame this) Nil else p :: p.parents
}
// if name ends with an extension (e.g. "foo.jpg") returns the extension ("jpg"), otherwise ""
def extension: String = (name lastIndexOf '.') match {
case -1 => ""
case idx => name drop (idx + 1)
}
+ // compares against extensions in a CASE INSENSITIVE way.
+ def hasExtension(ext: String, exts: String*) = {
+ val xs = (ext +: exts) map (_.toLowerCase)
+ xs contains extension.toLowerCase
+ }
+ // returns the filename without the extension.
+ def stripExtension: String = name stripSuffix ("." + extension)
+ // returns the Path with the extension.
+ def addExtension(ext: String): Path = Path(path + "." + ext)
+ // changes the existing extension out for a new one
+ def changeExtension(ext: String): Path = Path((path stripSuffix extension) + ext)
+
+ // conditionally execute
+ def ifFile[T](f: File => T): Option[T] = if (isFile) Some(f(toFile)) else None
+ def ifDirectory[T](f: Directory => T): Option[T] = if (isDirectory) Some(f(toDirectory)) else None
// Boolean tests
def canRead = jfile.canRead()
@@ -119,10 +197,11 @@ class Path private[io] (val jfile: JFile)
def isDirectory = jfile.isDirectory()
def isAbsolute = jfile.isAbsolute()
def isHidden = jfile.isHidden()
- def isSymlink = parent.isDefined && {
- val x = parent.get / name
+ def isSymlink = {
+ val x = parent / name
x.normalize != x.toAbsolute
}
+ def isEmpty = path.length == 0
// Information
def lastModified = jfile.lastModified()
@@ -132,7 +211,7 @@ class Path private[io] (val jfile: JFile)
// Boolean path comparisons
def endsWith(other: Path) = segments endsWith other.segments
def startsWith(other: Path) = segments startsWith other.segments
- def isSame(other: Path) = toAbsolute == other.toAbsolute
+ def isSame(other: Path) = normalize == other.normalize
def isFresher(other: Path) = lastModified > other.lastModified
// creations
@@ -152,12 +231,25 @@ class Path private[io] (val jfile: JFile)
// deletions
def delete() = jfile.delete()
def deleteIfExists() = if (jfile.exists()) delete() else false
+ def truncate() =
+ isFile && {
+ val raf = new RandomAccessFile(jfile, "rw")
+ raf setLength 0
+ raf.close()
+ length == 0
+ }
+
+ def touch(modTime: Long = System.currentTimeMillis) = {
+ createFile()
+ if (isFile)
+ lastModified = modTime
+ }
// todo
// def copyTo(target: Path, options ...): Boolean
// def moveTo(target: Path, options ...): Boolean
- override def toString() = "Path(%s)".format(path)
+ override def toString() = path
override def equals(other: Any) = other match {
case x: Path => path == x.path
case _ => false
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala
new file mode 100644
index 0000000000..78c1369b64
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/Pickler.scala
@@ -0,0 +1,455 @@
+package scala.tools.nsc.io
+
+import annotation.unchecked
+import Lexer._
+import java.io.Writer
+
+/** An abstract class for writing and reading Scala objects to and
+ * from a legible representation. The presesentation follows the folloing grammar:
+ * {{{
+ * Pickled = `true' | `false' | `null' | NumericLit | StringLit |
+ * Labelled | Pickled `,' Pickled
+ * Labelled = StringLit `(' Pickled? `)'
+ * }}}
+ *
+ * All ...Lit classes are as in JSON. @see scala.tools.nsc.io.Lexer
+ *
+ * Subclasses of `Pickler` each can write and read individual classes
+ * of values.
+ *
+ * @param T the type of values handled by this pickler.
+ *
+ * These Picklers build on the work of Andrew Kennedy. They are most closely inspired by
+ * Iulian Dragos' picklers for Scala to XML. See:
+ *
+ * <a href="http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide">
+ * http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide
+ * </a>
+ */
+abstract class Pickler[T] {
+
+ import Pickler._
+
+ /** Writes value in pickled form
+ * @param wr the writer to which pickled form is written
+ * @param x the value to write
+ */
+ def pickle(wr: Writer, x: T)
+
+ /** Reads value from pickled form.
+ *
+ * @param rd the lexer from which lexemes are read
+ * @return An `UnpickleSuccess value if the current input corresponds to the
+ * kind of value that is unpickled by the current subclass of `Pickler`,
+ * an `UnpickleFailure` value otherwise.
+ * @throws `Lexer.MalformedInput` if input is invalid, or if
+ * an `Unpickle
+ */
+ def unpickle(rd: Lexer): Unpickled[T]
+
+ /** A pickler representing a `~`-pair of values as two consecutive pickled
+ * strings, separated by a comma.
+ * @param that the second pickler which together with the current pickler makes
+ * up the pair `this ~ that` to be pickled.
+ */
+ def ~ [U] (that: => Pickler[U]): Pickler[T ~ U] = seqPickler(this, that)
+
+ /** A pickler that adds a label to the current pickler, using the representation
+ * `label ( <current pickler> )`
+ *
+ * @label the string to be added as a label.
+ */
+ def labelled(label: String): Pickler[T] = labelledPickler(label, this)
+
+ /** A pickler obtained from the current pickler by a pair of transformer functions
+ * @param in the function that maps values handled by the current pickler to
+ * values handled by the wrapped pickler.
+ * @param out the function that maps values handled by the wrapped pickler to
+ * values handled by the current pickler.
+ */
+ def wrapped [U] (in: T => U)(out: U => T): Pickler[U] = wrappedPickler(this)(in)(out)
+
+ /** A pickler obtained from the current pickler by also admitting `null` as
+ * a handled value, represented as the token `null`.
+ *
+ * @param fromNull an implicit evidence parameter ensuring that the type of values
+ * handled by this pickler contains `null`.
+ */
+ def orNull(implicit fromNull: Null <:< T): Pickler[T] = nullablePickler(this)
+
+ /** A conditional pickler obtained from the current pickler.
+ * @param cond the condition to test to find out whether pickler can handle
+ * some Scala value.
+ */
+ def cond(p: Any => Boolean): CondPickler[T] = conditionalPickler(this, p)
+
+ /** A conditional pickler handling values of some Scala class. It adds the
+ * class name as a label to the representation of the current pickler and
+ * @param c the class of values handled by this pickler.
+ */
+ def asClass[U <: T](c: Class[U]): CondPickler[T] = this.labelled(c.getName).cond(c isInstance _)
+}
+
+object Pickler {
+
+ var picklerDebugMode = false
+
+ /** A base class representing unpickler result. It has two subclasses:
+ * `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures,
+ * where a value of the given type `T` could not be unpickled from input.
+ * @param T the type of unpickled values in case of success.
+ */
+ abstract class Unpickled[+T] {
+ /** Transforms success values to success values using given function,
+ * leaves failures alone
+ * @param f the function to apply.
+ */
+ def map[U](f: T => U): Unpickled[U] = this match {
+ case UnpickleSuccess(x) => UnpickleSuccess(f(x))
+ case f: UnpickleFailure => f
+ }
+ /** Transforms success values to successes or failures using given function,
+ * leaves failures alone.
+ * @param f the function to apply.
+ */
+ def flatMap[U](f: T => Unpickled[U]): Unpickled[U] = this match {
+ case UnpickleSuccess(x) => f(x)
+ case f: UnpickleFailure => f
+ }
+ /** Tries alternate expression if current result is a failure
+ * @param alt the alternate expression to be tried in case of failure
+ */
+ def orElse[U >: T](alt: => Unpickled[U]): Unpickled[U] = this match {
+ case UnpickleSuccess(x) => this
+ case f: UnpickleFailure => alt
+ }
+
+ /** Transforms failures into thrown `MalformedInput` exceptions.
+ * @throws MalformedInput if current result is a failure
+ */
+ def requireSuccess: UnpickleSuccess[T] = this match {
+ case s @ UnpickleSuccess(x) => s
+ case f: UnpickleFailure =>
+ throw new MalformedInput(f.rd, "Unrecoverable unpickle failure:\n"+f.errMsg)
+ }
+ }
+
+ /** A class representing successful unpicklings
+ * @param T the type of the unpickled value
+ * @param result the unpickled value
+ */
+ case class UnpickleSuccess[+T](result: T) extends Unpickled[T]
+
+ /** A class representing unpickle failures
+ * @param msg an error message describing what failed.
+ * @param rd the lexer unpickled values were read from (can be used to get
+ * error position, for instance).
+ */
+ class UnpickleFailure(msg: => String, val rd: Lexer) extends Unpickled[Nothing] {
+ def errMsg = msg
+ override def toString = "Failure at "+rd.tokenPos+":\n"+msg
+ }
+
+ private def errorExpected(rd: Lexer, msg: => String) =
+ new UnpickleFailure("expected: "+msg+"\n" +
+ "found : "+rd.token,
+ rd)
+
+ private def nextSuccess[T](rd: Lexer, result: T) = {
+ rd.nextToken()
+ UnpickleSuccess(result)
+ }
+
+ /** The implicit `Pickler` value for type `T`. Equivalent to `implicitly[Pickler[T]]`.
+ */
+ def pkl[T: Pickler] = implicitly[Pickler[T]]
+
+ /** A class represenenting `~`-pairs */
+ case class ~[S, T](fst: S, snd: T)
+
+ /** A wrapper class to be able to use `~` s an infix method */
+ class TildeDecorator[S](x: S) {
+ /** Infix method that forms a `~`-pair. */
+ def ~ [T](y: T): S ~ T = new ~ (x, y)
+ }
+
+ /** An implicit wrapper that adds `~` as a method to any value. */
+ implicit def tildeDecorator[S](x: S): TildeDecorator[S] = new TildeDecorator(x)
+
+ /** A converter from binary functions to functions over `~`-pairs
+ */
+ implicit def fromTilde[T1, T2, R](f: (T1, T2) => R): T1 ~ T2 => R = { case x1 ~ x2 => f(x1, x2) }
+
+ /** An converter from unctions returning Options over pair to functions returning `~`-pairs
+ * The converted function will raise a `MatchError` where the original function returned
+ * a `None`. This converter is useful for turning `unapply` methods of case classes
+ * into wrapper methods that can be passed as second argument to `wrap`.
+ */
+ implicit def toTilde[T1, T2, S](f: S => Option[(T1, T2)]): S => T1 ~ T2 = { x => (f(x): @unchecked) match { case Some((x1, x2)) => x1 ~ x2 } }
+
+ /** Same as `p.labelled(label)`.
+ */
+ def labelledPickler[T](label: String, p: Pickler[T]): Pickler[T] = new Pickler[T] {
+ def pickle(wr: Writer, x: T) = {
+ wr.write(quoted(label));
+ wr.write("(")
+ p.pickle(wr, x)
+ wr.write(")")
+ }
+ def unpickle(rd: Lexer): Unpickled[T] =
+ rd.token match {
+ case StringLit(`label`) =>
+ rd.nextToken()
+ rd.accept('(')
+ val result = p.unpickle(rd).requireSuccess
+ rd.accept(')')
+ result
+ case _ =>
+ errorExpected(rd, quoted(label)+"(...)")
+ }
+ }
+
+ /** Same as `p.wrap(in)(out)`
+ */
+ def wrappedPickler[S, T](p: Pickler[S])(in: S => T)(out: T => S) = new Pickler[T] {
+ def pickle(wr: Writer, x: T) = p.pickle(wr, out(x))
+ def unpickle(rd: Lexer) = p.unpickle(rd) map in
+ }
+
+ /** Same as `p.cond(condition)`
+ */
+ def conditionalPickler[T](p: Pickler[T], condition: Any => Boolean) = new CondPickler[T](condition) {
+ def pickle(wr: Writer, x: T) = p.pickle(wr, x)
+ def unpickle(rd: Lexer) = p.unpickle(rd)
+ }
+
+ /** Same as `p ~ q`
+ */
+ def seqPickler[T, U](p: Pickler[T], q: => Pickler[U]) = new Pickler[T ~ U] {
+ lazy val qq = q
+ def pickle(wr: Writer, x: T ~ U) = {
+ p.pickle(wr, x.fst)
+ wr.write(',')
+ q.pickle(wr, x.snd)
+ }
+ def unpickle(rd: Lexer) =
+ for (x <- p.unpickle(rd); y <- { rd.accept(','); qq.unpickle(rd).requireSuccess })
+ yield x ~ y
+ }
+
+ /** Same as `p | q`
+ */
+ def eitherPickler[T, U <: T, V <: T](p: CondPickler[U], q: => CondPickler[V]) =
+ new CondPickler[T](x => p.canPickle(x) || q.canPickle(x)) {
+ lazy val qq = q
+ override def tryPickle(wr: Writer, x: Any): Boolean =
+ p.tryPickle(wr, x) || qq.tryPickle(wr, x)
+ def pickle(wr: Writer, x: T) =
+ require(tryPickle(wr, x),
+ "no pickler found for "+x+" of class "+x.asInstanceOf[AnyRef].getClass.getName)
+ def unpickle(rd: Lexer) = p.unpickle(rd) orElse qq.unpickle(rd)
+ }
+
+ /** Same as `p.orNull`
+ */
+ def nullablePickler[T](p: Pickler[T])(implicit fromNull: Null <:< T): Pickler[T] = new Pickler[T] {
+ def pickle(wr: Writer, x: T) =
+ if (x == null) wr.write("null") else p.pickle(wr, x)
+ def unpickle(rd: Lexer): Unpickled[T] =
+ if (rd.token == NullLit) nextSuccess(rd, fromNull(null))
+ else p.unpickle(rd)
+ }
+
+ /** A conditional pickler for singleton objects. It represents these
+ * with the object's underlying class as a label.
+ * Example: Object scala.None would be represented as `scala.None$()`.
+ */
+ def singletonPickler[T <: AnyRef](x: T): CondPickler[T] =
+ unitPickler
+ .wrapped { _ => x } { x => () }
+ .labelled (x.getClass.getName)
+ .cond (x eq _.asInstanceOf[AnyRef])
+
+ /** A pickler the handles instances of classes that have an empty constructor.
+ * It represents than as `$new ( <name of class> )`.
+ * When unpickling, a new instance of the class is created using the empty
+ * constructor of the class via `Class.forName(<name of class>).newInstance()`.
+ */
+ def javaInstancePickler[T <: AnyRef]: Pickler[T] =
+ (stringPickler labelled "$new")
+ .wrapped { name => Class.forName(name).newInstance().asInstanceOf[T] } { _.getClass.getName }
+
+ /** A picklers that handles iterators. It pickles all values
+ * returned by an iterator separated by commas.
+ * When unpickling, it always returns an `UnpickleSuccess` containing an iterator.
+ * This iterator returns 0 or more values that are obtained by unpickling
+ * until a closing parenthesis, bracket or brace or the end of input is encountered.
+ *
+ * This means that iterator picklers should not be directly followed by `~`
+ * because the pickler would also read any values belonging to the second
+ * part of the `~`-pair.
+ *
+ * What's usually done instead is that the iterator pickler is wrapped and labelled
+ * to handle other kinds of sequences.
+ */
+ implicit def iterPickler[T: Pickler]: Pickler[Iterator[T]] = new Pickler[Iterator[T]] {
+ lazy val p = pkl[T]
+ def pickle(wr: Writer, xs: Iterator[T]) {
+ var first = true
+ for (x <- xs) {
+ if (first) first = false else wr.write(',')
+ p.pickle(wr, x)
+ }
+ }
+ def unpickle(rd: Lexer): Unpickled[Iterator[T]] = UnpickleSuccess(new Iterator[T] {
+ var first = true
+ def hasNext = {
+ val t = rd.token
+ t != EOF && t != RParen && t != RBrace && t != RBracket
+ }
+ def next(): T = {
+ if (first) first = false else rd.accept(',')
+ p.unpickle(rd).requireSuccess.result
+ }
+ })
+ }
+
+ /** A pickler that handles values that can be represented as a single token.
+ * @param kind the kind of token representing the value, used in error messages
+ * for unpickling.
+ * @param matcher A partial function from tokens to handled values. Unpickling
+ * succeeds if the matcher function is defined on the current token.
+ */
+ private def tokenPickler[T](kind: String)(matcher: PartialFunction[Token, T]) = new Pickler[T] {
+ def pickle(wr: Writer, x: T) = wr.write(x.toString)
+ def unpickle(rd: Lexer) =
+ if (matcher isDefinedAt rd.token) nextSuccess(rd, matcher(rd.token))
+ else errorExpected(rd, kind)
+ }
+
+ /** A pickler for values of type `Long`, represented as integer literals */
+ implicit val longPickler: Pickler[Long] =
+ tokenPickler("integer literal") { case IntLit(s) => s.toLong }
+
+ /** A pickler for values of type `Double`, represented as floating point literals */
+ implicit val doublePickler: Pickler[Double] =
+ tokenPickler("floating point literal") { case FloatLit(s) => s.toDouble }
+
+ /** A pickler for values of type `Byte`, represented as integer literals */
+ implicit val bytePickler: Pickler[Byte] = longPickler.wrapped { _.toByte } { _.toLong }
+
+ /** A pickler for values of type `Short`, represented as integer literals */
+ implicit val shortPickler: Pickler[Short] = longPickler.wrapped { _.toShort } { _.toLong }
+
+ /** A pickler for values of type `Int`, represented as integer literals */
+ implicit val intPickler: Pickler[Int] = longPickler.wrapped { _.toInt } { _.toLong }
+
+ /** A pickler for values of type `Float`, represented as floating point literals */
+ implicit val floatPickler: Pickler[Float] = doublePickler.wrapped { _.toFloat } { _.toLong }
+
+ /** A conditional pickler for the boolean value `true` */
+ private val truePickler =
+ tokenPickler("boolean literal") { case TrueLit => true } cond { _ == true }
+
+ /** A conditional pickler for the boolean value `false` */
+ private val falsePickler =
+ tokenPickler("boolean literal") { case FalseLit => false } cond { _ == false }
+
+ /** A pickler for values of type `Boolean`, represented as the literals `true` or `false`. */
+ implicit def booleanPickler: Pickler[Boolean] = truePickler | falsePickler
+
+ /** A pickler for values of type `Unit`, represented by the empty character string */
+ implicit val unitPickler: Pickler[Unit] = new Pickler[Unit] {
+ def pickle(wr: Writer, x: Unit) {}
+ def unpickle(rd: Lexer): Unpickled[Unit] = UnpickleSuccess(())
+ }
+
+ /** A pickler for values of type `String`, represented as string literals */
+ implicit val stringPickler: Pickler[String] = new Pickler[String] {
+ def pickle(wr: Writer, x: String) = wr.write(if (x == null) "null" else quoted(x))
+ def unpickle(rd: Lexer) = rd.token match {
+ case StringLit(s) => nextSuccess(rd, s)
+ case NullLit => nextSuccess(rd, null)
+ case _ => errorExpected(rd, "string literal")
+ }
+ }
+
+ /** A pickler for values of type `Char`, represented as string literals of length 1 */
+ implicit val charPickler: Pickler[Char] =
+ stringPickler
+ .wrapped { s => require(s.length == 1, "single character string literal expected, but "+quoted(s)+" found"); s(0) } { _.toString }
+
+ /** A pickler for pairs, represented as `~`-pairs */
+ implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] =
+ (pkl[T1] ~ pkl[T2])
+ .wrapped { case x1 ~ x2 => (x1, x2) } { case (x1, x2) => x1 ~ x2 }
+ .labelled ("tuple2")
+
+ /** A pickler for 3-tuples, represented as `~`-tuples */
+ implicit def tuple3Pickler[T1, T2, T3](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3]): Pickler[(T1, T2, T3)] =
+ (p1 ~ p2 ~ p3)
+ .wrapped { case x1 ~ x2 ~ x3 => (x1, x2, x3) } { case (x1, x2, x3) => x1 ~ x2 ~ x3 }
+ .labelled ("tuple3")
+
+ /** A pickler for 4-tuples, represented as `~`-tuples */
+ implicit def tuple4Pickler[T1, T2, T3, T4](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3], p4: Pickler[T4]): Pickler[(T1, T2, T3, T4)] =
+ (p1 ~ p2 ~ p3 ~ p4)
+ .wrapped { case x1 ~ x2 ~ x3 ~ x4 => (x1, x2, x3, x4) } { case (x1, x2, x3, x4) => x1 ~ x2 ~ x3 ~ x4 }
+ .labelled ("tuple4")
+
+ /** A conditional pickler for the `scala.None` object */
+ implicit val nonePickler = singletonPickler(None)
+
+ /** A conditional pickler for instances of class `scala.Some` */
+ implicit def somePickler[T: Pickler]: CondPickler[Some[T]] =
+ pkl[T]
+ .wrapped { Some(_) } { _.get }
+ .asClass (classOf[Some[T]])
+
+ /** A pickler for optional values */
+ implicit def optionPickler[T: Pickler]: Pickler[Option[T]] = nonePickler | somePickler[T]
+
+ /** A pickler for list values */
+ implicit def listPickler[T: Pickler]: Pickler[List[T]] =
+ iterPickler[T] .wrapped { _.toList } { _.iterator } .labelled ("scala.List")
+
+ /** A pickler for vector values */
+ implicit def vectorPickler[T: Pickler]: Pickler[Vector[T]] =
+ iterPickler[T] .wrapped { Vector() ++ _ } { _.iterator } .labelled ("scala.Vector")
+
+ /** A pickler for array values */
+ implicit def array[T : ClassManifest : Pickler]: Pickler[Array[T]] =
+ iterPickler[T] .wrapped { _.toArray} { _.iterator } .labelled ("scala.Array")
+}
+
+/** A subclass of Pickler can indicate whether a particular value can be pickled by instances
+ * of this class.
+ * @param canPickle The predicate that indicates whether a given value
+ * can be pickled by instances of this class.
+ */
+abstract class CondPickler[T](val canPickle: Any => Boolean) extends Pickler[T] {
+ import Pickler._
+
+ /** Pickles given value `x` if possible, as indicated by `canPickle(x)`.
+ */
+ def tryPickle(wr: Writer, x: Any): Boolean = {
+ val result = canPickle(x)
+ if (result) pickle(wr, x.asInstanceOf[T])
+ result
+ }
+
+ /** A pickler obtained from this pickler and an alternative pickler.
+ * To pickle a value, this pickler is tried first. If it cannot handle
+ * the object (as indicated by its `canPickle` test), then the
+ * alternative pickler is tried.
+ * To unpickle a value, this unpickler is tried first. If it cannot read
+ * the input (as indicated by a `UnpickleFailure` result), then the
+ * alternative pickler is tried.
+ * @param V The handled type of the returned pickler.
+ * @param U The handled type of the alternative pickler.
+ * @param that The alternative pickler.
+ */
+ def | [V >: T, U <: V] (that: => CondPickler[U]): CondPickler[V] =
+ eitherPickler[V, T, U](this, that)
+}
+
diff --git a/src/compiler/scala/tools/nsc/io/PlainFile.scala b/src/compiler/scala/tools/nsc/io/PlainFile.scala
index 926f5ee042..9346e88bb2 100644
--- a/src/compiler/scala/tools/nsc/io/PlainFile.scala
+++ b/src/compiler/scala/tools/nsc/io/PlainFile.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
@@ -11,8 +10,7 @@ package io
import java.io.{ File => JFile, FileInputStream, FileOutputStream, IOException }
import PartialFunction._
-object PlainFile
-{
+object PlainFile {
/**
* If the specified File exists, returns an abstract file backed
* by it. Otherwise, returns null.
@@ -27,6 +25,8 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
assert(path ne null)
val file = givenPath.jfile
+ override def underlyingSource = Some(this)
+
private val fpath = try givenPath.normalize catch { case _: IOException => givenPath.toAbsolute }
/** Returns the name of this abstract file. */
@@ -38,7 +38,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
/** The absolute file. */
def absolute = new PlainFile(givenPath.normalize)
- override def container: AbstractFile = new PlainFile(givenPath.parent.get)
+ override def container: AbstractFile = new PlainFile(givenPath.parent)
override def input = givenPath.toFile.inputStream()
override def output = givenPath.toFile.outputStream()
override def sizeOption = Some(givenPath.length.toInt)
@@ -55,8 +55,8 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
/** Returns all abstract subfiles of this abstract directory. */
def iterator: Iterator[AbstractFile] = {
- assert(isDirectory, "not a directory '%s'" format this)
- givenPath.toDirectory.list filter (_.exists) map (new PlainFile(_))
+ if (!isDirectory) Iterator.empty
+ else givenPath.toDirectory.list filter (_.exists) map (new PlainFile(_))
}
/**
diff --git a/src/compiler/scala/tools/nsc/io/PrettyWriter.scala b/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
new file mode 100644
index 0000000000..acd4847469
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
@@ -0,0 +1,41 @@
+package scala.tools.nsc.io
+
+import java.io.Writer
+
+class PrettyWriter(wr: Writer) extends Writer {
+ protected val indentStep = " "
+ private var indent = 0
+ private def newLine() {
+ wr.write('\n')
+ wr.write(indentStep * indent)
+ }
+ def close() = wr.close()
+ def flush() = wr.flush()
+ def write(str: Array[Char], off: Int, len: Int): Unit = {
+ if (off < str.length && off < len) {
+ str(off) match {
+ case '{' | '[' | '(' =>
+ indent += 1
+ wr.write(str(off))
+ newLine()
+ wr.write(str, off + 1, len - 1)
+ case '}' | ']' | ')' =>
+ wr.write(str, off, len)
+ indent -= 1
+ case ',' =>
+ wr.write(',')
+ newLine()
+ wr.write(str, off + 1, len - 1)
+ case ':' =>
+ wr.write(':')
+ wr.write(' ')
+ wr.write(str, off + 1, len - 1)
+ case _ =>
+ wr.write(str, off, len)
+ }
+ } else {
+ wr.write(str, off, len)
+ }
+ }
+ override def toString = wr.toString
+}
diff --git a/src/compiler/scala/tools/nsc/io/Process.scala b/src/compiler/scala/tools/nsc/io/Process.scala
index 7b10672699..ebd7937f33 100644
--- a/src/compiler/scala/tools/nsc/io/Process.scala
+++ b/src/compiler/scala/tools/nsc/io/Process.scala
@@ -1,12 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
*/
package scala.tools.nsc
package io
import concurrent.ThreadRunner
-import scala.util.Properties.{ isWin, isMac }
+import scala.annotation.tailrec
+import scala.util.Properties.{ isWin, isMac, lineSeparator }
import scala.util.control.Exception.catching
import java.lang.{ Process => JProcess, ProcessBuilder => JProcessBuilder }
import java.io.{ IOException, InputStream, OutputStream, BufferedReader, InputStreamReader, PrintWriter, File => JFile }
@@ -34,11 +35,40 @@ import java.util.concurrent.LinkedBlockingQueue
object Process
{
- lazy val javaVmArguments = java.lang.management.ManagementFactory.getRuntimeMXBean().getInputArguments()
+ def javaVmArguments: List[String] = {
+ import collection.JavaConversions._
+
+ java.lang.management.ManagementFactory.getRuntimeMXBean().getInputArguments().toList
+ }
lazy val runtime = Runtime.getRuntime()
- private[Process] class ProcessBuilder(val pb: JProcessBuilder)
- {
+ class Pipe[T](xs: Seq[T], stringify: T => String) {
+ def |(cmd: String): Seq[String] = {
+ val p = Process(cmd)
+ xs foreach (x => p.stdin println stringify(x))
+ p.stdin.close()
+ p.stdout.toList
+ }
+ }
+
+ object Pipe {
+ /* After importing this implicit you can say for instance
+ * xs | "grep foo" | "grep bar"
+ * and it will execute shells and pipe input/output. You can
+ * also implicitly or explicitly supply a function which translates
+ * the opening sequence into Strings; if none is given toString is used.
+ *
+ * Also, once you use :sh in the repl, this is auto-imported.
+ */
+ implicit def seqToPipelinedProcess[T]
+ (xs: Seq[T])
+ (implicit stringify: T => String = (x: T) => x.toString): Pipe[T] =
+ {
+ new Pipe(xs, stringify)
+ }
+ }
+
+ private[Process] class ProcessBuilder(val pb: JProcessBuilder) {
def this(cmd: String*) = this(new JProcessBuilder(cmd: _*))
def start() = new Process(() => pb.start())
@@ -55,7 +85,7 @@ object Process
this
}
- def withCwd(cwd: File): this.type = {
+ def withCwd(cwd: Path): this.type = {
if (cwd != null)
pb directory cwd.jfile
@@ -82,10 +112,10 @@ object Process
def apply(
command: String,
env: Map[String, String] = null,
- cwd: File = null,
+ cwd: Path = null,
redirect: Boolean = false
): Process =
- exec(shell(command), env, cwd)
+ exec(shell(command), env, cwd, redirect)
/** Executes the given command line.
*
@@ -95,15 +125,14 @@ object Process
def exec(
command: Seq[String],
env: Map[String, String] = null,
- cwd: File = null,
+ cwd: Path = null,
redirect: Boolean = false
): Process =
- new ProcessBuilder(command: _*) withEnv env withCwd cwd start
+ new ProcessBuilder(command: _*) withEnv env withCwd cwd withRedirectedErrorStream redirect start
}
import Process._
-class Process(processCreator: () => JProcess) extends Iterable[String]
-{
+class Process(processCreator: () => JProcess) extends Iterable[String] {
lazy val process = processCreator()
def exitValue(): Option[Int] =
@@ -111,32 +140,49 @@ class Process(processCreator: () => JProcess) extends Iterable[String]
def waitFor() = process.waitFor()
def destroy() = process.destroy()
- def rerun() = new Process(processCreator)
+ def rerun() = new Process(processCreator)
+ def slurp() = _out.slurp()
def stdout = iterator
def iterator = _out.iterator
def stderr = _err.iterator
lazy val stdin = new PrintWriter(_in, true)
class StreamedConsumer(in: InputStream) extends Thread with Iterable[String] {
- private val queue = new LinkedBlockingQueue[String]
- private val reader = new BufferedReader(new InputStreamReader(in))
+ private val queue = new LinkedBlockingQueue[String]
+ private val reader = new BufferedReader(new InputStreamReader(in))
+
+ private def finish() {
+ // make sure this thread is complete
+ join()
+ }
+
+ def slurp(): String = {
+ finish()
+ queue.toArray map (_ + lineSeparator) mkString
+ }
def iterator = {
- join() // make sure this thread is complete
+ finish()
new Iterator[String] {
val it = queue.iterator()
def hasNext = it.hasNext
def next = it.next
}
}
- override def run() {
- reader.readLine match {
- case null =>
- case x =>
- queue put x
- run()
+ override final def run() {
+ @tailrec def loop() {
+ reader.readLine match {
+ case null =>
+ reader.close()
+ case x =>
+ queue put x
+ loop()
+ }
}
+
+ try loop()
+ catch { case _: IOException => () }
}
}
diff --git a/src/compiler/scala/tools/nsc/io/Replayer.scala b/src/compiler/scala/tools/nsc/io/Replayer.scala
new file mode 100644
index 0000000000..5cb61b6cb1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/Replayer.scala
@@ -0,0 +1,74 @@
+package scala.tools.nsc.io
+
+import java.io.{Reader, Writer}
+
+import Pickler._
+import Lexer.{Token, EOF}
+
+abstract class LogReplay {
+ def logreplay(event: String, x: => Boolean): Boolean
+ def logreplay[T: Pickler](event: String, x: => Option[T]): Option[T]
+ def close()
+ def flush()
+}
+
+class Logger(wr0: Writer) extends LogReplay {
+ val wr = new PrettyWriter(wr0)
+ private var first = true
+ private def insertComma() = if (first) first = false else wr.write(",")
+
+ def logreplay(event: String, x: => Boolean) = {
+ val xx = x
+ if (xx) { insertComma(); pkl[Unit].labelled(event).pickle(wr, ()) }
+ xx
+ }
+ def logreplay[T: Pickler](event: String, x: => Option[T]) = {
+ val xx = x
+ xx match {
+ case Some(y) => insertComma(); pkl[T].labelled(event).pickle(wr, y)
+ case None =>
+ }
+ xx
+ }
+ def close() { wr.close() }
+ def flush() { wr.flush() }
+}
+
+object NullLogger extends LogReplay {
+ def logreplay(event: String, x: => Boolean) = x
+ def logreplay[T: Pickler](event: String, x: => Option[T]) = x
+ def close() {}
+ def flush() {}
+}
+
+class Replayer(raw: Reader) extends LogReplay {
+ private val rd = new Lexer(raw)
+ private var nextComma = false
+
+ private def eatComma() =
+ if (nextComma) { rd.accept(','); nextComma = false }
+
+ def logreplay(event: String, x: => Boolean) =
+ if (rd.token == EOF) NullLogger.logreplay(event, x)
+ else {
+ eatComma()
+ pkl[Unit].labelled(event).unpickle(rd) match {
+ case UnpickleSuccess(_) => nextComma = true; true
+ case _ => false
+ }
+ }
+
+ def logreplay[T: Pickler](event: String, x: => Option[T]) =
+ if (rd.token == EOF) NullLogger.logreplay(event, x)
+ else {
+ eatComma()
+ pkl[T].labelled(event).unpickle(rd) match {
+ case UnpickleSuccess(y) => nextComma = true; Some(y)
+ case _ => None
+ }
+ }
+
+ def close() { raw.close() }
+ def flush() {}
+}
+
diff --git a/src/compiler/scala/tools/nsc/io/Socket.scala b/src/compiler/scala/tools/nsc/io/Socket.scala
new file mode 100644
index 0000000000..e883c71b8e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/Socket.scala
@@ -0,0 +1,46 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package io
+
+import java.io.{ IOException, InputStreamReader, BufferedReader, PrintWriter }
+import java.net.{ URL, MalformedURLException }
+import java.net.{ InetAddress, Socket => JSocket }
+import scala.util.control.Exception._
+
+/** A skeletal only-as-much-as-I-need Socket wrapper.
+ */
+object Socket
+{
+ private val socketExceptions = List(classOf[IOException], classOf[SecurityException])
+
+ class SocketBox(f: () => Socket) {
+ def either: Either[Throwable, Socket] = catching(socketExceptions: _*) either f()
+ def opt: Option[Socket] = catching(socketExceptions: _*) opt f()
+ }
+
+ def apply(host: InetAddress, port: Int) = new SocketBox(() => new Socket(new JSocket(host, port)))
+ def apply(host: String, port: Int) = new SocketBox(() => new Socket(new JSocket(host, port)))
+}
+
+class Socket(jsocket: JSocket) {
+ def getOutputStream() = jsocket.getOutputStream()
+ def getInputStream() = jsocket.getInputStream()
+ def getPort() = jsocket.getPort()
+ def close() = jsocket.close()
+
+ /** Creates an InputStream and applies the closure, automatically closing it on completion.
+ */
+ def applyReaderAndWriter[T](f: (BufferedReader, PrintWriter) => T): T = {
+ val out = new PrintWriter(getOutputStream(), true)
+ val in = new BufferedReader(new InputStreamReader(getInputStream()))
+ try f(in, out)
+ finally {
+ in.close()
+ out.close()
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala
index f2b3373619..cc69c238b3 100644
--- a/src/compiler/scala/tools/nsc/io/SourceReader.scala
+++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
@@ -65,7 +64,7 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) {
case p:PlainFile =>
read(p.file) // bq: (!!!)
case z:ZipArchive#FileEntry =>
- val c = Channels.newChannel(z.getArchive.getInputStream(z.entry))
+ val c = Channels.newChannel(z.archive.getInputStream(z.entry))
read(c)
case _ =>
val b = ByteBuffer.wrap(file.toByteArray)
diff --git a/src/compiler/scala/tools/nsc/io/Streamable.scala b/src/compiler/scala/tools/nsc/io/Streamable.scala
index 49a26e436b..16a867ade8 100644
--- a/src/compiler/scala/tools/nsc/io/Streamable.scala
+++ b/src/compiler/scala/tools/nsc/io/Streamable.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
*/
package scala.tools.nsc
@@ -8,7 +8,7 @@ package io
import java.net.{ URI, URL }
import java.io.{ BufferedInputStream, InputStream, PrintStream, File => JFile }
import java.io.{ BufferedReader, InputStreamReader }
-import scala.io.{ Codec, Source }
+import scala.io.{ Codec, BufferedSource, Source }
import collection.mutable.ArrayBuffer
import Path.fail
@@ -66,43 +66,44 @@ object Streamable
}
}
- /** For objects which can be viewed as Chars. The abstract creationCodec
- * can safely be defined as null and will subsequently be ignored.
+ /** For objects which can be viewed as Chars.
*/
trait Chars extends Bytes {
- def creationCodec: Codec
- private def failNoCodec() = fail("This method requires a Codec to be chosen explicitly.")
-
- /** The general algorithm for any call to a method involving byte<->char
- * transformations is: if a codec is supplied (explicitly or implicitly),
- * use that; otherwise if a codec was defined when the object was created,
- * use that; otherwise, use Codec.default.
- *
- * Note that getCodec takes a codec argument rather than having methods
- * always default to getCodec() and use the argument otherwise, so that
- * method implementations can, where desired, identify the case where no
- * codec was ever explicitly supplied. If allowDefault = false, an
- * exception will be thrown rather than falling back on Codec.default.
+ /** Calls to methods requiring byte<->char transformations should be offered
+ * in a form which allows specifying the codec. When it is not specified,
+ * the one discovered at creation time will be used, which will always find the
+ * one in scala.io.Codec if no other is available. This can be overridden
+ * to use a different default.
*/
- def getCodec(givenCodec: Codec = null, allowDefault: Boolean = true) =
- if (givenCodec != null) givenCodec
- else if (creationCodec != null) creationCodec
- else if (allowDefault) Codec.default
- else failNoCodec()
+ def creationCodec: Codec = implicitly[Codec]
- def chars(codec: Codec = getCodec()): Source = (Source fromInputStream inputStream())(codec)
- def lines(codec: Codec = getCodec()): Iterator[String] = chars(codec).getLines()
+ def chars(): BufferedSource = chars(creationCodec)
+ def chars(codec: Codec): BufferedSource = Source.fromInputStream(inputStream())(codec)
+
+ def lines(): Iterator[String] = lines(creationCodec)
+ def lines(codec: Codec): Iterator[String] = chars(codec).getLines()
/** Obtains an InputStreamReader wrapped around a FileInputStream.
*/
- def reader(codec: Codec = getCodec()) = new InputStreamReader(inputStream, codec.charSet)
+ def reader(): InputStreamReader = reader(creationCodec)
+ def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream, codec.charSet)
/** Wraps a BufferedReader around the result of reader().
*/
- def bufferedReader(codec: Codec = getCodec()) = new BufferedReader(reader(codec))
+ def bufferedReader(): BufferedReader = bufferedReader(creationCodec)
+ def bufferedReader(codec: Codec) = new BufferedReader(reader(codec))
+
+ /** Creates a BufferedReader and applies the closure, automatically closing it on completion.
+ */
+ def applyReader[T](f: BufferedReader => T): T = {
+ val in = bufferedReader()
+ try f(in)
+ finally in.close()
+ }
/** Convenience function to import entire file into a String.
*/
- def slurp(codec: Codec = getCodec()) = chars(codec).mkString
+ def slurp(): String = slurp(creationCodec)
+ def slurp(codec: Codec) = chars(codec).mkString
}
}
diff --git a/src/compiler/scala/tools/nsc/io/VirtualDirectory.scala b/src/compiler/scala/tools/nsc/io/VirtualDirectory.scala
index 0d394fdddd..b4b1eca439 100644
--- a/src/compiler/scala/tools/nsc/io/VirtualDirectory.scala
+++ b/src/compiler/scala/tools/nsc/io/VirtualDirectory.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
*/
package scala.tools.nsc
@@ -31,26 +31,21 @@ extends AbstractFile {
override def output = error("directories cannot be written")
/** Does this abstract file denote an existing file? */
- def create {
- throw new UnsupportedOperationException
- }
+ def create { unsupported }
/** Delete the underlying file or directory (recursively). */
- def delete {
- throw new UnsupportedOperationException
- }
+ def delete { unsupported }
/** Returns an abstract file with the given name. It does not
* check that it exists.
*/
- def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile =
- throw new UnsupportedOperationException()
+ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported
private val files = mutable.Map.empty[String, AbstractFile]
// the toList is so that the directory may continue to be
// modified while its elements are iterated
- def iterator = files.valuesIterator.toList.iterator
+ def iterator = files.values.toList.iterator
override def lookupName(name: String, directory: Boolean): AbstractFile =
files get name filter (_.isDirectory == directory) orNull
diff --git a/src/compiler/scala/tools/nsc/io/VirtualFile.scala b/src/compiler/scala/tools/nsc/io/VirtualFile.scala
index 90769d7086..421e39195d 100644
--- a/src/compiler/scala/tools/nsc/io/VirtualFile.scala
+++ b/src/compiler/scala/tools/nsc/io/VirtualFile.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
@@ -32,7 +31,7 @@ class VirtualFile(val name: String, _path: String) extends AbstractFile
*/
def this(name: String) = this(name, name)
- override def hashCode = name.hashCode
+ override def hashCode = name.##
override def equals(that: Any) = cond(that) { case x: VirtualFile => x.name == name }
//########################################################################
@@ -62,13 +61,13 @@ class VirtualFile(val name: String, _path: String) extends AbstractFile
}
}
- def container : AbstractFile = throw new Error("not supported")
+ def container: AbstractFile = unsupported
/** Is this abstract file a directory? */
def isDirectory: Boolean = false
/** Returns the time that this abstract file was last modified. */
- def lastModified: Long = Math.MIN_LONG
+ def lastModified: Long = Long.MinValue
/** Returns all abstract subfiles of this abstract directory. */
def iterator: Iterator[AbstractFile] = {
@@ -77,14 +76,10 @@ class VirtualFile(val name: String, _path: String) extends AbstractFile
}
/** Does this abstract file denote an existing file? */
- def create {
- throw new UnsupportedOperationException
- }
+ def create { unsupported }
/** Delete the underlying file or directory (recursively). */
- def delete {
- throw new UnsupportedOperationException
- }
+ def delete { unsupported }
/**
* Returns the abstract file in this abstract directory with the
@@ -104,8 +99,7 @@ class VirtualFile(val name: String, _path: String) extends AbstractFile
/** Returns an abstract file with the given name. It does not
* check that it exists.
*/
- def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile =
- throw new UnsupportedOperationException()
+ def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
//########################################################################
}
diff --git a/src/compiler/scala/tools/nsc/io/ZipArchive.scala b/src/compiler/scala/tools/nsc/io/ZipArchive.scala
index e75f68ca92..4be11fc9a8 100644
--- a/src/compiler/scala/tools/nsc/io/ZipArchive.scala
+++ b/src/compiler/scala/tools/nsc/io/ZipArchive.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
@@ -14,7 +13,6 @@ import java.io.{ File => JFile, IOException, InputStream, BufferedInputStream, B
import java.util.zip.{ ZipEntry, ZipFile, ZipInputStream }
import PartialFunction._
-import scala.collection.Traversable
import scala.collection.mutable.{ Map, HashMap }
import scala.collection.JavaConversions.asIterator
@@ -157,8 +155,7 @@ private[io] trait ZipContainer extends AbstractFile
/** Returns an abstract file with the given name. It does not
* check that it exists.
*/
- override def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile =
- throw new UnsupportedOperationException()
+ override def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
/** Returns all abstract subfiles of this abstract directory. */
override def iterator: Iterator[AbstractFile] = root.iterator
@@ -207,9 +204,9 @@ final class ZipArchive(file: File, val archive: ZipFile) extends PlainFile(file)
path: String
) extends VirtualFile(name, path)
{
- final override def path = "%s(%s)".format(self, pathInArchive)
- final def getArchive = self.archive
- def pathInArchive = super.path
+ override def underlyingSource = Some(self)
+ final override def path = "%s(%s)".format(self, super.path)
+ final def archive = self.archive
override def hashCode = super.hashCode + container.hashCode
override def equals(that : Any) =
@@ -234,15 +231,13 @@ final class ZipArchive(file: File, val archive: ZipFile) extends PlainFile(file)
val entry: ZipEntry
) extends Entry(container, name, path) with FileEntryInterface
{
- def archive = self.archive
override def input = archive getInputStream entry
}
private def zipTraversableFromZipFile(z: ZipFile): ZipTrav =
- new Traversable[ZipEntry] {
- def zis: ZipInputStream = null // not valid for this type
- val itStream = asIterator(z.entries()).toStream
- def foreach[U](f: ZipEntry => U) = itStream foreach f
+ new Iterable[ZipEntry] {
+ def zis: ZipInputStream = null // not valid for this type
+ def iterator = asIterator(z.entries())
}
}
@@ -275,13 +270,14 @@ final class URLZipArchive(url: URL) extends AbstractFile with ZipContainer
/** Methods we don't support but have to implement because of the design */
def file: JFile = null
- def create: Unit = throw new UnsupportedOperationException
- def delete: Unit = throw new UnsupportedOperationException
- def output = throw new Error("unsupported")
- def container = throw new Error("unsupported")
+ def create: Unit = unsupported
+ def delete: Unit = unsupported
+ def output = unsupported
+ def container = unsupported
abstract class Entry(name: String, path: String) extends VirtualFile(name, path) {
final override def path = "%s(%s)".format(URLZipArchive.this, super.path)
+ override def container = URLZipArchive.this
}
final class DirEntry(name: String, path: String) extends Entry(name, path) with DirEntryInterface {
def source = input
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 5f5db8015a..0388df7005 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -1,15 +1,14 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
//todo: allow infix type patterns
package scala.tools.nsc
package javac
-import scala.tools.nsc.util.{Position, OffsetPosition, NoPosition, BatchSourceFile}
+import scala.tools.nsc.util.{OffsetPosition, BatchSourceFile}
import scala.collection.mutable.ListBuffer
import symtab.Flags
import JavaTokens._
@@ -429,7 +428,7 @@ trait JavaParsers extends JavaScanners {
return Modifiers(flags, privateWithin)
}
}
- throw new Error("should not be here")
+ abort("should not be here")
}
def typeParams(): List[TypeDef] =
@@ -541,7 +540,7 @@ trait JavaParsers extends JavaScanners {
if (parentToken == AT && in.token == DEFAULT) {
val annot =
atPos(pos) {
- New(rootId(nme.AnnotationDefaultATTR.toTypeName), List(List()))
+ New(Select(scalaDot(newTermName("runtime")), nme.AnnotationDefaultATTR.toTypeName), List(List()))
}
mods1 = Modifiers(mods1.flags, mods1.privateWithin, annot :: mods1.annotations, mods1.positions)
skipTo(SEMI)
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index 2ffea32307..21394595a6 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,7 +7,7 @@ package scala.tools.nsc
package javac
import scala.tools.nsc.util._
-import SourceFile.{LF, FF, CR, SU}
+import Chars._
import JavaTokens._
import scala.annotation.switch
@@ -73,7 +73,7 @@ trait JavaScanners {
def floatVal: Double = floatVal(false)
//def token2string(token : Int) : String = configuration.token2string(token)
/** return recent scala doc, if any */
- def flushDoc: String
+ def flushDoc: DocComment
def currentPos: Position
}
@@ -282,8 +282,8 @@ trait JavaScanners {
*/
var docBuffer: StringBuilder = null
- def flushDoc = {
- val ret = if (docBuffer != null) docBuffer.toString else null
+ def flushDoc: DocComment = {
+ val ret = if (docBuffer != null) DocComment(docBuffer.toString, NoPosition) else null
docBuffer = null
ret
}
@@ -685,24 +685,6 @@ trait JavaScanners {
// Identifiers ---------------------------------------------------------------
- def isIdentStart(c: Char): Boolean = (
- ('A' <= c && c <= 'Z') ||
- ('a' <= c && c <= 'a') ||
- (c == '_') || (c == '$') ||
- Character.isUnicodeIdentifierStart(c)
- )
-
- def isIdentPart(c: Char) = (
- isIdentStart(c) ||
- ('0' <= c && c <= '9') ||
- Character.isUnicodeIdentifierPart(c)
- )
-
- def isSpecial(c: Char) = {
- val chtp = Character.getType(c)
- chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt
- }
-
private def getIdentRest {
while (true) {
(in.ch: @switch) match {
@@ -754,13 +736,13 @@ trait JavaScanners {
in.next
if ('0' <= in.ch && in.ch <= '7') {
val leadch: Char = in.ch
- var oct: Int = in.digit2int(in.ch, 8)
+ var oct: Int = digit2int(in.ch, 8)
in.next
if ('0' <= in.ch && in.ch <= '7') {
- oct = oct * 8 + in.digit2int(in.ch, 8)
+ oct = oct * 8 + digit2int(in.ch, 8)
in.next
if (leadch <= '3' && '0' <= in.ch && in.ch <= '7') {
- oct = oct * 8 + in.digit2int(in.ch, 8)
+ oct = oct * 8 + digit2int(in.ch, 8)
in.next
}
}
@@ -836,11 +818,11 @@ trait JavaScanners {
var value: Long = 0
val divider = if (base == 10) 1 else 2
val limit: Long =
- if (token == LONGLIT) Math.MAX_LONG else Math.MAX_INT
+ if (token == LONGLIT) Long.MaxValue else Int.MaxValue
var i = 0
val len = name.length
while (i < len) {
- val d = in.digit2int(name(i), base)
+ val d = digit2int(name(i), base)
if (d < 0) {
syntaxError("malformed integer number")
return 0
@@ -864,7 +846,7 @@ trait JavaScanners {
*/
def floatVal(negated: Boolean): Double = {
val limit: Double =
- if (token == DOUBLELIT) Math.MAX_DOUBLE else Math.MAX_FLOAT
+ if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
try {
val value: Double = java.lang.Double.valueOf(name.toString()).doubleValue()
if (value > limit)
@@ -879,7 +861,7 @@ trait JavaScanners {
/** read a number into name and set base
*/
protected def getNumber {
- while (in.digit2int(in.ch, if (base < 10) 10 else base) >= 0) {
+ while (digit2int(in.ch, if (base < 10) 10 else base) >= 0) {
putChar(in.ch)
in.next
}
@@ -894,7 +876,7 @@ trait JavaScanners {
in.next
return getFraction
case _ =>
- if (!isIdentStart(lookahead.ch)) {
+ if (!isIdentifierStart(lookahead.ch)) {
putChar(in.ch)
in.next
return getFraction
diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
index 8cabfa9f53..eb64ffe062 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
@@ -1,27 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package javac
-object JavaTokens {
+object JavaTokens extends ast.parser.Tokens {
- /** special tokens */
- final val EMPTY = -3
- final val UNDEF = -2
- final val ERROR = -1
- final val EOF = 0
-
- /** literals */
- final val CHARLIT = 1
- final val INTLIT = 2
- final val LONGLIT = 3
- final val FLOATLIT = 4
- final val DOUBLELIT = 5
- final val STRINGLIT = 6
def isLiteral(code : Int) =
code >= CHARLIT && code <= STRINGLIT
@@ -139,20 +125,4 @@ object JavaTokens {
final val RBRACKET = 118
final val LBRACE = 119
final val RBRACE = 120
-
- def isBrace(code : Int) =
- code >= LPAREN && code <= RBRACE
- def isOpenBrace(code : Int) = isBrace(code) && (code % 2 == 0)
- def isCloseBrace(code : Int) = isBrace(code) && (code % 2 == 1)
-
- def isSpace(at : Char) = at match {
- case ' ' | '\t' => true
- case _ => false
- }
- import scala.tools.nsc.util.SourceFile._
-
- def isNewLine(at : Char) = at match {
- case CR | LF | FF => true
- case _ => false
- }
}
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
index ddd3c7b71b..2f50435db6 100644
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* Author: Paul Phillips
*/
@@ -13,9 +13,7 @@ import java.io.{ StringWriter, PrintWriter }
/** Ancillary bits of ParallelMatching which are better off
* out of the way.
*/
-trait MatchSupport extends ast.TreeDSL
-{
- self: ParallelMatching =>
+trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
import global.{ typer => _, _ }
import CODE._
@@ -24,28 +22,15 @@ trait MatchSupport extends ast.TreeDSL
private final def trace = settings.Ypmatdebug.value
def impossible: Nothing = abort("this never happens")
- def abort(msg: String): Nothing = Predef.error(msg)
object Types {
import definitions._
implicit def enrichType(x: Type): RichType = new RichType(x)
- // see bug1434.scala for an illustration of why "x <:< y" is insufficient.
- // this code is definitely inadequate at best. Inherited comment:
- //
- // an approximation of _tp1 <:< tp2 that ignores _ types. this code is wrong,
- // ideally there is a better way to do it, and ideally defined in Types.scala
- private[matching] def matches(arg1: Type, arg2: Type) = {
- val List(t1, t2) = List(arg1, arg2) map decodedEqualsType
- def eqSymbols = t1.typeSymbol eq t2.typeSymbol
- // note: writing this as "t1.baseTypeSeq exists (_ =:= t2)" does not lead to 1434 passing.
- def isSubtype = t1.baseTypeSeq exists (_.typeSymbol eq t2.typeSymbol)
-
- (t1 <:< t2) || ((t1, t2) match {
- case (_: TypeRef, _: TypeRef) => !t1.isArray && (t1.prefix =:= t2.prefix) && (eqSymbols || isSubtype)
- case _ => false
- })
- }
+ // A subtype test which creates fresh existentials for type
+ // parameters on the right hand side.
+ private[matching] def matches(arg1: Type, arg2: Type) =
+ decodedEqualsType(arg1) matchesPattern decodedEqualsType(arg2)
class RichType(undecodedTpe: Type) {
def tpe = decodedEqualsType(undecodedTpe)
@@ -102,26 +87,13 @@ trait MatchSupport extends ast.TreeDSL
pp(x match {
case s: String => return clean(s)
- case x: Tree => treeToCompactString(x)
+ case x: Tree => asCompactString(x)
case xs: List[_] => pplist(xs map pp)
case x: Tuple2[_,_] => "%s -> %s".format(pp(x._1), pp(x._2))
case x => x.toString
})
}
- object compactTreePrinter extends CompactTreePrinter
-
- // def treeChildrenString(t: Tree): String =
- // nodeToString(t)
-
- def treeToCompactString(t: Tree): String = {
- val buffer = new StringWriter()
- val printer = compactTreePrinter.create(new PrintWriter(buffer))
- printer.print(t)
- printer.flush()
- buffer.toString
- }
-
def ifDebug(body: => Unit): Unit = { if (settings.debug.value) body }
def DBG(msg: => String): Unit = { ifDebug(println(msg)) }
@@ -166,121 +138,4 @@ trait MatchSupport extends ast.TreeDSL
*/
def extractIndex[T](xs: List[T], n: Int): (T, List[T]) =
(xs(n), dropIndex(xs, n))
-
- /** A tree printer which is stingier about vertical whitespace and unnecessary
- * punctuation than the standard one.
- */
- class CompactTreePrinter extends {
- val trees: global.type = global
- } with TreePrinters {
- import trees._
-
- override def create(writer: PrintWriter): TreePrinter = new TreePrinter(writer) {
- // drill down through Blocks and pull out the real statements.
- def allStatements(t: Tree): List[Tree] = t match {
- case Block(stmts, expr) => (stmts flatMap allStatements) ::: List(expr)
- case _ => List(t)
- }
-
- def printLogicalOr(t1: (Tree, Boolean), t2: (Tree, Boolean)) =
- printLogicalOp(t1, t2, "||")
-
- def printLogicalAnd(t1: (Tree, Boolean), t2: (Tree, Boolean)) =
- printLogicalOp(t1, t2, "&&")
-
- def printLogicalOp(t1: (Tree, Boolean), t2: (Tree, Boolean), op: String) = {
- def maybenot(tvalue: Boolean) = if (tvalue) "" else "!"
-
- printRow(List(t1._1, t2._1),
- " %s(" format maybenot(t1._2),
- ") %s %s(".format(op, maybenot(t2._2)),
- ")"
- )
- }
-
- override def printRaw(tree: Tree): Unit = {
- // routing supercalls through this for debugging ease
- def s() = super.printRaw(tree)
-
- tree match {
- // labels used for jumps - does not map to valid scala code
- case LabelDef(name, params, rhs) =>
- print("labeldef %s(%s) = ".format(name, params mkString ","))
- printRaw(rhs)
-
- // target.method(arg) ==> target method arg
- case Apply(Select(target, method), List(arg)) =>
- (target, arg) match {
- case (_: Ident, _: Literal | _: Ident) =>
- printRaw(target)
- print(" %s " format symName(tree, method))
- printRaw(arg)
- case _ => s()
- }
-
- // target.unary_! ==> !target
- case Select(qualifier, name) =>
- val n = symName(tree, name)
- if (n startsWith "unary_") {
- print(n drop 6)
- print(qualifier)
- }
- else s()
-
- // target.toString() ==> target.toString
- case Apply(fn, Nil) => printRaw(fn)
-
- // if a Block only continues one actual statement, just print it.
- case Block(stats, expr) =>
- allStatements(tree) match {
- case List(x) => printRow(List(x), "", ";", "")
- case _ => s()
- }
-
- // We get a lot of this stuff
- case If( IsTrue(), x, _) => printRaw(x)
- case If(IsFalse(), _, x) => printRaw(x)
-
- case If(cond, IsTrue(), elsep) =>
- printLogicalOr(cond -> true, elsep -> true)
-
- case If(cond, IsFalse(), elsep) =>
- printLogicalAnd(cond -> false, elsep -> true)
-
- case If(cond, thenp, IsTrue()) =>
- printLogicalOr(cond -> false, thenp -> true)
-
- case If(cond, thenp, IsFalse()) =>
- printLogicalAnd(cond -> true, thenp -> true)
-
- // If thenp or elsep has only one statement, it doesn't need more than one line.
- case If(cond, thenp, elsep) =>
- printRow(List(cond), "if (", "", ") ")
-
- def ifIndented(x: Tree) = {
- indent ; println ; printRaw(x) ; undent
- }
-
- indent ; println ;
- allStatements(thenp) match {
- case List(x: If) => ifIndented(x)
- case List(x) => printRaw(x)
- case _ => printRaw(thenp)
- }
- undent ; println ;
- val elseStmts = allStatements(elsep)
- if (!elseStmts.isEmpty) {
- print("else")
- indent ; println
- elseStmts match {
- case List(x) => printRaw(x)
- case xs => printRaw(elsep)
- }
- undent ; println
- }
- case _ => s()
- }
- }
- }
- }
}
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
index 188a639b97..2c9d974d61 100644
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* Author: Paul Phillips
*/
@@ -7,7 +7,6 @@ package scala.tools.nsc
package matching
import transform.ExplicitOuter
-import util.Position
import symtab.Flags
trait Matrix extends MatrixAdditions {
@@ -17,7 +16,7 @@ trait Matrix extends MatrixAdditions {
import analyzer.Typer
import CODE._
import Debug._
- import Flags.{ TRANS_FLAG }
+ import Flags.{ TRANS_FLAG, SYNTHETIC }
/** Translation of match expressions.
*
@@ -50,7 +49,7 @@ trait Matrix extends MatrixAdditions {
The is the real work-horse of the algorithm. There is some column whose top-most pattern is a
constructor. (Forsimplicity, itisdepicted above asthe left-most column, but anycolumn will do.)
The goal is to build a test state with the variablevand some outgoing arcs (one for each construc-
- tor and possibly a default arc). Foreach constructorcin the selected column, its arc is deï¬ned as
+ tor and possibly a default arc). Foreach constructor in the selected column, its arc is deï¬ned as
follows:
Let {i1,...,ij} be the rows-indices of the patterns in the column that match c. Since the pat-
@@ -157,7 +156,7 @@ trait Matrix extends MatrixAdditions {
def tpe = valsym.tpe
lazy val ident = ID(lhs)
- lazy val valDef = tracing("typedVal", typer typedValDef (VAL(lhs) === rhs))
+ lazy val valDef = tracing("typedVal", typer typedValDef (VAL(lhs) === rhs) setPos lhs.pos)
override def toString() = "%s: %s = %s".format(lhs, lhs.info, rhs)
}
@@ -202,7 +201,7 @@ trait Matrix extends MatrixAdditions {
{
val n: Name = if (name == null) newName(pos, "temp") else name
// careful: pos has special meaning
- owner.newVariable(pos, n) setInfo tpe setFlag (0L /: flags)(_|_)
+ owner.newVariable(pos, n) setInfo tpe setFlag (SYNTHETIC.toLong /: flags)(_|_)
}
def typedValDef(x: Symbol, rhs: Tree) =
diff --git a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
index 37ddbbc671..fbfe00d2c1 100644
--- a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* Author: Paul Phillips
*/
@@ -7,6 +7,7 @@ package scala.tools.nsc
package matching
import transform.ExplicitOuter
+import PartialFunction._
/** Traits which are mixed into MatchMatrix, but separated out as
* (somewhat) independent components to keep them on the sidelines.
@@ -19,22 +20,7 @@ trait MatrixAdditions extends ast.TreeDSL
import symtab.Flags
import CODE._
import Debug._
-
- // Extractors which can spot pure true/false expressions
- // even through the haze of braces
- abstract class SeeThroughBlocks[T] {
- protected def unapplyImpl(x: Tree): T
- def unapply(x: Tree): T = x match {
- case Block(Nil, expr) => unapply(expr)
- case _ => unapplyImpl(x)
- }
- }
- object IsTrue extends SeeThroughBlocks[Boolean] {
- protected def unapplyImpl(x: Tree): Boolean = x equalsStructure TRUE
- }
- object IsFalse extends SeeThroughBlocks[Boolean] {
- protected def unapplyImpl(x: Tree): Boolean = x equalsStructure FALSE
- }
+ import treeInfo.{ IsTrue, IsFalse }
/** The Squeezer, responsible for all the squeezing.
*/
@@ -44,9 +30,15 @@ trait MatrixAdditions extends ast.TreeDSL
def squeezedBlockPVs(pvs: List[PatternVar], exp: Tree): Tree =
squeezedBlock(pvs map (_.valDef), exp)
+ /** Compresses multiple Blocks. */
+ def mkBlock(stats: List[Tree], expr: Tree): Tree = expr match {
+ case Block(stats1, expr1) if stats.isEmpty => mkBlock(stats1, expr1)
+ case _ => Block(stats, expr)
+ }
+
def squeezedBlock(vds: List[Tree], exp: Tree): Tree =
- if (settings_squeeze) Block(Nil, squeezedBlock1(vds, exp))
- else Block(vds, exp)
+ if (settings_squeeze) mkBlock(Nil, squeezedBlock1(vds, exp))
+ else mkBlock(vds, exp)
private def squeezedBlock1(vds: List[Tree], exp: Tree): Tree = {
class RefTraverser(sym: Symbol) extends Traverser {
@@ -114,7 +106,7 @@ trait MatrixAdditions extends ast.TreeDSL
object lxtt extends Transformer {
override def transform(tree: Tree): Tree = tree match {
case blck @ Block(vdefs, ld @ LabelDef(name, params, body)) =>
- def shouldInline(t: FinalState) = t.isReachedOnce && (t.label eq ld.symbol)
+ def shouldInline(t: FinalState) = t.isReachedOnce && (t.labelSym eq ld.symbol)
if (targets exists shouldInline) squeezedBlock(vdefs, body)
else blck
@@ -143,7 +135,7 @@ trait MatrixAdditions extends ast.TreeDSL
}
}
- returning[Tree](resetTraverser traverse _)(lxtt transform tree)
+ returning(lxtt transform tree)(resetTraverser traverse _)
}
}
@@ -168,16 +160,26 @@ trait MatrixAdditions extends ast.TreeDSL
def cmpSymbols(t1: Type, t2: Type) = t1.typeSymbol eq t2.typeSymbol
def coversSym = {
val tpe = decodedEqualsType(p.tpe)
- lazy val lmoc = sym.linkedModuleOfClass
+ lazy val lmoc = sym.companionModule
val symtpe =
if ((sym hasFlag Flags.MODULE) && (lmoc ne NoSymbol))
singleType(sym.tpe.prefix, lmoc) // e.g. None, Nil
else sym.tpe
+ /** Note to Martin should you come through this way: this
+ * logic looks way overcomplicated for the intention, but a little
+ * experimentation showed that at least most of it is serving
+ * some necessary purpose. It doesn't seem like much more than
+ * "sym.tpe matchesPattern tpe" ought to be necessary though.
+ *
+ * For the time being I tacked the matchesPattern test onto the
+ * end to address #3097.
+ */
(tpe.typeSymbol == sym) ||
(symtpe <:< tpe) ||
(symtpe.parents exists (x => cmpSymbols(x, tpe))) || // e.g. Some[Int] <: Option[&b]
- ((tpe.prefix memberType sym) <:< tpe) // outer, see combinator.lexical.Scanner
+ ((tpe.prefix memberType sym) <:< tpe) || // outer, see combinator.lexical.Scanner
+ (symtpe matchesPattern tpe)
}
cond(p.tree) {
@@ -194,18 +196,13 @@ trait MatrixAdditions extends ast.TreeDSL
private def requiresExhaustive(s: Symbol) =
(s hasFlag MUTABLE) && // indicates that have not yet checked exhaustivity
!(s hasFlag TRANS_FLAG) && // indicates @unchecked
- (s.tpe.typeSymbol hasFlag SEALED) &&
+ (s.tpe.typeSymbol.isSealed) &&
{ s resetFlag MUTABLE ; true } // side effects MUTABLE flag
- private def sealedSymsFor(s: Symbol): Set[Symbol] = {
- val kids = s.children flatMap sealedSymsFor
- if (s hasFlag ABSTRACT) kids else kids + s
- }
-
private lazy val inexhaustives: List[List[Combo]] = {
val collected =
for ((pv, i) <- tvars.zipWithIndex ; val sym = pv.lhs ; if requiresExhaustive(sym)) yield
- i -> sealedSymsFor(sym.tpe.typeSymbol)
+ i -> sym.tpe.typeSymbol.sealedDescendants
val folded =
collected.foldRight(List[List[Combo]]())((c, xs) => {
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 4bc8826908..f9e7a1bdcf 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* Copyright 2007 Google Inc. All Rights Reserved.
* Author: bqe@google.com (Burak Emir)
*/
-// $Id$
package scala.tools.nsc
package matching
@@ -15,7 +14,6 @@ import collection._
import mutable.ListBuffer
import immutable.IntMap
import annotation.elidable
-import Function.tupled
trait ParallelMatching extends ast.TreeDSL
with MatchSupport
@@ -31,6 +29,7 @@ trait ParallelMatching extends ast.TreeDSL
import Types._
import Debug._
import Flags.{ TRANS_FLAG }
+ import PartialFunction._
/** Transition **/
def toPats(xs: List[Tree]): List[Pattern] = xs map Pattern.apply
@@ -52,20 +51,19 @@ trait ParallelMatching extends ast.TreeDSL
-shortCuts.length
}
- // XXX transitional.
- final def requestBody(bx: Int, subst: Bindings): Tree =
- requestBody(bx, PatternVarGroup.fromBindings(subst.get(), targets(bx).freeVars))
-
/** first time bx is requested, a LabelDef is returned. next time, a jump.
* the function takes care of binding
*/
- final def requestBody(bx: Int, pvgroup: PatternVarGroup): Tree = {
+ final def requestBody(bx: Int, subst: Bindings): Tree = {
+ // shortcut
+ if (bx < 0)
+ return Apply(ID(shortCuts(-bx-1)), Nil)
+
+ val pvgroup = PatternVarGroup.fromBindings(subst.get(), targets(bx).freeVars)
val target = targets(bx)
- // shortcut
- if (bx < 0) Apply(ID(shortCuts(-bx-1)), Nil)
// first time this bx is requested - might be bound elsewhere
- else if (target.isNotReached) target.createLabelBody(bx, pvgroup)
+ if (target.isNotReached) target.createLabelBody(bx, pvgroup)
// call label "method" if possible
else target.getLabelBody(pvgroup)
}
@@ -96,7 +94,7 @@ trait ParallelMatching extends ast.TreeDSL
def sym = pv.sym
def tpe = sym.tpe
def pos = sym.pos
- def id = ID(sym) // attributed ident
+ def id = ID(sym) setPos pos // attributed ident
def accessors = if (isCaseClass) sym.caseFieldAccessors else Nil
def accessorTypes = accessors map (x => (tpe memberType x).resultType)
@@ -150,7 +148,7 @@ trait ParallelMatching extends ast.TreeDSL
if (!scrut.isSimple) None
else {
val (_lits, others) = ps span isSwitchableConst
- val lits = _lits partialMap { case x: LiteralPattern => x }
+ val lits = _lits collect { case x: LiteralPattern => x }
condOpt(others) {
case Nil => new PatternSwitch(scrut, lits, None)
@@ -177,7 +175,6 @@ trait ParallelMatching extends ast.TreeDSL
def isCaseHead = head.isCaseClass
private val dummyCount = if (isCaseHead) headType.typeSymbol.caseFieldAccessors.length else 0
def dummies = emptyPatterns(dummyCount)
- // def dummies = head.dummies
def apply(i: Int): Pattern = ps(i)
def pzip() = ps.zipWithIndex
@@ -281,7 +278,9 @@ trait ParallelMatching extends ast.TreeDSL
lazy val pvgroup = PatternVarGroup.fromBindings(subst.get())
- final def tree(): Tree = squeezedBlock(pvgroup.valDefs, codegen)
+ final def tree(): Tree =
+ if (guard.isEmpty) success
+ else squeezedBlock(pvgroup.valDefs, codegen)
}
/** Mixture rule for all literal ints (and chars) i.e. hopefully a switch
@@ -443,32 +442,92 @@ trait ParallelMatching extends ast.TreeDSL
* Note: pivot == head, just better typed.
*/
sealed class MixSequence(val pmatch: PatternMatch, val rest: Rep, pivot: SequencePattern) extends RuleApplication {
+ require(scrut.tpe <:< head.tpe)
+
def hasStar = pivot.hasStar
- private def pivotLen = pivot.nonStarLength
+ private def pivotLen = pivot.nonStarLength
+ private def seqDummies = emptyPatterns(pivot.elems.length + 1)
// one pattern var per sequence element up to elemCount, and one more for the rest of the sequence
lazy val pvs = scrut createSequenceVars pivotLen
- // divide the remaining rows into success/failure branches, expanding subsequences of patterns
- private lazy val rowsplit = {
- require(scrut.tpe <:< head.tpe)
+ // Should the given pattern join the expanded pivot in the success matrix? If so,
+ // this partial function will be defined for the pattern, and the result of the apply
+ // is the expanded sequence of new patterns.
+ lazy val successMatrixFn = new PartialFunction[Pattern, List[Pattern]] {
+ private def seqIsDefinedAt(x: SequenceLikePattern) = (hasStar, x.hasStar) match {
+ case (true, true) => true
+ case (true, false) => pivotLen <= x.nonStarLength
+ case (false, true) => pivotLen >= x.nonStarLength
+ case (false, false) => pivotLen == x.nonStarLength
+ }
- List.unzip(
- for ((c, rows) <- pmatch pzip rest.rows) yield {
- def canSkip = pivot canSkipSubsequences c
- def passthrough(skip: Boolean) = if (skip) None else Some(rows insert c)
+ def isDefinedAt(pat: Pattern) = pat match {
+ case x: SequenceLikePattern => seqIsDefinedAt(x)
+ case WildcardPattern() => true
+ case _ => false
+ }
- pivot.subsequences(c, scrut.seqType) match {
- case Some(ps) => (Some(rows insert ps), passthrough(canSkip))
- case None => (None, passthrough(false))
+ def apply(pat: Pattern): List[Pattern] = pat match {
+ case x: SequenceLikePattern =>
+ def isSameLength = pivotLen == x.nonStarLength
+ def rebound = x.nonStarPatterns :+ (x.elemPatterns.last rebindTo WILD(scrut.seqType))
+
+ (pivot.hasStar, x.hasStar, isSameLength) match {
+ case (true, true, true) => rebound :+ NoPattern
+ case (true, true, false) => (seqDummies drop 1) :+ x
+ case (true, false, true) => x.elemPatterns ++ List(NilPattern, NoPattern)
+ case (false, true, true) => rebound
+ case (false, false, true) => x.elemPatterns :+ NoPattern
+ case _ => seqDummies
}
+
+ case _ => seqDummies
+ }
+ }
+
+ // Should the given pattern be in the fail matrix? This is true of any sequences
+ // as long as the result of the length test on the pivot doesn't make it impossible:
+ // for instance if neither sequence is right ignoring and they are of different
+ // lengths, the later one cannot match since its length must be wrong.
+ def failureMatrixFn(c: Pattern) = (pivot ne c) && (c match {
+ case x: SequenceLikePattern =>
+ (hasStar, x.hasStar) match {
+ case (_, true) => true
+ case (true, false) => pivotLen > x.nonStarLength
+ case (false, false) => pivotLen != x.nonStarLength
}
- ) match { case (l1, l2) => (l1.flatten, l2.flatten) }
+ case WildcardPattern() => true
+ case _ => false
+ })
+
+ // divide the remaining rows into success/failure branches, expanding subsequences of patterns
+ val successRows = pmatch pzip rest.rows collect {
+ case (c, row) if successMatrixFn isDefinedAt c => row insert successMatrixFn(c)
+ }
+ val failRows = pmatch pzip rest.rows collect {
+ case (c, row) if failureMatrixFn(c) => row insert c
}
- lazy val cond = (pivot precondition pmatch).get // length check
- lazy val success = squeezedBlockPVs(pvs, remake(rowsplit._1, pvs, hasStar).toTree)
- lazy val failure = remake(rowsplit._2).toTree
+ // the discrimination test for sequences is a call to lengthCompare. Note that
+ // this logic must be fully consistent wiith successMatrixFn and failureMatrixFn above:
+ // any inconsistency will (and frequently has) manifested as pattern matcher crashes.
+ lazy val cond = {
+ // the method call symbol
+ val methodOp: Symbol = head.tpe member nme.lengthCompare
+
+ // the comparison to perform. If the pivot is right ignoring, then a scrutinee sequence
+ // of >= pivot length could match it; otherwise it must be exactly equal.
+ val compareOp: (Tree, Tree) => Tree = if (hasStar) _ INT_>= _ else _ INT_== _
+
+ // scrutinee.lengthCompare(pivotLength) [== | >=] 0
+ val compareFn: Tree => Tree = (t: Tree) => compareOp((t DOT methodOp)(LIT(pivotLen)), ZERO)
+
+ // wrapping in a null check on the scrutinee
+ nullSafe(compareFn, FALSE)(scrut.id)
+ }
+ lazy val success = squeezedBlockPVs(pvs, remake(successRows, pvs, hasStar).toTree)
+ lazy val failure = remake(failRows).toTree
final def tree(): Tree = codegen
}
@@ -476,7 +535,7 @@ trait ParallelMatching extends ast.TreeDSL
// @todo: equals test for same constant
class MixEquals(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
private lazy val labelBody =
- remake(List.map2(rest.rows.tail, pmatch.tail)(_ insert _)).toTree
+ remake((rest.rows.tail, pmatch.tail).zipped map (_ insert _)).toTree
private lazy val rhs =
decodedEqualsType(head.tpe) match {
@@ -510,8 +569,11 @@ trait ParallelMatching extends ast.TreeDSL
case class Yes(bx: Int, moreSpecific: Pattern, subsumed: List[Pattern])
case class No(bx: Int, remaining: Pattern)
- val (yeses, noes) : (List[Yes], List[No]) = List.unzip(
- for ((pattern, j) <- pmatch.pzip()) yield {
+ val (yeses, noes) = {
+ val _ys = new ListBuffer[Yes]
+ val _ns = new ListBuffer[No]
+
+ for ((pattern, j) <- pmatch.pzip()) {
// scrutinee, head of pattern group
val (s, p) = (pattern.tpe, head.necessaryType)
@@ -529,17 +591,23 @@ trait ParallelMatching extends ast.TreeDSL
def typed(pp: Tree) = passl(ifEquiv(Pattern(pp)))
def subs() = passl(ifEquiv(NoPattern), pattern subpatterns pmatch)
- (pattern match {
+ val (oneY, oneN) = pattern match {
case Pattern(LIT(null), _) if !(p =:= s) => (None, passr) // (1)
case x if isObjectTest => (passl(), None) // (2)
case Pattern(Typed(pp, _), _) if sMatchesP => (typed(pp), None) // (4)
- case Pattern(_: UnApply, _) => (passl(), passr)
+ // The next line used to be this which "fixed" 1697 but introduced
+ // numerous regressions including #3136.
+ // case Pattern(_: UnApply, _) => (passl(), passr)
+ case Pattern(_: UnApply, _) => (None, passr)
case x if !x.isDefault && sMatchesP => (subs(), None)
case x if x.isDefault || pMatchesS => (passl(), passr)
case _ => (None, passr)
- }) : (Option[Yes], Option[No])
+ }
+ oneY map (_ys +=)
+ oneN map (_ns +=)
}
- ) match { case (x,y) => (x.flatten, y.flatten) }
+ (_ys.toList, _ns.toList)
+ }
val moreSpecific = yeses map (_.moreSpecific)
val subsumed = yeses map (x => (x.bx, x.subsumed))
@@ -577,7 +645,7 @@ trait ParallelMatching extends ast.TreeDSL
}
lazy val failure =
- mkFail(remaining map tupled((p1, p2) => rest rows p1 insert p2))
+ mkFail(remaining map { case (p1, p2) => rest rows p1 insert p2 })
final def tree(): Tree = codegen
}
@@ -614,7 +682,7 @@ trait ParallelMatching extends ast.TreeDSL
def isNotAlternative(p: Pattern) = !cond(p.tree) { case _: Alternative => true }
// classify all the top level patterns - alternatives come back unaltered
- val newPats: List[Pattern] = pats.zipWithIndex map tupled(classifyPat)
+ val newPats: List[Pattern] = pats.zipWithIndex map classifyPat.tupled
// see if any alternatives were in there
val (ps, others) = newPats span isNotAlternative
// make a new row for each alternative, with it spliced into the original position
@@ -685,34 +753,28 @@ trait ParallelMatching extends ast.TreeDSL
}
def createLabelBody(index: Int, pvgroup: PatternVarGroup) = {
- def args = pvgroup.syms
- def vdefs = pvgroup.valDefs
+ val args = pvgroup.syms
+ val vdefs = pvgroup.valDefs
val name = "body%" + index
require(_labelSym == null)
referenceCount += 1
if (isLabellable) {
- // val mtype = MethodType(freeVars, bodyTpe)
- val mtype = MethodType(args, bodyTpe)
+ val mtype = MethodType(freeVars, bodyTpe)
_labelSym = owner.newLabel(body.pos, name) setInfo mtype
TRACE("Creating index %d: mtype = %s".format(bx, mtype))
- if (freeVars.size != args.size)
- TRACE("We will be hosed! freeVars = %s, args = %s, vdefs = %s".format(freeVars, args, vdefs))
-
- // Labelled expression - the symbols in the array (must be Idents!)
- // are those the label takes as argument
- _label = typer typedLabelDef LabelDef(_labelSym, args, body setType bodyTpe)
- TRACE("[New label] def %s%s: %s = %s".format(name, pp(args), bodyTpe, body))
+ _label = typer typedLabelDef LabelDef(_labelSym, freeVars, body setType bodyTpe)
+ TRACE("[New label] def %s%s: %s = %s".format(name, pp(freeVars), bodyTpe, body))
}
ifLabellable(vdefs, squeezedBlock(vdefs, label))
}
def getLabelBody(pvgroup: PatternVarGroup): Tree = {
- def idents = pvgroup map (_.rhs)
- def vdefs = pvgroup.valDefs
+ val idents = pvgroup map (_.rhs)
+ val vdefs = pvgroup.valDefs
referenceCount += 1
// if (idents.size != labelParamTypes.size)
// consistencyFailure(idents, vdefs)
@@ -738,12 +800,10 @@ trait ParallelMatching extends ast.TreeDSL
/** Cut out the column containing the non-default pattern. */
class Cut(index: Int) {
/** The first two separate out the 'i'th pattern in each row from the remainder. */
- private val (_column, _rows) =
- List.unzip(rows map (_ extractColumn index))
+ private val (_column, _rows) = rows map (_ extractColumn index) unzip
/** Now the 'i'th tvar is separated out and used as a new Scrutinee. */
- private val (_pv, _tvars) =
- tvars extractIndex index
+ private val (_pv, _tvars) = tvars extractIndex index
/** The non-default pattern (others.head) replaces the column head. */
private val (_ncol, _nrep) =
@@ -826,12 +886,18 @@ trait ParallelMatching extends ast.TreeDSL
}
case _: SingletonType if useEqTest =>
val eqTest = REF(tpe.termSymbol) MEMBER_== scrutTree
+
// See ticket #1503 for the motivation behind checking for a binding.
// The upshot is that it is unsound to assume equality means the right
// type, but if the value doesn't appear on the right hand side of the
// match that's unimportant; so we add an instance check only if there
// is a binding.
- if (isBound) eqTest AND (scrutTree IS tpe.widen)
+ if (isBound) {
+ if (settings.Xmigration28.value) {
+ cunit.warning(scrutTree.pos, "A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.")
+ }
+ eqTest AND (scrutTree IS tpe.widen)
+ }
else eqTest
case _ if scrutTree.tpe <:< tpe && tpe.isAnyRef => scrutTree OBJ_!= NULL
diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
index b4689ebc30..83fd3a9608 100644
--- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
+++ b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* Author: Paul Phillips
*/
@@ -8,6 +8,7 @@ package matching
import transform.ExplicitOuter
import collection.immutable.TreeMap
+import PartialFunction._
trait PatternBindings extends ast.TreeDSL
{
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
index 8a596f1257..a135655b9d 100644
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* Author: Paul Phillips
*/
@@ -7,8 +7,8 @@ package scala.tools.nsc
package matching
import symtab.Flags
-import util.NoPosition
-import scala.util.NameTransformer.decode
+import scala.reflect.NameTransformer.decode
+import PartialFunction._
/** Patterns are wrappers for Trees with enhanced semantics.
*
@@ -85,7 +85,8 @@ trait Patterns extends ast.TreeDSL {
// 8.1.4 (a)
case class ApplyIdentPattern(tree: Apply) extends ApplyPattern with NamePattern {
- require (!isVarPattern(fn) && args.isEmpty)
+ // XXX - see bug 3411 for code which violates this assumption
+ // require (!isVarPattern(fn) && args.isEmpty)
val ident @ Ident(name) = fn
override def sufficientType = Pattern(ident).equalsCheck
@@ -130,7 +131,7 @@ trait Patterns extends ast.TreeDSL {
case class ConstructorPattern(tree: Apply) extends ApplyPattern with NamePattern {
require(fn.isType && this.isCaseClass)
def name = tpe.typeSymbol.name
- def cleanName = tpe.typeSymbol.cleanNameString
+ def cleanName = tpe.typeSymbol.decodedName
def hasPrefix = tpe.prefix.prefixString != ""
def prefixedName =
if (hasPrefix) "%s.%s".format(tpe.prefix.prefixString, cleanName)
@@ -177,9 +178,9 @@ trait Patterns extends ast.TreeDSL {
}
// 8.1.8 (unapplySeq calls)
- case class SequenceExtractorPattern(tree: UnApply) extends UnapplyPattern {
+ case class SequenceExtractorPattern(tree: UnApply) extends UnapplyPattern with SequenceLikePattern {
- private val UnApply(
+ lazy val UnApply(
Apply(TypeApply(Select(_, nme.unapplySeq), List(tptArg)), _),
List(ArrayValue(_, elems))
) = tree
@@ -211,90 +212,24 @@ trait Patterns extends ast.TreeDSL {
override def description = "UnSeq(%s => %s)".format(tptArg, resTypesString)
}
- abstract class SequencePattern extends Pattern {
- val tree: ArrayValue
- def nonStarPatterns: List[Pattern]
- def subsequences(other: Pattern, seqType: Type): Option[List[Pattern]]
- def canSkipSubsequences(second: Pattern): Boolean
-
- lazy val ArrayValue(elemtpt, elems) = tree
- lazy val elemPatterns = toPats(elems)
-
- override def dummies = emptyPatterns(elems.length + 1)
- override def subpatternsForVars: List[Pattern] = elemPatterns
+ trait SequenceLikePattern extends Pattern {
+ def elems: List[Tree]
+ def elemPatterns = toPats(elems)
+ def nonStarPatterns: List[Pattern] = if (hasStar) elemPatterns.init else elemPatterns
def nonStarLength = nonStarPatterns.length
def isAllDefaults = nonStarPatterns forall (_.isDefault)
- def isShorter(other: SequencePattern) = nonStarLength < other.nonStarLength
- def isSameLength(other: SequencePattern) = nonStarLength == other.nonStarLength
-
- protected def lengthCheckOp: (Tree, Tree) => Tree =
- if (hasStar) _ ANY_>= _
- else _ MEMBER_== _
-
- // optimization to avoid trying to match if length makes it impossible
- override def precondition(pm: PatternMatch) = {
- import pm.{ scrut, head }
- val len = nonStarLength
- val compareOp = head.tpe member nme.lengthCompare
-
- def cmpFunction(t1: Tree) = lengthCheckOp((t1 DOT compareOp)(LIT(len)), ZERO)
-
- Some(nullSafe(cmpFunction _, FALSE)(scrut.id))
- }
-
- /** True if 'next' must be checked even if 'first' failed to match after passing its length test
- * (the conditional supplied by getPrecondition.) This is an optimization to avoid checking sequences
- * which cannot match due to a length incompatibility.
- */
- override def description = "Seq(%s)".format(elemPatterns)
+ def isShorter(other: SequenceLikePattern) = nonStarLength < other.nonStarLength
+ def isSameLength(other: SequenceLikePattern) = nonStarLength == other.nonStarLength
}
// 8.1.8 (b) (literal ArrayValues)
- case class SequenceNoStarPattern(tree: ArrayValue) extends SequencePattern {
- require(!hasStar)
- lazy val nonStarPatterns = elemPatterns
-
- // no star
- def subsequences(other: Pattern, seqType: Type): Option[List[Pattern]] =
- condOpt(other) {
- case next: SequenceStarPattern if isSameLength(next) => next rebindStar seqType
- case next: SequenceNoStarPattern if isSameLength(next) => next.elemPatterns ::: List(NoPattern)
- case WildcardPattern() | (_: SequencePattern) => dummies
- }
-
- def canSkipSubsequences(second: Pattern): Boolean =
- (tree eq second.tree) || (cond(second) {
- case x: SequenceNoStarPattern => (x isShorter this) && this.isAllDefaults
- })
- }
-
- // 8.1.8 (b)
- case class SequenceStarPattern(tree: ArrayValue) extends SequencePattern {
- require(hasStar)
- lazy val nonStarPatterns = elemPatterns.init
-
- // yes star
- private def nilPats = List(NilPattern, NoPattern)
- def subsequences(other: Pattern, seqType: Type): Option[List[Pattern]] =
- condOpt(other) {
- case next: SequenceStarPattern if isSameLength(next) => (next rebindStar seqType) ::: List(NoPattern)
- case next: SequenceStarPattern if (next isShorter this) => (dummies drop 1) ::: List(next)
- case next: SequenceNoStarPattern if isSameLength(next) => next.elemPatterns ::: nilPats
- case WildcardPattern() | (_: SequencePattern) => dummies
- }
-
- def rebindStar(seqType: Type): List[Pattern] =
- nonStarPatterns ::: List(elemPatterns.last rebindTo WILD(seqType))
-
- def canSkipSubsequences(second: Pattern): Boolean =
- (tree eq second.tree) || (cond(second) {
- case x: SequenceStarPattern => this isShorter x
- case x: SequenceNoStarPattern => !(x isShorter this)
- })
+ case class SequencePattern(tree: ArrayValue) extends Pattern with SequenceLikePattern {
+ lazy val ArrayValue(elemtpt, elems) = tree
- override def description = "Seq*(%s)".format(elemPatterns)
+ override def subpatternsForVars: List[Pattern] = elemPatterns
+ override def description = "Seq(%s)".format(elemPatterns mkString ", ")
}
// 8.1.8 (c)
@@ -353,7 +288,7 @@ trait Patterns extends ast.TreeDSL {
case x: Literal => LiteralPattern(x)
case x: UnApply => UnapplyPattern(x)
case x: Ident => if (isVarPattern(x)) VariablePattern(x) else SimpleIdPattern(x)
- case x: ArrayValue => if (isRightIgnoring(x)) SequenceStarPattern(x) else SequenceNoStarPattern(x)
+ case x: ArrayValue => SequencePattern(x)
case x: Select => StableIdPattern(x)
case x: Star => StarPattern(x)
case x: This => ThisPattern(x) // XXX ?
@@ -504,10 +439,6 @@ trait Patterns extends ast.TreeDSL {
// the right number of dummies for this pattern
def dummies: List[Pattern] = Nil
- // given this scrutinee, what if any condition must be satisfied before
- // we even try to match?
- def precondition(scrut: PatternMatch): Option[Tree] = None
-
// 8.1.13
// A pattern p is irrefutable for type T if any of the following applies:
// 1) p is a variable pattern
diff --git a/src/compiler/scala/tools/nsc/matching/TransMatcher.scala b/src/compiler/scala/tools/nsc/matching/TransMatcher.scala
index b47ebccfa9..be455b00b0 100644
--- a/src/compiler/scala/tools/nsc/matching/TransMatcher.scala
+++ b/src/compiler/scala/tools/nsc/matching/TransMatcher.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* Copyright 2007 Google Inc. All Rights Reserved.
* Author: bqe@google.com (Burak Emir)
*/
@@ -7,12 +7,11 @@
package scala.tools.nsc
package matching
-import util.Position
-import ast.{ TreePrinters, Trees }
import symtab.SymbolTable
import transform.ExplicitOuter
import java.io.{ StringWriter, PrintWriter }
-import scala.util.NameTransformer.decode
+import scala.reflect.NameTransformer.decode
+import PartialFunction._
/** Translation of pattern matching
*
@@ -45,7 +44,7 @@ trait TransMatcher extends ast.TreeDSL {
{
import context._
- def matchError(obj: Tree) = atPos(selector.pos)(THROW(MatchErrorClass, obj))
+ def matchError(obj: Tree) = atPos(selector.pos)(MATCHERROR(obj))
def caseIsOk(c: CaseDef) = cond(c.pat) { case _: Apply | Ident(nme.WILDCARD) => true }
def rootTypes = selector.tpe.typeArgs
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index 50bd52d3fa..1b7e208334 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Lex Spoon
*/
-// $Id$
package scala.tools.nsc
package plugins
@@ -131,13 +130,13 @@ object Plugin {
dirs: List[Path],
ignoring: List[String]): List[AnyClass] =
{
- val alljars = jars ::: (for {
+ val alljars = (jars ::: (for {
dir <- dirs if dir.isDirectory
- entry <- dir.toDirectory.files.toList sortWith (_.name <= _.name)
- if entry.name.toLowerCase endsWith ".jar"
+ entry <- dir.toDirectory.files.toList sortBy (_.name)
+ if entry.extension == "jar"
pdesc <- loadDescription(entry)
if !(ignoring contains pdesc.name)
- } yield entry)
+ } yield entry)).distinct
val loader = loaderFor(alljars)
alljars map (loadFrom(_, loader)) flatten
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
index 1163ab9195..4234d49545 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Lex Spoon
* Updated by Anders Bach Nielsen
*/
-// $Id$
package scala.tools.nsc
package plugins
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
index 27355d061a..2498d84338 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Lex Spoon
*/
-// $Id$
package scala.tools.nsc
package plugins
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala b/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala
index ce322122eb..f0402809d7 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Lex Spoon
*/
-// $Id$
package scala.tools.nsc
package plugins
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
index 93a3f46ac2..5823a09995 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Lex Spoon
* Updated by Anders Bach Nielsen
*/
-// $Id$
package scala.tools.nsc
package plugins
@@ -30,10 +29,10 @@ trait Plugins
val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map Path.apply
val classes = Plugin.loadAllFrom(jars, dirs, settings.disable.value)
- classes foreach (c => Plugin.instantiate(c, this))
-
- for (plugClass <- Plugin.loadAllFrom(jars, dirs, settings.disable.value))
- yield Plugin.instantiate(plugClass, this)
+ // Lach plugin must only be instantiated once. A common pattern
+ // is to register annotation checkers during object construction, so
+ // creating multiple plugin instances will leave behind stale checkers.
+ classes map (Plugin.instantiate(_, this))
}
protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList
diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
index f8d40a13f8..11ef7ffb68 100644
--- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2002-2009 LAMP/EPFL
+ * Copyright 2002-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package reporters
@@ -27,15 +26,19 @@ abstract class AbstractReporter extends Reporter {
def display(pos: Position, msg: String, severity: Severity): Unit
def displayPrompt: Unit
- protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) {
+ protected def info0(pos: Position, msg: String, _severity: Severity, force: Boolean) {
+ val severity =
+ if (settings.Xwarnfatal.value && _severity == WARNING) ERROR
+ else _severity
+
severity match {
case INFO =>
if (force || settings.verbose.value) display(pos, msg, severity)
case WARNING =>
val hidden = testAndLog(pos, severity)
if (!settings.nowarnings.value) {
- if (!hidden || settings.prompt.value) display(pos, msg, severity)
- if (settings.prompt.value) displayPrompt
+ if (!hidden || settings.prompt.value) display(pos, msg, severity)
+ if (settings.prompt.value) displayPrompt
}
case ERROR =>
val hidden = testAndLog(pos, severity)
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index ca7b264d22..1f01087dad 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2002-2009 LAMP/EPFL
+ * Copyright 2002-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package reporters
@@ -103,18 +102,21 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
writer.flush()
var line = reader.readLine()
if (line ne null) {
- line = line.toLowerCase()
- if ("abort" startsWith line)
- throw new Error("user abort")
- if ("resume" startsWith line) continue = false
+ line = line.toLowerCase()
+ if ("abort" startsWith line)
+ abort("user abort")
+ if ("resume" startsWith line)
+ continue = false
}
}
- } catch {
+ }
+ catch {
case ex: IOException => {
ex.printStackTrace()
- throw new Error("input read error")
+ abort("input read error")
}
}
+ private def abort(msg: String) = throw new Error(msg)
override def flush() { writer.flush() }
}
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index 0d8939c168..e253a8f5f0 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2002-2009 LAMP/EPFL
+ * Copyright 2002-2011 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package reporters
@@ -15,15 +14,14 @@ import scala.tools.nsc.util._
*/
abstract class Reporter {
object severity extends Enumeration
- class Severity(_id: Int) extends severity.Value {
+ class Severity(val id: Int) extends severity.Value {
var count: Int = 0
- def id = _id
}
val INFO = new Severity(0)
val WARNING = new Severity(1)
val ERROR = new Severity(2)
- def reset {
+ def reset() {
INFO.count = 0
ERROR.count = 0
WARNING.count = 0
@@ -31,7 +29,8 @@ abstract class Reporter {
}
var cancelled: Boolean = false
- def hasErrors: Boolean = ERROR.count != 0 || cancelled
+ def hasErrors: Boolean = ERROR.count > 0 || cancelled
+ def hasWarnings: Boolean = WARNING.count > 0
/** Flush all output */
def flush() { }
@@ -41,6 +40,28 @@ abstract class Reporter {
private var source: SourceFile = _
def setSource(source: SourceFile) { this.source = source }
def getSource: SourceFile = source
+ def withSource[A](src: SourceFile)(op: => A) = {
+ val oldSource = source
+ try {
+ source = src
+ op
+ } finally {
+ source = oldSource
+ }
+ }
+
+ /** Whether very long lines can be truncated. This exists so important
+ * debugging information (like printing the classpath) is not rendered
+ * invisible due to the max message length.
+ */
+ private var _truncationOK: Boolean = true
+ def truncationOK = _truncationOK
+ def withoutTruncating[T](body: => T): T = {
+ val saved = _truncationOK
+ _truncationOK = false
+ try body
+ finally _truncationOK = saved
+ }
def info(pos: Position, msg: String, force: Boolean) { info0(pos, msg, INFO, force) }
def warning(pos: Position, msg: String ) { info0(pos, msg, WARNING, false) }
diff --git a/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala b/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala
index 3bce66df0a..56f904a8bc 100644
--- a/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2002-2009 LAMP/EPFL
+ * Copyright 2002-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package reporters
diff --git a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
index 5239dabd60..e4736297e9 100644
--- a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2002-2009 LAMP/EPFL
+ * Copyright 2002-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package reporters
diff --git a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
new file mode 100644
index 0000000000..790f4cce00
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
@@ -0,0 +1,40 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package settings
+
+trait AbsScalaSettings {
+ self: AbsSettings =>
+
+ type BooleanSetting <: AbsSetting { type T = Boolean }
+ type ChoiceSetting <: AbsSetting { type T = String }
+ type DefinesSetting <: AbsSetting { type T = List[(String, String)] }
+ type IntSetting <: AbsSetting { type T = Int }
+ type MultiStringSetting <: AbsSetting { type T = List[String] }
+ type PathSetting <: AbsSetting { type T = String }
+ type PhasesSetting <: AbsSetting { type T = List[String] }
+ type StringSetting <: AbsSetting { type T = String }
+
+ type OutputDirs
+ type OutputSetting <: AbsSetting
+
+ def BooleanSetting(name: String, descr: String): BooleanSetting
+ def ChoiceSetting(name: String, descr: String, choices: List[String], default: String): ChoiceSetting
+ def DefinesSetting(): DefinesSetting
+ def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]): IntSetting
+ def MultiStringSetting(name: String, arg: String, descr: String): MultiStringSetting
+ def OutputSetting(outputDirs: OutputDirs, default: String): OutputSetting
+ def PathSetting(name: String, arg: String, descr: String, default: String): PathSetting
+ def PhasesSetting(name: String, descr: String): PhasesSetting
+ def StringSetting(name: String, arg: String, descr: String, default: String): StringSetting
+
+ /** **/
+ abstract class SettingGroup(val prefix: String) extends AbsSetting {
+ def name = prefix
+ def helpDescription: String = error("todo")
+ def unparse: List[String] = List(name)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
new file mode 100644
index 0000000000..21608f7f05
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
@@ -0,0 +1,137 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package settings
+
+import io.AbstractFile
+
+/** A Settings abstraction boiled out of the original highly mutable Settings
+ * class with the intention of creating an ImmutableSettings which can be used
+ * interchangeably. Except of course without the mutants.
+ */
+
+trait AbsSettings {
+ type Setting <: AbsSetting // Fix to the concrete Setting type
+ type ResultOfTryToSet // List[String] in mutable, (Settings, List[String]) in immutable
+ def errorFn: String => Unit
+ protected def allSettings: collection.Set[Setting]
+
+ // settings minus internal usage settings
+ def visibleSettings = allSettings filterNot (_.isInternalOnly)
+
+ // only settings which differ from default
+ def userSetSettings = visibleSettings filterNot (_.isDefault)
+
+ // an argument list which (should) be usable to recreate the Settings
+ def recreateArgs = userSetSettings.toList flatMap (_.unparse)
+
+ // checks both name and any available abbreviations
+ def lookupSetting(cmd: String): Option[Setting] = allSettings find (_ respondsTo cmd)
+
+ // two AbsSettings objects are equal if their visible settings are equal.
+ override def hashCode() = visibleSettings.hashCode
+ override def equals(that: Any) = that match {
+ case s: AbsSettings => this.visibleSettings == s.visibleSettings
+ case _ => false
+ }
+ override def toString() = "Settings {\n%s}\n" format (userSetSettings map (" " + _ + "\n") mkString)
+ def toConciseString = userSetSettings.mkString("(", " ", ")")
+
+ def checkDependencies =
+ visibleSettings filterNot (_.isDefault) forall (setting => setting.dependencies forall {
+ case (dep, value) =>
+ (Option(dep.value) exists (_.toString == value)) || {
+ errorFn("incomplete option %s (requires %s)".format(setting.name, dep.name))
+ false
+ }
+ })
+
+ implicit lazy val SettingOrdering: Ordering[Setting] = Ordering.ordered
+
+ trait AbsSettingValue {
+ type T <: Any
+ def value: T
+ def isDefault: Boolean
+ }
+
+ trait AbsSetting extends Ordered[Setting] with AbsSettingValue {
+ def name: String
+ def helpDescription: String
+ def unparse: List[String] // A list of Strings which can recreate this setting.
+
+ /* For tools which need to populate lists of available choices */
+ def choices : List[String] = Nil
+
+ /** In mutable Settings, these return the same object with a var set.
+ * In immutable, of course they will return a new object, which means
+ * we can't use "this.type", at least not in a non-casty manner, which
+ * is unfortunate because we lose type information without it.
+ *
+ * ...but now they're this.type because of #3462. The immutable
+ * side doesn't exist yet anyway.
+ */
+ def withAbbreviation(name: String): this.type
+ def withHelpSyntax(help: String): this.type
+
+ def helpSyntax: String = name
+ def abbreviations: List[String] = Nil
+ def dependencies: List[(Setting, String)] = Nil
+ def respondsTo(label: String) = (name == label) || (abbreviations contains label)
+
+ /** If the setting should not appear in help output, etc. */
+ def isInternalOnly = false
+
+ /** Issue error and return */
+ def errorAndValue[T](msg: String, x: T): T = { errorFn(msg) ; x }
+
+ /** After correct Setting has been selected, tryToSet is called with the
+ * remainder of the command line. It consumes any applicable arguments and
+ * returns the unconsumed ones.
+ */
+ protected[nsc] def tryToSet(args: List[String]): Option[ResultOfTryToSet]
+
+ /** Commands which can take lists of arguments in form -Xfoo:bar,baz override
+ * this method and accept them as a list. It returns List[String] for
+ * consistency with tryToSet, and should return its incoming arguments
+ * unmodified on failure, and Nil on success.
+ */
+ protected[nsc] def tryToSetColon(args: List[String]): Option[ResultOfTryToSet] =
+ errorAndValue("'%s' does not accept multiple arguments" format name, None)
+
+ /** Commands which take properties in form -Dfoo=bar or -Dfoo
+ */
+ protected[nsc] def tryToSetProperty(args: List[String]): Option[ResultOfTryToSet] =
+ errorAndValue("'%s' does not accept property style arguments" format name, None)
+
+ /** Attempt to set from a properties file style property value.
+ */
+ def tryToSetFromPropertyValue(s: String): Unit = tryToSet(s :: Nil)
+
+ /** These categorizations are so the help output shows -X and -P among
+ * the standard options and -Y among the advanced options.
+ */
+ def isAdvanced = name match { case "-Y" => true ; case "-X" => false ; case _ => name startsWith "-X" }
+ def isPrivate = name match { case "-Y" => false ; case _ => name startsWith "-Y" }
+ def isStandard = !isAdvanced && !isPrivate
+
+ def compare(that: Setting): Int = name compare that.name
+
+ /** Equality tries to sidestep all the drama and define it simply and
+ * in one place: two AbsSetting objects are equal if their names and
+ * values compare equal.
+ */
+ override def equals(that: Any) = that match {
+ case x: AbsSettings#AbsSetting => (name == x.name) && (value == x.value)
+ case _ => false
+ }
+ override def hashCode() = (name, value).hashCode
+ override def toString() = "%s = %s".format(name, value)
+ }
+
+ trait InternalSetting extends AbsSetting {
+ override def isInternalOnly = true
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
new file mode 100644
index 0000000000..830370a3e7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
@@ -0,0 +1,76 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package settings
+
+trait AdvancedScalaSettings {
+ self: AbsScalaSettings =>
+
+ abstract class X extends SettingGroup("-X") {
+ val assemextdirs: StringSetting
+ val assemname: StringSetting
+ val assempath: StringSetting
+ val checkinit: BooleanSetting
+ val disableassertions: BooleanSetting
+ val elidebelow: IntSetting
+ val experimental: BooleanSetting
+ val future: BooleanSetting
+ val generatephasegraph: StringSetting
+ val logimplicits: BooleanSetting
+ val migration: BooleanSetting
+ val noforwarders: BooleanSetting
+ val nojline: BooleanSetting
+ val nouescape: BooleanSetting
+ val plugin: MultiStringSetting
+ val plugindisable: MultiStringSetting
+ val pluginlist: BooleanSetting
+ val pluginrequire: MultiStringSetting
+ val pluginsdir: StringSetting
+ val print: PhasesSetting
+ val printicode: BooleanSetting
+ val printpos: BooleanSetting
+ val printtypes: BooleanSetting
+ val prompt: BooleanSetting
+ val resident: BooleanSetting
+ val script: StringSetting
+ val showclass: StringSetting
+ val showobject: StringSetting
+ val showphases: BooleanSetting
+ val sourcedir: StringSetting
+ val sourcereader: StringSetting
+ }
+ // def Xexperimental = X.experimental
+ // def Xmigration28 = X.migration
+ // def Xnojline = X.nojline
+ // def Xprint = X.print
+ // def Xprintpos = X.printpos
+ // def Xshowcls = X.showclass
+ // def Xshowobj = X.showobject
+ // def assemextdirs = X.assemextdirs
+ // def assemname = X.assemname
+ // def assemrefs = X.assempath
+ // def checkInit = X.checkinit
+ // def disable = X.plugindisable
+ // def elideLevel = X.elidelevel
+ // def future = X.future
+ // def genPhaseGraph = X.generatephasegraph
+ // def logimplicits = X.logimplicits
+ // def noForwarders = X.noforwarders
+ // def noassertions = X.disableassertions
+ // def nouescape = X.nouescape
+ // def plugin = X.plugin
+ // def pluginsDir = X.pluginsdir
+ // def printtypes = X.printtypes
+ // def prompt = X.prompt
+ // def require = X.require
+ // def resident = X.resident
+ // def script = X.script
+ // def showPhases = X.showphases
+ // def showPlugins = X.pluginlist
+ // def sourceReader = X.sourcereader
+ // def sourcedir = X.sourcedir
+ // def writeICode = X.printicode
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala b/src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala
new file mode 100644
index 0000000000..a673860417
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala
@@ -0,0 +1,11 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package settings
+
+/** TODO.
+ */
+class ImmutableSettings
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
new file mode 100644
index 0000000000..4c9da87229
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -0,0 +1,604 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+// $Id$
+
+package scala.tools
+package nsc
+package settings
+
+import io.{AbstractFile, VirtualDirectory}
+import scala.tools.util.StringOps
+import scala.collection.mutable.ListBuffer
+import scala.io.Source
+
+/** A mutable Settings object.
+ */
+class MutableSettings(val errorFn: String => Unit) extends AbsSettings with ScalaSettings with Mutable {
+ type ResultOfTryToSet = List[String]
+
+ /** Iterates over the arguments applying them to settings where applicable.
+ * Then verifies setting dependencies are met.
+ *
+ * This temporarily takes a boolean indicating whether to keep
+ * processing if an argument is seen which is not a command line option.
+ * This is an expedience for the moment so that you can say
+ *
+ * scalac -d /tmp foo.scala -optimise
+ *
+ * while also allowing
+ *
+ * scala Program opt opt
+ *
+ * to get their arguments.
+ *
+ * Returns (success, List of unprocessed arguments)
+ */
+ def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = {
+ var args = arguments
+ val residualArgs = new ListBuffer[String]
+
+ while (args.nonEmpty) {
+ if (args.head startsWith "-") {
+ val args0 = args
+ args = this parseParams args
+ if (args eq args0) {
+ errorFn("bad option: '" + args.head + "'")
+ return ((false, args))
+ }
+ }
+ else if (args.head == "") { // discard empties, sometimes they appear because of ant or etc.
+ args = args.tail
+ }
+ else {
+ if (!processAll)
+ return ((checkDependencies, args))
+
+ residualArgs += args.head
+ args = args.tail
+ }
+ }
+
+ ((checkDependencies, residualArgs.toList))
+ }
+ def processArgumentString(params: String) = processArguments(splitParams(params), true)
+
+ /** Create a new Settings object, copying all user-set values.
+ */
+ def copy(): Settings = {
+ val s = new Settings()
+ val xs = userSetSettings flatMap (_.unparse)
+ s.processArguments(xs.toList, true)
+ s
+ }
+
+ /** A list pairing source directories with their output directory.
+ * This option is not available on the command line, but can be set by
+ * other tools (IDEs especially). The command line specifies a single
+ * output directory that is used for all source files, denoted by a
+ * '*' in this list.
+ */
+ lazy val outputDirs = new OutputDirs
+
+ /** Split the given line into parameters.
+ */
+ def splitParams(line: String) = cmd.Parser.tokenize(line, errorFn)
+
+ /** Returns any unprocessed arguments.
+ */
+ def parseParams(args: List[String]): List[String] = {
+ // verify command exists and call setter
+ def tryToSetIfExists(
+ cmd: String,
+ args: List[String],
+ setter: (Setting) => (List[String] => Option[List[String]])
+ ): Option[List[String]] =
+ lookupSetting(cmd) match {
+ case None => errorFn("Parameter '" + cmd + "' is not recognised by Scalac.") ; None
+ case Some(cmd) => setter(cmd)(args)
+ }
+
+ // if arg is of form -Xfoo:bar,baz,quux
+ def parseColonArg(s: String): Option[List[String]] = {
+ val (p, args) = StringOps.splitWhere(s, _ == ':', true) getOrElse (return None)
+
+ // any non-Nil return value means failure and we return s unmodified
+ tryToSetIfExists(p, args split "," toList, (s: Setting) => s.tryToSetColon _)
+ }
+ // if arg is of form -Dfoo=bar or -Dfoo (name = "-D")
+ def isPropertyArg(s: String) = lookupSetting(s take 2) match {
+ case Some(x: DefinesSetting) => true
+ case _ => false
+ }
+ def parsePropertyArg(s: String): Option[List[String]] = {
+ val (p, args) = (s take 2, s drop 2)
+
+ tryToSetIfExists(p, List(args), (s: Setting) => s.tryToSetProperty _)
+ }
+
+ // if arg is of form -Xfoo or -Xfoo bar (name = "-Xfoo")
+ def parseNormalArg(p: String, args: List[String]): Option[List[String]] =
+ tryToSetIfExists(p, args, (s: Setting) => s.tryToSet _)
+
+ def doArgs(args: List[String]): List[String] = {
+ if (args.isEmpty) return Nil
+ val arg :: rest = args
+ if (arg == "") {
+ // it looks like Ant passes "" sometimes
+ rest
+ }
+ else if (!arg.startsWith("-")) {
+ errorFn("Argument '" + arg + "' does not start with '-'.")
+ args
+ }
+ else if (arg == "-") {
+ errorFn("'-' is not a valid argument.")
+ args
+ }
+ else
+ // we dispatch differently based on the appearance of p:
+ // 1) If it has a : it is presumed to be -Xfoo:bar,baz
+ // 2) If the first two chars are the name of a command, -Dfoo=bar
+ // 3) Otherwise, the whole string should be a command name
+ //
+ // Internally we use Option[List[String]] to discover error,
+ // but the outside expects our arguments back unchanged on failure
+ if (isPropertyArg(arg)) parsePropertyArg(arg) match {
+ case Some(_) => rest
+ case None => args
+ }
+ else if (arg contains ":") parseColonArg(arg) match {
+ case Some(_) => rest
+ case None => args
+ }
+ else parseNormalArg(arg, rest) match {
+ case Some(xs) => xs
+ case None => args
+ }
+ }
+
+ doArgs(args)
+ }
+
+ /** Initializes these settings for embedded use by type `T`.
+ * The class loader defining `T` should provide resources `app.class.path`
+ * and `boot.class.path`. These resources should contain the application
+ * and boot classpaths in the same form as would be passed on the command line.*/
+ def embeddedDefaults[T: Manifest]: Unit =
+ embeddedDefaults(implicitly[Manifest[T]].erasure.getClassLoader)
+
+ /** Initializes these settings for embedded use by a class from the given class loader.
+ * The class loader for `T` should provide resources `app.class.path`
+ * and `boot.class.path`. These resources should contain the application
+ * and boot classpaths in the same form as would be passed on the command line.*/
+ def embeddedDefaults(loader: ClassLoader) {
+ explicitParentLoader = Option(loader) // for the Interpreter parentClassLoader
+ getClasspath("app", loader) foreach { classpath.value = _ }
+ getClasspath("boot", loader) foreach { bootclasspath append _ }
+ }
+
+ /** The parent loader to use for the interpreter.*/
+ private[nsc] var explicitParentLoader: Option[ClassLoader] = None
+
+ /** Retrieves the contents of resource "${id}.class.path" from `loader`
+ * (wrapped in Some) or None if the resource does not exist.*/
+ private def getClasspath(id: String, loader: ClassLoader): Option[String] =
+ Option(loader).flatMap(ld => Option(ld.getResource(id + ".class.path"))).map { cp =>
+ Source.fromURL(cp).mkString
+ }
+
+ // a wrapper for all Setting creators to keep our list up to date
+ private def add[T <: Setting](s: T): T = {
+ allSettings += s
+ s
+ }
+
+ def BooleanSetting(name: String, descr: String) = add(new BooleanSetting(name, descr))
+ /** Compatibility layer, so that the IDE can call the 2.9 method. */
+ def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String): ChoiceSetting =
+ ChoiceSetting(name, descr, choices, default) // helpArg is ignored
+
+ def ChoiceSetting(name: String, descr: String, choices: List[String], default: String): ChoiceSetting =
+ add(new ChoiceSetting(name, descr, choices, default))
+ def DefinesSetting() = add(new DefinesSetting())
+ def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]) = add(new IntSetting(name, descr, default, range, parser))
+ def MultiStringSetting(name: String, arg: String, descr: String) = add(new MultiStringSetting(name, arg, descr))
+ def OutputSetting(outputDirs: OutputDirs, default: String) = add(new OutputSetting(outputDirs, default))
+ def PhasesSetting(name: String, descr: String) = add(new PhasesSetting(name, descr))
+ def StringSetting(name: String, arg: String, descr: String, default: String) = add(new StringSetting(name, arg, descr, default))
+ def PathSetting(name: String, arg: String, descr: String, default: String): PathSetting = {
+ val prepend = new StringSetting(name + "/p", "", "", "") with InternalSetting
+ val append = new StringSetting(name + "/a", "", "", "") with InternalSetting
+
+ add[StringSetting](prepend)
+ add[StringSetting](append)
+ add(new PathSetting(name, arg, descr, default, prepend, append))
+ }
+
+ // basically this is a value which remembers if it's been modified
+ trait SettingValue extends AbsSettingValue {
+ protected var v: T
+ protected var setByUser: Boolean = false
+ def postSetHook(): Unit
+
+ def isDefault: Boolean = !setByUser
+ def value: T = v
+ def value_=(arg: T) = {
+ setByUser = true
+ v = arg
+ postSetHook()
+ }
+ }
+
+ /** A class for holding mappings from source directories to
+ * their output location. This functionality can be accessed
+ * only programmatically. The command line compiler uses a
+ * single output location, but tools may use this functionality
+ * to set output location per source directory.
+ */
+ class OutputDirs {
+ /** Pairs of source directory - destination directory. */
+ private var outputDirs: List[(AbstractFile, AbstractFile)] = Nil
+
+ /** If this is not None, the output location where all
+ * classes should go.
+ */
+ private var singleOutDir: Option[AbstractFile] = None
+
+ /** Add a destination directory for sources found under srcdir.
+ * Both directories should exits.
+ */
+ def add(srcDir: String, outDir: String): Unit =
+ add(checkDir(AbstractFile.getDirectory(srcDir), srcDir),
+ checkDir(AbstractFile.getDirectory(outDir), outDir))
+
+ /** Check that dir is exists and is a directory. */
+ private def checkDir(dir: AbstractFile, name: String): AbstractFile = {
+ if ((dir eq null) || !dir.isDirectory)
+ throw new FatalError(name + " does not exist or is not a directory")
+ dir
+ }
+
+ /** Set the single output directory. From now on, all files will
+ * be dumped in there, regardless of previous calls to 'add'.
+ */
+ def setSingleOutput(outDir: String) {
+ val dst = AbstractFile.getDirectory(outDir)
+ setSingleOutput(checkDir(dst, outDir))
+ }
+
+ /** Set the single output directory. From now on, all files will
+ * be dumped in there, regardless of previous calls to 'add'.
+ */
+ def setSingleOutput(dir: AbstractFile) {
+ singleOutDir = Some(dir)
+ }
+
+ def add(src: AbstractFile, dst: AbstractFile) {
+ singleOutDir = None
+ outputDirs ::= (src, dst)
+ }
+
+ /** Return the list of source-destination directory pairs. */
+ def outputs: List[(AbstractFile, AbstractFile)] = outputDirs
+
+ /** Return the output directory for the given file.
+ */
+ def outputDirFor(src: AbstractFile): AbstractFile = {
+ def isBelow(srcDir: AbstractFile, outDir: AbstractFile) =
+ src.path.startsWith(srcDir.path)
+
+ singleOutDir match {
+ case Some(d) => d
+ case None =>
+ (outputs find (isBelow _).tupled) match {
+ case Some((_, d)) => d
+ case _ =>
+ throw new FatalError("Could not find an output directory for "
+ + src.path + " in " + outputs)
+ }
+ }
+ }
+
+ /** Return the source file path(s) which correspond to the given
+ * classfile path and SourceFile attribute value, subject to the
+ * condition that source files are arranged in the filesystem
+ * according to Java package layout conventions.
+ *
+ * The given classfile path must be contained in at least one of
+ * the specified output directories. If it does not then this
+ * method returns Nil.
+ *
+ * Note that the source file is not required to exist, so assuming
+ * a valid classfile path this method will always return a list
+ * containing at least one element.
+ *
+ * Also that if two or more source path elements target the same
+ * output directory there will be two or more candidate source file
+ * paths.
+ */
+ def srcFilesFor(classFile : AbstractFile, srcPath : String) : List[AbstractFile] = {
+ def isBelow(srcDir: AbstractFile, outDir: AbstractFile) =
+ classFile.path.startsWith(outDir.path)
+
+ singleOutDir match {
+ case Some(d) =>
+ d match {
+ case _: VirtualDirectory => Nil
+ case _ => List(d.lookupPathUnchecked(srcPath, false))
+ }
+ case None =>
+ (outputs filter (isBelow _).tupled) match {
+ case Nil => Nil
+ case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false))
+ }
+ }
+ }
+ }
+
+ /** A base class for settings of all types.
+ * Subclasses each define a `value' field of the appropriate type.
+ */
+ abstract class Setting(val name: String, val helpDescription: String) extends AbsSetting with SettingValue with Mutable {
+ /** Will be called after this Setting is set for any extra work. */
+ private var _postSetHook: this.type => Unit = (x: this.type) => ()
+ def postSetHook(): Unit = _postSetHook(this)
+ def withPostSetHook(f: this.type => Unit): this.type = { _postSetHook = f ; this }
+
+ /** The syntax defining this setting in a help string */
+ private var _helpSyntax = name
+ override def helpSyntax: String = _helpSyntax
+ def withHelpSyntax(s: String): this.type = { _helpSyntax = s ; this }
+
+ /** Abbreviations for this setting */
+ private var _abbreviations: List[String] = Nil
+ override def abbreviations = _abbreviations
+ def withAbbreviation(s: String): this.type = { _abbreviations ++= List(s) ; this }
+
+ /** Optional dependency on another setting */
+ private var dependency: Option[(Setting, String)] = None
+ override def dependencies = dependency.toList
+ def dependsOn(s: Setting, value: String): this.type = { dependency = Some((s, value)); this }
+ }
+
+ /** A setting represented by an integer */
+ class IntSetting private[nsc](
+ name: String,
+ descr: String,
+ val default: Int,
+ val range: Option[(Int, Int)],
+ parser: String => Option[Int])
+ extends Setting(name, descr) {
+ type T = Int
+ protected var v = default
+
+ // not stable values!
+ val IntMin = Int.MinValue
+ val IntMax = Int.MaxValue
+ def min = range map (_._1) getOrElse IntMin
+ def max = range map (_._2) getOrElse IntMax
+
+ override def value_=(s: Int) =
+ if (isInputValid(s)) super.value_=(s) else errorMsg
+
+ // Validate that min and max are consistent
+ assert(min <= max)
+
+ // Helper to validate an input
+ private def isInputValid(k: Int): Boolean = (min <= k) && (k <= max)
+
+ // Helper to generate a textual explaination of valid inputs
+ private def getValidText: String = (min, max) match {
+ case (IntMin, IntMax) => "can be any integer"
+ case (IntMin, x) => "must be less than or equal to "+x
+ case (x, IntMax) => "must be greater than or equal to "+x
+ case _ => "must be between %d and %d".format(min, max)
+ }
+
+ // Ensure that the default value is actually valid
+ assert(isInputValid(default))
+
+ def parseArgument(x: String): Option[Int] = {
+ parser(x) orElse {
+ try { Some(x.toInt) }
+ catch { case _: NumberFormatException => None }
+ }
+ }
+
+ def errorMsg = errorFn("invalid setting for -"+name+" "+getValidText)
+
+ def tryToSet(args: List[String]) =
+ if (args.isEmpty) errorAndValue("missing argument", None)
+ else parseArgument(args.head) match {
+ case Some(i) => value = i ; Some(args.tail)
+ case None => errorMsg ; None
+ }
+
+ def unparse: List[String] =
+ if (value == default) Nil
+ else List(name, value.toString)
+ }
+
+ /** A setting represented by a boolean flag (false, unless set) */
+ class BooleanSetting private[nsc](
+ name: String,
+ descr: String)
+ extends Setting(name, descr) {
+ type T = Boolean
+ protected var v = false
+
+ def tryToSet(args: List[String]) = { value = true ; Some(args) }
+ def unparse: List[String] = if (value) List(name) else Nil
+ override def tryToSetFromPropertyValue(s : String) {
+ value = s.equalsIgnoreCase("true")
+ }
+ }
+
+ /** A setting represented by a string, (`default' unless set) */
+ class StringSetting private[nsc](
+ name: String,
+ val arg: String,
+ descr: String,
+ val default: String)
+ extends Setting(name, descr) {
+ type T = String
+ protected var v = default
+
+ def tryToSet(args: List[String]) = args match {
+ case Nil => errorAndValue("missing argument", None)
+ case x :: xs => value = x ; Some(xs)
+ }
+ def unparse: List[String] = if (value == default) Nil else List(name, value)
+
+ withHelpSyntax(name + " <" + arg + ">")
+ }
+
+ class PathSetting private[nsc](
+ name: String,
+ arg: String,
+ descr: String,
+ default: String,
+ prependPath: StringSetting,
+ appendPath: StringSetting)
+ extends StringSetting(name, arg, descr, default) {
+ import util.ClassPath.join
+ def prepend(s: String) = prependPath.value = join(s, prependPath.value)
+ def append(s: String) = appendPath.value = join(appendPath.value, s)
+
+ override def value = join(
+ prependPath.value,
+ super.value,
+ appendPath.value
+ )
+ }
+
+ /** Set the output directory. */
+ class OutputSetting private[nsc](
+ outputDirs: OutputDirs,
+ default: String)
+ extends StringSetting("-d", "directory", "Specify where to place generated class files", default) {
+ value = default
+ override def value_=(str: String) {
+ super.value_=(str)
+ outputDirs.setSingleOutput(str)
+ }
+ }
+
+ /** A setting that accumulates all strings supplied to it,
+ * until it encounters one starting with a '-'. */
+ class MultiStringSetting private[nsc](
+ name: String,
+ val arg: String,
+ descr: String)
+ extends Setting(name, descr) {
+ type T = List[String]
+ protected var v: List[String] = Nil
+ def appendToValue(str: String) { value ++= List(str) }
+
+ def tryToSet(args: List[String]) = {
+ val (strings, rest) = args span (x => !x.startsWith("-"))
+ strings foreach appendToValue
+
+ Some(rest)
+ }
+ override def tryToSetColon(args: List[String]) = tryToSet(args)
+ override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(" +").toList)
+ def unparse: List[String] = value map { name + ":" + _ }
+
+ withHelpSyntax(name + ":<" + arg + ">")
+ }
+
+ /** A setting represented by a string in a given set of <code>choices</code>,
+ * (<code>default</code> unless set).
+ */
+ class ChoiceSetting private[nsc](
+ name: String,
+ descr: String,
+ override val choices: List[String],
+ val default: String)
+ extends Setting(name, descr + choices.mkString(" (", ",", ")")) {
+ type T = String
+ protected var v: String = default
+ protected def argument: String = name drop 1
+ def indexOfChoice: Int = choices indexOf value
+
+ def tryToSet(args: List[String]) = { value = default ; Some(args) }
+
+ override def tryToSetColon(args: List[String]) = args match {
+ case Nil => errorAndValue("missing " + argument, None)
+ case List(x) if choices contains x => value = x ; Some(Nil)
+ case List(x) => errorAndValue("'" + x + "' is not a valid choice for '" + name + "'", None)
+ case xs => errorAndValue("'" + name + "' does not accept multiple arguments.", None)
+ }
+ def unparse: List[String] =
+ if (value == default) Nil else List(name + ":" + value)
+ override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil)
+
+ withHelpSyntax(name + ":<" + argument + ">")
+ }
+
+ /** A setting represented by a list of strings which should be prefixes of
+ * phase names. This is not checked here, however. Alternatively the string
+ * "all" can be used to represent all phases.
+ * (the empty list, unless set)
+ */
+ class PhasesSetting private[nsc](
+ name: String,
+ descr: String)
+ extends Setting(name, descr + " <phase> or \"all\"") {
+ type T = List[String]
+ protected var v: List[String] = Nil
+ override def value = if (v contains "all") List("all") else super.value
+
+ def tryToSet(args: List[String]) = errorAndValue("missing phase", None)
+ override def tryToSetColon(args: List[String]) = args match {
+ case Nil => errorAndValue("missing phase", None)
+ case xs => value = (value ++ xs).distinct.sorted ; Some(Nil)
+ }
+ // we slightly abuse the usual meaning of "contains" here by returning
+ // true if our phase list contains "all", regardless of the incoming argument
+ def contains(phasename: String): Boolean =
+ doAllPhases || (value exists { phasename startsWith _ } )
+
+ def doAllPhases() = value contains "all"
+ def unparse: List[String] = value map { name + ":" + _ }
+
+ withHelpSyntax(name + ":<phase>")
+ }
+
+ /** A setting for a -D style property definition */
+ class DefinesSetting private[nsc] extends Setting("-D", "set a Java property") {
+ type T = List[(String, String)]
+ protected var v: T = Nil
+ withHelpSyntax(name + "<prop>")
+
+ // given foo=bar returns Some(foo, bar), or None if parse fails
+ def parseArg(s: String): Option[(String, String)] = {
+ if (s == "") return None
+ val idx = s indexOf '='
+
+ if (idx < 0) Some(s, "")
+ else Some(s take idx, s drop (idx + 1))
+ }
+
+ protected[nsc] override def tryToSetProperty(args: List[String]): Option[List[String]] =
+ tryToSet(args)
+
+ def tryToSet(args: List[String]) =
+ if (args.isEmpty) None
+ else parseArg(args.head) match {
+ case None => None
+ case Some((a, b)) => value = value ++ List((a, b)) ; Some(args.tail)
+ }
+
+ def unparse: List[String] =
+ value map { case (k,v) => "-D" + k + (if (v == "") "" else "=" + v) }
+
+ /** Apply the specified properties to the current JVM and returns them. */
+ def applyToJVM() = {
+ value foreach { case (k, v) => System.getProperties.setProperty(k, v) }
+ value
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
new file mode 100644
index 0000000000..19b9c3c737
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -0,0 +1,169 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+// $Id$
+
+package scala.tools
+package nsc
+package settings
+
+import annotation.elidable
+import scala.tools.util.PathResolver.Defaults
+import scala.collection.mutable.HashSet
+
+trait ScalaSettings extends AbsScalaSettings with StandardScalaSettings {
+ self: MutableSettings =>
+
+ import Defaults.scalaUserClassPath
+
+ /** Set of settings */
+ protected lazy val allSettings = HashSet[Setting]()
+
+ /** Disable a setting */
+ def disable(s: Setting) = allSettings -= s
+
+ /**
+ * Temporary Settings
+ */
+ val suppressVTWarn = BooleanSetting ("-Ysuppress-vt-typer-warnings", "Suppress warnings from the typer when testing the virtual class encoding, NOT FOR FINAL!")
+
+ /**
+ * Standard settings
+ */
+ // argfiles is only for the help message
+ val argfiles = BooleanSetting ("@<file>", "A text file containing compiler arguments (options and source files)")
+ val classpath = PathSetting ("-classpath", "path", "Specify where to find user class files", scalaUserClassPath) .
+ withAbbreviation ("-cp")
+ val d = OutputSetting (outputDirs, ".")
+ val defines = DefinesSetting()
+ val optimise = BooleanSetting ("-optimise", "Generates faster bytecode by applying optimisations to the program") .
+ withAbbreviation("-optimize") .
+ withPostSetHook(_ => List(inline, Xcloselim, Xdce) foreach (_.value = true))
+ val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.")
+
+
+ /**
+ * -X "Advanced" settings
+ */
+ val Xhelp = BooleanSetting ("-X", "Print a synopsis of advanced options")
+ val assemname = StringSetting ("-Xassem-name", "file", "Name of the output assembly (only relevant with -target:msil)", "").dependsOn(target, "msil")
+ val assemrefs = StringSetting ("-Xassem-path", "path", "List of assemblies referenced by the program (only relevant with -target:msil)", ".").dependsOn(target, "msil")
+ val assemextdirs = StringSetting ("-Xassem-extdirs", "dirs", "List of directories containing assemblies, defaults to `lib'", Defaults.scalaLibDir.path).dependsOn(target, "msil")
+ val sourcedir = StringSetting ("-Xsourcedir", "directory", "When -target:msil, the source folder structure is mirrored in output directory.", ".").dependsOn(target, "msil")
+ val checkInit = BooleanSetting ("-Xcheckinit", "Add runtime checks on field accessors. Uninitialized accesses result in an exception being thrown.")
+ val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions and assumptions")
+ val elidebelow = IntSetting ("-Xelide-below", "Generate calls to @elidable-marked methods only if method priority is greater than argument.",
+ elidable.ASSERTION, None, elidable.byName.get(_))
+ val noForwarders = BooleanSetting ("-Xno-forwarders", "Do not generate static forwarders in mirror classes")
+ val future = BooleanSetting ("-Xfuture", "Turn on future language features")
+ val genPhaseGraph = StringSetting ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot", "")
+ val XlogImplicits = BooleanSetting ("-Xlog-implicits", "Show more info on why some implicits are not applicable")
+ val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8")
+ val nouescape = BooleanSetting ("-Xno-uescape", "Disables handling of \\u unicode escapes")
+ val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing")
+ val plugin = MultiStringSetting("-Xplugin", "file", "Load a plugin from a file")
+ val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable a plugin")
+ val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins")
+ val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort unless a plugin is available")
+ val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Path to search compiler plugins", Defaults.scalaPluginPath)
+ val Xprint = PhasesSetting ("-Xprint", "Print out program after")
+ val writeICode = BooleanSetting ("-Xprint-icode", "Log internal icode to *.icode files")
+ val Xprintpos = BooleanSetting ("-Xprint-pos", "Print tree positions (as offsets)")
+ val printtypes = BooleanSetting ("-Xprint-types", "Print tree types (debugging option)")
+ val prompt = BooleanSetting ("-Xprompt", "Display a prompt after each error (debugging option)")
+ val resident = BooleanSetting ("-Xresident", "Compiler stays resident, files to compile are read from standard input")
+ val script = StringSetting ("-Xscript", "object", "Compile as a script, wrapping the code into object.main()", "")
+ val Xshowcls = StringSetting ("-Xshow-class", "class", "Show class info", "")
+ val Xshowobj = StringSetting ("-Xshow-object", "object", "Show object info", "")
+ val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases")
+ val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files", "scala.tools.nsc.io.SourceReader")
+
+ val Xwarnfatal = BooleanSetting ("-Xfatal-warnings", "Fail the compilation if there are any warnings.")
+ val Xwarninit = BooleanSetting ("-Xwarninit", "Warn about possible changes in initialization semantics")
+ val Xchecknull = BooleanSetting ("-Xcheck-null", "Emit warning on selection of nullable reference")
+
+ // Experimental Extensions
+ val Xexperimental = BooleanSetting ("-Xexperimental", "Enable experimental extensions") .
+ withPostSetHook(_ => List(YdepMethTpes, YmethodInfer) foreach (_.value = true)) //YvirtClasses,
+ val YdepMethTpes = BooleanSetting ("-Ydependent-method-types", "Allow dependent method types")
+ val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods")
+ val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
+
+ /** Compatibility stubs for options whose value name did
+ * not previously match the option name.
+ */
+ def XO = optimise
+ def debuginfo = g
+ def dependenciesFile = dependencyfile
+ def nowarnings = nowarn
+ def outdir = d
+ def printLate = print
+
+ /**
+ * -Y "Private" settings
+ */
+ val Yhelp = BooleanSetting ("-Y", "Print a synopsis of private options")
+ val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after")
+ val check = PhasesSetting ("-Ycheck", "Check the tree at the end of")
+ val Xcloselim = BooleanSetting ("-Yclosure-elim", "Perform closure elimination")
+ val Ycompacttrees = BooleanSetting ("-Ycompact-trees", "Use compact tree printer when displaying trees")
+ val noCompletion = BooleanSetting ("-Yno-completion", "Disable tab-completion in the REPL")
+ val Xdce = BooleanSetting ("-Ydead-code", "Perform dead code elimination")
+ val debug = BooleanSetting ("-Ydebug", "Output debugging messages")
+ val Xdetach = BooleanSetting ("-Ydetach", "Perform detaching of remote closures")
+ // val doc = BooleanSetting ("-Ydoc", "Generate documentation")
+ val inline = BooleanSetting ("-Yinline", "Perform inlining when possible")
+ val Xlinearizer = ChoiceSetting ("-Ylinearizer", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo") .
+ withHelpSyntax("-Ylinearizer:<which>")
+ val log = PhasesSetting ("-Ylog", "Log operations in")
+ val Ylogcp = BooleanSetting ("-Ylog-classpath", "Output information about what classpath is being applied.")
+ val Ynogenericsig = BooleanSetting ("-Yno-generic-signatures", "Suppress generation of generic signatures for Java")
+ val noimports = BooleanSetting ("-Yno-imports", "Compile without any implicit imports")
+ val nopredefs = BooleanSetting ("-Yno-predefs", "Compile without any implicit predefined values")
+ val Yrecursion = IntSetting ("-Yrecursion", "Recursion depth used when locking symbols", 0, Some(0, Int.MaxValue), (_: String) => None)
+ val selfInAnnots = BooleanSetting ("-Yself-in-annots", "Include a \"self\" identifier inside of annotations")
+ val Xshowtrees = BooleanSetting ("-Yshow-trees", "Show detailed trees when used in connection with -print:phase")
+ val skip = PhasesSetting ("-Yskip", "Skip")
+ val Xsqueeze = ChoiceSetting ("-Ysqueeze", "if on, creates compact code in matching", List("on","off"), "on") .
+ withHelpSyntax("-Ysqueeze:<enabled>")
+ val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics")
+ val stop = PhasesSetting ("-Ystop", "Stop after phase")
+ val refinementMethodDispatch =
+ ChoiceSetting ("-Ystruct-dispatch", "Selects dispatch method for structural refinement method calls",
+ List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache") .
+ withHelpSyntax("-Ystruct-dispatch:<method>")
+ val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
+ val Yidedebug = BooleanSetting ("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
+ val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "Compile using the specified build manager", List("none", "refined", "simple"), "none") .
+ withHelpSyntax("-Ybuilder-debug:<method>")
+ val Ybuildmanagerdebug =
+ BooleanSetting ("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.")
+ val Ytyperdebug = BooleanSetting ("-Ytyper-debug", "Trace all type assignements")
+ val Ypmatdebug = BooleanSetting ("-Ypmat-debug", "Trace all pattern matcher activity.")
+ val Yrepldebug = BooleanSetting ("-Yrepl-debug", "Trace all repl activity.")
+ val Ycompletion = BooleanSetting ("-Ycompletion-debug", "Trace all tab completion activity.")
+ val Ypmatnaive = BooleanSetting ("-Ypmat-naive", "Desugar matches as naively as possible..")
+ // val Yjenkins = BooleanSetting ("-Yjenkins-hashCodes", "Use jenkins hash algorithm for case class generated hashCodes.")
+ val Ynotnull = BooleanSetting ("-Ynotnull", "Enable the experimental and incomplete scala.NotNull")
+
+ // Warnings
+ val Ywarndeadcode = BooleanSetting ("-Ywarn-dead-code", "Emit warnings for dead code")
+
+ /**
+ * IDE-specific settings
+ */
+ val YpresentationVerbose = BooleanSetting("-Ypresentation-verbose", "Print information about presentation compiler tasks.")
+ val YpresentationDebug = BooleanSetting("-Ypresentation-debug", "Enable debugging output for the presentation compiler.")
+ val YpresentationStrict = BooleanSetting("-Ypresentation-strict", "Do not report type errors in sources with syntax errors.")
+
+ val YpresentationLog = StringSetting("-Ypresentation-log", "file", "Log presentation compiler events into file", "")
+ val YpresentationReplay = StringSetting("-Ypresentation-replay", "file", "Replay presentation compiler events from file", "")
+ val YpresentationDelay = IntSetting("-Ypresentation-delay", "Wait number of ms after typing before starting typechecking", 0, Some(0, 999), str => Some(str.toInt))
+
+ /**
+ * -P "Plugin" settings
+ */
+ val pluginOptions = MultiStringSetting("-P", "plugin:opt", "Pass an option to a plugin") .
+ withHelpSyntax("-P:<plugin>:<opt>")
+}
diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
new file mode 100644
index 0000000000..12ae5c9d0e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
@@ -0,0 +1,54 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package settings
+
+import scala.tools.util.PathResolver.Defaults
+
+/** Settings which aren't behind a -X, -Y, or -P option.
+ * When possible, the val and the option have identical names.
+ * The abstract settings are commented as to why they are as yet
+ * implemented in MutableSettings rather than mutation-generically.
+ */
+trait StandardScalaSettings {
+ self: AbsScalaSettings =>
+
+ /** Path related settings.
+ */
+ val bootclasspath = PathSetting ("-bootclasspath", "path", "Override location of bootstrap class files", Defaults.scalaBootClassPath)
+ val classpath: PathSetting // is mutated directly in various places (thus inspiring this very effort)
+ val d: OutputSetting // depends on mutable OutputDirs class
+ val extdirs = PathSetting ("-extdirs", "dirs", "Override location of installed extensions", Defaults.scalaExtDirs)
+ val javabootclasspath = PathSetting ("-javabootclasspath", "path", "Override java boot classpath.", Defaults.javaBootClassPath)
+ val javaextdirs = PathSetting ("-javaextdirs", "path", "Override java extdirs classpath.", Defaults.javaExtDirs)
+ val sourcepath = StringSetting ("-sourcepath", "path", "Specify where to find input source files", "")
+
+ /** Other settings.
+ */
+ val dependencyfile = StringSetting ("-dependencyfile", "file", "Specify the file in which dependencies are tracked", ".scala_dependencies")
+ val deprecation = BooleanSetting ("-deprecation", "Output source locations where deprecated APIs are used")
+ val encoding = StringSetting ("-encoding", "encoding", "Specify character encoding used by source files", Properties.sourceEncoding)
+ val explaintypes = BooleanSetting ("-explaintypes", "Explain type errors in more detail")
+ val g = ChoiceSetting ("-g", "Specify level of generated debugging info", List("none", "source", "line", "vars", "notailcalls"), "vars")
+ val help = BooleanSetting ("-help", "Print a synopsis of standard options")
+ val make = ChoiceSetting ("-make", "Specify recompilation detection strategy", List("all", "changed", "immediate", "transitive", "transitivenocp"), "all") .
+ withHelpSyntax("-make:<strategy>")
+ val nowarn = BooleanSetting ("-nowarn", "Generate no warnings")
+ val optimise: BooleanSetting // depends on post hook which mutates other settings
+ val print = BooleanSetting ("-print", "Print program with all Scala-specific features removed")
+ val target = ChoiceSetting ("-target", "Specify for which target object files should be built", List("jvm-1.5", "msil"), "jvm-1.5")
+ val unchecked = BooleanSetting ("-unchecked", "Enable detailed unchecked warnings")
+ val uniqid = BooleanSetting ("-uniqid", "Print identifiers with unique names for debugging")
+ val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.")
+ val verbose = BooleanSetting ("-verbose", "Output messages about what the compiler is doing")
+ val version = BooleanSetting ("-version", "Print product version and exit")
+
+ /** These are @<file> and -Dkey=val style settings, which don't
+ * nicely map to identifiers.
+ */
+ val argfiles: BooleanSetting // exists only to echo help message, should be done differently
+ val defines: DefinesSetting // not entirely clear that DefinesSetting makes sense as a Setting
+}
diff --git a/src/compiler/scala/tools/nsc/symtab/AnnotationCheckers.scala b/src/compiler/scala/tools/nsc/symtab/AnnotationCheckers.scala
index cc07d7896e..388e68497e 100644
--- a/src/compiler/scala/tools/nsc/symtab/AnnotationCheckers.scala
+++ b/src/compiler/scala/tools/nsc/symtab/AnnotationCheckers.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
@@ -21,6 +20,18 @@ trait AnnotationCheckers {
/** Check the annotations on two types conform. */
def annotationsConform(tpe1: Type, tpe2: Type): Boolean
+ /** Refine the computed least upper bound of a list of types.
+ * All this should do is add annotations. */
+ def annotationsLub(tp: Type, ts: List[Type]): Type = tp
+
+ /** Refine the computed greatest lower bound of a list of types.
+ * All this should do is add annotations. */
+ def annotationsGlb(tp: Type, ts: List[Type]): Type = tp
+
+ /** Refine the bounds on type parameters to the given type arguments. */
+ def adaptBoundsToAnnotations(bounds: List[TypeBounds],
+ tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = bounds
+
/** Modify the type that has thus far been inferred
* for a tree. All this should do is add annotations. */
def addAnnotations(tree: Tree, tpe: Type): Type = tpe
@@ -64,6 +75,27 @@ trait AnnotationCheckers {
_.annotationsConform(tp1,tp2))
}
+ /** Refine the computed least upper bound of a list of types.
+ * All this should do is add annotations. */
+ def annotationsLub(tpe: Type, ts: List[Type]): Type = {
+ annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ checker.annotationsLub(tpe, ts))
+ }
+
+ /** Refine the computed greatest lower bound of a list of types.
+ * All this should do is add annotations. */
+ def annotationsGlb(tpe: Type, ts: List[Type]): Type = {
+ annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ checker.annotationsGlb(tpe, ts))
+ }
+
+ /** Refine the bounds on type parameters to the given type arguments. */
+ def adaptBoundsToAnnotations(bounds: List[TypeBounds],
+ tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
+ annotationCheckers.foldLeft(bounds)((bounds, checker) =>
+ checker.adaptBoundsToAnnotations(bounds, tparams, targs))
+ }
+
/** Let all annotations checkers add extra annotations
* to this tree's type. */
def addAnnotations(tree: Tree, tpe: Type): Type = {
@@ -74,8 +106,7 @@ trait AnnotationCheckers {
/** Find out whether any annotation checker can adapt a tree
* to a given type. Called by Typers.adapt. */
def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = {
- annotationCheckers.foldLeft(false)((res, checker) =>
- res || checker.canAdaptAnnotations(tree, mode, pt))
+ annotationCheckers.exists(_.canAdaptAnnotations(tree, mode, pt))
}
/** Let registered annotation checkers adapt a tree
diff --git a/src/compiler/scala/tools/nsc/symtab/AnnotationInfos.scala b/src/compiler/scala/tools/nsc/symtab/AnnotationInfos.scala
index dd0c32cee2..2429f53aa1 100644
--- a/src/compiler/scala/tools/nsc/symtab/AnnotationInfos.scala
+++ b/src/compiler/scala/tools/nsc/symtab/AnnotationInfos.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
@@ -11,8 +10,7 @@ import scala.tools.nsc.transform.Reifiers
import util._
/** AnnotationInfo and its helpers */
-trait AnnotationInfos {
- self: SymbolTable =>
+trait AnnotationInfos extends reflect.generic.AnnotationInfos { self: SymbolTable =>
/** Arguments to classfile annotations (which are written to
* bytecode as java annotations) are either:
@@ -33,12 +31,31 @@ trait AnnotationInfos {
override def toString = const.escapedStringValue
}
+ object LiteralAnnotArg extends LiteralAnnotArgExtractor
+
/** Represents an array of classfile annotation arguments */
case class ArrayAnnotArg(args: Array[ClassfileAnnotArg])
extends ClassfileAnnotArg {
override def toString = args.mkString("[", ", ", "]")
}
+ object ArrayAnnotArg extends ArrayAnnotArgExtractor
+
+ /** A specific annotation argument that encodes an array of bytes as an array of `Long`. The type of the argument
+ * declared in the annotation must be `String`. This specialised class is used to encode scala signatures for
+ * reasons of efficiency, both in term of class-file size and in term of compiler performance. */
+ case class ScalaSigBytes(bytes: Array[Byte]) extends ClassfileAnnotArg {
+ override def toString = (bytes map { byte => (byte & 0xff).toHexString }).mkString("[ ", " ", " ]")
+ lazy val encodedBytes =
+ reflect.generic.ByteCodecs.encode(bytes)
+ def isLong: Boolean = (encodedBytes.length > 65535)
+ def sigAnnot: Type =
+ if (this.isLong)
+ definitions.ScalaLongSignatureAnnotation.tpe
+ else
+ definitions.ScalaSignatureAnnotation.tpe
+ }
+
/** Represents a nested classfile annotation */
case class NestedAnnotArg(annInfo: AnnotationInfo)
extends ClassfileAnnotArg {
@@ -47,6 +64,8 @@ trait AnnotationInfos {
override def toString = annInfo.toString
}
+ object NestedAnnotArg extends NestedAnnotArgExtractor
+
class AnnotationInfoBase
/** <p>
@@ -60,7 +79,7 @@ trait AnnotationInfos {
* class).
* </p>
* <p>
- * Annotations are pickled (written to scala symbtab attribute
+ * Annotations are pickled (written to scala symtab attribute
* in the classfile) if <code>atp</code> inherits form
* <code>StaticAnnotation</code>.
* </p>
@@ -104,8 +123,25 @@ trait AnnotationInfos {
val subs = new TreeSymSubstituter(List(from), List(to))
AnnotationInfo(atp, args.map(subs(_)), assocs).setPos(pos)
}
+
+ // !!! when annotation arguments are not literal strings, but any sort of
+ // assembly of strings, there is a fair chance they will turn up here not as
+ // Literal(const) but some arbitrary AST.
+ def stringArg(index: Int): Option[String] = if(args.size > index) Some(args(index) match {
+ case Literal(const) => const.stringValue
+ case x => x.toString // should not be necessary, but better than silently ignoring an issue
+ }) else None
+
+ def intArg(index: Int): Option[Int] = if(args.size > index) Some(args(index)) collect {
+ case Literal(Constant(x: Int)) => x
+ } else None
}
+ object AnnotationInfo extends AnnotationInfoExtractor
+
+ lazy val classfileAnnotArgManifest: ClassManifest[ClassfileAnnotArg] =
+ reflect.ClassManifest.classType(classOf[ClassfileAnnotArg])
+
/** Symbol annotations parsed in Namer (typeCompleter of
* definitions) have to be lazy (#1782)
*/
diff --git a/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala b/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala
index 6c29ab5cf3..c230533765 100644
--- a/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala
+++ b/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala
@@ -1,14 +1,14 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package symtab
// todo implement in terms of BitSet
-import scala.collection.mutable.ListBuffer
-import scala.collection.immutable.Map
-import Math.max
+import scala.collection.mutable.{ListBuffer, BitSet}
+import math.max
+import util.Statistics._
/** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types
* of a type. It characterized by the following two laws:
@@ -31,42 +31,48 @@ trait BaseTypeSeqs {
class BaseTypeSeq(parents: List[Type], elems: Array[Type]) {
self =>
+ incCounter(baseTypeSeqCount)
+ incCounter(baseTypeSeqLenTotal, elems.length)
/** The number of types in the sequence */
def length: Int = elems.length
- var pending: Map[Int, Type] = Map()
+ // #3676 shows why we can't store NoType in elems to mark cycles
+ // (while NoType is in there to indicate a cycle in this BTS, during the execution of
+ // the mergePrefixAndArgs below, the elems get copied without the pending map,
+ // so that NoType's are seen instead of the original type --> spurious compile error)
+ val pending = new BitSet(length)
/** The type at i'th position in this sequence; lazy types are returned evaluated. */
- def apply(i: Int): Type = elems(i) match {
- case NoType =>
- pending = Map()
- elems(i) = AnyClass.tpe
+ def apply(i: Int): Type =
+ if(pending contains i) {
+ pending.clear()
throw CyclicInheritance
- case rtp @ RefinedType(variants, decls) =>
- // can't assert decls.isEmpty; see t0764
- //if (!decls.isEmpty) assert(false, "computing closure of "+this+":"+this.isInstanceOf[RefinedType]+"/"+closureCache(j))
- //Console.println("compute closure of "+this+" => glb("+variants+")")
- pending += (i -> rtp)
- elems(i) = NoType
- try {
- mergePrefixAndArgs(variants, -1, lubDepth(variants)) match {
- case Some(tp0) =>
- pending -= i
- elems(i) = tp0
- tp0
- case None =>
- typeError(
- "no common type instance of base types "+(variants mkString ", and ")+" exists.")
- }
- } catch {
- case CyclicInheritance =>
- typeError(
- "computing the common type instance of base types "+(variants mkString ", and ")+" leads to a cycle.")
+ } else
+ elems(i) match {
+ case rtp @ RefinedType(variants, decls) =>
+ // can't assert decls.isEmpty; see t0764
+ //if (!decls.isEmpty) assert(false, "computing closure of "+this+":"+this.isInstanceOf[RefinedType]+"/"+closureCache(j))
+ //Console.println("compute closure of "+this+" => glb("+variants+")")
+ pending += i
+ try {
+ mergePrefixAndArgs(variants, -1, lubDepth(variants)) match {
+ case Some(tp0) =>
+ pending(i) = false
+ elems(i) = tp0
+ tp0
+ case None =>
+ typeError(
+ "no common type instance of base types "+(variants mkString ", and ")+" exists.")
+ }
+ } catch {
+ case CyclicInheritance =>
+ typeError(
+ "computing the common type instance of base types "+(variants mkString ", and ")+" leads to a cycle.")
+ }
+ case tp =>
+ tp
}
- case tp =>
- tp
- }
def rawElem(i: Int) = elems(i)
@@ -74,17 +80,9 @@ trait BaseTypeSeqs {
* no evaluation needed.
*/
def typeSymbol(i: Int): Symbol = {
- def tsym(tp: Type) = tp match {
- case RefinedType(v :: vs, _) => v.typeSymbol
- case _ => tp.typeSymbol
- }
elems(i) match {
- case NoType =>
- pending get i match {
- case Some(tp) => tsym(tp)
- case _ => NoType.typeSymbol
- }
- case tp => tsym(tp)
+ case RefinedType(v :: vs, _) => v.typeSymbol
+ case tp => tp.typeSymbol
}
}
@@ -137,7 +135,7 @@ trait BaseTypeSeqs {
protected def maxDepthOfElems = {
var d = 0
- for (i <- 0 until length) d = Math.max(d, maxDpth(elems(i)))
+ for (i <- 0 until length) d = max(d, maxDpth(elems(i)))
d
}
diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
new file mode 100644
index 0000000000..3594e48323
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
@@ -0,0 +1,118 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package symtab
+
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.util.BatchSourceFile
+
+/** A subclass of SymbolLoaders that implements browsing behavior.
+ * This class should be used whenever file dependencies and recompile sets
+ * are managed automoatically.
+ */
+abstract class BrowsingLoaders extends SymbolLoaders {
+ import global._
+
+ import syntaxAnalyzer.{OutlineParser, MalformedInput}
+
+ /** In browse mode, it can happen that an encountered symbol is already
+ * present. For instance, if the source file has a name different from
+ * the classes and objects it contains, the symbol loader will always
+ * reparse the source file. The symbols it encounters might already be loaded
+ * as class files. In this case we return the one which has a sourcefile
+ * (and the other has not), and issue an error if both have sourcefiles.
+ */
+ override protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
+ completer.sourcefile match {
+ case Some(src) =>
+ (if (member.isModule) member.moduleClass else member).sourceFile = src
+ case _ =>
+ }
+ val decls = owner.info.decls
+ val existing = decls.lookup(member.name)
+ if (existing == NoSymbol) {
+ decls enter member
+ member
+ } else if (existing.sourceFile == null) {
+ decls unlink existing
+ decls enter member
+ member
+ } else {
+ if (member.sourceFile != null) {
+ if (existing.sourceFile != member.sourceFile)
+ error(member+"is defined twice,"+
+ "\n in "+existing.sourceFile+
+ "\n and also in "+member.sourceFile)
+ }
+ existing
+ }
+ }
+
+ /** Browse the top-level of given abstract file `src` and enter
+ * eny encountered top-level classes and modules in `root`
+ */
+ def browseTopLevel(root: Symbol, src: AbstractFile) {
+
+ class BrowserTraverser extends Traverser {
+ var packagePrefix = ""
+ var entered = 0
+ def addPackagePrefix(pkg: Tree): Unit = pkg match {
+ case Select(pre, name) =>
+ addPackagePrefix(pre)
+ packagePrefix += ("." + name)
+ case Ident(name) =>
+ if (packagePrefix.length != 0) packagePrefix += "."
+ packagePrefix += name
+ case _ =>
+ throw new MalformedInput(pkg.pos.point, "illegal tree node in package prefix: "+pkg)
+ }
+ override def traverse(tree: Tree): Unit = tree match {
+ case PackageDef(pkg, body) =>
+ addPackagePrefix(pkg)
+ body foreach traverse
+ case ClassDef(_, name, _, _) =>
+ if (packagePrefix == root.fullName) {
+ enterClass(root, name.toString, new SourcefileLoader(src))
+ entered += 1
+ } else println("prefixes differ: "+packagePrefix+","+root.fullName)
+ case ModuleDef(_, name, _) =>
+ if (packagePrefix == root.fullName) {
+ val module = enterModule(root, name.toString, new SourcefileLoader(src))
+ entered += 1
+ if (name == nme.PACKAGEkw) {
+ println("open package module: "+module)
+ loaders.openPackageModule(module)()
+ }
+ } else println("prefixes differ: "+packagePrefix+","+root.fullName)
+ case _ =>
+ }
+ }
+
+// System.out.println("Browsing "+src)
+ val source = getSourceFile(src) // this uses the current encoding
+ val body = new OutlineParser(source).parse()
+// System.out.println(body)
+ val browser = new BrowserTraverser
+ browser.traverse(body)
+ if (browser.entered == 0)
+ warning("No classes or objects found in "+source+" that go in "+root)
+ }
+
+ /** Enter top-level symbols from a source file
+ */
+ override def enterToplevelsFromSource(root: Symbol, name: String, src: AbstractFile) {
+ try {
+ if (root == definitions.RootClass || root == definitions.EmptyPackageClass)
+ super.enterToplevelsFromSource(root, name, src)
+ else
+ browseTopLevel(root, src)
+ } catch {
+ case ex: syntaxAnalyzer.MalformedInput =>
+ println("[%s] caught malformed input exception at offset %d: %s".format(src, ex.offset, ex.msg))
+ super.enterToplevelsFromSource(root, name, src)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/symtab/Definitions.scala b/src/compiler/scala/tools/nsc/symtab/Definitions.scala
index 25c60c32b8..5f7a1e332b 100644
--- a/src/compiler/scala/tools/nsc/symtab/Definitions.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Definitions.scala
@@ -1,28 +1,22 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
import scala.collection.mutable.{HashMap, HashSet}
-import scala.tools.nsc.util.{Position, NoPosition}
+import scala.tools.nsc.util.NoPosition
import Flags._
+import PartialFunction._
-trait Definitions {
+trait Definitions extends reflect.generic.StandardDefinitions {
self: SymbolTable =>
- object definitions {
+ object definitions extends AbsDefinitions {
def isDefinitionsInitialized = isInitialized
- // Working around bug #2133
- private object definitionHelpers {
- def cond[T](x: T)(f: PartialFunction[T, Boolean]) = (f isDefinedAt x) && f(x)
- }
- import definitionHelpers._
-
// symbols related to packages
var emptypackagescope: Scope = null //debug
@@ -30,7 +24,7 @@ trait Definitions {
val rp=NoSymbol.newValue(NoPosition, nme.ROOTPKG)
.setFlag(FINAL | MODULE | PACKAGE | JAVA)
.setInfo(PolyType(List(), RootClass.tpe))
- RootClass.setSourceModule(rp)
+ RootClass.sourceModule = rp
rp
}
lazy val RootClass: ModuleClassSymbol = NoSymbol.newModuleClass(NoPosition, nme.ROOT.toTypeName)
@@ -51,6 +45,25 @@ trait Definitions {
lazy val anyvalparam = List(AnyValClass.typeConstructor)
lazy val anyrefparam = List(AnyRefClass.typeConstructor)
+ // private parameter conveniences
+ private def booltype = BooleanClass.typeConstructor
+ private def boolparam = List(booltype)
+ private def bytetype = ByteClass.typeConstructor
+ private def byteparam = List(bytetype)
+ private def shorttype = ShortClass.typeConstructor
+ private def shortparam = List(shorttype)
+ private def inttype = IntClass.typeConstructor
+ private def intparam = List(inttype)
+ private def longtype = LongClass.typeConstructor
+ private def longparam = List(longtype)
+ private def floattype = FloatClass.typeConstructor
+ private def floatparam = List(floattype)
+ private def doubletype = DoubleClass.typeConstructor
+ private def doubleparam = List(doubletype)
+ private def chartype = CharClass.typeConstructor
+ private def charparam = List(chartype)
+ private def stringtype = StringClass.typeConstructor
+
// top types
lazy val AnyClass = newClass(ScalaPackageClass, nme.Any, Nil) setFlag (ABSTRACT)
lazy val AnyValClass = newClass(ScalaPackageClass, nme.AnyVal, anyparam) setFlag (ABSTRACT | SEALED)
@@ -63,23 +76,34 @@ trait Definitions {
lazy val RuntimeNothingClass = getClass("scala.runtime.Nothing$")
lazy val RuntimeNullClass = getClass("scala.runtime.Null$")
+ lazy val AnyValCompanionClass = getClass("scala.runtime.AnyValCompanion").setFlag(SEALED | ABSTRACT | TRAIT)
+
// the scala value classes
- lazy val UnitClass = newClass(ScalaPackageClass, nme.Unit, anyvalparam).setFlag(ABSTRACT | FINAL)
- lazy val ByteClass = newValueClass(nme.Byte, 'B', 1)
- lazy val ShortClass = newValueClass(nme.Short, 'S', 2)
- lazy val CharClass = newValueClass(nme.Char, 'C', 2)
- lazy val IntClass = newValueClass(nme.Int, 'I', 3)
- lazy val LongClass = newValueClass(nme.Long, 'L', 4)
- lazy val FloatClass = newValueClass(nme.Float, 'F', 5)
- lazy val DoubleClass = newValueClass(nme.Double, 'D', 6)
- lazy val BooleanClass = newValueClass(nme.Boolean, 'Z', -1)
+ lazy val UnitClass =
+ newClass(ScalaPackageClass, nme.Unit, anyvalparam).setFlag(ABSTRACT | FINAL)
+
+ import classfile.ClassfileConstants._
+
+ lazy val ByteClass = newValueClass(nme.Byte, BYTE_TAG, 2)
+ lazy val ShortClass = newValueClass(nme.Short, SHORT_TAG, 4)
+ lazy val CharClass = newValueClass(nme.Char, CHAR_TAG, 3)
+ lazy val IntClass = newValueClass(nme.Int, INT_TAG, 12)
+ lazy val LongClass = newValueClass(nme.Long, LONG_TAG, 24)
+ lazy val FloatClass = newValueClass(nme.Float, FLOAT_TAG, 48)
+ lazy val DoubleClass = newValueClass(nme.Double, DOUBLE_TAG, 96)
+ lazy val BooleanClass = newValueClass(nme.Boolean, BOOL_TAG, 0)
def Boolean_and = getMember(BooleanClass, nme.ZAND)
def Boolean_or = getMember(BooleanClass, nme.ZOR)
+ def ScalaValueClasses = List(
+ UnitClass, ByteClass, ShortClass, IntClass, LongClass,
+ CharClass, FloatClass, DoubleClass, BooleanClass
+ )
+
// exceptions and other throwables
lazy val ThrowableClass = getClass(sn.Throwable)
lazy val NullPointerExceptionClass = getClass(sn.NPException)
- lazy val NonLocalReturnExceptionClass = getClass(sn.NLRException)
+ lazy val NonLocalReturnControlClass = getClass(sn.NLRControl)
lazy val IndexOutOfBoundsExceptionClass = getClass(sn.IOOBException)
lazy val UninitializedErrorClass = getClass("scala.UninitializedFieldError")
lazy val MatchErrorClass = getClass("scala.MatchError")
@@ -94,18 +118,24 @@ trait Definitions {
lazy val uncheckedStableClass = getClass("scala.annotation.unchecked.uncheckedStable")
lazy val uncheckedVarianceClass = getClass("scala.annotation.unchecked.uncheckedVariance")
lazy val UncheckedClass = getClass("scala.unchecked")
+ lazy val ThrowsClass = getClass("scala.throws")
lazy val TailrecClass = getClass("scala.annotation.tailrec")
lazy val SwitchClass = getClass("scala.annotation.switch")
lazy val ElidableMethodClass = getClass("scala.annotation.elidable")
- lazy val FieldClass = getClass("scala.annotation.target.field")
- lazy val GetterClass = getClass("scala.annotation.target.getter")
- lazy val SetterClass = getClass("scala.annotation.target.setter")
- lazy val BeanGetterClass = getClass("scala.annotation.target.beanGetter")
- lazy val BeanSetterClass = getClass("scala.annotation.target.beanSetter")
+ lazy val ImplicitNotFoundClass = getClass("scala.annotation.implicitNotFound")
+ lazy val FieldTargetClass = getClass("scala.annotation.target.field")
+ lazy val GetterTargetClass = getClass("scala.annotation.target.getter")
+ lazy val SetterTargetClass = getClass("scala.annotation.target.setter")
+ lazy val BeanGetterTargetClass = getClass("scala.annotation.target.beanGetter")
+ lazy val BeanSetterTargetClass = getClass("scala.annotation.target.beanSetter")
+ lazy val ParamTargetClass = getClass("scala.annotation.target.param")
+ lazy val SpecializedClass = definitions.getClass("scala.specialized")
+
// fundamental reference classes
lazy val ScalaObjectClass = getClass("scala.ScalaObject")
lazy val PartialFunctionClass = getClass("scala.PartialFunction")
+ lazy val SymbolClass = getClass("scala.Symbol")
lazy val StringClass = getClass(sn.String)
lazy val ClassClass = getClass(sn.Class)
def Class_getMethod = getMember(ClassClass, nme.getMethod_)
@@ -113,21 +143,19 @@ trait Definitions {
// fundamental modules
lazy val PredefModule: Symbol = getModule("scala.Predef")
def Predef_classOf = getMember(PredefModule, nme.classOf)
- def Predef_classOfType(classType: Type): Type =
- if (!ClassClass.unsafeTypeParams.isEmpty && !phase.erasedTypes)
- appliedType(ClassClass.tpe, List(classType))
- else ClassClass.tpe
def Predef_error = getMember(PredefModule, nme.error)
def Predef_identity = getMember(PredefModule, nme.identity)
def Predef_conforms = getMember(PredefModule, nme.conforms)
lazy val ConsoleModule: Symbol = getModule("scala.Console")
lazy val ScalaRunTimeModule: Symbol = getModule("scala.runtime.ScalaRunTime")
+ lazy val SymbolModule: Symbol = getModule("scala.Symbol")
def SeqFactory = getMember(ScalaRunTimeModule, nme.Seq)
- def checkDefinedMethod = getMember(ScalaRunTimeModule, "checkDefined")
- def isArrayMethod = getMember(ScalaRunTimeModule, "isArray")
def arrayApplyMethod = getMember(ScalaRunTimeModule, "array_apply")
def arrayUpdateMethod = getMember(ScalaRunTimeModule, "array_update")
def arrayLengthMethod = getMember(ScalaRunTimeModule, "array_length")
+ def arrayCloneMethod = getMember(ScalaRunTimeModule, "array_clone")
+ def scalaRuntimeHash = getMember(ScalaRunTimeModule, "hash")
+ def scalaRuntimeSameElements = getMember(ScalaRunTimeModule, nme.sameElements)
// classes with special meanings
lazy val NotNullClass = getClass("scala.NotNull")
@@ -189,11 +217,14 @@ trait Definitions {
def Array_apply = getMember(ArrayClass, nme.apply)
def Array_update = getMember(ArrayClass, nme.update)
def Array_length = getMember(ArrayClass, nme.length)
+ lazy val Array_clone = getMember(ArrayClass, nme.clone_)
lazy val ArrayModule = getModule("scala.Array")
- def ArrayModule_apply = getMember(ArrayModule, nme.apply)
// reflection / structural types
+ lazy val SoftReferenceClass = getClass("java.lang.ref.SoftReference")
+ lazy val WeakReferenceClass = getClass("java.lang.ref.WeakReference")
lazy val MethodClass = getClass(sn.MethodAsObject)
+ def methodClass_setAccessible = getMember(MethodClass, nme.setAccessible)
lazy val EmptyMethodCacheClass = getClass("scala.runtime.EmptyMethodCache")
lazy val MethodCacheClass = getClass("scala.runtime.MethodCache")
def methodCache_find = getMember(MethodCacheClass, nme.find_)
@@ -210,6 +241,9 @@ trait Definitions {
lazy val CodeModule = getModule(sn.Code)
def Code_lift = getMember(CodeModule, nme.lift_)
+ lazy val ScalaSignatureAnnotation = getClass("scala.reflect.ScalaSignature")
+ lazy val ScalaLongSignatureAnnotation = getClass("scala.reflect.ScalaLongSignature")
+
// invoke dynamic support
lazy val LinkageModule = getModule("java.dyn.Linkage")
lazy val Linkage_invalidateCallerClass = getMember(LinkageModule, "invalidateCallerClass")
@@ -219,14 +253,16 @@ trait Definitions {
// Option classes
lazy val OptionClass: Symbol = getClass("scala.Option")
lazy val SomeClass: Symbol = getClass("scala.Some")
- lazy val NoneClass: Symbol = getModule("scala.None")
+ lazy val NoneModule: Symbol = getModule("scala.None")
def isOptionType(tp: Type) = cond(tp.normalize) { case TypeRef(_, OptionClass, List(_)) => true }
def isSomeType(tp: Type) = cond(tp.normalize) { case TypeRef(_, SomeClass, List(_)) => true }
- def isNoneType(tp: Type) = cond(tp.normalize) { case TypeRef(_, NoneClass, List(_)) => true }
+ def isNoneType(tp: Type) = cond(tp.normalize) { case TypeRef(_, NoneModule, List(_)) => true }
def optionType(tp: Type) = typeRef(OptionClass.typeConstructor.prefix, OptionClass, List(tp))
def someType(tp: Type) = typeRef(SomeClass.typeConstructor.prefix, SomeClass, List(tp))
+ def symbolType = typeRef(SymbolClass.typeConstructor.prefix, SymbolClass, List())
+ def longType = typeRef(LongClass.typeConstructor.prefix, LongClass, List())
// Product, Tuple, Function
private def mkArityArray(name: String, arity: Int, countFrom: Int = 1) = {
@@ -244,6 +280,7 @@ trait Definitions {
lazy val TupleClass = mkArityArray("Tuple", MaxTupleArity)
lazy val ProductClass = mkArityArray("Product", MaxProductArity)
lazy val FunctionClass = mkArityArray("Function", MaxFunctionArity, 0)
+ lazy val AbstractFunctionClass = mkArityArray("runtime.AbstractFunction", MaxFunctionArity, 0)
def tupleField(n: Int, j: Int) = getMember(TupleClass(n), "_" + j)
def isTupleType(tp: Type): Boolean = cond(tp.normalize) {
@@ -258,6 +295,7 @@ trait Definitions {
lazy val ProductRootClass: Symbol = getClass("scala.Product")
def Product_productArity = getMember(ProductRootClass, nme.productArity)
def Product_productElement = getMember(ProductRootClass, nme.productElement)
+ // def Product_productElementName = getMember(ProductRootClass, nme.productElementName)
def Product_productPrefix = getMember(ProductRootClass, nme.productPrefix)
def Product_canEqual = getMember(ProductRootClass, nme.canEqual_)
@@ -297,6 +335,14 @@ trait Definitions {
typeRef(sym.typeConstructor.prefix, sym, formals ::: List(restpe))
} else NoType
+ def abstractFunctionForFunctionType(tp: Type) = tp.normalize match {
+ case tr @ TypeRef(_, _, args) if isFunctionType(tr) =>
+ val sym = AbstractFunctionClass(args.length - 1)
+ typeRef(sym.typeConstructor.prefix, sym, args)
+ case _ =>
+ NoType
+ }
+
def isFunctionType(tp: Type): Boolean = tp.normalize match {
case TypeRef(_, sym, args) =>
(args.length > 0) && (args.length - 1 <= MaxFunctionArity) &&
@@ -308,9 +354,15 @@ trait Definitions {
def seqType(arg: Type) = typeRef(SeqClass.typeConstructor.prefix, SeqClass, List(arg))
def arrayType(arg: Type) = typeRef(ArrayClass.typeConstructor.prefix, ArrayClass, List(arg))
+ def ClassType(arg: Type) =
+ if (phase.erasedTypes || forMSIL) ClassClass.tpe
+ else appliedType(ClassClass.tpe, List(arg))
+
//
// .NET backend
//
+
+ lazy val ComparatorClass = getClass("scala.runtime.Comparator")
// System.ValueType
lazy val ValueTypeClass: Symbol = getClass(sn.ValueType)
// System.MulticastDelegate
@@ -344,15 +396,22 @@ trait Definitions {
var Any_toString : Symbol = _
var Any_isInstanceOf: Symbol = _
var Any_asInstanceOf: Symbol = _
+ var Any_## : Symbol = _
// members of class java.lang.{Object, String}
var Object_eq : Symbol = _
var Object_ne : Symbol = _
var Object_== : Symbol = _
var Object_!= : Symbol = _
+ var Object_## : Symbol = _
var Object_synchronized: Symbol = _
- var Object_isInstanceOf: Symbol = _
- var Object_asInstanceOf: Symbol = _
+ lazy val Object_isInstanceOf = newPolyMethod(
+ ObjectClass, "$isInstanceOf",
+ tparam => MethodType(List(), booltype)) setFlag FINAL
+ lazy val Object_asInstanceOf = newPolyMethod(
+ ObjectClass, "$asInstanceOf",
+ tparam => MethodType(List(), tparam.typeConstructor)) setFlag FINAL
+
def Object_getClass = getMember(ObjectClass, nme.getClass_)
def Object_clone = getMember(ObjectClass, nme.clone_)
def Object_finalize = getMember(ObjectClass, nme.finalize_)
@@ -366,10 +425,8 @@ trait Definitions {
// boxed classes
lazy val ObjectRefClass = getClass("scala.runtime.ObjectRef")
+ lazy val VolatileObjectRefClass = getClass("scala.runtime.VolatileObjectRef")
lazy val BoxesRunTimeClass = getModule("scala.runtime.BoxesRunTime")
- lazy val BoxedArrayClass = getClass("scala.runtime.BoxedArray")
- lazy val BoxedAnyArrayClass = getClass("scala.runtime.BoxedAnyArray")
- lazy val BoxedObjectArrayClass = getClass("scala.runtime.BoxedObjectArray")
lazy val BoxedNumberClass = getClass(sn.BoxedNumber)
lazy val BoxedCharacterClass = getClass(sn.BoxedCharacter)
lazy val BoxedBooleanClass = getClass(sn.BoxedBoolean)
@@ -388,11 +445,14 @@ trait Definitions {
// special attributes
lazy val SerializableAttr: Symbol = getClass("scala.serializable")
lazy val DeprecatedAttr: Symbol = getClass("scala.deprecated")
+ lazy val MigrationAnnotationClass: Symbol = getClass("scala.annotation.migration")
+ lazy val TraitSetterAnnotationClass: Symbol = getClass("scala.runtime.TraitSetter")
lazy val BeanPropertyAttr: Symbol = getClass(sn.BeanProperty)
lazy val BooleanBeanPropertyAttr: Symbol = getClass(sn.BooleanBeanProperty)
lazy val AnnotationDefaultAttr: Symbol = {
- val attr = newClass(RootClass, nme.AnnotationDefaultATTR, List(AnnotationClass.typeConstructor))
+ val RuntimePackageClass = getModule("scala.runtime").tpe.typeSymbol
+ val attr = newClass(RuntimePackageClass, nme.AnnotationDefaultATTR, List(AnnotationClass.typeConstructor))
// This attribute needs a constructor so that modifiers in parsed Java code make sense
attr.info.decls enter (attr newConstructor NoPosition setInfo MethodType(Nil, attr.tpe))
attr
@@ -455,7 +515,7 @@ trait Definitions {
else sym.info.member(fullname.subName(i, j).toTypeName)
if (result == NoSymbol) {
if (settings.debug.value)
- { Console.println(sym.info); Console.println(sym.info.members) }//debug
+ { log(sym.info); log(sym.info.members) }//debug
throw new MissingRequirementError((if (module) "object " else "class ") + fullname)
}
result
@@ -519,47 +579,60 @@ trait Definitions {
private def newTypeParam(owner: Symbol, index: Int): Symbol =
owner.newTypeParameter(NoPosition, "T" + index)
- .setInfo(mkTypeBounds(NothingClass.typeConstructor, AnyClass.typeConstructor))
+ .setInfo(TypeBounds(NothingClass.typeConstructor, AnyClass.typeConstructor))
val boxedClass = new HashMap[Symbol, Symbol]
val boxedModule = new HashMap[Symbol, Symbol]
val unboxMethod = new HashMap[Symbol, Symbol] // Type -> Method
val boxMethod = new HashMap[Symbol, Symbol] // Type -> Method
- val boxedArrayClass = new HashMap[Symbol, Symbol]
+ val primitiveCompanions = new HashSet[Symbol]
- def isUnbox(m: Symbol) = (m.name == nme.unbox) && cond(m.tpe) {
- case MethodType(_, restpe) => cond(unboxMethod get restpe.typeSymbol) {
- case Some(`m`) => true
- }
- }
-
- /** Test whether a method symbol is that of a boxing method. */
- def isBox(m: Symbol) = (boxMethod.valuesIterator contains m) && cond(m.tpe) {
- case MethodType(List(arg), _) => cond(boxMethod get arg.tpe.typeSymbol) {
- case Some(`m`) => true
- }
- }
+ def isUnbox(m: Symbol) = unboxMethod.valuesIterator contains m
+ def isBox(m: Symbol) = boxMethod.valuesIterator contains m
val refClass = new HashMap[Symbol, Symbol]
+ val volatileRefClass = new HashMap[Symbol, Symbol]
val abbrvTag = new HashMap[Symbol, Char]
- val numericWidth = new HashMap[Symbol, Int]
+ private val numericWeight = new HashMap[Symbol, Int]
+
+ def isNumericSubClass(sub: Symbol, sup: Symbol) =
+ numericWeight get sub match {
+ case Some(w1) =>
+ numericWeight get sup match {
+ case Some(w2) => w2 % w1 == 0
+ case None => false
+ }
+ case None => false
+ }
+
+ /** Create a companion object for scala.Unit.
+ */
+ private def initUnitCompanionObject() {
+ val module = ScalaPackageClass.newModule(NoPosition, "Unit")
+ ScalaPackageClass.info.decls.enter(module)
+ val mclass = module.moduleClass
+ mclass.setInfo(ClassInfoType(List(AnyRefClass.tpe, AnyValCompanionClass.tpe), new Scope, mclass))
+ module.setInfo(mclass.tpe)
+ primitiveCompanions += module
+ }
- private def newValueClass(name: Name, tag: Char, width: Int): Symbol = {
+ private[symtab] def newValueClass(name: Name, tag: Char, weight: Int): Symbol = {
val boxedName = sn.Boxed(name)
val clazz = newClass(ScalaPackageClass, name, anyvalparam) setFlag (ABSTRACT | FINAL)
boxedClass(clazz) = getClass(boxedName)
boxedModule(clazz) = getModule(boxedName)
- boxedArrayClass(clazz) = getClass("scala.runtime.Boxed" + name + "Array")
refClass(clazz) = getClass("scala.runtime." + name + "Ref")
+ volatileRefClass(clazz) = getClass("scala.runtime.Volatile" + name + "Ref")
abbrvTag(clazz) = tag
- if (width > 0) numericWidth(clazz) = width
+ if (weight > 0) numericWeight(clazz) = weight
val module = ScalaPackageClass.newModule(NoPosition, name)
ScalaPackageClass.info.decls.enter(module)
val mclass = module.moduleClass
- mclass.setInfo(ClassInfoType(List(), new Scope, mclass))
+ mclass.setInfo(ClassInfoType(List(AnyRefClass.tpe, AnyValCompanionClass.tpe), new Scope, mclass))
module.setInfo(mclass.tpe)
+ primitiveCompanions += module
val box = newMethod(mclass, nme.box, List(clazz.typeConstructor), boxedClass(clazz).tpe)
boxMethod(clazz) = box
@@ -572,26 +645,6 @@ trait Definitions {
/** Sets-up symbols etc. for value classes, and their boxed versions. This
* method is called once from within the body of init. */
private def initValueClasses() {
- val booltype = BooleanClass.typeConstructor
- val boolparam = List(booltype)
- val bytetype = ByteClass.typeConstructor
- val byteparam = List(bytetype)
- val chartype = CharClass.typeConstructor
- val charparam = List(chartype)
- val shorttype = ShortClass.typeConstructor
- val shortparam = List(shorttype)
- val inttype = IntClass.typeConstructor
- val intparam = List(inttype)
- val longtype = LongClass.typeConstructor
- val longparam = List(longtype)
-
- val floattype = FloatClass.typeConstructor
- val floatparam = List(floattype)
- val doubletype = DoubleClass.typeConstructor
- val doubleparam = List(doubletype)
-
- val stringtype = StringClass.typeConstructor
-
// init scala.Boolean
newParameterlessMethod(BooleanClass, nme.UNARY_!, booltype)
List(nme.EQ, nme.NE, nme.ZOR, nme.ZAND, nme.OR, nme.AND, nme.XOR) foreach {
@@ -619,7 +672,6 @@ trait Definitions {
newParameterlessMethod(clazz, nme.toChar, chartype)
newParameterlessMethod(clazz, nme.toInt, inttype)
newParameterlessMethod(clazz, nme.toLong, longtype)
-
newParameterlessMethod(clazz, nme.toFloat, floattype)
newParameterlessMethod(clazz, nme.toDouble, doubletype)
@@ -659,7 +711,7 @@ trait Definitions {
def addModuleMethod(clazz: Symbol, name: Name, value: Any) {
val owner = clazz.linkedClassOfClass
- newParameterlessMethod(owner, name, mkConstantType(Constant(value)))
+ newParameterlessMethod(owner, name, ConstantType(Constant(value)))
}
addModuleMethod(ByteClass, "MinValue", java.lang.Byte.MIN_VALUE)
addModuleMethod(ByteClass, "MaxValue", java.lang.Byte.MAX_VALUE)
@@ -697,7 +749,7 @@ trait Definitions {
/** Is symbol a numeric value class? */
def isNumericValueClass(sym: Symbol): Boolean =
- numericWidth contains sym
+ numericWeight contains sym
/** Is symbol a numeric value class? */
def isNumericValueType(tp: Type): Boolean = tp match {
@@ -705,6 +757,7 @@ trait Definitions {
case _ => false
}
+ // todo: reconcile with javaSignature!!!
def signature(tp: Type): String = {
def erasure(tp: Type): Type = tp match {
case st: SubType => erasure(st.supertype)
@@ -712,7 +765,7 @@ trait Definitions {
case _ => tp
}
def flatNameString(sym: Symbol, separator: Char): String =
- if (sym.owner.isPackageClass) sym.fullNameString('.') + (if (sym.isModuleClass) "$" else "")
+ if (sym.owner.isPackageClass) sym.fullName('.') + (if (sym.isModuleClass) "$" else "")
else flatNameString(sym.owner, separator) + "$" + sym.simpleName;
def signature1(etp: Type): String = {
if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.normalize.typeArgs.head))
@@ -730,7 +783,7 @@ trait Definitions {
if (isInitialized) return
isInitialized = true
- EmptyPackageClass.setInfo(ClassInfoType(List(), new Scope, EmptyPackageClass))
+ EmptyPackageClass.setInfo(ClassInfoType(Nil, new Scope, EmptyPackageClass))
EmptyPackage.setInfo(EmptyPackageClass.tpe)
RootClass.info.decls.enter(EmptyPackage)
RootClass.info.decls.enter(RootPackage)
@@ -738,23 +791,23 @@ trait Definitions {
abbrvTag(UnitClass) = 'V'
initValueClasses()
- val booltype = BooleanClass.typeConstructor
+ initUnitCompanionObject()
// members of class scala.Any
Any_== = newMethod(AnyClass, nme.EQ, anyparam, booltype) setFlag FINAL
Any_!= = newMethod(AnyClass, nme.NE, anyparam, booltype) setFlag FINAL
Any_equals = newMethod(AnyClass, nme.equals_, anyparam, booltype)
- Any_hashCode = newMethod(
- AnyClass, nme.hashCode_, List(), IntClass.typeConstructor)
- Any_toString = newMethod(
- AnyClass, nme.toString_, List(), StringClass.typeConstructor)
+ Any_hashCode = newMethod(AnyClass, nme.hashCode_, Nil, inttype)
+ Any_toString = newMethod(AnyClass, nme.toString_, Nil, stringtype)
+ Any_## = newMethod(AnyClass, nme.HASHHASH, Nil, inttype) setFlag FINAL
Any_isInstanceOf = newPolyMethod(
AnyClass, nme.isInstanceOf_, tparam => booltype) setFlag FINAL
Any_asInstanceOf = newPolyMethod(
AnyClass, nme.asInstanceOf_, tparam => tparam.typeConstructor) setFlag FINAL
- // members of class java.lang.{Object, String}
+ // members of class java.lang.{ Object, String }
+ Object_## = newMethod(ObjectClass, nme.HASHHASH, Nil, inttype) setFlag FINAL
Object_== = newMethod(ObjectClass, nme.EQ, anyrefparam, booltype) setFlag FINAL
Object_!= = newMethod(ObjectClass, nme.NE, anyrefparam, booltype) setFlag FINAL
Object_eq = newMethod(ObjectClass, nme.eq, anyrefparam, booltype) setFlag FINAL
@@ -762,17 +815,12 @@ trait Definitions {
Object_synchronized = newPolyMethodCon(
ObjectClass, nme.synchronized_,
tparam => msym => MethodType(msym.newSyntheticValueParams(List(tparam.typeConstructor)), tparam.typeConstructor)) setFlag FINAL
- Object_isInstanceOf = newPolyMethod(
- ObjectClass, "$isInstanceOf",
- tparam => MethodType(List(), booltype)) setFlag FINAL
- Object_asInstanceOf = newPolyMethod(
- ObjectClass, "$asInstanceOf",
- tparam => MethodType(List(), tparam.typeConstructor)) setFlag FINAL
+
String_+ = newMethod(
- StringClass, "+", anyparam, StringClass.typeConstructor) setFlag FINAL
+ StringClass, "+", anyparam, stringtype) setFlag FINAL
- val forced = List( // force initialization of every symbol that is enetred as a side effect
- AnnotationDefaultAttr,
+ val forced = List( // force initialization of every symbol that is entered as a side effect
+ AnnotationDefaultAttr, // #2264
RepeatedParamClass,
JavaRepeatedParamClass,
ByNameParamClass,
@@ -791,12 +839,11 @@ trait Definitions {
NullClass,
NothingClass,
SingletonClass,
- EqualsPatternClass
+ EqualsPatternClass,
+ Object_isInstanceOf,
+ Object_asInstanceOf
)
- // #2264
- var tmp = AnnotationDefaultAttr
- tmp = RepeatedParamClass // force initalization
if (forMSIL) {
val intType = IntClass.typeConstructor
val intParam = List(intType)
@@ -808,9 +855,10 @@ trait Definitions {
// additional methods of Object
newMethod(ObjectClass, "clone", List(), AnyRefClass.typeConstructor)
- newMethod(ObjectClass, "wait", List(), unitType)
- newMethod(ObjectClass, "wait", List(longType), unitType)
- newMethod(ObjectClass, "wait", List(longType, intType), unitType)
+ // wait in Java returns void, on .NET Wait returns boolean. by putting
+ // `booltype` the compiler adds a `drop` after calling wait.
+ newMethod(ObjectClass, "wait", List(), booltype)
+ newMethod(ObjectClass, "wait", List(longType), booltype)
newMethod(ObjectClass, "notify", List(), unitType)
newMethod(ObjectClass, "notifyAll", List(), unitType)
@@ -843,7 +891,7 @@ trait Definitions {
var nbScalaCallers: Int = 0
def newScalaCaller(delegateType: Type): Symbol = {
assert(forMSIL, "scalaCallers can only be created if target is .NET")
- // object: reference to object on which to call (scala-)metod
+ // object: reference to object on which to call (scala-)method
val paramTypes: List[Type] = List(ObjectClass.tpe)
val name: String = "$scalaCaller$$" + nbScalaCallers
// tparam => resultType, which is the resultType of PolyType, i.e. the result type after applying the
diff --git a/src/compiler/scala/tools/nsc/symtab/Flags.scala b/src/compiler/scala/tools/nsc/symtab/Flags.scala
index 0c7415e58a..de8f92fdf4 100644
--- a/src/compiler/scala/tools/nsc/symtab/Flags.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Flags.scala
@@ -1,89 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
-object Flags {
-
- // modifiers
- final val IMPLICIT = 0x00000200
- final val FINAL = 0x00000020
- final val PRIVATE = 0x00000004
- final val PROTECTED = 0x00000001
-
- final val SEALED = 0x00000400
- final val OVERRIDE = 0x00000002
- final val CASE = 0x00000800
- final val ABSTRACT = 0x00000008 // abstract class, or used in conjunction
- // with abstract override.
- // Note difference to DEFERRED!
-
- final val DEFERRED = 0x00000010 // was `abstract' for members | trait is virtual
- final val METHOD = 0x00000040 // a method
- final val MODULE = 0x00000100 // symbol is module or class implementing a module
- final val INTERFACE = 0x00000080 // symbol is an interface (i.e. a trait which defines only abstract methods)
-
-
- final val MUTABLE = 0x00001000 // symbol is a mutable variable.
- final val PARAM = 0x00002000 // symbol is a (value or type) parameter to a method
- final val PACKAGE = 0x00004000 // symbol is a java package
- final val DEPRECATED = 0x00008000 // symbol is deprecated.
-
- final val COVARIANT = 0x00010000 // symbol is a covariant type variable
- final val CAPTURED = 0x00010000 // variable is accessed from nested function.
- // Set by LambdaLift
- final val BYNAMEPARAM = 0x00010000 // parameter is by name
- final val CONTRAVARIANT = 0x00020000 // symbol is a contravariant type variable
- final val LABEL = 0x00020000 // method symbol is a label. Set by TailCall
- final val INCONSTRUCTOR = 0x00020000 // class symbol is defined in this/superclass
- // constructor.
- final val ABSOVERRIDE = 0x00040000 // combination of abstract & override
- final val LOCAL = 0x00080000 // symbol is local to current class (i.e. private[this] or protected[this]
- // pre: PRIVATE or PROTECTED are also set
- final val JAVA = 0x00100000 // symbol was defined by a Java class
- final val SYNTHETIC = 0x00200000 // symbol is compiler-generated
- final val STABLE = 0x00400000 // functions that are assumed to be stable
- // (typically, access methods for valdefs)
- // or classes that do not contain abstract types.
- final val STATIC = 0x00800000 // static field, method or class
-
- final val CASEACCESSOR = 0x01000000 // symbol is a case parameter (or its accessor)
- final val TRAIT = 0x02000000 // symbol is a trait
- final val DEFAULTPARAM = 0x02000000 // the parameter has a default value
- final val BRIDGE = 0x04000000 // function is a bridge method. Set by Erasure
- final val ACCESSOR = 0x08000000 // a value or variable accessor (getter or setter)
-
- final val SUPERACCESSOR = 0x10000000 // a super accessor
- final val PARAMACCESSOR = 0x20000000 // for value definitions: is an access method
- // for a final val parameter
- // for parameters: is a val parameter
- final val MODULEVAR = 0x40000000 // for variables: is the variable caching a module value
- final val SYNTHETICMETH = 0x40000000 // for methods: synthetic method, but without SYNTHETIC flag
- final val MONOMORPHIC = 0x40000000 // for type symbols: does not have type parameters
- final val LAZY = 0x80000000L // symbol is a lazy val. can't have MUTABLE unless transformed by typer
-
- final val IS_ERROR = 0x100000000L // symbol is an error symbol
- final val OVERLOADED = 0x200000000L // symbol is overloaded
- final val LIFTED = 0x400000000L // class has been lifted out to package level
- // local value has been lifted out to class level
- // todo: make LIFTED = latePRIVATE?
- final val MIXEDIN = 0x800000000L // term member has been mixed in
- final val EXISTENTIAL = 0x800000000L // type is an existential parameter or skolem
-
- final val EXPANDEDNAME = 0x1000000000L // name has been expanded with class suffix
- final val IMPLCLASS = 0x2000000000L // symbol is an implementation class
- final val PRESUPER = 0x2000000000L // value is evaluated before super call
- final val TRANS_FLAG = 0x4000000000L // transient flag guaranteed to be reset
- // after each phase.
-
- final val LOCKED = 0x8000000000L // temporary flag to catch cyclic dependencies
- final val SPECIALIZED = 0x10000000000L// symbol is a generated specialized member
- final val DEFAULTINIT = 0x20000000000L// symbol is a generated specialized member
- final val VBRIDGE = 0x40000000000L// symbol is a varargs bridge
+object Flags extends reflect.generic.Flags {
final val InitialFlags = 0x0001FFFFFFFFFFFFL // flags that are enabled from phase 1.
final val LateFlags = 0x00FE000000000000L // flags that override flags in 0x1FC.
@@ -108,118 +31,6 @@ object Flags {
final val notOVERRIDE = (OVERRIDE: Long) << AntiShift
final val notMETHOD = (METHOD: Long) << AntiShift
- // The flags from 0x001 to 0x800 are different in the raw flags
- // and in the pickled format.
-
- private final val IMPLICIT_PKL = 0x00000001
- private final val FINAL_PKL = 0x00000002
- private final val PRIVATE_PKL = 0x00000004
- private final val PROTECTED_PKL = 0x00000008
-
- private final val SEALED_PKL = 0x00000010
- private final val OVERRIDE_PKL = 0x00000020
- private final val CASE_PKL = 0x00000040
- private final val ABSTRACT_PKL = 0x00000080
-
- private final val DEFERRED_PKL = 0x00000100
- private final val METHOD_PKL = 0x00000200
- private final val MODULE_PKL = 0x00000400
- private final val INTERFACE_PKL = 0x00000800
-
- private final val PKL_MASK = 0x00000FFF
-
-
- private val r2p = {
- def rawFlagsToPickledAux(flags:Int) = {
- var pflags=0
- if ((flags & IMPLICIT )!=0) pflags|=IMPLICIT_PKL
- if ((flags & FINAL )!=0) pflags|=FINAL_PKL
- if ((flags & PRIVATE )!=0) pflags|=PRIVATE_PKL
- if ((flags & PROTECTED)!=0) pflags|=PROTECTED_PKL
- if ((flags & SEALED )!=0) pflags|=SEALED_PKL
- if ((flags & OVERRIDE )!=0) pflags|=OVERRIDE_PKL
- if ((flags & CASE )!=0) pflags|=CASE_PKL
- if ((flags & ABSTRACT )!=0) pflags|=ABSTRACT_PKL
- if ((flags & DEFERRED )!=0) pflags|=DEFERRED_PKL
- if ((flags & METHOD )!=0) pflags|=METHOD_PKL
- if ((flags & MODULE )!=0) pflags|=MODULE_PKL
- if ((flags & INTERFACE)!=0) pflags|=INTERFACE_PKL
- pflags
- }
- val v=new Array[Int](PKL_MASK+1)
- var i=0
- while (i<=PKL_MASK) {
- v(i)=rawFlagsToPickledAux(i)
- i+=1
- }
- v
- }
-
- private val p2r = {
- def pickledToRawFlagsAux(pflags:Int) = {
- var flags=0
- if ((pflags & IMPLICIT_PKL )!=0) flags|=IMPLICIT
- if ((pflags & FINAL_PKL )!=0) flags|=FINAL
- if ((pflags & PRIVATE_PKL )!=0) flags|=PRIVATE
- if ((pflags & PROTECTED_PKL)!=0) flags|=PROTECTED
- if ((pflags & SEALED_PKL )!=0) flags|=SEALED
- if ((pflags & OVERRIDE_PKL )!=0) flags|=OVERRIDE
- if ((pflags & CASE_PKL )!=0) flags|=CASE
- if ((pflags & ABSTRACT_PKL )!=0) flags|=ABSTRACT
- if ((pflags & DEFERRED_PKL )!=0) flags|=DEFERRED
- if ((pflags & METHOD_PKL )!=0) flags|=METHOD
- if ((pflags & MODULE_PKL )!=0) flags|=MODULE
- if ((pflags & INTERFACE_PKL)!=0) flags|=INTERFACE
- flags
- }
- val v=new Array[Int](PKL_MASK+1)
- var i=0
- while (i<=PKL_MASK) {
- v(i)=pickledToRawFlagsAux(i)
- i+=1
- }
- v
- }
-
- def rawFlagsToPickled(flags:Long):Long =
- (flags & ~PKL_MASK) | r2p(flags.toInt & PKL_MASK)
-
- def pickledToRawFlags(pflags:Long):Long =
- (pflags & ~PKL_MASK) | p2r(pflags.toInt & PKL_MASK)
-
- // List of the raw flags, in pickled order
- private val pickledListOrder = {
- def findBit(m:Long):Int = {
- var mask=m
- var i=0
- while (i <= 62) {
- if ((mask&1) == 1L) return i
- mask >>= 1
- i += 1
- }
- throw new FatalError("Internal error: mask is zero")
- }
- val v=new Array[Long](63)
- v(findBit(IMPLICIT_PKL ))=IMPLICIT
- v(findBit(FINAL_PKL ))=FINAL
- v(findBit(PRIVATE_PKL ))=PRIVATE
- v(findBit(PROTECTED_PKL))=PROTECTED
- v(findBit(SEALED_PKL ))=SEALED
- v(findBit(OVERRIDE_PKL ))=OVERRIDE
- v(findBit(CASE_PKL ))=CASE
- v(findBit(ABSTRACT_PKL ))=ABSTRACT
- v(findBit(DEFERRED_PKL ))=DEFERRED
- v(findBit(METHOD_PKL ))=METHOD
- v(findBit(MODULE_PKL ))=MODULE
- v(findBit(INTERFACE_PKL))=INTERFACE
- var i=findBit(PKL_MASK+1)
- while (i <= 62) {
- v(i)=1L << i
- i += 1
- }
- v.toList
- }
-
// masks
/** This flags can be set when class or module symbol is first created. */
final val TopLevelCreationFlags: Long =
@@ -233,7 +44,7 @@ object Flags {
/** These modifiers appear in TreePrinter output. */
final val PrintableFlags: Long =
ExplicitFlags | LOCAL | SYNTHETIC | STABLE | CASEACCESSOR |
- ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | BRIDGE | STATIC | VBRIDGE
+ ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | BRIDGE | STATIC | VBRIDGE | SPECIALIZED
/** The two bridge flags */
final val BRIDGES = BRIDGE | VBRIDGE
@@ -241,14 +52,20 @@ object Flags {
final val FieldFlags: Long =
MUTABLE | CASEACCESSOR | PARAMACCESSOR | STATIC | FINAL | PRESUPER | LAZY
- final val AccessFlags: Long = PRIVATE | PROTECTED
+ final val AccessFlags: Long = PRIVATE | PROTECTED | LOCAL
final val VARIANCES = COVARIANT | CONTRAVARIANT
final val ConstrFlags: Long = JAVA
- final val PickledFlags: Long = 0xFFFFFFFFL
/** Module flags inherited by their module-class */
final val ModuleToClassFlags: Long = AccessFlags | MODULE | PACKAGE | CASE | SYNTHETIC | JAVA
+ def getterFlags(fieldFlags: Long): Long =
+ ACCESSOR +
+ (if ((fieldFlags & MUTABLE) != 0) fieldFlags & ~MUTABLE & ~PRESUPER else fieldFlags & ~PRESUPER | STABLE)
+
+ def setterFlags(fieldFlags: Long): Long =
+ getterFlags(fieldFlags) & ~STABLE & ~CASEACCESSOR
+
private def listToString(ss: List[String]): String =
ss.filter("" !=).mkString("", " ", "")
@@ -287,6 +104,7 @@ object Flags {
else if (flag == TRANS_FLAG ) "<trans-flag>"
else if (flag == LOCKED ) "<locked>"
else if (flag == LAZY ) "lazy"
+ else if (flag == SPECIALIZED ) "<specialized>"
else flag.toInt match {
case IMPLICIT => "implicit"
case FINAL => "final"
@@ -306,7 +124,6 @@ object Flags {
case MUTABLE => "<mutable>"
case PARAM => "<param>"
case PACKAGE => "<package>"
- case DEPRECATED => "<deprecated>"
case COVARIANT => "<covariant/captured/byname>"
case CONTRAVARIANT => "<contravariant/label/inconstr/defaultinit>"
diff --git a/src/compiler/scala/tools/nsc/symtab/InfoTransformers.scala b/src/compiler/scala/tools/nsc/symtab/InfoTransformers.scala
index d02063ca60..95dcb07c7c 100644
--- a/src/compiler/scala/tools/nsc/symtab/InfoTransformers.scala
+++ b/src/compiler/scala/tools/nsc/symtab/InfoTransformers.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
diff --git a/src/compiler/scala/tools/nsc/symtab/Names.scala b/src/compiler/scala/tools/nsc/symtab/Names.scala
index f6101d61d5..9ab5914843 100644
--- a/src/compiler/scala/tools/nsc/symtab/Names.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Names.scala
@@ -1,13 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
-import scala.util.NameTransformer
+import scala.reflect.NameTransformer
import scala.io.Codec
import java.security.MessageDigest
@@ -16,7 +15,7 @@ import java.security.MessageDigest
* @author Martin Odersky
* @version 1.0, 05/02/2005
*/
-class Names {
+trait Names extends reflect.generic.Names {
// Operations -------------------------------------------------------------
@@ -36,11 +35,11 @@ class Names {
/** hashtable for finding term names quickly
*/
- private val termHashtable = new Array[Name](HASH_SIZE)
+ private val termHashtable = new Array[TermName](HASH_SIZE)
/** hashtable for finding type names quickly
*/
- private val typeHashtable = new Array[Name](HASH_SIZE)
+ private val typeHashtable = new Array[TypeName](HASH_SIZE)
/** the hashcode of a name
*/
@@ -112,7 +111,7 @@ class Names {
* @param len ...
* @return the created term name
*/
- def newTermName(cs: Array[Char], offset: Int, len: Int): Name = {
+ def newTermName(cs: Array[Char], offset: Int, len: Int): TermName = {
val h = hashValue(cs, offset, len) & HASH_MASK
var n = termHashtable(h)
while ((n ne null) && (n.length != len || !equals(n.start, cs, offset, len)))
@@ -126,7 +125,7 @@ class Names {
/** create a term name from string
*/
- def newTermName(s: String): Name =
+ def newTermName(s: String): TermName =
newTermName(s.toCharArray(), 0, s.length())
/** Create a term name from the UTF8 encoded bytes in <code>bs[offset..offset+len-1]</code>.
@@ -136,7 +135,7 @@ class Names {
* @param len ...
* @return the created term name
*/
- def newTermName(bs: Array[Byte], offset: Int, len: Int): Name =
+ def newTermName(bs: Array[Byte], offset: Int, len: Int): TermName =
newTermName(Codec toUTF8 bs.slice(offset, offset + len) mkString)
/** Create a type name from the characters in <code>cs[offset..offset+len-1]</code>.
@@ -146,12 +145,12 @@ class Names {
* @param len ...
* @return the created type name
*/
- def newTypeName(cs: Array[Char], offset: Int, len: Int): Name =
+ def newTypeName(cs: Array[Char], offset: Int, len: Int): TypeName =
newTermName(cs, offset, len).toTypeName
/** create a type name from string
*/
- def newTypeName(s: String): Name =
+ def newTypeName(s: String): TypeName =
newTermName(s).toTypeName
/** Create a type name from the UTF8 encoded bytes in <code>bs[offset..offset+len-1]</code>.
@@ -161,9 +160,12 @@ class Names {
* @param len ...
* @return the create type name
*/
- def newTypeName(bs: Array[Byte], offset: Int, len: Int): Name =
+ def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
newTermName(bs, offset, len).toTypeName
+ def mkTermName(name: Name) = name.toTermName
+ def mkTypeName(name: Name) = name.toTypeName
+
def nameChars: Array[Char] = chrs
implicit def view(s: String): Name = newTermName(s)
@@ -178,7 +180,7 @@ class Names {
/** next name in the same hash bucket
*/
- var next: Name = null
+ def next: Name
/** return the length of this name
*/
@@ -390,10 +392,15 @@ class Names {
def decode: String = (
NameTransformer.decode(toString()) +
(if (nameDebug && isTypeName) "!" else ""))//debug
+
+ def append(suffix: String): Name
+ def append(suffix: Name): Name
+
+ def isOperatorName: Boolean = decode != toString
}
- private class TermName(index: Int, len: Int, hash: Int) extends Name(index, len) {
- next = termHashtable(hash)
+ final class TermName(index: Int, len: Int, hash: Int) extends Name(index, len) {
+ var next: TermName = termHashtable(hash)
termHashtable(hash) = this
def isTermName: Boolean = true
def isTypeName: Boolean = false
@@ -409,10 +416,14 @@ class Names {
}
def subName(from: Int, to: Int): Name =
newTermName(chrs, start + from, to - from)
+
+ def append(suffix: String): TermName = newTermName(this + suffix)
+ def append(suffix: Name): TermName = append(suffix.toString)
+
}
- private class TypeName(index: Int, len: Int, hash: Int) extends Name(index, len) {
- next = typeHashtable(hash)
+ final class TypeName(index: Int, len: Int, hash: Int) extends Name(index, len) {
+ var next: TypeName = typeHashtable(hash)
typeHashtable(hash) = this
def isTermName: Boolean = false
def isTypeName: Boolean = true
@@ -428,5 +439,9 @@ class Names {
def toTypeName: Name = this
def subName(from: Int, to: Int): Name =
newTypeName(chrs, start + from, to - from)
+
+ def append(suffix: String): TypeName = newTypeName(this + suffix)
+ def append(suffix: Name): TypeName = append(suffix.toString)
+
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/Positions.scala b/src/compiler/scala/tools/nsc/symtab/Positions.scala
index e096d3b5e3..58b9164988 100644
--- a/src/compiler/scala/tools/nsc/symtab/Positions.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Positions.scala
@@ -27,10 +27,13 @@ self: scala.tools.nsc.symtab.SymbolTable =>
/** Ensure that given tree has no positions that overlap with
* any of the positions of `others`. This is done by
- * shortening the range or assinging TransparentPositions
+ * shortening the range or assigning TransparentPositions
* to some of the nodes in `tree`.
*/
def ensureNonOverlapping(tree: Tree, others: List[Tree]) {}
def validatePositions(tree: Tree) {}
+
+ type Position = scala.tools.nsc.util.Position
+ val NoPosition = scala.tools.nsc.util.NoPosition
}
diff --git a/src/compiler/scala/tools/nsc/symtab/Scopes.scala b/src/compiler/scala/tools/nsc/symtab/Scopes.scala
index a33e0c3f54..b5e23d61f0 100644
--- a/src/compiler/scala/tools/nsc/symtab/Scopes.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Scopes.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
@@ -44,11 +43,11 @@ trait Scopes {
e
}
- class Scope(initElems: ScopeEntry) extends Iterable[Symbol] {
+ class Scope(initElems: ScopeEntry) extends AbsScope {
var elems: ScopeEntry = initElems
- /** The number of times this scope is neted in another
+ /** The number of times this scope is nested in another
*/
private var nestinglevel = 0
@@ -218,7 +217,7 @@ trait Scopes {
if (e eq null) NoSymbol else e.sym
}
- /** Returns an iterator eidling every symbol with given name in this scope.
+ /** Returns an iterator yielding every symbol with given name in this scope.
*/
def lookupAll(name: Name): Iterator[Symbol] = new Iterator[Symbol] {
var e = lookupEntry(name)
@@ -275,10 +274,39 @@ trait Scopes {
elemsCache
}
- /** Return all symbols as an interator in the order they were entered in this scope.
+ /** Return the nesting level of this scope, i.e. the number of times this scope
+ * was nested in another */
+ def nestingLevel = nestinglevel
+
+ /** Return all symbols as an iterator in the order they were entered in this scope.
*/
def iterator: Iterator[Symbol] = toList.iterator
+/*
+ /** Does this scope contain an entry for `sym`?
+ */
+ def contains(sym: Symbol): Boolean = lookupAll(sym.name) contains sym
+
+ /** A scope that contains all symbols of this scope and that also contains `sym`.
+ */
+ def +(sym: Symbol): Scope =
+ if (contains(sym)) this
+ else {
+ val result = cloneScope
+ result enter sym
+ result
+ }
+
+ /** A scope that contains all symbols of this scope except `sym`.
+ */
+ def -(sym: Symbol): Scope =
+ if (!contains(sym)) this
+ else {
+ val result = cloneScope
+ result unlink sym
+ result
+ }
+*/
override def foreach[U](p: Symbol => U): Unit = toList foreach p
override def filter(p: Symbol => Boolean): Scope =
@@ -287,18 +315,17 @@ trait Scopes {
override def mkString(start: String, sep: String, end: String) =
toList.map(_.defString).mkString(start, sep, end)
- override def toString(): String = mkString("{\n ", ";\n ", "\n}")
+ override def toString(): String = mkString("Scope{\n ", ";\n ", "\n}")
- /** Return the nesting level of this scope, i.e. the number of times this scope
- * was nested in another */
- def nestingLevel = nestinglevel
}
+ def newScope: Scope = new Scope
+
/** The empty scope (immutable).
*/
object EmptyScope extends Scope {
override def enter(e: ScopeEntry) {
- throw new Error("EmptyScope.enter")
+ abort("EmptyScope.enter")
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/StdNames.scala b/src/compiler/scala/tools/nsc/symtab/StdNames.scala
index 3c5b866ba8..9c7952aa28 100644
--- a/src/compiler/scala/tools/nsc/symtab/StdNames.scala
+++ b/src/compiler/scala/tools/nsc/symtab/StdNames.scala
@@ -1,18 +1,17 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
-import scala.util.NameTransformer
+import scala.reflect.NameTransformer
+import util.Chars.isOperatorPart
-trait StdNames {
- self: SymbolTable =>
+trait StdNames extends reflect.generic.StdNames { self: SymbolTable =>
- object nme {
+ object nme extends StandardNames {
// Scala keywords; enter them first to minimize scanner.maxKey
val ABSTRACTkw = newTermName("abstract")
@@ -73,7 +72,6 @@ trait StdNames {
val LOCALDUMMY_PREFIX_STRING = "<local "
val SUPER_PREFIX_STRING = "super$"
- val EXPAND_SEPARATOR_STRING = "$$"
val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
val TUPLE_FIELD_PREFIX_STRING = "_"
val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
@@ -88,14 +86,14 @@ trait StdNames {
def LOCAL(clazz: Symbol) = newTermName(LOCALDUMMY_PREFIX_STRING + clazz.name+">")
def TUPLE_FIELD(index: Int) = newTermName(TUPLE_FIELD_PREFIX_STRING + index)
- val LOCAL_SUFFIX = newTermName(" ")
+ val LOCAL_SUFFIX = newTermName(LOCAL_SUFFIX_STRING)
val SETTER_SUFFIX = encode("_=")
val IMPL_CLASS_SUFFIX = newTermName("$class")
- val MODULE_SUFFIX = newTermName("$module")
val LOCALDUMMY_PREFIX = newTermName(LOCALDUMMY_PREFIX_STRING)
val SELECTOR_DUMMY = newTermName("<unapply-selector>")
val MODULE_INSTANCE_FIELD = newTermName("MODULE$")
+ val SPECIALIZED_INSTANCE = newTermName("specInstance$")
def isLocalName(name: Name) = name.endsWith(LOCAL_SUFFIX)
def isSetterName(name: Name) = name.endsWith(SETTER_SUFFIX)
@@ -103,22 +101,16 @@ trait StdNames {
def isTraitSetterName(name: Name) = isSetterName(name) && name.pos(TRAIT_SETTER_SEPARATOR_STRING) < name.length
def isOpAssignmentName(name: Name) =
name(name.length - 1) == '=' &&
- isOperatorCharacter(name(0)) &&
+ isOperatorPart(name(0)) &&
name(0) != '=' && name != NEraw && name != LEraw && name != GEraw
- def isOperatorCharacter(c: Char) = c match {
- case '~' | '!' | '@' | '#' | '%' |
- '^' | '*' | '+' | '-' | '<' |
- '>' | '?' | ':' | '=' | '&' |
- '|' | '\\'| '/' => true
- case _ =>
- val chtp = Character.getType(c)
- chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt
- }
+ /** The expanded setter name of `name' relative to this class `base`
+ */
+ def expandedSetterName(name: Name, base: Symbol): Name =
+ expandedName(name, base, separator = TRAIT_SETTER_SEPARATOR_STRING)
/** If `name' is an expandedName name, the original name.
* Otherwise `name' itself.
- * @see Symbol.expandedName
*/
def originalName(name: Name): Name = {
var i = name.length
@@ -129,8 +121,27 @@ trait StdNames {
} else name
}
+ /** Return the original name and the types on which this name
+ * is specialized. For example,
+ * {{{
+ * splitSpecializedName("foo$mIcD$sp") == ('foo', "I", "D")
+ * }}}
+ * `foo$mIcD$sp` is the name of a method specialized on two type
+ * parameters, the first one belonging to the method itself, on Int,
+ * and another one belonging to the enclosing class, on Double.
+ */
+ def splitSpecializedName(name: Name): (Name, String, String) =
+ if (name.endsWith("$sp")) {
+ val name1 = name.subName(0, name.length - 3)
+ val idxC = name1.lastPos('c')
+ val idxM = name1.lastPos('m', idxC)
+ (name1.subName(0, idxM - 1).toString,
+ name1.subName(idxC + 1, name1.length).toString,
+ name1.subName(idxM + 1, idxC).toString)
+ } else
+ (name, "", "")
+
def localToGetter(name: Name): Name = {
- assert(isLocalName(name))//debug
name.subName(0, name.length - LOCAL_SUFFIX.length)
}
@@ -153,6 +164,9 @@ trait StdNames {
def getterName(name: Name): Name =
if (isLocalName(name)) localToGetter(name) else name;
+ def isConstructorName(name: Name) =
+ name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR
+
def isImplClassName(name: Name): Boolean =
name endsWith IMPL_CLASS_SUFFIX;
@@ -162,9 +176,6 @@ trait StdNames {
def interfaceName(implname: Name): Name =
implname.subName(0, implname.length - IMPL_CLASS_SUFFIX.length)
- def moduleVarName(name: Name): Name =
- newTermName(name.toString() + MODULE_SUFFIX)
-
def superName(name: Name) = newTermName("super$" + name)
val PROTECTED_PREFIX = "protected$"
@@ -183,19 +194,11 @@ trait StdNames {
val LOCALCHILD = newTypeName("<local child>")
val NOSYMBOL = newTermName("<none>")
- val EMPTY = newTermName("")
val ANYNAME = newTermName("<anyname>")
val WILDCARD = newTermName("_")
val WILDCARD_STAR = newTermName("_*")
- val ANON_CLASS_NAME = newTermName("$anon")
- val ANON_FUN_NAME = newTermName("$anonfun")
- val REFINE_CLASS_NAME = newTermName("<refinement>")
- val EMPTY_PACKAGE_NAME = newTermName("<empty>")
- val IMPORT = newTermName("<import>")
val STAR = newTermName("*")
- val ROOT = newTermName("<root>")
- val ROOTPKG = newTermName("_root_")
val REPEATED_PARAM_CLASS_NAME = newTermName("<repeated>")
val JAVA_REPEATED_PARAM_CLASS_NAME = newTermName("<repeated...>")
val BYNAME_PARAM_CLASS_NAME = newTermName("<byname>")
@@ -220,6 +223,7 @@ trait StdNames {
val PERCENT = encode("%")
val EQL = encode("=")
val USCOREEQL = encode("_=")
+ val HASHHASH = encode("##")
val Nothing = newTermName("Nothing")
val Null = newTermName("Null")
@@ -268,8 +272,7 @@ trait StdNames {
val assume_ = newTermName("assume")
val asInstanceOf_ = newTermName("asInstanceOf")
val box = newTermName("box")
- val boxArray = newTermName("boxArray")
- val forceBoxedArray = newTermName("forceBoxedArray")
+ val bytes = newTermName("bytes")
val canEqual_ = newTermName("canEqual")
val checkInitialized = newTermName("checkInitialized")
val classOf = newTermName("classOf")
@@ -282,7 +285,6 @@ trait StdNames {
val eq = newTermName("eq")
val equals_ = newTermName("equals")
val _equals = newTermName("_equals")
- val _equalsWithVarArgs = newTermName("_equalsWithVarArgs")
val inlinedEquals = newTermName("inlinedEquals")
val error = newTermName("error")
val ex = newTermName("ex")
@@ -298,10 +300,12 @@ trait StdNames {
val getCause = newTermName("getCause")
val getClass_ = newTermName("getClass")
val getMethod_ = newTermName("getMethod")
+ val hash_ = newTermName("hash")
val hashCode_ = newTermName("hashCode")
val hasNext = newTermName("hasNext")
val head = newTermName("head")
val invoke_ = newTermName("invoke")
+ val isArray = newTermName("isArray")
val isInstanceOf_ = newTermName("isInstanceOf")
val isDefinedAt = newTermName("isDefinedAt")
val isEmpty = newTermName("isEmpty")
@@ -323,11 +327,13 @@ trait StdNames {
val print = newTermName("print")
val productArity = newTermName("productArity")
val productElement = newTermName("productElement")
+ // val productElementName = newTermName("productElementName")
val productPrefix = newTermName("productPrefix")
val readResolve = newTermName("readResolve")
val sameElements = newTermName("sameElements")
val scala_ = newTermName("scala")
val self = newTermName("self")
+ val setAccessible = newTermName("setAccessible")
val synchronized_ = newTermName("synchronized")
val tail = newTermName("tail")
val toArray = newTermName("toArray")
@@ -379,6 +385,7 @@ trait StdNames {
val NEraw = newTermName("!=")
val LEraw = newTermName("<=")
val GEraw = newTermName(">=")
+ val DOLLARraw = newTermName("$")
// value-conversion methods
val toByte = newTermName("toByte")
@@ -421,7 +428,7 @@ trait StdNames {
val String : Name
val Throwable : Name
val NPException : Name // NullPointerException
- val NLRException : Name = newTermName("scala.runtime.NonLocalReturnException")
+ val NLRControl : Name = newTermName("scala.runtime.NonLocalReturnControl")
val ValueType : Name
val Serializable : Name
val BeanProperty : Name
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index d55b0bf957..741aaa4718 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -1,22 +1,20 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
-import java.io.{File, IOException}
+import java.io.IOException
+import ch.epfl.lamp.compiler.msil.{ Type => MSILType, Attribute => MSILAttribute }
-import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute}
-
-import scala.collection.mutable.{HashMap, HashSet}
import scala.compat.Platform.currentTime
import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.{Position, NoPosition, ClassPath, ClassRep, JavaClassPath, MsilClassPath}
+import scala.tools.nsc.util.{ ClassPath }
import classfile.ClassfileParser
import Flags._
+import util.Statistics._
/** This class ...
*
@@ -27,6 +25,59 @@ abstract class SymbolLoaders {
val global: Global
import global._
+ protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
+ assert(owner.info.decls.lookup(member.name) == NoSymbol, owner.fullName + "." + member.name)
+ owner.info.decls enter member
+ member
+ }
+
+ private def realOwner(root: Symbol): Symbol = {
+ if (root.isRoot) definitions.EmptyPackageClass else root
+ }
+
+ /** Enter class with given `name` into scope of `root`
+ * and give them `completer` as type.
+ */
+ def enterClass(root: Symbol, name: String, completer: SymbolLoader): Symbol = {
+ val owner = realOwner(root)
+ val clazz = owner.newClass(NoPosition, newTypeName(name))
+ clazz setInfo completer
+ enterIfNew(owner, clazz, completer)
+ }
+
+ /** Enter module with given `name` into scope of `root`
+ * and give them `completer` as type.
+ */
+ def enterModule(root: Symbol, name: String, completer: SymbolLoader): Symbol = {
+ val owner = realOwner(root)
+ val module = owner.newModule(NoPosition, newTermName(name))
+ module setInfo completer
+ module.moduleClass setInfo moduleClassLoader
+ enterIfNew(owner, module, completer)
+ }
+
+ /** Enter class and module with given `name` into scope of `root`
+ * and give them `completer` as type.
+ */
+ def enterClassAndModule(root: Symbol, name: String, completer: SymbolLoader) {
+ val clazz = enterClass(root, name, completer)
+ val module = enterModule(root, name, completer)
+ if (!clazz.isAnonymousClass) {
+ assert(clazz.companionModule == module, module)
+ assert(module.companionClass == clazz, clazz)
+ }
+ }
+
+ /** In batch mode: Enter class and module with given `name` into scope of `root`
+ * and give them a source completer for given `src` as type.
+ * In IDE mode: Find all toplevel definitions in `src` and enter then into scope of `root`
+ * with source completer for given `src` as type.
+ * (overridden in interactive.Global).
+ */
+ def enterToplevelsFromSource(root: Symbol, name: String, src: AbstractFile) {
+ enterClassAndModule(root, name, new SourcefileLoader(src))
+ }
+
/**
* A lazy type that completes itself by calling parameter doComplete.
* Any linked modules/classes or module classes are also initialized.
@@ -36,7 +87,7 @@ abstract class SymbolLoaders {
/** Load source or class file for `root', return */
protected def doComplete(root: Symbol): Unit
- protected def sourcefile: Option[AbstractFile] = None
+ def sourcefile: Option[AbstractFile] = None
/**
* Description of the resource (ClassPath, AbstractFile, MSILType)
@@ -62,7 +113,7 @@ abstract class SymbolLoaders {
informTime("loaded " + description, start)
ok = true
setSource(root)
- setSource(root.linkedSym) // module -> class, class -> module
+ setSource(root.companionSymbol) // module -> class, class -> module
} catch {
case ex: IOException =>
ok = false
@@ -73,18 +124,22 @@ abstract class SymbolLoaders {
else "error while loading " + root.name + ", " + msg);
}
initRoot(root)
- if (!root.isPackageClass) initRoot(root.linkedSym)
+ if (!root.isPackageClass) initRoot(root.companionSymbol)
}
override def load(root: Symbol) { complete(root) }
+ private def markAbsent(sym: Symbol): Unit = {
+ val tpe: Type = if (ok) NoType else ErrorType
+
+ if (sym != NoSymbol)
+ sym setInfo tpe
+ }
private def initRoot(root: Symbol) {
- if (root.rawInfo == this) {
- def markAbsent(sym: Symbol) =
- if (sym != NoSymbol) sym.setInfo(if (ok) NoType else ErrorType);
- markAbsent(root)
- markAbsent(root.moduleClass)
- } else if (root.isClass && !root.isModuleClass) root.rawInfo.load(root)
+ if (root.rawInfo == this)
+ List(root, root.moduleClass) foreach markAbsent
+ else if (root.isClass && !root.isModuleClass)
+ root.rawInfo.load(root)
}
}
@@ -105,23 +160,8 @@ abstract class SymbolLoaders {
root.info.decls.enter(pkg)
}
- def enterClassAndModule(root: Symbol, name: String, completer: SymbolLoader) {
- val owner = if (root.isRoot) definitions.EmptyPackageClass else root
- val className = newTermName(name)
- assert(owner.info.decls.lookup(name) == NoSymbol, owner.fullNameString + "." + name)
- val clazz = owner.newClass(NoPosition, name.toTypeName)
- val module = owner.newModule(NoPosition, name)
- clazz setInfo completer
- module setInfo completer
- module.moduleClass setInfo moduleClassLoader
- owner.info.decls enter clazz
- owner.info.decls enter module
- assert(clazz.linkedModuleOfClass == module, module)
- assert(module.linkedClassOfModule == clazz, clazz)
- }
-
/**
- * Tells wether a class with both a binary and a source representation
+ * Tells whether a class with both a binary and a source representation
* (found in classpath and in sourcepath) should be re-compiled. Behaves
* similar to javac, i.e. if the source file is newer than the classfile,
* a re-compile is triggered.
@@ -129,12 +169,12 @@ abstract class SymbolLoaders {
protected def needCompile(bin: T, src: AbstractFile): Boolean
/**
- * Tells wether a class should be loaded and entered into the package
+ * Tells whether a class should be loaded and entered into the package
* scope. On .NET, this method returns `false' for all synthetic classes
* (anonymous classes, implementation classes, module classes), their
* symtab is encoded in the pickle of another class.
*/
- protected def doLoad(cls: ClassRep[T]): Boolean
+ protected def doLoad(cls: classpath.AnyClassRep): Boolean
protected def newClassLoader(bin: T): SymbolLoader
@@ -146,15 +186,15 @@ abstract class SymbolLoaders {
val sourcepaths = classpath.sourcepaths
for (classRep <- classpath.classes if doLoad(classRep)) {
- if (classRep.binary.isDefined && classRep.source.isDefined) {
- val (bin, src) = (classRep.binary.get, classRep.source.get)
- val loader = if (needCompile(bin, src)) new SourcefileLoader(src)
- else newClassLoader(bin)
- enterClassAndModule(root, classRep.name, loader)
- } else if (classRep.binary.isDefined) {
- enterClassAndModule(root, classRep.name, newClassLoader(classRep.binary.get))
- } else if (classRep.source.isDefined) {
- enterClassAndModule(root, classRep.name, new SourcefileLoader(classRep.source.get))
+ ((classRep.binary, classRep.source) : @unchecked) match {
+ case (Some(bin), Some(src)) if needCompile(bin, src) =>
+ if (settings.verbose.value) inform("[symloader] picked up newer source file for " + src.path)
+ enterToplevelsFromSource(root, classRep.name, src)
+ case (None, Some(src)) =>
+ if (settings.verbose.value) inform("[symloader] no class, picked up source file for " + src.path)
+ enterToplevelsFromSource(root, classRep.name, src)
+ case (Some(bin), _) =>
+ enterClassAndModule(root, classRep.name, newClassLoader(bin))
}
}
@@ -166,21 +206,32 @@ abstract class SymbolLoaders {
val pkgModule = root.info.decl(nme.PACKAGEkw)
if (pkgModule.isModule && !pkgModule.rawInfo.isInstanceOf[SourcefileLoader]) {
//println("open "+pkgModule)//DEBUG
- openPackageModule(pkgModule)
+ openPackageModule(pkgModule)()
}
}
}
- def openPackageModule(m: Symbol) = {
- val owner = m.owner
- for (member <- m.info.decls.iterator) {
- // todo: handle overlapping definitions in some way: mark as errors
- // or treat as abstractions. For now the symbol in the package module takes precedence.
- for (existing <- owner.info.decl(member.name).alternatives)
- owner.info.decls.unlink(existing)
+ def openPackageModule(module: Symbol)(packageClass: Symbol = module.owner): Unit = {
+ // unlink existing symbols in the package
+ for (member <- module.info.decls.iterator) {
+ if (!member.hasFlag(PRIVATE) && !member.isConstructor) {
+ // todo: handle overlapping definitions in some way: mark as errors
+ // or treat as abstractions. For now the symbol in the package module takes precedence.
+ for (existing <- packageClass.info.decl(member.name).alternatives)
+ packageClass.info.decls.unlink(existing)
+ }
}
- for (member <- m.info.decls.iterator) {
- owner.info.decls.enter(member)
+ // enter non-private decls the class
+ for (member <- module.info.decls.iterator) {
+ if (!member.hasFlag(PRIVATE) && !member.isConstructor) {
+ packageClass.info.decls.enter(member)
+ }
+ }
+ // enter decls of parent classes
+ for (pt <- module.info.parents; val p = pt.typeSymbol) {
+ if (p != definitions.ObjectClass && p != definitions.ScalaObjectClass) {
+ openPackageModule(p)(packageClass)
+ }
}
}
@@ -188,7 +239,7 @@ abstract class SymbolLoaders {
protected def needCompile(bin: AbstractFile, src: AbstractFile) =
(src.lastModified >= bin.lastModified)
- protected def doLoad(cls: ClassRep[AbstractFile]) = true
+ protected def doLoad(cls: classpath.AnyClassRep) = true
protected def newClassLoader(bin: AbstractFile) =
new ClassfileLoader(bin)
@@ -201,7 +252,7 @@ abstract class SymbolLoaders {
protected def needCompile(bin: MSILType, src: AbstractFile) =
false // always use compiled file on .net
- protected def doLoad(cls: ClassRep[MSILType]) = {
+ protected def doLoad(cls: classpath.AnyClassRep) = {
if (cls.binary.isDefined) {
val typ = cls.binary.get
if (typ.IsDefined(clrTypes.SCALA_SYMTAB_ATTR, false)) {
@@ -224,15 +275,6 @@ abstract class SymbolLoaders {
}
class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader {
-
- /**
- * @FIXME: iulian,
- * there should not be a new ClassfileParser for every loaded classfile, this object
- * should be outside the class ClassfileLoader! This was changed by Sean in r5494.
- *
- * However, when pulling it out, loading "java.lang.Object" breaks with:
- * "illegal class file dependency between java.lang.Object and java.lang.Class"
- */
private object classfileParser extends ClassfileParser {
val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
}
@@ -240,18 +282,25 @@ abstract class SymbolLoaders {
protected def description = "class file "+ classfile.toString
protected def doComplete(root: Symbol) {
+ val start = startTimer(classReadNanos)
classfileParser.parse(classfile, root)
+ stopTimer(classReadNanos, start)
}
+ override def sourcefile = classfileParser.srcfile
}
class MSILTypeLoader(typ: MSILType) extends SymbolLoader {
+ private object typeParser extends clr.TypeParser {
+ val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
+ }
+
protected def description = "MSILType "+ typ.FullName + ", assembly "+ typ.Assembly.FullName
protected def doComplete(root: Symbol) { typeParser.parse(typ, root) }
}
class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader {
protected def description = "source file "+ srcfile.toString
- override protected def sourcefile = Some(srcfile)
+ override def sourcefile = Some(srcfile)
protected def doComplete(root: Symbol): Unit = global.currentRun.compileLate(srcfile)
}
@@ -260,12 +309,12 @@ abstract class SymbolLoaders {
protected def doComplete(root: Symbol) { root.sourceModule.initialize }
}
- private object typeParser extends clr.TypeParser {
- val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
- }
-
object clrTypes extends clr.CLRTypes {
val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
if (global.forMSIL) init()
}
+
+ /** used from classfile parser to avoid cyclies */
+ var parentsLevel = 0
+ var pendingLoadActions: List[() => Unit] = Nil
}
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
index 32af1cbe8a..0fdbeae98f 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
@@ -1,32 +1,37 @@
/* NSC -- new scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
-import ast.Trees
+
+import ast.{Trees, TreePrinters, DocComments}
import util._
-abstract class SymbolTable extends Names
+abstract class SymbolTable extends reflect.generic.Universe
+ with Names
with Symbols
with Types
with Scopes
with Definitions
- with Constants
+ with reflect.generic.Constants
with BaseTypeSeqs
with InfoTransformers
with StdNames
with AnnotationInfos
with AnnotationCheckers
with Trees
+ with TreePrinters
with Positions
+ with DocComments
{
def settings: Settings
def rootLoader: LazyType
def log(msg: AnyRef)
+ def abort(msg: String) = throw new Error(msg)
+ def abort() = throw new Error()
/** Are we compiling for Java SE ? */
def forJVM: Boolean
diff --git a/src/compiler/scala/tools/nsc/symtab/Symbols.scala b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
index 3b9ffa4f58..359aabdf95 100644
--- a/src/compiler/scala/tools/nsc/symtab/Symbols.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
@@ -10,28 +9,26 @@ package symtab
import scala.collection.mutable.ListBuffer
import scala.collection.immutable.Map
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.{Position, NoPosition, BatchSourceFile}
+import io.AbstractFile
+import util.{Position, NoPosition, BatchSourceFile}
+import util.Statistics._
import Flags._
//todo: get rid of MONOMORPHIC flag
-trait Symbols {
- self: SymbolTable =>
+trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
import definitions._
private var ids = 0
-
- //for statistics:
- def symbolCount = ids
- var typeSymbolCount = 0
- var classSymbolCount = 0
+ def symbolCount = ids // statistics
val emptySymbolArray = new Array[Symbol](0)
- val emptySymbolSet = Set.empty[Symbol]
/** Used for deciding in the IDE whether we can interrupt the compiler */
- protected var activeLocks = 0
+ //protected var activeLocks = 0
+
+ /** Used for debugging only */
+ //protected var lockedSyms = collection.immutable.Set[Symbol]()
/** Used to keep track of the recursion depth on locked symbols */
private var recursionTable = Map.empty[Symbol, Int]
@@ -54,7 +51,7 @@ trait Symbols {
}
*/
/** The class for all symbols */
- abstract class Symbol(initOwner: Symbol, initPos: Position, initName: Name) {
+ abstract class Symbol(initOwner: Symbol, initPos: Position, initName: Name) extends AbsSymbol {
var rawowner = initOwner
var rawname = initName
@@ -105,7 +102,7 @@ trait Symbols {
private var rawannots: List[AnnotationInfoBase] = Nil
- /* Used in namer to check wether annotations were already assigned or not */
+ /* Used in namer to check whether annotations were already assigned or not */
def rawAnnotations:List[AnnotationInfoBase] = rawannots
/** After the typer phase (before, look at the definition's Modifiers), contains
@@ -127,8 +124,9 @@ trait Symbols {
this
}
- def addAnnotation(annot: AnnotationInfo): this.type =
+ override def addAnnotation(annot: AnnotationInfo) {
setAnnotations(annot :: this.rawannots)
+ }
/** Does this symbol have an annotation of the given class? */
def hasAnnotation(cls: Symbol) =
@@ -160,12 +158,16 @@ trait Symbols {
* Java protected: PROTECTED flag set, privateWithin == enclosing package
* Java public: no flag set, privateWithin == NoSymbol
*/
- var privateWithin: Symbol = _
+ private[this] var _privateWithin: Symbol = _
+ def privateWithin = _privateWithin
+ override def privateWithin_=(sym: Symbol) { _privateWithin = sym }
// Creators -------------------------------------------------------------------
final def newValue(pos: Position, name: Name) =
new TermSymbol(this, pos, name)
+ final def newValue(name: Name, pos: Position = NoPosition) =
+ new TermSymbol(this, pos, name)
final def newVariable(pos: Position, name: Name) =
newValue(pos, name).setFlag(MUTABLE)
final def newValueParameter(pos: Position, name: Name) =
@@ -174,7 +176,9 @@ trait Symbols {
final def newLocalDummy(pos: Position) =
newValue(pos, nme.LOCAL(this)).setInfo(NoType)
final def newMethod(pos: Position, name: Name) =
- newValue(pos, name).setFlag(METHOD)
+ new MethodSymbol(this, pos, name).setFlag(METHOD)
+ final def newMethod(name: Name, pos: Position = NoPosition) =
+ new MethodSymbol(this, pos, name).setFlag(METHOD)
final def newLabel(pos: Position, name: Name) =
newMethod(pos, name).setFlag(LABEL)
final def newConstructor(pos: Position) =
@@ -182,6 +186,9 @@ trait Symbols {
final def newModule(pos: Position, name: Name, clazz: ClassSymbol) =
new ModuleSymbol(this, pos, name).setFlag(MODULE | FINAL)
.setModuleClass(clazz)
+ final def newModule(name: Name, clazz: Symbol, pos: Position = NoPosition) =
+ new ModuleSymbol(this, pos, name).setFlag(MODULE | FINAL)
+ .setModuleClass(clazz.asInstanceOf[ClassSymbol])
final def newModule(pos: Position, name: Name) = {
val m = new ModuleSymbol(this, pos, name).setFlag(MODULE | FINAL)
m.setModuleClass(new ModuleClassSymbol(m))
@@ -237,11 +244,15 @@ trait Symbols {
*/
final def newAliasType(pos: Position, name: Name) =
new TypeSymbol(this, pos, name)
+ final def newAliasType(name: Name, pos: Position = NoPosition) =
+ new TypeSymbol(this, pos, name)
/** Symbol of an abstract type type T >: ... <: ...
*/
final def newAbstractType(pos: Position, name: Name) =
new TypeSymbol(this, pos, name).setFlag(DEFERRED)
+ final def newAbstractType(name: Name, pos: Position = NoPosition) =
+ new TypeSymbol(this, pos, name).setFlag(DEFERRED)
/** Symbol of a type parameter
*/
@@ -284,9 +295,9 @@ trait Symbols {
newSyntheticValueParams(List(argtype)).head
/** Type skolems are type parameters ``seen from the inside''
- * Given a class C[T]
- * Then the class has a TypeParameter with name `T' in its typeParams list
- * While type checking the class, there's a local copy of `T' which is a TypeSkolem
+ * Assuming a polymorphic method m[T], its type is a PolyType which has a TypeParameter
+ * with name `T' in its typeParams list. While type checking the parameters, result type and
+ * body of the method, there's a local copy of `T' which is a TypeSkolem.
*/
final def newTypeSkolem: Symbol =
new TypeSkolem(owner, pos, name, this)
@@ -294,13 +305,16 @@ trait Symbols {
final def newClass(pos: Position, name: Name) =
new ClassSymbol(this, pos, name)
+ final def newClass(name: Name, pos: Position = NoPosition) =
+ new ClassSymbol(this, pos, name)
final def newModuleClass(pos: Position, name: Name) =
new ModuleClassSymbol(this, pos, name)
+ final def newModuleClass(name: Name, pos: Position = NoPosition) =
+ new ModuleClassSymbol(this, pos, name)
final def newAnonymousClass(pos: Position) =
newClass(pos, nme.ANON_CLASS_NAME.toTypeName)
-
final def newAnonymousFunctionClass(pos: Position) =
newClass(pos, nme.ANON_FUN_NAME.toTypeName)
@@ -310,6 +324,14 @@ trait Symbols {
final def newRefinementClass(pos: Position) =
newClass(pos, nme.REFINE_CLASS_NAME.toTypeName)
+ /** Create a new getter for current symbol (which must be a field)
+ */
+ final def newGetter: Symbol = {
+ val getter = owner.newMethod(pos.focus, nme.getterName(name)).setFlag(getterFlags(flags))
+ getter.privateWithin = privateWithin
+ getter.setInfo(MethodType(List(), tpe))
+ }
+
final def newErrorClass(name: Name) = {
val clazz = newClass(pos, name).setFlag(SYNTHETIC | IS_ERROR)
clazz.setInfo(ClassInfoType(List(), new ErrorScope(this), clazz))
@@ -349,14 +371,16 @@ trait Symbols {
} else { handler }
} else {
rawflags |= LOCKED
- activeLocks += 1
+// activeLocks += 1
+// lockedSyms += this
}
}
// Unlock a symbol
def unlock() = {
if ((rawflags & LOCKED) != 0L) {
- activeLocks -= 1
+// activeLocks -= 1
+// lockedSyms -= this
rawflags = rawflags & ~LOCKED
if (settings.Yrecursion.value != 0)
recursionTable -= this
@@ -365,17 +389,11 @@ trait Symbols {
// Tests ----------------------------------------------------------------------
- def isTerm = false //to be overridden
- def isType = false //to be overridden
- def isClass = false //to be overridden
- def isTypeMember = false //to be overridden todo: rename, it's something
- // whose definition starts with `type', i.e. a type
- // which is not a class.
- def isAliasType = false //to be overridden
- def isAbstractType = false //to be overridden
- def isSkolem = false //to be overridden
-
- /** Term symbols with the exception of static parts of Java classes and packages */
+ /** Is this symbol a type but not a class? */
+ def isNonClassType = false
+
+ /** Term symbols with the exception of static parts of Java classes and packages.
+ */
final def isValue = isTerm && !(isModule && hasFlag(PACKAGE | JAVA))
final def isVariable = isTerm && hasFlag(MUTABLE) && !isMethod
@@ -391,29 +409,24 @@ trait Symbols {
final def isValueParameter = isTerm && hasFlag(PARAM)
final def isLocalDummy = isTerm && nme.isLocalDummyName(name)
- final def isMethod = isTerm && hasFlag(METHOD)
- final def isSourceMethod = isTerm && (flags & (METHOD | STABLE)) == METHOD.toLong // ???
final def isLabel = isMethod && !hasFlag(ACCESSOR) && hasFlag(LABEL)
final def isInitializedToDefault = !isType && (getFlag(DEFAULTINIT | ACCESSOR) == (DEFAULTINIT | ACCESSOR))
final def isClassConstructor = isTerm && (name == nme.CONSTRUCTOR)
final def isMixinConstructor = isTerm && (name == nme.MIXIN_CONSTRUCTOR)
final def isConstructor = isTerm && (name == nme.CONSTRUCTOR) || (name == nme.MIXIN_CONSTRUCTOR)
- final def isModule = isTerm && hasFlag(MODULE)
final def isStaticModule = isModule && isStatic && !isMethod
- final def isPackage = isModule && hasFlag(PACKAGE)
final def isThisSym = isTerm && owner.thisSym == this
//final def isMonomorphicType = isType && hasFlag(MONOMORPHIC)
final def isError = hasFlag(IS_ERROR)
final def isErroneous = isError || isInitialized && tpe.isErroneous
- final def isTrait = isClass && hasFlag(TRAIT | notDEFERRED) // A virtual class becomes a trait (part of DEVIRTUALIZE)
+ override final def isTrait: Boolean = isClass && hasFlag(TRAIT | notDEFERRED) // A virtual class becomes a trait (part of DEVIRTUALIZE)
final def isTypeParameterOrSkolem = isType && hasFlag(PARAM)
+ final def isHigherOrderTypeParameter = owner.isTypeParameterOrSkolem
final def isTypeSkolem = isSkolem && hasFlag(PARAM)
- final def isTypeParameter = isTypeParameterOrSkolem && !isSkolem
// a type symbol bound by an existential type, for instance the T in
// List[T] forSome { type T }
- final def isExistential = isType && hasFlag(EXISTENTIAL)
- final def isExistentialSkolem = isSkolem && hasFlag(EXISTENTIAL)
- final def isExistentialQuantified = isExistential && !isSkolem
+ final def isExistentialSkolem = isExistentiallyBound && isSkolem
+ final def isExistentialQuantified = isExistentiallyBound && !isSkolem
// class C extends D( { class E { ... } ... } ). Here, E is a class local to a constructor
final def isClassLocalToConstructor = isClass && hasFlag(INCONSTRUCTOR)
@@ -421,23 +434,19 @@ trait Symbols {
final def isAnonymousClass = isClass && (originalName startsWith nme.ANON_CLASS_NAME) // todo: find out why we can't use containsName here.
final def isAnonymousFunction = hasFlag(SYNTHETIC) && (name containsName nme.ANON_FUN_NAME)
- final def isRefinementClass = isClass && name == nme.REFINE_CLASS_NAME.toTypeName; // no lifting for refinement classes
- final def isModuleClass = isClass && hasFlag(MODULE)
- final def isPackageClass = isClass && hasFlag(PACKAGE)
+ final def isClassOfModule = isModuleClass || isClass && nme.isLocalName(name)
final def isPackageObject = isModule && name == nme.PACKAGEkw && owner.isPackageClass
final def isPackageObjectClass = isModuleClass && name.toTermName == nme.PACKAGEkw && owner.isPackageClass
final def definedInPackage = owner.isPackageClass || owner.isPackageObjectClass
- final def isRoot = isPackageClass && name == nme.ROOT.toTypeName
- final def isRootPackage = isPackage && name == nme.ROOTPKG
- final def isEmptyPackage = isPackage && name == nme.EMPTY_PACKAGE_NAME
- final def isEmptyPackageClass = isPackageClass && name == nme.EMPTY_PACKAGE_NAME.toTypeName
+ final def isJavaInterface = hasFlag(JAVA) && isTrait
+
final def isPredefModule = isModule && name == nme.Predef && owner.isScalaPackageClass // not printed as a prefix
final def isScalaPackage = isPackage && name == nme.scala_ && owner.isRoot || // not printed as a prefix
isPackageObject && owner.isScalaPackageClass
final def isScalaPackageClass: Boolean = isPackageClass && owner.isRoot && name == nme.scala_.toTypeName ||
isPackageObjectClass && owner.isScalaPackageClass // not printed as a prefix
- /** Is symbol a monomophic type?
+ /** Is symbol a monomorphic type?
* assumption: if a type starts out as monomorphic, it will not acquire
* type parameters in later phases.
*/
@@ -450,26 +459,17 @@ trait Symbols {
}
}
- def isDeprecated = hasAnnotation(DeprecatedAttr)
- def deprecationMessage: Option[String] =
- annotations find (_.atp.typeSymbol == DeprecatedAttr) flatMap { annot =>
- annot.args match {
- case Literal(const) :: Nil =>
- Some(const.stringValue)
- case _ =>
- None
- }
- }
- def elisionLevel: Option[Int] = {
- if (!hasAnnotation(ElidableMethodClass)) None
- else annotations find (_.atp.typeSymbol == ElidableMethodClass) flatMap { annot =>
- // since we default to enabled by default, only look hard for falsity
- annot.args match {
- case Literal(Constant(x: Int)) :: Nil => Some(x)
- case _ => None
- }
- }
- }
+ def isDeprecated = hasAnnotation(DeprecatedAttr)
+ def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap { _.stringArg(0) }
+ // !!! when annotation arguments are not literal strings, but any sort of
+ // assembly of strings, there is a fair chance they will turn up here not as
+ // Literal(const) but some arbitrary AST. However nothing in the compiler
+ // prevents someone from writing a @migration annotation with a calculated
+ // string. So this needs attention. For now the fact that migration is
+ // private[scala] ought to provide enough protection.
+ def migrationMessage = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(2) }
+ def elisionLevel = getAnnotation(ElidableMethodClass) flatMap { _.intArg(0) }
+ def implicitNotFoundMsg = getAnnotation(ImplicitNotFoundClass) flatMap { _.stringArg(0) }
/** Does this symbol denote a wrapper object of the interpreter or its class? */
final def isInterpreterWrapper =
@@ -478,6 +478,8 @@ trait Symbols {
name.toString.startsWith(nme.INTERPRETER_LINE_PREFIX) &&
name.toString.endsWith(nme.INTERPRETER_WRAPPER_SUFFIX)
+ override def isEffectiveRoot = super.isEffectiveRoot || isInterpreterWrapper
+
/** Is this symbol an accessor method for outer? */
final def isOuterAccessor = {
hasFlag(STABLE | SYNTHETIC) &&
@@ -497,9 +499,6 @@ trait Symbols {
(!hasFlag(METHOD | BYNAMEPARAM) || hasFlag(STABLE)) &&
!(tpe.isVolatile && !hasAnnotation(uncheckedStableClass))
- def isDeferred =
- hasFlag(DEFERRED) && !isClass
-
def isVirtualClass =
hasFlag(DEFERRED) && isClass
@@ -507,16 +506,6 @@ trait Symbols {
hasFlag(DEFERRED) && isTrait
/** Is this symbol a public */
- final def isPublic: Boolean =
- !hasFlag(PRIVATE | PROTECTED) && privateWithin == NoSymbol
-
- /** Is this symbol a private local */
- final def isPrivateLocal =
- hasFlag(PRIVATE) && hasFlag(LOCAL)
-
- /** Is this symbol a protected local */
- final def isProtectedLocal =
- hasFlag(PROTECTED) && hasFlag(LOCAL)
/** Does this symbol denote the primary constructor of its enclosing class? */
final def isPrimaryConstructor =
@@ -530,12 +519,6 @@ trait Symbols {
final def isCaseApplyOrUnapply =
isMethod && hasFlag(CASE) && hasFlag(SYNTHETIC)
- /** Is this symbol an implementation class for a mixin? */
- final def isImplClass: Boolean = isClass && hasFlag(IMPLCLASS)
-
- /** Is thhis symbol early initialized */
- final def isEarly: Boolean = isTerm && hasFlag(PRESUPER)
-
/** Is this symbol a trait which needs an implementation class? */
final def needsImplClass: Boolean =
isTrait && (!hasFlag(INTERFACE) || hasFlag(lateINTERFACE)) && !isImplClass
@@ -562,16 +545,9 @@ trait Symbols {
final def isStaticOwner: Boolean =
isPackageClass || isModuleClass && isStatic
- /** Is this symbol final?*/
- final def isFinal: Boolean = (
- hasFlag(FINAL) ||
- isTerm && (
- hasFlag(PRIVATE) || isLocal || owner.isClass && owner.hasFlag(FINAL | MODULE))
- )
-
- /** Is this symbol a sealed class?*/
- final def isSealed: Boolean =
- isClass && (hasFlag(SEALED) || isValueClass(this))
+ /** Is this symbol effectively final? I.e, it cannot be overridden */
+ final def isEffectivelyFinal: Boolean = isFinal || isTerm && (
+ hasFlag(PRIVATE) || isLocal || owner.isClass && owner.hasFlag(FINAL | MODULE))
/** Is this symbol locally defined? I.e. not accessed from outside `this' instance */
final def isLocal: Boolean = owner.isTerm
@@ -613,7 +589,7 @@ trait Symbols {
/** A a member of class `base' is incomplete if
* (1) it is declared deferred or
* (2) it is abstract override and its super symbol in `base' is
- * nonexistent or inclomplete.
+ * nonexistent or incomplete.
*
* @param base ...
* @return ...
@@ -625,6 +601,8 @@ trait Symbols {
supersym == NoSymbol || supersym.isIncompleteIn(base)
}
+ // Does not always work if the rawInfo is a SourcefileLoader, see comment
+ // in "def coreClassesFirst" in Global.
final def exists: Boolean =
this != NoSymbol && (!owner.isPackageClass || { rawInfo.load(this); rawInfo != NoType })
@@ -644,9 +622,6 @@ trait Symbols {
isClass && (hasFlag(STABLE) || checkStable())
}
- final def isCovariant: Boolean = isType && hasFlag(COVARIANT)
-
- final def isContravariant: Boolean = isType && hasFlag(CONTRAVARIANT)
/** The variance of this symbol as an integer */
final def variance: Int =
@@ -657,9 +632,14 @@ trait Symbols {
// Flags, owner, and name attributes --------------------------------------------------------------
def owner: Symbol = rawowner
- final def owner_=(owner: Symbol) { rawowner = owner }
+ override final def owner_=(owner: Symbol) { rawowner = owner }
def ownerChain: List[Symbol] = this :: owner.ownerChain
+ def enclClassChain: List[Symbol] = {
+ if (this eq NoSymbol) Nil
+ else if (isClass && !isPackageClass) this :: owner.enclClassChain
+ else owner.enclClassChain
+ }
def ownersIterator: Iterator[Symbol] = new Iterator[Symbol] {
private var current = Symbol.this
@@ -698,18 +678,19 @@ trait Symbols {
val fs = rawflags & phase.flagMask
(fs | ((fs & LateFlags) >>> LateShift)) & ~(fs >>> AntiShift)
}
- final def flags_=(fs: Long) = rawflags = fs
+ override final def flags_=(fs: Long) = rawflags = fs
final def setFlag(mask: Long): this.type = { rawflags = rawflags | mask; this }
final def resetFlag(mask: Long): this.type = { rawflags = rawflags & ~mask; this }
final def getFlag(mask: Long): Long = flags & mask
- final def hasFlag(mask: Long): Boolean = (flags & mask) != 0L
final def resetFlags { rawflags = rawflags & TopLevelCreationFlags }
+ final def hasAccessBoundary = (privateWithin != null) && (privateWithin != NoSymbol)
+
/** The class or term up to which this symbol is accessible,
- * or RootClass if it is public
+ * or RootClass if it is public.
*/
def accessBoundary(base: Symbol): Symbol = {
- if (hasFlag(PRIVATE) || owner.isTerm) owner
+ if (hasFlag(PRIVATE) || isLocal) owner
else if (privateWithin != NoSymbol && !phase.erasedTypes) privateWithin
else if (hasFlag(PROTECTED)) base
else RootClass
@@ -737,12 +718,12 @@ trait Symbols {
* to generate a type of kind *
* for a term symbol, its usual type
*/
- def tpe: Type = info
+ override def tpe: Type = info
/** Get type info associated with symbol at current phase, after
* ensuring that symbol is initialized (i.e. type is completed).
*/
- def info: Type = try {
+ override def info: Type = try {
var cnt = 0
while (validTo == NoPeriod) {
//if (settings.debug.value) System.out.println("completing " + this);//DEBUG
@@ -750,9 +731,15 @@ trait Symbols {
assert(infos.prev eq null, this.name)
val tp = infos.info
//if (settings.debug.value) System.out.println("completing " + this.rawname + tp.getClass());//debug
- lock {
- setInfo(ErrorType)
- throw CyclicReference(this, tp)
+ if ((rawflags & LOCKED) != 0L) { // rolled out once for performance
+ lock {
+ setInfo(ErrorType)
+ throw CyclicReference(this, tp)
+ }
+ } else {
+ rawflags |= LOCKED
+// activeLocks += 1
+ // lockedSyms += this
}
val current = phase
try {
@@ -766,7 +753,7 @@ trait Symbols {
cnt += 1
// allow for two completions:
// one: sourceCompleter to LazyType, two: LazyType to completed type
- if (cnt == 3) throw new Error("no progress in completing " + this + ":" + tp)
+ if (cnt == 3) abort("no progress in completing " + this + ":" + tp)
}
val result = rawInfo
result
@@ -776,20 +763,24 @@ trait Symbols {
throw ex
}
- /** Set initial info. */
- def setInfo(info: Type): this.type = {
+ override def info_=(info: Type) {
assert(info ne null)
infos = TypeHistory(currentPeriod, info, null)
unlock()
validTo = if (info.isComplete) currentPeriod else NoPeriod
- this
}
+ /** Set initial info. */
+ def setInfo(info: Type): this.type = { info_=(info); this }
+
+ def setInfoOwnerAdjusted(info: Type): this.type = setInfo(info.atOwner(this))
+
/** Set new info valid from start of this phase. */
final def updateInfo(info: Type): Symbol = {
assert(phaseId(infos.validFrom) <= phase.id)
if (phaseId(infos.validFrom) == phase.id) infos = infos.prev
infos = TypeHistory(currentPeriod, info, infos)
+ validTo = if (info.isComplete) currentPeriod else NoPeriod
this
}
@@ -884,13 +875,28 @@ trait Symbols {
infos ne null
}
+ /** Modify term symbol's type so that a raw type C is converted to an existential C[_]
+ *
+ * This is done in checkAccessible and overriding checks in refchecks
+ * We can't do this on class loading because it would result in infinite cycles.
+ */
+ private var triedCooking: Boolean = false
+ final def cookJavaRawInfo() {
+ // println("cookJavaRawInfo: "+(rawname, triedCooking))
+ if(triedCooking) return else triedCooking = true // only try once...
+ doCookJavaRawInfo()
+ }
+
+ protected def doCookJavaRawInfo(): Unit
+
+
/** The type constructor of a symbol is:
* For a type symbol, the type corresponding to the symbol itself,
* excluding parameters.
* Not applicable for term symbols.
*/
def typeConstructor: Type =
- throw new Error("typeConstructor inapplicable for " + this)
+ abort("typeConstructor inapplicable for " + this)
/** @M -- tpe vs tpeHK:
* Symbol::tpe creates a TypeRef that has dummy type arguments to get a type of kind *
@@ -910,7 +916,11 @@ trait Symbols {
else {
val current = phase
try {
- while (phase.keepsTypeParams) phase = phase.prev
+ while ((phase.prev ne NoPhase) && phase.prev.keepsTypeParams) phase = phase.prev
+// while (phase.keepsTypeParams && (phase.prev ne NoPhase)) phase = phase.prev
+ if (phase ne current) phase = phase.next
+ if (settings.debug.value && (phase ne current))
+ log("checking unsafeTypeParams(" + this + ") at: " + current + " reading at: " + phase)
rawInfo.typeParams
} finally {
phase = current
@@ -922,7 +932,20 @@ trait Symbols {
* type parameters later.
*/
def typeParams: List[Symbol] =
- if (isMonomorphicType) List() else { rawInfo.load(this); rawInfo.typeParams }
+ if (isMonomorphicType)
+ List()
+ else {
+ if (validTo == NoPeriod) {
+ val current = phase
+ try {
+ phase = phaseOf(infos.validFrom)
+ rawInfo.load(this)
+ } finally {
+ phase = current
+ }
+ }
+ rawInfo.typeParams
+ }
/** The value parameter sections of this symbol.
*/
@@ -965,13 +988,13 @@ trait Symbols {
*/
def existentialBound: Type =
if (this.isClass)
- polyType(this.typeParams, mkTypeBounds(NothingClass.tpe, this.classBound))
+ polyType(this.typeParams, TypeBounds(NothingClass.tpe, this.classBound))
else if (this.isAbstractType)
this.info
else if (this.isTerm)
- mkTypeBounds(NothingClass.tpe, intersectionType(List(this.tpe, SingletonClass.tpe)))
+ TypeBounds(NothingClass.tpe, intersectionType(List(this.tpe, SingletonClass.tpe)))
else
- throw new Error("unexpected alias type: "+this)
+ abort("unexpected alias type: "+this)
/** Reset symbol to initial state
*/
@@ -1036,8 +1059,9 @@ trait Symbols {
else if (alts1.isEmpty) NoSymbol
else if (alts1.tail.isEmpty) alts1.head
else owner.newOverloaded(info.prefix, alts1)
- } else if (cond(this)) this
- else NoSymbol
+ } else if (this == NoSymbol || cond(this)) {
+ this
+ } else NoSymbol
def suchThat(cond: Symbol => Boolean): Symbol = {
val result = filter(cond)
@@ -1054,6 +1078,7 @@ trait Symbols {
/** A clone of this symbol, but with given owner */
final def cloneSymbol(owner: Symbol): Symbol = {
val newSym = cloneSymbolImpl(owner)
+ newSym.privateWithin = privateWithin
newSym.setInfo(info.cloneInfo(newSym))
.setFlag(this.rawflags).setAnnotations(this.annotations)
}
@@ -1064,12 +1089,6 @@ trait Symbols {
// Access to related symbols --------------------------------------------------
- /** The next enclosing class */
- def enclClass: Symbol = if (isClass) this else owner.enclClass
-
- /** The next enclosing method */
- def enclMethod: Symbol = if (isSourceMethod) this else owner.enclMethod
-
/** The primary constructor of a class */
def primaryConstructor: Symbol = {
var c = info.decl(
@@ -1088,10 +1107,6 @@ trait Symbols {
/** The type of `this' in a class, or else the type of the symbol itself. */
def typeOfThis = thisSym.tpe
- /** Sets the type of `this' in a class */
- def typeOfThis_=(tp: Type): Unit =
- throw new Error("typeOfThis cannot be set for " + this)
-
/** If symbol is a class, the type <code>this.type</code> in this class,
* otherwise <code>NoPrefix</code>.
* We always have: thisType <:< typeOfThis
@@ -1123,9 +1138,10 @@ trait Symbols {
def renamedGetter = accessors find (_.originalName startsWith (getterName + "$"))
val accessorName = origGetter orElse renamedGetter
- accessorName getOrElse {
- throw new Error("Could not find case accessor for %s in %s".format(field, this))
- }
+ // This fails more gracefully rather than throw an Error as it used to because
+ // as seen in #2625, we can reach this point with an already erroneous tree.
+ accessorName getOrElse NoSymbol
+ // throw new Error("Could not find case accessor for %s in %s".format(field, this))
}
fields map findAccessor
@@ -1158,16 +1174,12 @@ trait Symbols {
*/
def alias: Symbol = NoSymbol
- /** For parameter symbols: the method computing its default value, NoSymbol
- * for all others
- */
- def defaultGetter: Symbol = NoSymbol
- def defaultGetter_=(getter: Symbol): Unit =
- throw new Error("defaultGetter cannot be set for " + this)
-
/** For a lazy value, its lazy accessor. NoSymbol for all others */
def lazyAccessor: Symbol = NoSymbol
+ /** If this is a lazy value, the lazy accessor; otherwise this symbol. */
+ def lazyAccessorOrSelf: Symbol = if (isLazy) lazyAccessor else this
+
/** For an outer accessor: The class from which the outer originates.
* For all other symbols: NoSymbol
*/
@@ -1178,7 +1190,7 @@ trait Symbols {
/** The directly or indirectly inherited mixins of this class
* except for mixin classes inherited by the superclass. Mixin classes appear
- * in linearlization order.
+ * in linearization order.
*/
def mixinClasses: List[Symbol] = {
val sc = superClass
@@ -1189,19 +1201,25 @@ trait Symbols {
*/
def ancestors: List[Symbol] = info.baseClasses drop 1
- /** The package containing this symbol, or NoSymbol if there
+ /** The package class containing this symbol, or NoSymbol if there
* is not one. */
- def enclosingPackage: Symbol =
+ def enclosingPackageClass: Symbol =
if (this == NoSymbol) this else {
var packSym = this.owner
while ((packSym != NoSymbol)
&& !packSym.isPackageClass)
packSym = packSym.owner
- if (packSym != NoSymbol)
- packSym = packSym.linkedModuleOfClass
packSym
}
+ /** The package containing this symbol, or NoSymbol if there
+ * is not one. */
+ def enclosingPackage: Symbol = {
+ val packSym = enclosingPackageClass
+ if (packSym != NoSymbol) packSym.companionModule
+ else packSym
+ }
+
/** The top-level class containing this symbol */
def toplevelClass: Symbol =
if (owner.isPackageClass) {
@@ -1223,54 +1241,103 @@ trait Symbols {
// appears to succeed but highly opaque errors come later: see bug #1286
if (res == false) {
val (f1, f2) = (this.sourceFile, that.sourceFile)
- if (f1 != null && f2 != null && f1 != f2)
- throw FatalError("Companions '" + this + "' and '" + that + "' must be defined in same file.")
+ if (f1 != null && f2 != null && f1.path != f2.path)
+ throw InvalidCompanions(this, that)
}
-
res
}
+ /** @PP: Added diagram because every time I come through here I end up
+ * losing my train of thought. [Renaming occurs.] This diagram is a
+ * bit less necessary since the renaming, but leaving in place
+ * due to high artistic merit.
+ *
+ * class Foo <
+ * ^ ^ (2) \
+ * | | | \
+ * | (5) | (3)
+ * | | | \
+ * (1) v v \
+ * object Foo (4)-> > class Foo$
+ *
+ * (1) companionClass
+ * (2) companionModule
+ * (3) linkedClassOfClass
+ * (4) moduleClass
+ * (5) companionSymbol
+ */
+
/** The class with the same name in the same package as this module or
- * case class factory. A better name would be companionClassOfModule.
+ * case class factory.
+ * Note: does not work for classes owned by methods, see
+ * Namers.companionClassOf
*/
- final def linkedClassOfModule: Symbol = {
+ final def companionClass: Symbol = {
if (this != NoSymbol)
- owner.rawInfo.decl(name.toTypeName).suchThat(_ isCoDefinedWith this)
+ flatOwnerInfo.decl(name.toTypeName).suchThat(_ isCoDefinedWith this)
else NoSymbol
}
- /** The module or case class factory with the same name in the same
- * package as this class. A better name would be companionModuleOfClass.
+ /** A helper method that factors the common code used the discover a
+ * companion module of a class. If a companion module exists, its symbol is
+ * returned, otherwise, `NoSymbol` is returned. The method assumes that
+ * `this` symbol has already been checked to be a class (using `isClass`).
*/
- final def linkedModuleOfClass: Symbol =
- if (this.isClass && !this.isAnonymousClass && !this.isRefinementClass) {
- owner.rawInfo.decl(name.toTermName).suchThat(
- sym => (sym hasFlag MODULE) && (sym isCoDefinedWith this))
- } else NoSymbol
+ private final def companionModule0: Symbol =
+ flatOwnerInfo.decl(name.toTermName).suchThat(
+ sym => sym.hasFlag(MODULE) && (sym isCoDefinedWith this) && !sym.isMethod)
+
+ /** For a class: the module or case class factory with the same name in the same package.
+ * For all others: NoSymbol
+ * Note: does not work for modules owned by methods, see Namers.companionModuleOf
+ *
+ * class Foo . companionModule --> object Foo
+ */
+ final def companionModule: Symbol =
+ if (this.isClass && !this.isAnonymousClass && !this.isRefinementClass)
+ companionModule0
+ else NoSymbol
/** For a module its linked class, for a class its linked module or case
* factory otherwise.
+ * Note: does not work for modules owned by methods, see
+ * Namers.companionSymbolOf
*/
- final def linkedSym: Symbol =
- if (isTerm) linkedClassOfModule
- else if (isClass) owner.rawInfo.decl(name.toTermName).suchThat(_ isCoDefinedWith this)
+ final def companionSymbol: Symbol =
+ if (isTerm) companionClass
+ else if (isClass)
+ companionModule0
else NoSymbol
- /** For a module class its linked class, for a plain class
- * the module class of its linked module.
- * For instance:
- * object Foo
- * class Foo
+ /** For a module class: its linked class
+ * For a plain class: the module class of its linked module.
*
* Then object Foo has a `moduleClass' (invisible to the user, the backend calls it Foo$
- * linkedClassOFClass goes from class Foo$ to class Foo, and back.
+ * linkedClassOfClass goes from class Foo$ to class Foo, and back.
*/
final def linkedClassOfClass: Symbol =
- if (isModuleClass) linkedClassOfModule else linkedModuleOfClass.moduleClass
+ if (isModuleClass) companionClass else companionModule.moduleClass
+
+ /**
+ * Returns the rawInfo of the owner. If the current phase has flat classes, it first
+ * applies all pending type maps to this symbol.
+ *
+ * assume this is the ModuleSymbol for B in the following definition:
+ * package p { class A { object B { val x = 1 } } }
+ *
+ * The owner after flatten is "package p" (see "def owner"). The flatten type map enters
+ * symbol B in the decls of p. So to find a linked symbol ("object B" or "class B")
+ * we need to apply flatten to B first. Fixes #2470.
+ */
+ private final def flatOwnerInfo: Type = {
+ if (phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass)
+ info
+ owner.rawInfo
+ }
/** If this symbol is an implementation class, its interface, otherwise the symbol itself
* The method follows two strategies to determine the interface.
- * - during or after erasure, it takes the last parent of the implementatation class
+ * - during or after erasure, it takes the last parent of the implementation class
* (which is always the interface, by convention)
* - before erasure, it looks up the interface name in the scope of the owner of the class.
* This only works for implementation classes owned by other classes or traits.
@@ -1288,11 +1355,6 @@ trait Symbols {
result
} else this
- /** The module corresponding to this module class (note that this
- * is not updated when a module is cloned).
- */
- def sourceModule: Symbol = NoSymbol
-
/** The module class corresponding to this module.
*/
def moduleClass: Symbol = NoSymbol
@@ -1309,8 +1371,8 @@ trait Symbols {
/** The non-private member of `site' whose type and name match the type of this symbol
*/
- final def matchingSymbol(site: Type): Symbol =
- site.nonPrivateMember(name).filter(sym =>
+ final def matchingSymbol(site: Type, admit: Long = 0L): Symbol =
+ site.nonPrivateMemberAdmitting(name, admit).filter(sym =>
!sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
/** The symbol overridden by this symbol in given class `ofclazz'.
@@ -1368,7 +1430,7 @@ trait Symbols {
final def setter(base: Symbol, hasExpandedName: Boolean): Symbol = {
var sname = nme.getterToSetter(nme.getterName(name))
- if (hasExpandedName) sname = base.expandedSetterName(sname)
+ if (hasExpandedName) sname = nme.expandedSetterName(sname, base)
base.info.decl(sname) filter (_.hasFlag(ACCESSOR))
}
@@ -1378,7 +1440,7 @@ trait Symbols {
final def caseModule: Symbol = {
var modname = name.toTermName
if (privateWithin.isClass && !privateWithin.isModuleClass && !hasFlag(EXPANDEDNAME))
- modname = privateWithin.expandedName(modname)
+ modname = nme.expandedName(modname, privateWithin)
initialize.owner.info.decl(modname).suchThat(_.isModule)
}
@@ -1416,47 +1478,47 @@ trait Symbols {
getter(owner).expandName(base)
setter(owner).expandName(base)
}
- name = base.expandedName(name)
+ name = nme.expandedName(name, base)
if (isType) name = name.toTypeName
}
}
- def expandedSetterName(simpleSetterName: Name): Name =
- newTermName(fullNameString('$') + nme.TRAIT_SETTER_SEPARATOR_STRING + simpleSetterName)
-
- /** The expanded name of `name' relative to this class as base
- */
- def expandedName(name: Name): Name = {
- newTermName(fullNameString('$') + nme.EXPAND_SEPARATOR_STRING + name)
- }
-
def sourceFile: AbstractFile =
(if (isModule) moduleClass else toplevelClass).sourceFile
def sourceFile_=(f: AbstractFile) {
- throw new Error("sourceFile_= inapplicable for " + this)
+ abort("sourceFile_= inapplicable for " + this)
}
def isFromClassFile: Boolean =
(if (isModule) moduleClass else toplevelClass).isFromClassFile
/** If this is a sealed class, its known direct subclasses. Otherwise Set.empty */
- def children: Set[Symbol] = emptySymbolSet
+ def children: List[Symbol] = Nil
- /** Declare given subclass `sym' of this sealed class */
- def addChild(sym: Symbol) {
- throw new Error("addChild inapplicable for " + this)
- }
+ /** Recursively finds all sealed descendants and returns a sorted list. */
+ def sealedDescendants: List[Symbol] = {
+ val kids = children flatMap (_.sealedDescendants)
+ val all = if (this hasFlag ABSTRACT) kids else this :: kids
+ all.distinct sortBy (_.sealedSortName)
+ }
// ToString -------------------------------------------------------------------
/** A tag which (in the ideal case) uniquely identifies class symbols */
- final def tag: Int = fullNameString.hashCode()
+ final def tag: Int = fullName.hashCode()
/** The simple name of this Symbol */
final def simpleName: Name = name
+ /** The String used to order otherwise identical sealed symbols.
+ * This uses data which is stable across runs and variable classpaths
+ * (the initial Name) before falling back on id, which varies depending
+ * on exactly when a symbol is loaded.
+ */
+ final def sealedSortName: String = initName.toString + "#" + id
+
/** String representation of symbol's definition key word */
final def keyString: String =
if (isTrait && hasFlag(JAVA)) "interface"
@@ -1466,7 +1528,7 @@ trait Symbols {
else if (isVariable) "var"
else if (isPackage) "package"
else if (isModule) "object"
- else if (isMethod) "def"
+ else if (isSourceMethod) "def"
else if (isTerm && (!hasFlag(PARAM) || hasFlag(PARAMACCESSOR))) "val"
else ""
@@ -1495,32 +1557,11 @@ trait Symbols {
* E.g. $eq => =.
* If settings.uniquId adds id.
*/
- def nameString: String = cleanNameString + idString
+ def nameString: String = decodedName + idString
- /** A nameString that never adds idString, for use in e.g. GenJVM
- * where appending #uniqid breaks the bytecode.
+ /** The name of the symbol before decoding, e.g. `$eq$eq` instead of `==`.
*/
- def cleanNameString: String = {
- val s = simpleName.decode
- if (s endsWith nme.LOCAL_SUFFIX) s.substring(0, s.length - nme.LOCAL_SUFFIX.length)
- else s
- }
-
- /** String representation of symbol's full name with <code>separator</code>
- * between class names.
- * Never translates expansions of operators back to operator symbol.
- * Never adds id.
- */
- final def fullNameString(separator: Char): String = {
- var str =
- if (isRoot || isRootPackage || this == NoSymbol) this.toString
- else if (owner.isRoot || owner.isEmptyPackageClass || owner.isInterpreterWrapper) simpleName.toString
- else owner.enclClass.fullNameString(separator) + separator + simpleName
- if (str.charAt(str.length - 1) == ' ') str = str.substring(0, str.length - 1)
- str
- }
-
- final def fullNameString: String = fullNameString('.')
+ def encodedName: String = name.toString
/** If settings.uniqid is set, the symbol's id, else "" */
final def idString: String =
@@ -1593,6 +1634,11 @@ trait Symbols {
def infosString = infos.toString()
+ def hasFlagsToString(mask: Long): String = flagsToString(
+ flags & mask,
+ if (hasAccessBoundary) privateWithin.toString else ""
+ )
+
/** String representation of symbol's variance */
def varianceString: String =
if (variance == 1) "+"
@@ -1631,13 +1677,13 @@ trait Symbols {
privateWithin = NoSymbol
protected var referenced: Symbol = NoSymbol
- protected var defGetter: Symbol = NoSymbol
- def cloneSymbolImpl(owner: Symbol): Symbol = {
- val clone = new TermSymbol(owner, pos, name)
- clone.referenced = referenced
- clone.defGetter = defGetter
- clone
+ def cloneSymbolImpl(owner: Symbol): Symbol =
+ new TermSymbol(owner, pos, name).copyAttrsFrom(this)
+
+ def copyAttrsFrom(original: TermSymbol): this.type = {
+ referenced = original.referenced
+ this
}
private val validAliasFlags = SUPERACCESSOR | PARAMACCESSOR | MIXEDIN | SPECIALIZED
@@ -1655,10 +1701,6 @@ trait Symbols {
this
}
- override def defaultGetter = defGetter
- override def defaultGetter_=(getter: Symbol): Unit =
- defGetter = getter
-
override def outerSource: Symbol =
if (name endsWith nme.OUTER) initialize.referenced
else NoSymbol
@@ -1682,12 +1724,41 @@ trait Symbols {
assert(hasFlag(LAZY), this)
referenced
}
+
+ protected def doCookJavaRawInfo() {
+ def cook(sym: Symbol) {
+ require(sym hasFlag JAVA)
+ // @M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible
+ // object rawToExistentialInJava extends TypeMap {
+ // def apply(tp: Type): Type = tp match {
+ // // any symbol that occurs in a java sig, not just java symbols
+ // // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14
+ // case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty =>
+ // val eparams = typeParamsToExistentials(sym, sym.typeParams)
+ // existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe)))
+ // case _ =>
+ // mapOver(tp)
+ // }
+ // }
+ val tpe1 = rawToExistential(sym.tpe)
+ // println("cooking: "+ sym +": "+ sym.tpe +" to "+ tpe1)
+ if (tpe1 ne sym.tpe) {
+ sym.setInfo(tpe1)
+ }
+ }
+
+ if (hasFlag(JAVA))
+ cook(this)
+ else if (hasFlag(OVERLOADED))
+ for (sym2 <- alternatives)
+ if (sym2 hasFlag JAVA)
+ cook(sym2)
+ }
}
/** A class for module symbols */
class ModuleSymbol(initOwner: Symbol, initPos: Position, initName: Name)
extends TermSymbol(initOwner, initPos, initName) {
-
private var flatname = nme.EMPTY
override def owner: Symbol =
@@ -1705,10 +1776,35 @@ trait Symbols {
flatname
} else rawname
- override def cloneSymbolImpl(owner: Symbol): Symbol = {
- val clone = new ModuleSymbol(owner, pos, name)
- clone.referenced = referenced
- clone
+ override def cloneSymbolImpl(owner: Symbol): Symbol =
+ new ModuleSymbol(owner, pos, name).copyAttrsFrom(this)
+ }
+
+ /** A class for method symbols */
+ class MethodSymbol(initOwner: Symbol, initPos: Position, initName: Name)
+ extends TermSymbol(initOwner, initPos, initName) {
+
+ private var mtpePeriod = NoPeriod
+ private var mtpePre: Type = _
+ private var mtpeResult: Type = _
+ private var mtpeInfo: Type = _
+
+ override def cloneSymbolImpl(owner: Symbol): Symbol =
+ new MethodSymbol(owner, pos, name).copyAttrsFrom(this)
+
+ def typeAsMemberOf(pre: Type): Type = {
+ if (mtpePeriod == currentPeriod) {
+ if ((mtpePre eq pre) && (mtpeInfo eq info)) return mtpeResult
+ } else if (isValid(mtpePeriod)) {
+ mtpePeriod = currentPeriod
+ if ((mtpePre eq pre) && (mtpeInfo eq info)) return mtpeResult
+ }
+ val res = pre.computeMemberType(this)
+ mtpePeriod = currentPeriod
+ mtpePre = pre
+ mtpeInfo = info
+ mtpeResult = res
+ res
}
}
@@ -1724,7 +1820,7 @@ trait Symbols {
private var tpePeriod = NoPeriod
override def isType = true
- override def isTypeMember = true
+ override def isNonClassType = true
override def isAbstractType = isDeferred
override def isAliasType = !isDeferred
@@ -1758,7 +1854,7 @@ trait Symbols {
tpeCache
}
- // needed for experimentlal code for early types as type parameters
+ // needed for experimental code for early types as type parameters
// def refreshType() { tpePeriod = NoPeriod }
override def typeConstructor: Type = {
@@ -1771,7 +1867,7 @@ trait Symbols {
tyconCache
}
- override def setInfo(tp: Type): this.type = {
+ override def info_=(tp: Type) {
tpePeriod = NoPeriod
tyconCache = null
if (tp.isComplete)
@@ -1780,8 +1876,7 @@ trait Symbols {
case NoType | AnnotatedType(_, _, _) => ;
case _ => setFlag(MONOMORPHIC)
}
- super.setInfo(tp)
- this
+ super.info_=(tp)
}
override def reset(completer: Type) {
@@ -1790,13 +1885,39 @@ trait Symbols {
tyconRunId = NoRunId
}
+ /*** example:
+ * public class Test3<T> {}
+ * public class Test1<T extends Test3> {}
+ * info for T in Test1 should be >: Nothing <: Test3[_]
+ */
+ protected def doCookJavaRawInfo() {
+ // don't require hasFlag(JAVA), since T in the above example does not have that flag
+ val tpe1 = rawToExistential(info)
+ // println("cooking type: "+ this +": "+ info +" to "+ tpe1)
+ if (tpe1 ne info) {
+ setInfo(tpe1)
+ }
+ }
+
def cloneSymbolImpl(owner: Symbol): Symbol =
new TypeSymbol(owner, pos, name)
- if (util.Statistics.enabled) typeSymbolCount = typeSymbolCount + 1
+ incCounter(typeSymbolCount)
}
- /** A class for type parameters viewed from inside their scopes */
+ /** A class for type parameters viewed from inside their scopes
+ *
+ * @param origin Can be either a tree, or a symbol, or null.
+ * If skolem got created from newTypeSkolem (called in Namers), origin denotes
+ * the type parameter from which the skolem was created. If it got created from
+ * skolemizeExistential, origin is either null or a Tree. If it is a Tree, it indicates
+ * where the skolem was introduced (this is important for knowing when to pack it
+ * again into ab Existential). origin is `null' only in skolemizeExistentials called
+ * from <:< or isAsSpecific, because here its value does not matter.
+ * I elieve the following invariant holds:
+ *
+ * origin.isInstanceOf[Symbol] == !hasFlag(EXISTENTIAL)
+ */
class TypeSkolem(initOwner: Symbol, initPos: Position,
initName: Name, origin: AnyRef)
extends TypeSymbol(initOwner, initPos, initName) {
@@ -1805,11 +1926,16 @@ trait Symbols {
val level = skolemizationLevel
override def isSkolem = true
+
+ /** If typeskolem comes from a type parameter, that parameter, otherwise skolem itself */
override def deSkolemize = origin match {
case s: Symbol => s
case _ => this
}
+
+ /** If type skolem comes from an existential, the tree where it was created */
override def unpackLocation = origin
+
override def typeParams = info.typeParams //@M! (not deSkolemize.typeParams!!), also can't leave superclass definition: use info, not rawInfo
override def cloneSymbolImpl(owner: Symbol): Symbol =
@@ -1843,7 +1969,7 @@ trait Symbols {
private var thissym: Symbol = this
override def isClass: Boolean = true
- override def isTypeMember = false
+ override def isNonClassType = false
override def isAbstractType = false
override def isAliasType = false
@@ -1863,7 +1989,7 @@ trait Symbols {
newTypeName(rawname+"$trait") // (part of DEVIRTUALIZE)
} else if (phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass) {
if (flatname == nme.EMPTY) {
- assert(rawowner.isClass, "fatal: %s has owner %s, but a class owner is required".format(rawname, rawowner))
+ assert(rawowner.isClass, "fatal: %s has owner %s, but a class owner is required".format(rawname+idString, rawowner))
flatname = newTypeName(compactify(rawowner.name.toString() + "$" + rawname))
}
flatname
@@ -1877,7 +2003,7 @@ trait Symbols {
val period = thisTypePeriod
if (period != currentPeriod) {
thisTypePeriod = currentPeriod
- if (!isValid(period)) thisTypeCache = mkThisType(this)
+ if (!isValid(period)) thisTypeCache = ThisType(this)
}
thisTypeCache
}
@@ -1907,13 +2033,13 @@ trait Symbols {
}
override def sourceModule =
- if (isModuleClass) linkedModuleOfClass else NoSymbol
+ if (isModuleClass) companionModule else NoSymbol
- private var childSet: Set[Symbol] = emptySymbolSet
- override def children: Set[Symbol] = childSet
+ private var childSet: Set[Symbol] = Set()
+ override def children: List[Symbol] = childSet.toList sortBy (_.sealedSortName)
override def addChild(sym: Symbol) { childSet = childSet + sym }
- if (util.Statistics.enabled) classSymbolCount = classSymbolCount + 1
+ incCounter(classSymbolCount)
}
/** A class for module class symbols
@@ -1926,28 +2052,39 @@ trait Symbols {
def this(module: TermSymbol) = {
this(module.owner, module.pos, module.name.toTypeName)
setFlag(module.getFlag(ModuleToClassFlags) | MODULE | FINAL)
- setSourceModule(module)
+ sourceModule = module
}
override def sourceModule = module
- def setSourceModule(module: Symbol) { this.module = module }
+ private var implicitMembersCacheValue: List[Symbol] = List()
+ private var implicitMembersCacheKey1: Type = NoType
+ private var implicitMembersCacheKey2: ScopeEntry = null
+ def implicitMembers: List[Symbol] = {
+ val tp = info
+ if ((implicitMembersCacheKey1 ne tp) || (implicitMembersCacheKey2 ne tp.decls.elems)) {
+ implicitMembersCacheKey1 = tp
+ implicitMembersCacheKey2 = tp.decls.elems
+ implicitMembersCacheValue = tp.implicitMembers
+ }
+ implicitMembersCacheValue
+ }
+ override def sourceModule_=(module: Symbol) { this.module = module }
}
- /** An object repreesenting a missing symbol */
+ /** An object representing a missing symbol */
object NoSymbol extends Symbol(null, NoPosition, nme.NOSYMBOL) {
setInfo(NoType)
privateWithin = this
- override def setInfo(info: Type): this.type = {
+ override def info_=(info: Type) {
infos = TypeHistory(1, NoType, null)
unlock()
validTo = currentPeriod
- this
}
override def defString: String = toString
override def locationString: String = ""
override def enclClass: Symbol = this
override def toplevelClass: Symbol = this
override def enclMethod: Symbol = this
- override def owner: Symbol = throw new Error("no-symbol does not have owner")
+ override def owner: Symbol = abort("no-symbol does not have owner")
override def sourceFile: AbstractFile = null
override def ownerChain: List[Symbol] = List()
override def ownersIterator: Iterator[Symbol] = Iterator.empty
@@ -1955,8 +2092,9 @@ trait Symbols {
override def reset(completer: Type) {}
override def info: Type = NoType
override def rawInfo: Type = NoType
+ protected def doCookJavaRawInfo() {}
override def accessBoundary(base: Symbol): Symbol = RootClass
- def cloneSymbolImpl(owner: Symbol): Symbol = throw new Error()
+ def cloneSymbolImpl(owner: Symbol): Symbol = abort()
}
@@ -1978,6 +2116,11 @@ trait Symbols {
// printStackTrace() // debug
}
+ case class InvalidCompanions(sym1: Symbol, sym2: Symbol)
+ extends Throwable("Companions '" + sym1 + "' and '" + sym2 + "' must be defined in same file") {
+ override def toString = getMessage
+ }
+
/** A class for type histories */
private sealed case class TypeHistory(var validFrom: Period, info: Type, prev: TypeHistory) {
assert((prev eq null) || phaseId(validFrom) > phaseId(prev.validFrom), this)
diff --git a/src/compiler/scala/tools/nsc/symtab/Types.scala b/src/compiler/scala/tools/nsc/symtab/Types.scala
index 676f19205a..d0bb7155ad 100644
--- a/src/compiler/scala/tools/nsc/symtab/Types.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Types.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
//
@@ -8,10 +8,14 @@ package scala.tools.nsc
package symtab
import scala.collection.immutable
-import scala.collection.mutable.{ListBuffer, HashMap, WeakHashMap}
-import scala.tools.nsc.ast.TreeGen
-import scala.tools.nsc.util.{HashSet, Position, NoPosition}
+import scala.ref.WeakReference
+import scala.collection.mutable
+import scala.collection.mutable.{ListBuffer, HashMap}
+import ast.TreeGen
+import util.{HashSet, Position, NoPosition}
+import util.Statistics._
import Flags._
+import scala.util.control.ControlThrowable
/* A standard type pattern match:
case ErrorType =>
@@ -58,22 +62,12 @@ import Flags._
case DeBruijnIndex(level, index)
*/
-trait Types {
- self: SymbolTable =>
+trait Types extends reflect.generic.Types { self: SymbolTable =>
import definitions._
//statistics
- var singletonBaseTypeSeqCount = 0
- var compoundBaseTypeSeqCount = 0
- var typerefBaseTypeSeqCount = 0
- var findMemberCount = 0
- var noMemberCount = 0
- var multMemberCount = 0
- var findMemberNanos = 0l
- var subtypeCount = 0
- var sametypeCount = 0
- var subtypeNanos = 0l
+ def uniqueTypeCount = if (uniques == null) 0 else uniques.size
private var explainSwitch = false
@@ -81,6 +75,7 @@ trait Types {
private final val LogPendingSubTypesThreshold = 50
private final val LogPendingBaseTypesThreshold = 50
+ private final val LogVolatileThreshold = 50
/** A don't care value for the depth parameter in lubs/glbs and related operations */
private final val AnyDepth = -3
@@ -88,7 +83,7 @@ trait Types {
/** Decrement depth unless it is a don't care */
private final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
- private final val printLubs = false //@MDEBUG
+ private final val printLubs = false
/** The current skolemization level, needed for the algorithms
* in isSameType, isSubType that do constraint solving under a prefix
@@ -105,7 +100,7 @@ trait Types {
/** Undo all changes to constraints to type variables upto `limit'
*/
private def undoTo(limit: UndoLog) {
- while (log ne limit) {
+ while ((log ne limit) && log.nonEmpty) {
val (tv, constr) = log.head
tv.constr = constr
log = log.tail
@@ -113,33 +108,38 @@ trait Types {
}
private[Types] def record(tv: TypeVar) = {log = (tv, tv.constr.cloneInternal) :: log}
- private[Types] def clear {log = List()}
+ private[nsc] def clear() {log = List()}
// `block` should not affect constraints on typevars
def undo[T](block: => T): T = {
val before = log
- val result = block
- undoTo(before)
+ val result = try {
+ block
+ } finally {
+ undoTo(before)
+ }
result
}
// if `block` evaluates to false, it should not affect constraints on typevars
def undoUnless(block: => Boolean): Boolean = {
val before = log
- val result = block
- if(!result) undoTo(before)
+ var result = false
+ try {
+ result = block
+ } finally {
+ if(!result) undoTo(before)
+ }
result
}
}
-
-
/** A map from lists to compound types that have the given list as parents.
* This is used to avoid duplication in the computation of base type sequences and baseClasses.
* It makes use of the fact that these two operations depend only on the parents,
* not on the refinement.
*/
- var intersectionWitness = new WeakHashMap[List[Type], Type]
+ val intersectionWitness = new mutable.WeakHashMap[List[Type], WeakReference[Type]]
private object gen extends {
val global : Types.this.type = Types.this
@@ -204,7 +204,7 @@ trait Types {
/** A proxy for a type (identified by field `underlying') that forwards most
* operations to it. Every operation that is overridden for some kind of types is
- * forwarded here. Some opererations are rewrapped again.
+ * forwarded here. Some operations are rewrapped again.
*/
trait RewrappingTypeProxy extends SimpleTypeProxy {
protected def maybeRewrap(newtp: Type) = if (newtp eq underlying) this else rewrap(newtp)
@@ -229,6 +229,7 @@ trait Types {
override def normalize = maybeRewrap(underlying.normalize)
override def dealias = maybeRewrap(underlying.dealias)
override def cloneInfo(owner: Symbol) = maybeRewrap(underlying.cloneInfo(owner))
+ override def atOwner(owner: Symbol) = maybeRewrap(underlying.atOwner(owner))
override def prefixString = underlying.prefixString
override def isComplete = underlying.isComplete
override def complete(sym: Symbol) = underlying.complete(sym)
@@ -238,7 +239,7 @@ trait Types {
}
/** The base class for all types */
- abstract class Type {
+ abstract class Type extends AbsType {
/** Types for which asSeenFrom always is the identity, no matter what
* prefix or owner.
@@ -265,9 +266,17 @@ trait Types {
/** Is this type a structural refinement type (it 'refines' members that have not been inherited) */
def isStructuralRefinement: Boolean = false
+ /** Does this type depend immediately on an enclosing method parameter?
+ * i.e., is it a singleton type whose termSymbol refers to an argument of the symbol's owner (which is a method)
+ */
+ def isImmediatelyDependent: Boolean = false
+
/** Does this depend on an enclosing method parameter? */
def isDependent: Boolean = IsDependentCollector.collect(this)
+ /** True for WildcardType or BoundedWildcardType */
+ def isWildcard = false
+
/** The term symbol associated with the type
* Note that the symbol of the normalized type is returned (@see normalize)
*/
@@ -304,7 +313,7 @@ trait Types {
def typeOfThis: Type = typeSymbol.typeOfThis
/** Map to a singleton type which is a subtype of this type.
- * todo: change to singleton type of an existentgially defined variable
+ * todo: change to singleton type of an existentially defined variable
* of the right type instead of making this a `this` of a refined type.
*/
def narrow: Type =
@@ -321,7 +330,7 @@ trait Types {
* for a reference denoting an abstract type, its bounds,
* for all other types, a TypeBounds type all of whose bounds are this type.
*/
- def bounds: TypeBounds = mkTypeBounds(this, this)
+ def bounds: TypeBounds = TypeBounds(this, this)
/** For a class or intersection type, its parents.
* For a TypeBounds type, the parents of its hi bound.
@@ -351,7 +360,12 @@ trait Types {
def resultType(actuals: List[Type]) = this
- def resultApprox: Type = ApproximateDeBruijnMap(resultType)
+ /** If this is a TypeRef `clazz`[`T`], return the argument `T`
+ * otherwise return this type
+ */
+ def remove(clazz: Symbol): Type = this
+
+ def resultApprox: Type = if(settings.YdepMethTpes.value) ApproximateDependentMap(resultType) else resultType
/** For a curried method or poly type its non-method result type,
* the type itself for all other types */
@@ -381,18 +395,32 @@ trait Types {
* the empty list for all other types */
def boundSyms: List[Symbol] = List()
- /** Mixin a NotNull trait unless type already has one */
+ /** Mixin a NotNull trait unless type already has one
+ * ...if the option is given, since it is causing typing bugs.
+ */
def notNull: Type =
- if (isNotNull || phase.erasedTypes) this else NotNullType(this)
+ if (!settings.Ynotnull.value || isNotNull || phase.erasedTypes) this
+ else NotNullType(this)
/** Replace formal type parameter symbols with actual type arguments.
*
- * Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- @M (contact adriaan.moors at cs.kuleuven.be)
+ * Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- @M
*/
- def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type = this.subst(formals, actuals)
+ def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
+ if(formals.length == actuals.length) this.subst(formals, actuals) else ErrorType
+ /** If this type is an existential, turn all existentially bound variables to type skolems.
+ * @param owner The owner of the created type skolems
+ * @param origin The tree whose type was an existential for which the skolem was created.
+ */
def skolemizeExistential(owner: Symbol, origin: AnyRef): Type = this
+
+ /** A simple version of skolemizeExistential for situations where
+ * owner or unpack location do not matter (typically used in subtype tests)
+ */
+ def skolemizeExistential: Type = skolemizeExistential(NoSymbol, null)
+
/** Reduce to beta eta-long normal form. Expands type aliases and converts higher-kinded TypeRef's to PolyTypes. @M */
def normalize = this // @MAT
@@ -484,48 +512,34 @@ trait Types {
*/
def asSeenFrom(pre: Type, clazz: Symbol): Type =
if (!isTrivial && (!phase.erasedTypes || pre.typeSymbol == ArrayClass)) {
+ incCounter(asSeenFromCount)
+ val start = startTimer(asSeenFromNanos)
val m = new AsSeenFromMap(pre.normalize, clazz)
val tp = m apply this
- existentialAbstraction(m.capturedParams, tp)
+ val result = existentialAbstraction(m.capturedParams, tp)
+ stopTimer(asSeenFromNanos, start)
+ result
} else this
/** The info of `sym', seen as a member of this type.
*/
- def memberInfo(sym: Symbol): Type =
+ def memberInfo(sym: Symbol): Type = {
sym.info.asSeenFrom(this, sym.owner)
+ }
/** The type of `sym', seen as a member of this type. */
- def memberType(sym: Symbol): Type = {
- trackTypeIDE(sym)
- //@M don't prematurely instantiate higher-kinded types, they will be instantiated by transform, typedTypeApply, etc. when really necessary
- sym.tpeHK match {
- case ov @ OverloadedType(pre, alts) =>
- OverloadedType(this, alts)
-/*
- val pre1 = pre match {
- case ClassInfoType(_, _, clazz) => clazz.tpe
- case _ => pre
- }
- if (this =:= pre1) ov
- else if (this =:= pre1.narrow) OverloadedType(this, alts)
- else {
- Console.println("bad memberType of overloaded symbol: "+this+"/"+pre1+"/"+pre1.narrow)
- assert(false)
- ov
- }
-*/
- case tp =>
- val res = tp.asSeenFrom(this, sym.owner)
-/*
- if (sym.name.toString == "Elem") {
- println("pre = "+this)
- println("pre.normalize = "+this.widen.normalize)
- println("sym = "+sym+" in "+sym.ownerChain)
- println("result = "+res)
- }
-*/
- res
- }
+ def memberType(sym: Symbol): Type = sym match {
+ case meth: MethodSymbol =>
+ meth.typeAsMemberOf(this)
+ case _ =>
+ computeMemberType(sym)
+ }
+
+ def computeMemberType(sym: Symbol): Type = sym.tpeHK match { //@M don't prematurely instantiate higher-kinded types, they will be instantiated by transform, typedTypeApply, etc. when really necessary
+ case OverloadedType(_, alts) =>
+ OverloadedType(this, alts)
+ case tp =>
+ tp.asSeenFrom(this, sym.owner)
}
/** Substitute types `to' for occurrences of references to
@@ -582,31 +596,65 @@ trait Types {
/** Is this type a subtype of that type? */
def <:<(that: Type): Boolean = {
-// val startTime = if (util.Statistics.enabled) System.nanoTime() else 0l
-// val result =
- ((this eq that) ||
- (if (explainSwitch) explain("<:", isSubType, this, that)
- else isSubType(this, that, AnyDepth)))
-// if (util.Statistics.enabled) {
-// subtypeNanos += System.nanoTime() - startTime
-// subtypeCount += 1
-// }
-// result
+ if (util.Statistics.enabled) stat_<:<(that)
+ else {
+ (this eq that) ||
+ (if (explainSwitch) explain("<:", isSubType, this, that)
+ else isSubType(this, that, AnyDepth))
+ }
+ }
+
+ /** Can this type only be subtyped by bottom types?
+ * This is assessed to be the case if the class is final,
+ * and all type parameters (if any) are invariant.
+ */
+ def isFinalType = (
+ typeSymbol.isFinal &&
+ (typeSymbol.typeParams forall (_.variance == 0))
+ )
+
+ /** Is this type a subtype of that type in a pattern context?
+ * Any type arguments on the right hand side are replaced with
+ * fresh existentials, except for Arrays.
+ *
+ * See bug1434.scala for an example of code which would fail
+ * if only a <:< test were applied.
+ */
+ def matchesPattern(that: Type): Boolean = {
+ (this <:< that) || ((this, that) match {
+ case (TypeRef(_, ArrayClass, List(arg1)), TypeRef(_, ArrayClass, List(arg2))) if arg2.typeSymbol.typeParams.nonEmpty =>
+ arg1 matchesPattern arg2
+ case (_, TypeRef(_, _, args)) =>
+ val newtp = existentialAbstraction(args map (_.typeSymbol), that)
+ !(that =:= newtp) && (this <:< newtp)
+ case _ =>
+ false
+ })
+ }
+
+ def stat_<:<(that: Type): Boolean = {
+ incCounter(subtypeCount)
+ val start = startTimer(subtypeNanos)
+ val result =
+ (this eq that) ||
+ (if (explainSwitch) explain("<:", isSubType, this, that)
+ else isSubType(this, that, AnyDepth))
+ stopTimer(subtypeNanos, start)
+ result
}
/** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long.
*/
- def weak_<:<(that: Type): Boolean =
-// val startTime = if (util.Statistics.enabled) System.nanoTime() else 0l
-// val result =
+ def weak_<:<(that: Type): Boolean = {
+ incCounter(subtypeCount)
+ val start = startTimer(subtypeNanos)
+ val result =
((this eq that) ||
(if (explainSwitch) explain("weak_<:", isWeakSubType, this, that)
else isWeakSubType(this, that)))
-// if (util.Statistics.enabled) {
-// subtypeNanos += System.nanoTime() - startTime
-// subtypeCount += 1
-// }
-// result
+ stopTimer(subtypeNanos, start)
+ result
+ }
/** Is this type equivalent to that type? */
def =:=(that: Type): Boolean = (
@@ -677,7 +725,7 @@ trait Types {
if (sym == btssym) return mid
else if (sym isLess btssym) hi = mid - 1
else if (btssym isLess sym) lo = mid + 1
- else throw new Error()
+ else abort()
}
-1
}
@@ -687,6 +735,10 @@ trait Types {
*/
def cloneInfo(owner: Symbol) = this
+ /** Make sure this type is correct as the info of given owner; clone it if not.
+ */
+ def atOwner(owner: Symbol) = this
+
protected def objectPrefix = "object "
protected def packagePrefix = "package "
@@ -719,17 +771,10 @@ trait Types {
typeVarToOriginMap(this) eq this
}
- /** Is this type completed (i.e. not a lazy type)?
- */
- def isComplete: Boolean = true
-
/** Is this type a varargs parameter?
*/
def isVarargs: Boolean = typeSymbol == RepeatedParamClass
- /** If this is a lazy type, assign a new type to `sym'. */
- def complete(sym: Symbol) {}
-
/** If this is a symbol loader type, load and assign a new type to
* `sym'.
*/
@@ -764,21 +809,29 @@ trait Types {
*/
//TODO: use narrow only for modules? (correct? efficiency gain?)
def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = {
- // if this type contains type variables, get rid of them;
+ val suspension = TypeVar.Suspension
+ // if this type contains type variables, put them to sleep for a while -- don't just wipe them out by
+ // replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements
// without this, the matchesType call would lead to type variables on both sides
// of a subtyping/equality judgement, which can lead to recursive types being constructed.
// See (t0851) for a situation where this happens.
- if (!this.isGround)
- return typeVarToOriginMap(this).findMember(name, excludedFlags, requiredFlags, stableOnly)
- if (util.Statistics.enabled) findMemberCount += 1
-// val startTime = if (util.Statistics.enabled) System.nanoTime() else 0l
+ if (!this.isGround) {
+ // make each type var in this type use its original type for comparisons instead of collecting constraints
+ for(tv@TypeVar(_, _) <- this) {
+ suspension suspend tv
+ }
+ }
+
+ incCounter(findMemberCount)
+ val start = startTimer(findMemberNanos)
//Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
var members: Scope = null
var member: Symbol = NoSymbol
var excluded = excludedFlags | DEFERRED
- var self: Type = null
var continue = true
+ var self: Type = null
+ var membertpe: Type = null
while (continue) {
continue = false
val bcs0 = baseClasses
@@ -797,7 +850,8 @@ trait Types {
sym.getFlag(PRIVATE | LOCAL) != (PRIVATE | LOCAL).toLong ||
(bcs0.head.hasTransOwner(bcs.head)))) {
if (name.isTypeName || stableOnly && sym.isStable) {
-// if (util.Statistics.enabled) findMemberNanos += System.nanoTime() - startTime
+ stopTimer(findMemberNanos, start)
+ suspension.resumeAll
return sym
} else if (member == NoSymbol) {
member = sym
@@ -806,8 +860,9 @@ trait Types {
!(member == sym ||
member.owner != sym.owner &&
!sym.hasFlag(PRIVATE) && {
- if (self eq null) self = this.narrow;
- (self.memberType(member) matches self.memberType(sym))
+ if (self eq null) self = this.narrow
+ if (membertpe eq null) membertpe = self.memberType(member)
+ (membertpe matches self.memberType(sym))
})) {
members = new Scope(List(member, sym))
}
@@ -817,8 +872,8 @@ trait Types {
!(prevEntry.sym == sym ||
prevEntry.sym.owner != sym.owner &&
!sym.hasFlag(PRIVATE) && {
- if (self eq null) self = this.narrow;
- (self.memberType(prevEntry.sym) matches self.memberType(sym))
+ if (self eq null) self = this.narrow
+ self.memberType(prevEntry.sym) matches self.memberType(sym)
})) {
prevEntry = members lookupNextEntry prevEntry
}
@@ -837,18 +892,18 @@ trait Types {
} // while (!bcs.isEmpty)
excluded = excludedFlags
} // while (continue)
-// if (util.Statistics.enabled) findMemberNanos += System.nanoTime() - startTime
+ stopTimer(findMemberNanos, start)
+ suspension.resumeAll
if (members eq null) {
- if (util.Statistics.enabled) if (member == NoSymbol) noMemberCount += 1;
+ if (member == NoSymbol) incCounter(noMemberCount)
member
} else {
- if (util.Statistics.enabled) multMemberCount += 1;
- //val pre = if (this.typeSymbol.isClass) this.typeSymbol.thisType else this;
- (baseClasses.head.newOverloaded(this, members.toList))
+ incCounter(multMemberCount)
+ baseClasses.head.newOverloaded(this, members.toList)
}
}
- /** The existential skolems and existentially quantifed variables which are free in this type */
+ /** The existential skolems and existentially quantified variables which are free in this type */
def existentialSkolems: List[Symbol] = {
var boundSyms: List[Symbol] = List()
var skolems: List[Symbol] = List()
@@ -897,24 +952,8 @@ trait Types {
/** The kind of this type; used for debugging */
def kind: String = "unknown type of class "+getClass()
-
- override def toString: String =
- if (tostringRecursions >= maxTostringRecursions)
- "..."
- else
- try {
- tostringRecursions += 1
- safeToString
- } finally {
- tostringRecursions -= 1
- }
-
- def safeToString: String = super.toString
}
- private final val maxTostringRecursions = 50
- private var tostringRecursions = 0
-
// Subclasses ------------------------------------------------------------
trait UniqueType {
@@ -955,13 +994,14 @@ trait Types {
override def isVolatile = underlying.isVolatile
override def widen: Type = underlying.widen
override def baseTypeSeq: BaseTypeSeq = {
- if (util.Statistics.enabled) singletonBaseTypeSeqCount += 1
+ incCounter(singletonBaseTypeSeqCount)
underlying.baseTypeSeq prepend this
}
+ override def isHigherKinded = false // singleton type classifies objects, thus must be kind *
override def safeToString: String = prefixString + "type"
/*
override def typeOfThis: Type = typeSymbol.typeOfThis
- override def bounds: TypeBounds = mkTypeBounds(this, this)
+ override def bounds: TypeBounds = TypeBounds(this, this)
override def prefix: Type = NoType
override def typeArgs: List[Type] = List()
override def typeParams: List[Symbol] = List()
@@ -990,12 +1030,14 @@ trait Types {
/** An object representing an unknown type */
case object WildcardType extends Type {
+ override def isWildcard = true
override def safeToString: String = "?"
// override def isNullable: Boolean = true
override def kind = "WildcardType"
}
case class BoundedWildcardType(override val bounds: TypeBounds) extends Type {
+ override def isWildcard = true
override def safeToString: String = "?" + bounds
override def kind = "BoundedWildcardType"
}
@@ -1020,18 +1062,19 @@ trait Types {
/** A class for this-types of the form <sym>.this.type
*/
- case class ThisType(sym: Symbol) extends SingletonType {
+ abstract case class ThisType(sym: Symbol) extends SingletonType {
//assert(sym.isClass && !sym.isModuleClass || sym.isRoot, sym)
override def isTrivial: Boolean = sym.isPackageClass
override def isNotNull = true
override def typeSymbol = sym
override def underlying: Type = sym.typeOfThis
override def isVolatile = false
+ override def isHigherKinded = sym.isRefinementClass && underlying.isHigherKinded
override def prefixString =
if (settings.debug.value) sym.nameString + ".this."
else if (sym.isRoot || sym.isEmptyPackageClass || sym.isInterpreterWrapper || sym.isScalaPackageClass) ""
else if (sym.isAnonymousClass || sym.isRefinementClass) "this."
- else if (sym.isModuleClass) sym.fullNameString + "."
+ else if (sym.isModuleClass) sym.fullName + "."
else sym.nameString + ".this."
override def safeToString: String =
if (sym.isRoot) "<root>"
@@ -1041,14 +1084,21 @@ trait Types {
override def kind = "ThisType"
}
- case class DeBruijnIndex(level: Int, paramId: Int) extends Type {
- override def isTrivial = true
- override def isStable = true
- override def safeToString = "<param "+level+"."+paramId+">"
- override def kind = "DeBruijnIndex"
- // todo: this should be a subtype, which forwards to underlying
+ object ThisType extends ThisTypeExtractor {
+ def apply(sym: Symbol): Type =
+ if (!phase.erasedTypes) unique(new ThisType(sym) with UniqueType)
+ else if (sym.isImplClass) sym.typeOfThis
+ else sym.tpe
}
+ // case class DeBruijnIndex(level: Int, paramId: Int) extends Type {
+ // override def isTrivial = true
+ // override def isStable = true
+ // override def safeToString = "<param "+level+"."+paramId+">"
+ // override def kind = "DeBruijnIndex"
+ // // todo: this should be a subtype, which forwards to underlying
+ // }
+
/** A class for singleton types of the form &lt;prefix&gt;.&lt;sym.name&gt;.type.
* Cannot be created directly; one should always use
* `singleType' for creation.
@@ -1065,12 +1115,15 @@ trait Types {
underlyingPeriod = currentPeriod
if (!isValid(period)) {
underlyingCache = pre.memberType(sym).resultType;
+ assert(underlyingCache ne this, this)
}
}
- assert(underlyingCache ne this, this)
underlyingCache
}
+ // more precise conceptually, but causes cyclic errors: (paramss exists (_ contains sym))
+ override def isImmediatelyDependent = (sym ne NoSymbol) && (sym.owner.isMethod && sym.isValueParameter)
+
override def isVolatile : Boolean = underlying.isVolatile && (!sym.isStable)
/*
override def narrow: Type = {
@@ -1095,7 +1148,9 @@ trait Types {
override def kind = "SingleType"
}
- case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType {
+ object SingleType extends SingleTypeExtractor
+
+ abstract case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType {
override val isTrivial: Boolean = thistpe.isTrivial && supertpe.isTrivial
override def isNotNull = true;
override def typeSymbol = thistpe.typeSymbol
@@ -1109,9 +1164,15 @@ trait Types {
override def kind = "SuperType"
}
+ object SuperType extends SuperTypeExtractor {
+ def apply(thistp: Type, supertp: Type): Type =
+ if (phase.erasedTypes) supertp
+ else unique(new SuperType(thistp, supertp) with UniqueType)
+ }
+
/** A class for the bounds of abstract types and type parameters
*/
- case class TypeBounds(lo: Type, hi: Type) extends SubType {
+ abstract case class TypeBounds(lo: Type, hi: Type) extends SubType {
def supertype = hi
override val isTrivial: Boolean = lo.isTrivial && hi.isTrivial
override def bounds: TypeBounds = this
@@ -1124,6 +1185,11 @@ trait Types {
override def kind = "TypeBoundsType"
}
+ object TypeBounds extends TypeBoundsExtractor {
+ def apply(lo: Type, hi: Type): TypeBounds =
+ unique(new TypeBounds(lo, hi) with UniqueType)
+ }
+
/** A common base class for intersection types and class types
*/
abstract class CompoundType extends Type {
@@ -1140,18 +1206,18 @@ trait Types {
if (!isValidForBaseClasses(period)) {
if (parents.exists(_.exists(_.isInstanceOf[TypeVar]))) {
// rename type vars to fresh type params, take base type sequence of
- // resulting type, and rename back allthe entries in thats sequence
+ // resulting type, and rename back all the entries in that sequence
var tvs = Set[TypeVar]()
for (p <- parents)
for (t <- p) t match {
case tv: TypeVar => tvs += tv
case _ =>
}
- val varToParamMap = (Map[Type, Symbol]() /: tvs)((m, tv) => m + (tv -> tv.origin.typeSymbol.cloneSymbol))
- val paramToVarMap = Map[Symbol, Type]() ++ (varToParamMap map { case (t, tsym) => (tsym -> t) })
+ val varToParamMap: Map[Type, Symbol] = tvs map (tv => tv -> tv.origin.typeSymbol.cloneSymbol) toMap
+ val paramToVarMap = varToParamMap map (_.swap)
val varToParam = new TypeMap {
- def apply(tp: Type): Type = tp match {
- case tv: TypeVar => varToParamMap(tp).tpe
+ def apply(tp: Type) = varToParamMap get tp match {
+ case Some(sym) => sym.tpe
case _ => mapOver(tp)
}
}
@@ -1164,10 +1230,20 @@ trait Types {
val bts = copyRefinedType(this.asInstanceOf[RefinedType], parents map varToParam, varToParam mapOver decls).baseTypeSeq
baseTypeSeqCache = bts lateMap paramToVar
} else {
- if (util.Statistics.enabled)
- compoundBaseTypeSeqCount += 1
+ incCounter(compoundBaseTypeSeqCount)
baseTypeSeqCache = undetBaseTypeSeq
- baseTypeSeqCache = memo(compoundBaseTypeSeq(this))(_.baseTypeSeq updateHead typeSymbol.tpe)
+ baseTypeSeqCache = if (typeSymbol.isRefinementClass)
+ memo(compoundBaseTypeSeq(this))(_.baseTypeSeq updateHead typeSymbol.tpe)
+ else
+ compoundBaseTypeSeq(this)
+ // [Martin] suppressing memo-ization solves the problem with "same type after erasure" errors
+ // when compiling with
+ // scalac scala.collection.IterableViewLike.scala scala.collection.IterableLike.scala
+ // I have not yet figured out precisely why this is the case.
+ // My current assumption is that taking memos forces baseTypeSeqs to be computed
+ // at stale types (i.e. the underlying typeSymbol has already another type).
+ // I do not yet see precisely why this would cause a problem, but it looks
+ // fishy in any case.
}
}
//Console.println("baseTypeSeq(" + typeSymbol + ") = " + baseTypeSeqCache.toList);//DEBUG
@@ -1219,12 +1295,21 @@ trait Types {
baseClassesCache
}
- def memo[A](op1: => A)(op2: Type => A) = intersectionWitness get parents match {
- case Some(w) =>
- if (w eq this) op1 else op2(w)
- case None =>
- intersectionWitness(parents) = this
+ def memo[A](op1: => A)(op2: Type => A): A = {
+ def updateCache(): A = {
+ intersectionWitness(parents) = new WeakReference(this)
op1
+ }
+
+ intersectionWitness get parents match {
+ case Some(ref) =>
+ ref.get match {
+ case Some(w) => if (w eq this) op1 else op2(w)
+ case None => updateCache()
+ }
+ case None => updateCache()
+ }
+
}
override def baseType(sym: Symbol): Type = {
@@ -1273,18 +1358,20 @@ trait Types {
* If they are several higher-kinded parents with different bounds we need
* to take the intersection of their bounds
*/
- override def normalize =
- if (isHigherKinded)
+ override def normalize = {
+ if (isHigherKinded) {
PolyType(
typeParams,
- refinementOfClass(
- typeSymbol,
+ RefinedType(
parents map {
case TypeRef(pre, sym, List()) => TypeRef(pre, sym, dummyArgs)
case p => p
},
- decls))
+ decls,
+ typeSymbol))
+ }
else super.normalize
+ }
/** A refined type P1 with ... with Pn { decls } is volatile if
* one of the parent types Pi is an abstract type, and
@@ -1301,20 +1388,26 @@ trait Types {
def contributesAbstractMembers(p: Type) =
p.deferredMembers exists isVisible
- (parents exists (_.isVolatile)) ||
- (parents dropWhile (! _.typeSymbol.isAbstractType) match {
- case ps @ (_ :: ps1) =>
- (ps ne parents) ||
- (ps1 exists contributesAbstractMembers) ||
- (decls.iterator exists (m => m.isDeferred && isVisible(m)))
- case _ =>
- false
- })
+ ((parents exists (_.isVolatile))
+ ||
+ (parents dropWhile (! _.typeSymbol.isAbstractType) match {
+ case ps @ (_ :: ps1) =>
+ (ps ne parents) ||
+ (ps1 exists contributesAbstractMembers) ||
+ (decls.iterator exists (m => m.isDeferred && isVisible(m)))
+ case _ =>
+ false
+ }))
}
override def kind = "RefinedType"
}
+ object RefinedType extends RefinedTypeExtractor {
+ def apply(parents: List[Type], decls: Scope, clazz: Symbol) =
+ new RefinedType(parents, decls) { override def typeSymbol = clazz }
+ }
+
/** A class representing a class info
*/
case class ClassInfoType(
@@ -1365,7 +1458,7 @@ trait Types {
*/
private def getRefs(which: Int, from: Symbol): Set[Symbol] = refs(which) get from match {
case Some(set) => set
- case None => Set()
+ case none => Set()
}
/** Augment existing refs map with reference <pre>from -> to</pre>
@@ -1455,6 +1548,8 @@ trait Types {
override def kind = "ClassInfoType"
}
+ object ClassInfoType extends ClassInfoTypeExtractor
+
class PackageClassInfoType(decls: Scope, clazz: Symbol)
extends ClassInfoType(List(), decls, clazz)
@@ -1462,7 +1557,7 @@ trait Types {
*
* @param value ...
*/
- case class ConstantType(value: Constant) extends SingletonType {
+ abstract case class ConstantType(value: Constant) extends SingletonType {
override def underlying: Type = value.tpe
assert(underlying.typeSymbol != UnitClass)
override def isTrivial: Boolean = true
@@ -1475,17 +1570,32 @@ trait Types {
override def kind = "ConstantType"
}
+ object ConstantType extends ConstantTypeExtractor {
+ def apply(value: Constant): ConstantType = {
+ class UniqueConstantType extends ConstantType(value) with UniqueType {
+ /** Save the type of 'value'. For Java enums, it depends on finding the linked class,
+ * which might not be found after 'flatten'. */
+ private lazy val _tpe: Type = value.tpe
+ override def underlying: Type = _tpe
+ }
+ unique(new UniqueConstantType)
+ }
+ }
+
+ private var volatileRecursions: Int = 0
+ private val pendingVolatiles = new collection.mutable.HashSet[Symbol]
+
/** A class for named types of the form
* `&lt;prefix&gt;.&lt;sym.name&gt;[args]'
* Cannot be created directly; one should always use `typeRef'
* for creation. (@M: Otherwise hashing breaks)
*
- * @M: Higher-kinded types are represented as TypeRefs with a symbol that has type parameters, but with args==List()
+ * @M: a higher-kinded type is represented as a TypeRef with sym.info.typeParams.nonEmpty, but args.isEmpty
* @param pre ...
* @param sym ...
* @param args ...
*/
- case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends Type {
+ abstract case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends Type {
// assert(!sym.isAbstractType || pre.isStable || pre.isError)
// assert(!pre.isInstanceOf[ClassInfoType], this)
// assert(!(sym hasFlag (PARAM | EXISTENTIAL)) || pre == NoPrefix, this)
@@ -1497,17 +1607,41 @@ trait Types {
private var baseTypeSeqPeriod = NoPeriod
override def isStable: Boolean = {
+ sym == NothingClass ||
sym == SingletonClass ||
sym.isAliasType && normalize.isStable ||
sym.isAbstractType && (bounds.hi.typeSymbol isSubClass SingletonClass)
}
- override def isVolatile: Boolean =
+ override def isVolatile: Boolean = {
sym.isAliasType && normalize.isVolatile ||
- sym.isAbstractType && bounds.hi.isVolatile
+ sym.isAbstractType && {
+ // need to be careful not to fall into an infinite recursion here
+ // because volatile checking is done before all cycles are detected.
+ // the case to avoid is an abstract type directly or
+ // indirectly upper-bounded by itself. See #2918
+ try {
+ volatileRecursions += 1
+ if (volatileRecursions < LogVolatileThreshold)
+ bounds.hi.isVolatile
+ else if (pendingVolatiles contains sym)
+ true // we can return true here, because a cycle will be detected
+ // here afterwards and an error will result anyway.
+ else
+ try {
+ pendingVolatiles += sym
+ bounds.hi.isVolatile
+ } finally {
+ pendingVolatiles -= sym
+ }
+ } finally {
+ volatileRecursions -= 1
+ }
+ }
+ }
override val isTrivial: Boolean =
- pre.isTrivial && !sym.isTypeParameter && args.forall(_.isTrivial)
+ !sym.isTypeParameter && pre.isTrivial && args.forall(_.isTrivial)
override def isNotNull =
sym.isModuleClass || sym == NothingClass || isValueClass(sym) || super.isNotNull
@@ -1525,10 +1659,10 @@ trait Types {
def thisInfo =
if (sym.isAliasType) normalize
- else if (sym.isTypeMember) transformInfo(sym.info)
+ else if (sym.isNonClassType) transformInfo(sym.info)
else sym.info
- def relativeInfo = if (sym.isTypeMember) transformInfo(pre.memberInfo(sym)) else pre.memberInfo(sym)
+ def relativeInfo = if (sym.isNonClassType) transformInfo(pre.memberInfo(sym)) else pre.memberInfo(sym)
override def typeSymbol = if (sym.isAliasType) normalize.typeSymbol else sym
override def termSymbol = if (sym.isAliasType) normalize.termSymbol else super.termSymbol
@@ -1554,6 +1688,8 @@ A type's typeSymbol should never be inspected directly.
parentsPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
parentsCache = thisInfo.parents map transform
+ } else if (parentsCache == null) { // seems this can happen if things are currupted enough, see #2641
+ parentsCache = List(AnyClass.tpe)
}
}
parentsCache
@@ -1588,14 +1724,13 @@ A type's typeSymbol should never be inspected directly.
// (!result.isEmpty) IFF isHigherKinded
override def typeParams: List[Symbol] = if (isHigherKinded) typeParamsDirect else List()
- override def typeConstructor = rawTypeRef(pre, sym, List())
-
- // (args.isEmpty && !typeParamsDirect.isEmpty) && !isRawType(this)
- // check for isRawType: otherwise raw types are considered higher-kinded types during subtyping:
- override def isHigherKinded
- = (args.isEmpty && !typeParamsDirect.isEmpty) && (settings.YhigherKindedRaw.value || !isRaw(sym, args))
- // (args.isEmpty && !typeParamsDirect.isEmpty) && (phase.erasedTypes || !sym.hasFlag(JAVA))
+ override def typeConstructor = TypeRef(pre, sym, List())
+ // a reference (in a Scala program) to a type that has type parameters, but where the reference does not include type arguments
+ // note that it doesn't matter whether the symbol refers to a java or scala symbol,
+ // it does matter whether it occurs in java or scala code
+ // typerefs w/o type params that occur in java signatures/code are considered raw types, and are represented as existential types
+ override def isHigherKinded = (args.isEmpty && !typeParamsDirect.isEmpty)
override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
if (isHigherKinded) {
@@ -1603,8 +1738,9 @@ A type's typeSymbol should never be inspected directly.
if (substTps.length == typeParams.length)
typeRef(pre, sym, actuals)
- else // partial application (needed in infer when bunching type arguments from classes and methods together)
+ else if(formals.length == actuals.length) // partial application (needed in infer when bunching type arguments from classes and methods together)
typeRef(pre, sym, dummyArgs).subst(formals, actuals)
+ else ErrorType
}
else
super.instantiateTypeParams(formals, actuals)
@@ -1612,43 +1748,59 @@ A type's typeSymbol should never be inspected directly.
private var normalized: Type = null
+ @inline private def betaReduce: Type = {
+ assert(sym.info.typeParams.length == typeArgs.length, this)
+ // isHKSubType0 introduces synthetic type params so that betaReduce can first apply sym.info to typeArgs before calling asSeenFrom
+ // asSeenFrom then skips synthetic type params, which are used to reduce HO subtyping to first-order subtyping, but which can't be instantiated from the given prefix and class
+ // appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner) // this crashes pos/depmet_implicit_tpbetareduce.scala
+ transform(sym.info.resultType)
+ }
+
+ // @M TODO: should not use PolyType, as that's the type of a polymorphic value -- we really want a type *function*
+ // @M: initialize (by sym.info call) needed (see test/files/pos/ticket0137.scala)
+ @inline private def etaExpand: Type = {
+ val tpars = sym.info.typeParams // must go through sym.info for typeParams to initialise symbol
+ PolyType(tpars, typeRef(pre, sym, tpars map (_.tpeHK))) // todo: also beta-reduce?
+ }
+
override def dealias: Type =
if (sym.isAliasType && sym.info.typeParams.length == args.length) {
- val xform = transform(sym.info.resultType)
- assert(xform ne this, this)
- xform.dealias
+ betaReduce.dealias
} else this
def normalize0: Type =
- if (sym.isAliasType) { // beta-reduce
- if (sym.info.typeParams.length == args.length || !isHigherKinded) {
- /* !isHigherKinded && sym.info.typeParams.length != args.length only happens when compiling e.g.,
- `val x: Class' with -Xgenerics, while `type Class = java.lang.Class' had already been compiled without -Xgenerics */
- val xform = transform(sym.info.resultType)
- assert(xform ne this, this)
- xform.normalize // cycles have been checked in typeRef
- } else {
- PolyType(typeParams, transform(sym.info.resultType).normalize) // eta-expand
- // @M TODO: should not use PolyType, as that's the type of a polymorphic value -- we really want a type *function*
- }
- } else if (isHigherKinded) {
- // @M TODO: should not use PolyType, as that's the type of a polymorphic value -- we really want a type *function*
- // @M: initialize needed (see test/files/pos/ticket0137.scala)
- PolyType(typeParams, typeRef(pre, sym.initialize, dummyArgs))
- } else if (sym.isRefinementClass) {
- sym.info.normalize // @MO to AM: OK?
- //@M I think this is okay, but changeset 12414 (which fixed #1241) re-introduced another bug (#2208)
- // see typedTypeConstructor in Typers
- } else {
- super.normalize
+ if (pre eq WildcardType) WildcardType // arises when argument-dependent types are approximated (see def depoly in implicits)
+ else if (isHigherKinded) etaExpand // eta-expand, subtyping relies on eta-expansion of higher-kinded types
+ else if (sym.isAliasType && sym.info.typeParams.length == args.length)
+ betaReduce.normalize // beta-reduce, but don't do partial application -- cycles have been checked in typeRef
+ else if (sym.isRefinementClass)
+ sym.info.normalize // I think this is okay, but see #1241 (r12414), #2208, and typedTypeConstructor in Typers
+ // else if (args nonEmpty) {
+ // val argsNorm = args mapConserve (_.dealias)
+ // if(argsNorm ne args) TypeRef(pre, sym, argsNorm)
+ // else this
+ // }
+ else {
+ if(sym.isAliasType) ErrorType //println("!!error: "+(pre, sym, sym.info, sym.info.typeParams, args))
+ else super.normalize
}
- override def normalize: Type =
+ // track number of type parameters that we saw when caching normalization,
+ // so we can refresh our cache when the known list of type parameters changes (due to further class file loading)
+ // TODO: this would not be necessary if we could replace the call to sym.unsafeTypeParams in typeParamsDirect
+ // by a call to sym.typeParams, but need to verify that that does not lead to spurious "illegal cycle" errors
+ // the need for refreshing the cache is illustrated by #2278
+ // TODO: test case that is compiled in a specific order and in different runs
+ private var normalizeTyparCount = -1
+
+ override def normalize: Type = {
if (phase.erasedTypes) normalize0
- else {
- if (normalized == null) normalized = normalize0
+ else if (normalized == null || typeParamsDirect.length != normalizeTyparCount) {
+ normalizeTyparCount = typeParamsDirect.length
+ normalized = normalize0
normalized
- }
+ } else normalized
+ }
override def decls: Scope = {
sym.info match {
@@ -1685,13 +1837,11 @@ A type's typeSymbol should never be inspected directly.
if (period != currentPeriod) {
baseTypeSeqPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
- if (util.Statistics.enabled)
- typerefBaseTypeSeqCount += 1
+ incCounter(typerefBaseTypeSeqCount)
baseTypeSeqCache = undetBaseTypeSeq
baseTypeSeqCache =
if (sym.isAbstractType) transform(bounds.hi).baseTypeSeq prepend this
else sym.info.baseTypeSeq map transform
-
}
}
if (baseTypeSeqCache == undetBaseTypeSeq)
@@ -1722,7 +1872,7 @@ A type's typeSymbol should never be inspected directly.
}
val monopart =
if (!settings.debug.value &&
- (shorthands contains sym.fullNameString) &&
+ (shorthands contains sym.fullName) &&
(sym.ownerChain forall (_.isClass))) // ensure that symbol is not a local copy with a name coincidence
sym.name.toString
else
@@ -1735,7 +1885,7 @@ A type's typeSymbol should never be inspected directly.
packagePrefix + str
else if (sym.isModuleClass)
objectPrefix + str
- else if (sym.isAnonymousClass && sym.isInitialized && !settings.debug.value)
+ else if (sym.isAnonymousClass && sym.isInitialized && !settings.debug.value && !phase.erasedTypes)
thisInfo.parents.mkString(" with ") + {
if (sym.isStructuralRefinement)
((decls.toList filter { entry =>
@@ -1756,7 +1906,7 @@ A type's typeSymbol should never be inspected directly.
sym.isAnonymousClass || sym.isRefinementClass || sym.isScalaPackageClass)
""
else if (sym.isPackageClass)
- sym.fullNameString + "."
+ sym.fullName + "."
else if (isStable && (sym.name.toString endsWith ".type"))
sym.name.toString.substring(0, sym.name.length - 4)
else
@@ -1765,12 +1915,25 @@ A type's typeSymbol should never be inspected directly.
override def kind = "TypeRef"
}
+ object TypeRef extends TypeRefExtractor {
+ def apply(pre: Type, sym: Symbol, args: List[Type]): Type = {
+ class rawTypeRef extends TypeRef(pre, sym, args) with UniqueType
+ unique(new rawTypeRef)
+ }
+ }
+
/** A class representing a method type with parameters.
*/
case class MethodType(override val params: List[Symbol],
override val resultType: Type) extends Type {
- override val isTrivial: Boolean =
- paramTypes.forall(_.isTrivial) && resultType.isTrivial
+ override def isTrivial: Boolean = isTrivial0
+ private lazy val isTrivial0 =
+ resultType.isTrivial && params.forall{p => p.tpe.isTrivial && (
+ !settings.YdepMethTpes.value || !(params.exists(_.tpe.contains(p)) || resultType.contains(p)))
+ }
+
+ def isImplicit = params.nonEmpty && params.head.isImplicit
+ def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized?
//assert(paramTypes forall (pt => !pt.typeSymbol.isImplClass))//DEBUG
override def paramSectionCount: Int = resultType.paramSectionCount + 1
@@ -1781,44 +1944,56 @@ A type's typeSymbol should never be inspected directly.
override def boundSyms = params ::: resultType.boundSyms
- override def resultType(actuals: List[Type]) = {
- val map = new InstantiateDeBruijnMap(actuals)
- val rawResTpe = map.apply(resultType)
-
- if (phase.erasedTypes)
- rawResTpe
- else
- existentialAbstraction(map.existentialsNeeded, rawResTpe)
+ // this is needed for plugins to work correctly, only TypeConstraint annotations are supposed to be carried over
+ // TODO: this should probably be handled in a more structured way in adapt -- remove this map in resultType and watch the continuations tests fail
+ object dropNonContraintAnnotations extends TypeMap {
+ override val dropNonConstraintAnnotations = true
+ def apply(x: Type) = mapOver(x)
}
- override def finalResultType: Type = resultType.finalResultType
-
- private def dependentToString(base: Int): String = {
- val params = for ((pt, n) <- paramTypes.zipWithIndex) yield "x$"+n+":"+pt
- val res = resultType match {
- case mt: MethodType => mt.dependentToString(base + params.length)
- case rt => rt.toString
+ override def resultType(actuals: List[Type]) =
+ if(isTrivial) dropNonContraintAnnotations(resultType)
+ else {
+ if(actuals.length == params.length) {
+ val idm = new InstantiateDependentMap(params, actuals)
+ val res = idm(resultType)
+ // println("resultTypeDep "+(params, actuals, resultType, idm.existentialsNeeded, "\n= "+ res))
+ existentialAbstraction(idm.existentialsNeeded, res)
+ } else {
+ // Thread.dumpStack()
+ // println("resultType "+(params, actuals, resultType))
+ if (phase.erasedTypes) resultType
+ else existentialAbstraction(params, resultType)
+ }
}
- params.mkString("(", ",", ")")+res
- }
+
+ // implicit args can only be depended on in result type: TODO this may be generalised so that the only constraint is dependencies are acyclic
+ def approximate: MethodType = MethodType(params, resultApprox)
+
+ override def finalResultType: Type = resultType.finalResultType
override def safeToString: String =
- if (resultType.isDependent) dependentToString(0)
- else params.map(_.defString).mkString("(", ",", ")") + resultType
+ params.map(_.defString).mkString("(", ",", ")") + resultType
override def cloneInfo(owner: Symbol) = {
val vparams = cloneSymbols(params, owner)
copyMethodType(this, vparams, resultType.substSym(params, vparams).cloneInfo(owner))
}
+ override def atOwner(owner: Symbol) =
+ if ((params exists (_.owner != owner)) || (resultType.atOwner(owner) ne resultType))
+ cloneInfo(owner)
+ else
+ this
+
override def kind = "MethodType"
}
- // todo: this class is no longer needed, a method type is implicit if the first
- // parameter has the IMPLICIT flag
- class ImplicitMethodType(ps: List[Symbol], rt: Type) extends MethodType(ps, rt)
+ object MethodType extends MethodTypeExtractor
- class JavaMethodType(ps: List[Symbol], rt: Type) extends MethodType(ps, rt)
+ class JavaMethodType(ps: List[Symbol], rt: Type) extends MethodType(ps, rt) {
+ override def isJava = true
+ }
/** A class representing a polymorphic type or, if tparams.length == 0,
* a parameterless method type.
@@ -1831,6 +2006,7 @@ A type's typeSymbol should never be inspected directly.
*/
case class PolyType(override val typeParams: List[Symbol], override val resultType: Type)
extends Type {
+ // assert(!(typeParams contains NoSymbol), this)
override def paramSectionCount: Int = resultType.paramSectionCount
override def paramss: List[List[Symbol]] = resultType.paramss
@@ -1869,9 +2045,17 @@ A type's typeSymbol should never be inspected directly.
PolyType(tparams, resultType.substSym(typeParams, tparams).cloneInfo(owner))
}
+ override def atOwner(owner: Symbol) =
+ if ((typeParams exists (_.owner != owner)) || (resultType.atOwner(owner) ne resultType))
+ cloneInfo(owner)
+ else
+ this
+
override def kind = "PolyType"
}
+ object PolyType extends PolyTypeExtractor
+
case class ExistentialType(quantified: List[Symbol],
override val underlying: Type) extends RewrappingTypeProxy
{
@@ -1884,6 +2068,10 @@ A type's typeSymbol should never be inspected directly.
override def boundSyms: List[Symbol] = quantified
override def prefix = maybeRewrap(underlying.prefix)
override def typeArgs = underlying.typeArgs map maybeRewrap
+ override def params = underlying.params mapConserve { param =>
+ val tpe1 = rewrap(param.tpe)
+ if (tpe1 eq param.tpe) param else param.cloneSymbol.setInfo(tpe1)
+ }
override def paramTypes = underlying.paramTypes map maybeRewrap
override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = {
// maybeRewrap(underlying.instantiateTypeParams(formals, actuals))
@@ -1914,7 +2102,7 @@ A type's typeSymbol should never be inspected directly.
}
private def wildcardArgsString(available: Set[Symbol], args: List[Type]): List[String] = args match {
- case TypeRef(_, sym, _) :: args1 if (quantified contains sym) =>
+ case TypeRef(_, sym, _) :: args1 if (available contains sym) =>
("_"+sym.infoString(sym.info)) :: wildcardArgsString(available - sym, args1)
case arg :: args1 if !(quantified exists (arg contains _)) =>
arg.toString :: wildcardArgsString(available, args1)
@@ -1947,22 +2135,26 @@ A type's typeSymbol should never be inspected directly.
ExistentialType(tparams, underlying.substSym(quantified, tparams))
}
+ override def atOwner(owner: Symbol) =
+ if (quantified exists (_.owner != owner)) cloneInfo(owner) else this
+
override def kind = "ExistentialType"
def withTypeVars(op: Type => Boolean): Boolean = withTypeVars(op, AnyDepth)
def withTypeVars(op: Type => Boolean, depth: Int): Boolean = {
- val tvars = quantified map (tparam => TypeVar(tparam.tpe, new TypeConstraint)) // @M TODO
-//@M should probably change to handle HK type infer properly:
-// val tvars = quantified map (tparam => TypeVar(tparam))
- val underlying1 = underlying.instantiateTypeParams(quantified, tvars)
+ val quantifiedFresh = cloneSymbols(quantified)
+ val tvars = quantifiedFresh map (tparam => TypeVar(tparam))
+ val underlying1 = underlying.instantiateTypeParams(quantified, tvars) // fuse subst quantified -> quantifiedFresh -> tvars
op(underlying1) && {
- solve(tvars, quantified, quantified map (x => 0), false, depth) &&
- isWithinBounds(NoPrefix, NoSymbol, quantified, tvars map (_.constr.inst))
+ solve(tvars, quantifiedFresh, quantifiedFresh map (x => 0), false, depth) &&
+ isWithinBounds(NoPrefix, NoSymbol, quantifiedFresh, tvars map (_.constr.inst))
}
}
}
+ object ExistentialType extends ExistentialTypeExtractor
+
/** A class containing the alternatives and type prefix of an overloaded symbol.
* Not used after phase `typer'.
*/
@@ -1980,16 +2172,17 @@ A type's typeSymbol should never be inspected directly.
case class AntiPolyType(pre: Type, targs: List[Type]) extends Type {
override def safeToString =
pre.toString + targs.mkString("(with type arguments ", ",", ")");
- override def memberType(sym: Symbol) = pre.memberType(sym) match {
- case PolyType(tparams, restp) =>
- restp.subst(tparams, targs)
-/* I don't think this is needed, as existential types close only over value types
- case ExistentialType(tparams, qtpe) =>
- existentialAbstraction(tparams, qtpe.memberType(sym))
-*/
- case ErrorType =>
- ErrorType
- }
+ override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs)
+// override def memberType(sym: Symbol) = pre.memberType(sym) match {
+// case PolyType(tparams, restp) =>
+// restp.subst(tparams, targs)
+// /* I don't think this is needed, as existential types close only over value types
+// case ExistentialType(tparams, qtpe) =>
+// existentialAbstraction(tparams, qtpe.memberType(sym))
+// */
+// case ErrorType =>
+// ErrorType
+// }
override def kind = "AntiPolyType"
}
@@ -2000,17 +2193,33 @@ A type's typeSymbol should never be inspected directly.
// then, constr became mutable (to support UndoLog, I guess), but pattern-matching returned the original constr0 (a bug)
// now, pattern-matching returns the most recent constr
object TypeVar {
+ // encapsulate suspension so we can automatically link the suspension of cloned typevars to their original if this turns out to be necessary
+ def Suspension = new Suspension
+ class Suspension {
+ private val suspended = collection.mutable.HashSet[TypeVar]()
+ def suspend(tv: TypeVar): Unit = {
+ tv.suspended = true
+ suspended += tv
+ }
+ def resumeAll: Unit = {
+ for(tv <- suspended) {
+ tv.suspended = false
+ }
+ suspended.clear
+ }
+ }
+
def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr))
def apply(origin: Type, constr: TypeConstraint) = new TypeVar(origin, constr, List(), List())
- def apply(tparam: Symbol) = new TypeVar(tparam.tpeHK, new TypeConstraint, List(), tparam.typeParams)
+ def apply(tparam: Symbol) = new TypeVar(tparam.tpeHK, new TypeConstraint, List(), tparam.typeParams) // TODO why not initialise TypeConstraint with bounds of tparam?
def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]) = new TypeVar(origin, constr, args, params)
}
/** A class representing a type variable
* Not used after phase `typer'.
- * A higher-kinded type variable has type arguments (a list of Type's) and type paramers (list of Symbols)
+ * A higher-kinded type variable has type arguments (a list of Type's) and type parameters (list of Symbols)
* A TypeVar whose list of args is non-empty can only be instantiated by a higher-kinded type that can be applied to these args
- * NOTE:
+ * a typevar is much like a typeref, except it has special logic for type equality/subtyping
*/
class TypeVar(val origin: Type, val constr0: TypeConstraint, override val typeArgs: List[Type], override val params: List[Symbol]) extends Type {
// params are needed to keep track of variance (see mapOverArgs in SubstMap)
@@ -2021,7 +2230,7 @@ A type's typeSymbol should never be inspected directly.
var constr = constr0
def instValid = constr.instValid
- /** The variable's skolemizatuon level */
+ /** The variable's skolemization level */
val level = skolemizationLevel
/**
@@ -2034,14 +2243,14 @@ A type's typeSymbol should never be inspected directly.
if(newArgs.isEmpty) this // SubstMap relies on this (though this check is redundant when called from appliedType...)
else TypeVar(origin, constr, newArgs, params) // @M TODO: interaction with undoLog??
// newArgs.length may differ from args.length (could've been empty before)
- // OBSOLETE BEHAVIOUR: imperatively update args to new args
- // this initialises a TypeVar's arguments to the arguments of the type
// example: when making new typevars, you start out with C[A], then you replace C by ?C, which should yield ?C[A], then A by ?A, ?C[?A]
- // thus, we need to track a TypeVar's arguments, and map over them (see TypeMap::mapOver)
- // OBSOLETE BECAUSE: can't update imperatively because TypeVars do get applied to different arguments over type (in asSeenFrom) -- see pos/tcpoly_infer_implicit_tuplewrapper.scala
- // CONSEQUENCE: make new TypeVar's for every application of a TV to args,
- // inference may generate several TypeVar's for a single type parameter that must be inferred,
- // one of them is in the set of tvars that need to be solved, and they all share the same constr instance
+ // we need to track a TypeVar's arguments, and map over them (see TypeMap::mapOver)
+ // TypeVars get applied to different arguments over time (in asSeenFrom)
+ // -- see pos/tcpoly_infer_implicit_tuplewrapper.scala
+ // thus: make new TypeVar's for every application of a TV to args,
+ // inference may generate several TypeVar's for a single type parameter that must be inferred,
+ // only one of them is in the set of tvars that need to be solved, but
+ // they share the same TypeConstraint instance
def setInst(tp: Type) {
@@ -2049,17 +2258,20 @@ A type's typeSymbol should never be inspected directly.
constr.inst = tp
}
- /** Can this variable be related in a constraint to type `tp'?
- * This is not the case if `tp' contains type skolems whose
- * skolemization level is higher than the level of this variable.
- */
- def isRelatable(tp: Type): Boolean =
- !tp.exists { t =>
- t.typeSymbol match {
- case ts: TypeSkolem => ts.level > level
- case _ => false
- }
- }
+ def addLoBound(tp: Type, numBound: Boolean = false) {
+ assert(tp != this) // implies there is a cycle somewhere (?)
+ //println("addLoBound: "+(safeToString, debugString(tp))) //DEBUG
+ constr.addLoBound(tp, numBound)
+ }
+
+ def addHiBound(tp: Type, numBound: Boolean = false) {
+ // assert(tp != this)
+ //println("addHiBound: "+(safeToString, debugString(tp))) //DEBUG
+ constr.addHiBound(tp, numBound)
+ }
+
+ // ignore subtyping&equality checks while true -- see findMember
+ private[TypeVar] var suspended = false
/** Called from isSubtype0 when a TypeVar is involved in a subtyping check.
* if isLowerBound is true,
@@ -2084,8 +2296,8 @@ A type's typeSymbol should never be inspected directly.
else tp2 <:< tp1
def addBound(tp: Type) = {
- if (isLowerBound) constr.addLoBound(tp, numBound)
- else constr.addHiBound(tp, numBound)
+ if (isLowerBound) addLoBound(tp, numBound)
+ else addHiBound(tp, numBound)
// println("addedBound: "+(this, tp)) // @MDEBUG
}
@@ -2093,20 +2305,24 @@ A type's typeSymbol should never be inspected directly.
if(isLowerBound) isSubArgs(args1, args2, params)
else isSubArgs(args2, args1, params)
- if (constr.instValid) // type var is already set
+ if (suspended) checkSubtype(tp, origin)
+ else if (constr.instValid) // type var is already set
checkSubtype(tp, constr.inst)
else isRelatable(tp) && {
if(params.isEmpty) { // type var has kind *
addBound(tp)
true
- } else // higher-kinded type var with same arity as tp
- (typeArgs.length == tp.typeArgs.length) && {
- // register type constructor (the type without its type arguments) as bound
- addBound(tp.typeConstructor)
- // check subtyping of higher-order type vars
- // use variances as defined in the type parameter that we're trying to infer (the result is sanity-checked later)
- checkArgs(tp.typeArgs, typeArgs, params)
- }
+ } else { // higher-kinded type var with same arity as tp
+ def unifyHK(tp: Type) =
+ (typeArgs.length == tp.typeArgs.length) && {
+ // register type constructor (the type without its type arguments) as bound
+ addBound(tp.typeConstructor)
+ // check subtyping of higher-order type vars
+ // use variances as defined in the type parameter that we're trying to infer (the result is sanity-checked later)
+ checkArgs(tp.typeArgs, typeArgs, params)
+ }
+ unifyHK(tp) || unifyHK(tp.dealias)
+ }
}
}
@@ -2115,7 +2331,8 @@ A type's typeSymbol should never be inspected directly.
if(typeVarLHS) constr.inst =:= tp
else tp =:= constr.inst
- if (constr.instValid) checkIsSameType(tp)
+ if (suspended) tp =:= origin
+ else if (constr.instValid) checkIsSameType(tp)
else isRelatable(tp) && {
undoLog record this
@@ -2127,6 +2344,32 @@ A type's typeSymbol should never be inspected directly.
}
}
+ /**
+ * ?A.T =:= tp is rewritten as the constraint ?A <: {type T = tp}
+ *
+ * TODO: make these constraints count (incorporate them into implicit search in applyImplicitArgs)
+ * (T corresponds to @param sym)
+ */
+ def registerTypeSelection(sym: Symbol, tp: Type): Boolean = {
+ val bound = refinedType(List(WildcardType), NoSymbol)
+ val bsym = bound.typeSymbol.newAliasType(NoPosition, sym.name)
+ bsym setInfo tp
+ bound.decls enter bsym
+ registerBound(bound, false)
+ }
+
+ /** Can this variable be related in a constraint to type `tp'?
+ * This is not the case if `tp' contains type skolems whose
+ * skolemization level is higher than the level of this variable.
+ */
+ def isRelatable(tp: Type): Boolean =
+ !tp.exists { t =>
+ t.typeSymbol match {
+ case ts: TypeSkolem => ts.level > level
+ case _ => false
+ }
+ }
+
override val isHigherKinded = typeArgs.isEmpty && !params.isEmpty
override def normalize: Type =
@@ -2144,7 +2387,7 @@ A type's typeSymbol should never be inspected directly.
origin+
(if(typeArgs.isEmpty) "" else (typeArgs map (_.safeToString)).mkString("[ ", ", ", " ]")) // +"#"+tid //DEBUG
if (constr.inst eq null) "<null " + origin + ">"
- else if (settings.debug.value) varString+"(@"+constr.hashCode+")"+constr.toString
+ else if (settings.debug.value) varString+"(@"+constr.## +")"+constr.toString
else if (constr.inst eq NoType) varString
else constr.inst.toString
}
@@ -2152,7 +2395,10 @@ A type's typeSymbol should never be inspected directly.
override def isVolatile = origin.isVolatile
override def kind = "TypeVar"
- def cloneInternal = TypeVar(origin, constr cloneInternal, typeArgs, params) // @M TODO: clone args/params?
+ def cloneInternal = {
+ assert(!suspended) // cloning a suspended type variable when it's suspended will cause the clone to never be resumed with the current implementation
+ TypeVar(origin, constr cloneInternal, typeArgs, params) // @M TODO: clone args/params?
+ }
}
/** A type carrying some annotations. Created by the typechecker
@@ -2199,7 +2445,7 @@ A type's typeSymbol should never be inspected directly.
override def bounds: TypeBounds = {
val oftp = underlying.bounds
oftp match {
- case TypeBounds(lo, hi) if ((lo eq this) && (hi eq this)) => mkTypeBounds(this,this)
+ case TypeBounds(lo, hi) if ((lo eq this) && (hi eq this)) => TypeBounds(this,this)
case _ => oftp
}
}
@@ -2226,6 +2472,8 @@ A type's typeSymbol should never be inspected directly.
override def kind = "AnnotatedType"
}
+ object AnnotatedType extends AnnotatedTypeExtractor
+
/** A class representing types with a name. When an application uses
* named arguments, the named argument types for calling isApplicable
* are represented as NamedType.
@@ -2236,10 +2484,7 @@ A type's typeSymbol should never be inspected directly.
/** A class representing an as-yet unevaluated type.
*/
- abstract class LazyType extends Type {
- override def isComplete: Boolean = false
- override def complete(sym: Symbol)
- override def safeToString = "<?>"
+ abstract class LazyType extends Type with AbsLazyType {
override def kind = "LazyType"
}
@@ -2250,7 +2495,7 @@ A type's typeSymbol should never be inspected directly.
*/
private def rebind(pre: Type, sym: Symbol): Symbol = {
val owner = sym.owner
- if (owner.isClass && owner != pre.typeSymbol && !sym.isFinal && !sym.isClass) {
+ if (owner.isClass && owner != pre.typeSymbol && !sym.isEffectivelyFinal && !sym.isClass) {
//Console.println("rebind "+pre+" "+sym)//DEBUG
val rebind = pre.nonPrivateMember(sym.name).suchThat(sym => sym.isType || sym.isStable)
if (rebind == NoSymbol) sym
@@ -2266,23 +2511,18 @@ A type's typeSymbol should never be inspected directly.
*/
private def removeSuper(tp: Type, sym: Symbol): Type = tp match {
case SuperType(thistp, _) =>
- if (sym.isFinal || sym.isDeferred) thistp
+ if (sym.isEffectivelyFinal || sym.isDeferred) thistp
else tp
case _ =>
tp
}
- /** The canonical creator for this-types */
- def mkThisType(sym: Symbol): Type = {
- if (phase.erasedTypes) sym.tpe else unique(new ThisType(sym) with UniqueType)
- }
-
/** The canonical creator for single-types */
def singleType(pre: Type, sym: Symbol): Type = {
if (phase.erasedTypes)
sym.tpe.resultType
else if (sym.isRootPackage)
- mkThisType(RootClass)
+ ThisType(RootClass)
else {
var sym1 = rebind(pre, sym)
val pre1 = removeSuper(pre, sym1)
@@ -2291,36 +2531,13 @@ A type's typeSymbol should never be inspected directly.
}
}
- /** The canonical creator for super-types */
- def mkSuperType(thistp: Type, supertp: Type): Type =
- if (phase.erasedTypes) supertp
- else {
- unique(new SuperType(thistp, supertp) with UniqueType)
- }
-
- /** The canonical creator for type bounds */
- def mkTypeBounds(lo: Type, hi: Type): TypeBounds = {
- unique(new TypeBounds(lo, hi) with UniqueType)
- }
-
- def refinementOfClass(clazz: Symbol, parents: List[Type], decls: Scope) = {
- class RefinementOfClass extends RefinedType(parents, decls) {
- override def typeSymbol: Symbol = clazz
- }
- new RefinementOfClass
- }
-
-
-
/** the canonical creator for a refined type with a given scope */
def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos : Position): Type = {
if (phase.erasedTypes)
if (parents.isEmpty) ObjectClass.tpe else parents.head
else {
- // having $anonfun as owner causes the pickler to break upon unpickling; see ticket #2323
- val nonAnonOwner = (owner.ownerChain dropWhile (_.isAnonymousFunction)).headOption getOrElse NoSymbol
- val clazz = nonAnonOwner.newRefinementClass(NoPosition)
- val result = refinementOfClass(clazz, parents, decls)
+ val clazz = owner.newRefinementClass(NoPosition)
+ val result = RefinedType(parents, decls, clazz)
clazz.setInfo(result)
result
}
@@ -2350,24 +2567,21 @@ A type's typeSymbol should never be inspected directly.
result
}
- /** the canonical creator for a constant type */
- def mkConstantType(value: Constant): ConstantType = {
- class UniqueConstantType extends ConstantType(value) with UniqueType {
- /** Save the type of 'value'. For Java enums, it depends on finding the linked class,
- * which might not be found after 'flatten'. */
- private lazy val _tpe: Type = value.tpe
- override def underlying: Type = _tpe
- }
- unique(new UniqueConstantType)
- }
-
/** The canonical creator for typerefs
* todo: see how we can clean this up a bit
*/
def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type = {
- var sym1 = if (sym.isAbstractType) rebind(pre, sym) else sym
+ def rebindTR(pre: Type, sym: Symbol): Symbol = {
+ if(sym.isAbstractType) rebind(pre, sym) else sym
+ // type alias selections are rebound in TypeMap ("coevolved", actually -- see #3731)
+ // e.g., when type parameters that are referenced by the alias are instantiated in the prefix
+ // see pos/depmet_rebind_typealias
+ }
+ val sym1 = rebindTR(pre, sym)
+
def transform(tp: Type): Type =
tp.resultType.asSeenFrom(pre, sym1.owner).instantiateTypeParams(sym1.typeParams, args)
+
if (sym1.isAliasType && sym1.info.typeParams.length == args.length) {
if (!sym1.lockOK)
throw new TypeError("illegal cyclic reference involving " + sym1)
@@ -2380,42 +2594,30 @@ A type's typeSymbol should never be inspected directly.
transform(sym1.info) // check there are no cycles
sym1.unlock()
*/
- rawTypeRef(pre, sym1, args) // don't expand type alias (cycles checked above)
+ TypeRef(pre, sym1, args) // don't expand type alias (cycles checked above)
} else {
val pre1 = removeSuper(pre, sym1)
if (pre1 ne pre) {
- if (sym1.isAbstractType) sym1 = rebind(pre1, sym1)
- typeRef(pre1, sym1, args)
+ typeRef(pre1, rebindTR(pre1, sym1), args)
}
else if (sym1.isClass && pre.isInstanceOf[CompoundType]) {
// sharpen prefix so that it is maximal and still contains the class.
var p = pre.parents.reverse
while (!p.isEmpty && p.head.member(sym1.name) != sym1) p = p.tail
- if (p.isEmpty) rawTypeRef(pre, sym1, args)
+ if (p.isEmpty) TypeRef(pre, sym1, args)
else typeRef(p.head, sym1, args)
} else {
- rawTypeRef(pre, sym1, args)
+ TypeRef(pre, sym1, args)
}
}
}
- /** create a type-ref as found, without checks or rebinds */
- def rawTypeRef(pre: Type, sym: Symbol, args: List[Type]): Type = {
- class rawTypeRef extends TypeRef(pre, sym, args) with UniqueType
- unique(new rawTypeRef)
- }
-
- /** The canonical creator for implicit method types */
- def ImplicitMethodType(params: List[Symbol], resultType: Type): ImplicitMethodType =
- new ImplicitMethodType(params, resultType) // don't unique this!
-
/** The canonical creator for implicit method types */
def JavaMethodType(params: List[Symbol], resultType: Type): JavaMethodType =
new JavaMethodType(params, resultType) // don't unique this!
- /** Create a new MethodType of the same class as tp, i.e. keep Java / ImplicitMethodType */
+ /** Create a new MethodType of the same class as tp, i.e. keep JavaMethodType */
def copyMethodType(tp: Type, params: List[Symbol], restpe: Type): Type = tp match {
- case _: ImplicitMethodType => ImplicitMethodType(params, restpe)
case _: JavaMethodType => JavaMethodType(params, restpe)
case _ => MethodType(params, restpe)
}
@@ -2467,7 +2669,7 @@ A type's typeSymbol should never be inspected directly.
case tv@TypeVar(_, constr) => tv.applyArgs(args)
case ErrorType => tycon
case WildcardType => tycon // needed for neg/t0226
- case _ => throw new Error(debugString(tycon))
+ case _ => abort(debugString(tycon))
}
/** A creator for type parameterizations
@@ -2490,7 +2692,7 @@ A type's typeSymbol should never be inspected directly.
* (minus any SingletonClass markers),
* type variables in `tparams' occurring in contravariant positions are replaced by upper bounds,
* provided the resulting type is legal wrt to stability, and does not contain any
- * type varianble in `tparams'.
+ * type variable in `tparams'.
* The abstraction drops all type parameters that are not directly or indirectly
* referenced by type `tpe1'.
* If there are no remaining type parameters, simply returns result type `tpe'.
@@ -2500,16 +2702,19 @@ A type's typeSymbol should never be inspected directly.
else {
var occurCount = emptySymCount ++ (tparams map (_ -> 0))
val tpe = deAlias(tpe0)
- for (t <- tpe) {
- t match {
- case TypeRef(_, sym, _) =>
- occurCount get sym match {
- case Some(count) => occurCount += (sym -> (count + 1))
- case None =>
- }
- case _ =>
+ def countOccs(tp: Type) =
+ for (t <- tp) {
+ t match {
+ case TypeRef(_, sym, _) =>
+ occurCount get sym match {
+ case Some(count) => occurCount += (sym -> (count + 1))
+ case none =>
+ }
+ case _ =>
+ }
}
- }
+ countOccs(tpe)
+ for (tparam <- tparams) countOccs(tparam.info)
val extrapolate = new TypeMap {
variance = 1
@@ -2601,20 +2806,13 @@ A type's typeSymbol should never be inspected directly.
private var uniques: HashSet[AnyRef] = _
private var uniqueRunId = NoRunId
- def uniqueTypeCount = if (uniques == null) 0 else uniques.size // for statistics
-
private def unique[T <: AnyRef](tp: T): T = {
+ incCounter(rawTypeCount)
if (uniqueRunId != currentRunId) {
uniques = new HashSet("uniques", initialUniquesCapacity)
uniqueRunId = currentRunId
}
- uniques.findEntry(tp) match {
- case null =>
- //println("new unique type: "+tp)
- uniques.addEntry(tp);
- tp
- case tp1 => tp1.asInstanceOf[T]
- }
+ (uniques findEntryOrUpdate tp).asInstanceOf[T]
}
// Helper Classes ---------------------------------------------------------
@@ -2623,7 +2821,6 @@ A type's typeSymbol should never be inspected directly.
* as well as their instantiations.
*/
class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type) {
- //var self: Type = _ //DEBUG
def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
def this() = this(List(), List())
@@ -2644,7 +2841,8 @@ A type's typeSymbol should never be inspected directly.
def addLoBound(tp: Type, numBound: Boolean = false) {
if (numBound && isNumericValueType(tp)) {
- if (!isNumericSubType(tp, numlo)) numlo = tp
+ if (numlo == NoType || isNumericSubType(numlo, tp)) numlo = tp
+ else if (!isNumericSubType(tp, numlo)) numlo = IntClass.tpe
} else {
lobounds = tp :: lobounds
}
@@ -2652,7 +2850,8 @@ A type's typeSymbol should never be inspected directly.
def addHiBound(tp: Type, numBound: Boolean = false) {
if (numBound && isNumericValueType(tp)) {
- if (!isNumericSubType(numhi, tp)) numhi = tp
+ if (numhi == NoType || isNumericSubType(tp, numhi)) numhi = tp
+ else if (!isNumericSubType(numhi, tp)) numhi = intersectionType(List(ByteClass.tpe, CharClass.tpe), ScalaPackageClass)
} else {
hibounds = tp :: hibounds
}
@@ -2707,15 +2906,42 @@ A type's typeSymbol should never be inspected directly.
case _ => false
}
+ // #3731: return sym1 for which holds: pre bound sym.name to sym and pre1 now binds sym.name to sym1, conceptually exactly the same symbol as sym
+ // the selection of sym on pre must be updated to the selection of sym1 on pre1,
+ // since sym's info was probably updated by the TypeMap to yield a new symbol sym1 with transformed info
+ // @returns sym1
+ protected def coevolveSym(pre: Type, pre1: Type, sym: Symbol): Symbol =
+ if((pre ne pre1) && sym.isAliasType) // only need to rebind type aliases here, as typeRef already handles abstract types (they are allowed to be rebound more liberally)
+ (pre, pre1) match {
+ case (RefinedType(_, decls), RefinedType(_, decls1)) => // don't look at parents -- it would be an error to override alias types anyway
+ //val sym1 =
+ decls1.lookup(sym.name)
+// assert(decls.lookupAll(sym.name).toList.length == 1)
+// assert(decls1.lookupAll(sym.name).toList.length == 1)
+// assert(sym1.isAliasType)
+// println("coevolved "+ sym +" : "+ sym.info +" to "+ sym1 +" : "+ sym1.info +" with "+ pre +" -> "+ pre1)
+// sym1
+ case _ => // TODO: is there another way a typeref's symbol can refer to a symbol defined in its pre?
+// val sym1 = pre1.nonPrivateMember(sym.name).suchThat(sym => sym.isAliasType)
+// println("??coevolve "+ sym +" : "+ sym.info +" to "+ sym1 +" : "+ sym1.info +" with "+ pre +" -> "+ pre1)
+ sym
+ }
+ else sym
+
/** Map this function over given type */
def mapOver(tp: Type): Type = tp match {
- case ErrorType => tp
- case WildcardType => tp
- case NoType => tp
- case NoPrefix => tp
+ case TypeRef(pre, sym, args) =>
+ val pre1 = this(pre)
+ //val args1 = args mapConserve this(_)
+ val args1 = if (args.isEmpty) args
+ else {
+ val tparams = sym.typeParams
+ if (tparams.isEmpty) args
+ else mapOverArgs(args, tparams)
+ }
+ if ((pre1 eq pre) && (args1 eq args)) tp
+ else typeRef(pre1, coevolveSym(pre, pre1, sym), args1)
case ThisType(_) => tp
- case ConstantType(_) => tp
- case DeBruijnIndex(_, _) => tp
case SingleType(pre, sym) =>
if (sym.isPackageClass) tp // short path
else {
@@ -2723,29 +2949,35 @@ A type's typeSymbol should never be inspected directly.
if (pre1 eq pre) tp
else singleType(pre1, sym)
}
+ case MethodType(params, result) =>
+ variance = -variance
+ val params1 = mapOver(params)
+ variance = -variance
+ val result1 = this(result)
+ if ((params1 eq params) && (result1 eq result)) tp
+ // for new dependent types: result1.substSym(params, params1)?
+ else copyMethodType(tp, params1, result1.substSym(params, params1))
+ case PolyType(tparams, result) =>
+ variance = -variance
+ val tparams1 = mapOver(tparams)
+ variance = -variance
+ var result1 = this(result)
+ if ((tparams1 eq tparams) && (result1 eq result)) tp
+ else PolyType(tparams1, result1.substSym(tparams, tparams1))
+ case ConstantType(_) => tp
+ // case DeBruijnIndex(_, _) => tp
case SuperType(thistp, supertp) =>
val thistp1 = this(thistp)
val supertp1 = this(supertp)
if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
- else mkSuperType(thistp1, supertp1)
- case TypeRef(pre, sym, args) =>
- val pre1 = this(pre)
- //val args1 = args mapConserve this(_)
- val args1 = if (args.isEmpty) args
- else {
- val tparams = sym.typeParams
- if (tparams.isEmpty) args
- else mapOverArgs(args, tparams)
- }
- if ((pre1 eq pre) && (args1 eq args)) tp
- else typeRef(pre1, sym, args1)
+ else SuperType(thistp1, supertp1)
case TypeBounds(lo, hi) =>
variance = -variance
val lo1 = this(lo)
variance = -variance
val hi1 = this(hi)
if ((lo1 eq lo) && (hi1 eq hi)) tp
- else mkTypeBounds(lo1, hi1)
+ else TypeBounds(lo1, hi1)
case BoundedWildcardType(bounds) =>
val bounds1 = this(bounds)
if (bounds1 eq bounds) tp
@@ -2756,28 +2988,6 @@ A type's typeSymbol should never be inspected directly.
//if ((parents1 eq parents) && (decls1 eq decls)) tp
//else refinementOfClass(tp.typeSymbol, parents1, decls1)
copyRefinedType(rtp, parents1, decls1)
-/*
- case ClassInfoType(parents, decls, clazz) =>
- val parents1 = parents mapConserve (this);
- val decls1 = mapOver(decls);
- if ((parents1 eq parents) && (decls1 eq decls)) tp
- else cloneDecls(ClassInfoType(parents1, new Scope(), clazz), tp, decls1)
-*/
- case MethodType(params, result) =>
- variance = -variance
- val params1 = mapOver(params)
- variance = -variance
- val result1 = this(result)
- if ((params1 eq params) && (result1 eq result)) tp
- // for new dependent types: result1.substSym(params, params1)?
- else copyMethodType(tp, params1, result1.substSym(params, params1))
- case PolyType(tparams, result) =>
- variance = -variance
- val tparams1 = mapOver(tparams)
- variance = -variance
- var result1 = this(result)
- if ((tparams1 eq tparams) && (result1 eq result)) tp
- else PolyType(tparams1, result1.substSym(tparams, tparams1))
case ExistentialType(tparams, result) =>
val tparams1 = mapOver(tparams)
var result1 = this(result)
@@ -2805,6 +3015,12 @@ A type's typeSymbol should never be inspected directly.
if ((annots1 eq annots) && (atp1 eq atp)) tp
else if (annots1.isEmpty) atp1
else AnnotatedType(annots1, atp1, selfsym)
+/*
+ case ErrorType => tp
+ case WildcardType => tp
+ case NoType => tp
+ case NoPrefix => tp
+*/
case _ =>
tp
// throw new Error("mapOver inapplicable for " + tp);
@@ -2842,9 +3058,8 @@ A type's typeSymbol should never be inspected directly.
val clonedSyms = origSyms map (_.cloneSymbol)
val clonedInfos = clonedSyms map (_.info.substSym(origSyms, clonedSyms))
val transformedInfos = clonedInfos mapConserve (this)
- List.map2(clonedSyms, transformedInfos) {
- ((newSym, newInfo) => newSym.setInfo(newInfo))
- }
+ (clonedSyms, transformedInfos).zipped map (_ setInfo _)
+
clonedSyms
}
}
@@ -2948,7 +3163,7 @@ A type's typeSymbol should never be inspected directly.
// note: it's important to write the two tests in this order,
// as only typeParams forces the classfile to be read. See #400
private def isRawIfWithoutArgs(sym: Symbol) =
- !sym.typeParams.isEmpty && sym.hasFlag(JAVA)
+ sym.isClass && !sym.typeParams.isEmpty && sym.hasFlag(JAVA)
def isRaw(sym: Symbol, args: List[Type]) =
!phase.erasedTypes && isRawIfWithoutArgs(sym) && args.isEmpty
@@ -2968,17 +3183,26 @@ A type's typeSymbol should never be inspected directly.
* in ClassFileparser.sigToType (where it is usually done)
*/
object rawToExistential extends TypeMap {
+ private var expanded = immutable.Set[Symbol]()
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
- val eparams = typeParamsToExistentials(sym, sym.typeParams)
- existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe)))
+ if (expanded contains sym) AnyRefClass.tpe
+ else try {
+ expanded += sym
+ val eparams = mapOver(typeParamsToExistentials(sym, sym.typeParams))
+ existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
+ } finally {
+ expanded -= sym
+ }
+ case ExistentialType(_, _) => // stop to avoid infinite expansions
+ tp
case _ =>
mapOver(tp)
}
}
def singletonBounds(hi: Type) = {
- mkTypeBounds(NothingClass.tpe, intersectionType(List(hi, SingletonClass.tpe)))
+ TypeBounds(NothingClass.tpe, intersectionType(List(hi, SingletonClass.tpe)))
}
/** A map to compute the asSeenFrom method */
@@ -3018,17 +3242,13 @@ A type's typeSymbol should never be inspected directly.
var capturedPre = emptySymMap
- def stabilize(pre: Type, clazz: Symbol): Type = {
- capturedPre get clazz match {
- case None =>
+ def stabilize(pre: Type, clazz: Symbol): Type =
+ capturedPre.getOrElse(clazz, {
val qvar = clazz freshExistential ".type" setInfo singletonBounds(pre)
capturedPre += (clazz -> qvar)
capturedParams = qvar :: capturedParams
qvar
- case Some(qvar) =>
- qvar
- }
- }.tpe
+ }).tpe
/** Return pre.baseType(clazz), or if that's NoType and clazz is a refinement, pre itself.
* See bug397.scala for an example where the second alternative is needed.
@@ -3041,6 +3261,7 @@ A type's typeSymbol should never be inspected directly.
if (b == NoType && clazz.isRefinementClass) pre
else b
}
+
def apply(tp: Type): Type =
if ((pre eq NoType) || (pre eq NoPrefix) || !clazz.isClass) tp
else tp match {
@@ -3077,9 +3298,7 @@ A type's typeSymbol should never be inspected directly.
if ((pre eq NoType) || (pre eq NoPrefix) || !clazz.isClass) mapOver(tp)
//@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary
else {
- def throwError : Nothing = throw new Error(
- "" + tp + sym.locationString + " cannot be instantiated from " + pre.widen
- )
+ def throwError = abort("" + tp + sym.locationString + " cannot be instantiated from " + pre.widen)
def instParam(ps: List[Symbol], as: List[Type]): Type =
if (ps.isEmpty) throwError
@@ -3088,7 +3307,7 @@ A type's typeSymbol should never be inspected directly.
appliedType(as.head, args mapConserve (this)) // @M: was as.head
else instParam(ps.tail, as.tail);
val symclazz = sym.owner
- if (symclazz == clazz && (pre.widen.typeSymbol isNonBottomSubClass symclazz)) {
+ if (symclazz == clazz && !pre.isInstanceOf[TypeVar] && (pre.widen.typeSymbol isNonBottomSubClass symclazz)) {
pre.baseType(symclazz) match {
case TypeRef(_, basesym, baseargs) =>
//Console.println("instantiating " + sym + " from " + basesym + " with " + basesym.typeParams + " and " + baseargs+", pre = "+pre+", symclazz = "+symclazz);//DEBUG
@@ -3117,6 +3336,7 @@ A type's typeSymbol should never be inspected directly.
/** A base class to compute all substitutions */
abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap {
+ assert(from.length == to.length, "Unsound substitution from "+ from +" to "+ to)
/** Are `sym' and `sym1' the same.
* Can be tuned by subclasses.
@@ -3124,14 +3344,15 @@ A type's typeSymbol should never be inspected directly.
protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1
/** Map target to type, can be tuned by subclasses */
- protected def toType(fromtp: Type, t: T): Type
+ protected def toType(fromtp: Type, tp: T): Type
- def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type =
- if (from.isEmpty) tp
- else if (matches(from.head, sym)) toType(tp, to.head)
- else subst(tp, sym, from.tail, to.tail)
+ def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type =
+ if (from.isEmpty) tp
+ // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
+ else if (matches(from.head, sym)) toType(tp, to.head)
+ else subst(tp, sym, from.tail, to.tail)
- private def renameBoundSyms(tp: Type): Type = tp match {
+ protected def renameBoundSyms(tp: Type): Type = tp match {
case MethodType(ps, restp) =>
val ps1 = cloneSymbols(ps)
copyMethodType(tp, ps1, renameBoundSyms(restp.substSym(ps, ps1)))
@@ -3183,6 +3404,7 @@ A type's typeSymbol should never be inspected directly.
override def apply(tp: Type): Type = if (from.isEmpty) tp else {
def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol =
if (from.isEmpty) sym
+ // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from))
else if (matches(from.head, sym)) to.head
else subst(sym, from.tail, to.tail)
tp match {
@@ -3220,7 +3442,7 @@ A type's typeSymbol should never be inspected directly.
} else {
giveup()
}
- case None => super.transform(tree)
+ case none => super.transform(tree)
}
case tree => super.transform(tree)
}
@@ -3234,7 +3456,6 @@ A type's typeSymbol should never be inspected directly.
extends SubstMap(from, to) {
protected def toType(fromtp: Type, tp: Type) = tp
-
override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
object trans extends TypeMapTransformer {
override def transform(tree: Tree) =
@@ -3267,8 +3488,10 @@ A type's typeSymbol should never be inspected directly.
class SubstWildcardMap(from: List[Symbol]) extends TypeMap {
def apply(tp: Type): Type = try {
tp match {
- case TypeRef(_, sym, _) if (from contains sym) => WildcardType
- case _ => mapOver(tp)
+ case TypeRef(_, sym, _) if (from contains sym) =>
+ BoundedWildcardType(sym.info.bounds)
+ case _ =>
+ mapOver(tp)
}
} catch {
case ex: MalformedType =>
@@ -3276,83 +3499,94 @@ A type's typeSymbol should never be inspected directly.
}
}
- /** Most of the implementation for MethodType.resultType. The
- * caller also needs to existentially quantify over the
- * variables in existentialsNeeded.
- */
- class InstantiateDeBruijnMap(actuals: List[Type]) extends TypeMap {
+// dependent method types
+ object IsDependentCollector extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if(tp isImmediatelyDependent) result = true
+ else if (!result) mapOver(tp)
+ }
+ }
+
+ object ApproximateDependentMap extends TypeMap {
+ def apply(tp: Type): Type =
+ if(tp isImmediatelyDependent) WildcardType
+ else mapOver(tp)
+ }
+
+ class InstantiateDependentMap(params: List[Symbol], actuals: List[Type]) extends TypeMap {
override val dropNonConstraintAnnotations = true
- private var existSyms = immutable.Map.empty[Int, Symbol]
- def existentialsNeeded: List[Symbol] = existSyms.valuesIterator.toList
+ object ParamWithActual {
+ def unapply(sym: Symbol): Option[Type] = {
+ val pid = params indexOf sym
+ if(pid != -1) Some(actuals(pid)) else None
+ }
+ }
- /* Return the type symbol for referencing a parameter index
- * inside the existential quantifier. */
- def existSymFor(actualIdx: Int, oldSym: Symbol) =
- if (existSyms.isDefinedAt(actualIdx))
- existSyms(actualIdx)
+ def apply(tp: Type): Type =
+ mapOver(tp) match {
+ case SingleType(NoPrefix, ParamWithActual(arg)) if arg isStable => arg // unsound to replace args by unstable actual #3873
+ // (soundly) expand type alias selections on implicit arguments, see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit`
+ case tp1@TypeRef(SingleType(NoPrefix, param@ParamWithActual(arg)), sym, targs) =>
+ val res = typeRef(arg, sym, targs)
+ if(res.typeSymbolDirect isAliasType) res.dealias
+ else tp1
+ case tp1 => tp1 // don't return the original `tp`, which may be different from `tp1`, due to `dropNonConstraintAnnotations`
+ }
+
+ def existentialsNeeded: List[Symbol] = existSyms.filter(_ ne null).toList
+
+ private val existSyms: Array[Symbol] = new Array(actuals.length)
+ private def haveExistential(i: Int) = {assert((i >= 0) && (i <= actuals.length)); existSyms(i) ne null}
+
+ /* Return the type symbol for referencing a parameter inside the existential quantifier.
+ * (Only needed if the actual is unstable.)
+ */
+ def existSymFor(actualIdx: Int) =
+ if (haveExistential(actualIdx)) existSyms(actualIdx)
else {
- val symowner = oldSym.owner // what should be used??
+ val oldSym = params(actualIdx)
+ val symowner = oldSym.owner
val bound = singletonBounds(actuals(actualIdx))
val sym = symowner.newExistential(oldSym.pos, oldSym.name+".type")
sym.setInfo(bound)
sym.setFlag(oldSym.flags)
- existSyms = existSyms + (actualIdx -> sym)
+ existSyms(actualIdx) = sym
sym
}
- def apply(tp: Type): Type = tp match {
- case DeBruijnIndex(level, pid) =>
- if (level == 1)
- if (pid < actuals.length) actuals(pid) else tp
- else DeBruijnIndex(level - 1, pid)
- case _ =>
- mapOver(tp)
- }
-
+ //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon)
override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
- object treeTrans extends TypeMapTransformer {
- override def transform(tree: Tree): Tree =
+ object treeTrans extends Transformer {
+ override def transform(tree: Tree): Tree = {
tree match {
- case Ident(name) =>
- tree.tpe.withoutAnnotations match {
- case DeBruijnIndex(level, pid) =>
- if (level == 1) {
- if (actuals(pid).isStable)
- mkAttributedQualifier(actuals(pid), tree.symbol)
- else {
- val sym = existSymFor(pid, tree.symbol)
- (Ident(tree.symbol.name)
- copyAttrs tree
- setType typeRef(NoPrefix, sym, Nil))
- }
- } else
- Ident(name)
- .setPos(tree.pos)
- .setSymbol(tree.symbol)
- .setType(DeBruijnIndex(level-1, pid))
- case _ =>
- super.transform(tree)
-
+ case RefParamAt(pid) =>
+ if(actuals(pid) isStable) mkAttributedQualifier(actuals(pid), tree.symbol)
+ else {
+ val sym = existSymFor(pid)
+ (Ident(sym.name)
+ copyAttrs tree
+ setType typeRef(NoPrefix, sym, Nil))
}
case _ => super.transform(tree)
}
+ }
+ object RefParamAt {
+ def unapply(tree: Tree): Option[(Int)] = tree match {
+ case Ident(_) =>
+ val pid = params indexOf tree.symbol
+ if(pid != -1) Some((pid)) else None
+ case _ => None
+ }
+ }
}
treeTrans.transform(arg)
}
}
- object ApproximateDeBruijnMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case DeBruijnIndex(level, pid) =>
- WildcardType
- case _ =>
- mapOver(tp)
- }
- }
object StripAnnotationsMap extends TypeMap {
def apply(tp: Type): Type = tp match {
@@ -3458,15 +3692,6 @@ A type's typeSymbol should never be inspected directly.
}
}
- object IsDependentCollector extends TypeCollector(false) {
- def traverse(tp: Type) {
- tp match {
- case DeBruijnIndex(_, _) => result = true
- case _ => if (!result) mapOver(tp)
- }
- }
- }
-
/** A map to compute the most deeply nested owner that contains all the symbols
* of thistype or prefixless typerefs/singletype occurrences in given type.
*/
@@ -3493,45 +3718,64 @@ A type's typeSymbol should never be inspected directly.
}
}
- class MissingAliasException extends Exception
- val missingAliasException = new MissingAliasException
+ class MissingAliasControl extends ControlThrowable
+ val missingAliasException = new MissingAliasControl
+ class MissingTypeControl extends ControlThrowable
object adaptToNewRunMap extends TypeMap {
private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
- if (sym.isModuleClass && !phase.flatClasses) {
+ if (phase.refChecked) {
+ sym
+ } else if (sym.isModuleClass) {
adaptToNewRun(pre, sym.sourceModule).moduleClass
- } else if ((pre eq NoPrefix) || (pre eq NoType) || sym.owner.isPackageClass) {
+ } else if ((pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass) {
sym
} else {
var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true)
if (rebind0 == NoSymbol) {
if (sym.isAliasType) throw missingAliasException
- assert(false, pre+"."+sym+" does no longer exist, phase = "+phase)
+ if (settings.debug.value) println(pre+"."+sym+" does no longer exist, phase = "+phase)
+ throw new MissingTypeControl // For build manager and presentation compiler purposes
+ //assert(false, pre+"."+sym+" does no longer exist, phase = "+phase)
}
/** The two symbols have the same fully qualified name */
def corresponds(sym1: Symbol, sym2: Symbol): Boolean =
sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner))
if (!corresponds(sym.owner, rebind0.owner)) {
- if (settings.debug.value) Console.println("ADAPT1 pre = "+pre+", sym = "+sym+sym.locationString+", rebind = "+rebind0+rebind0.locationString)
+ if (settings.debug.value)
+ log("ADAPT1 pre = "+pre+", sym = "+sym+sym.locationString+", rebind = "+rebind0+rebind0.locationString)
val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner));
if (bcs.isEmpty)
assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in.
else
rebind0 = pre.baseType(bcs.head).member(sym.name)
- if (settings.debug.value) Console.println("ADAPT2 pre = "+pre+", bcs.head = "+bcs.head+", sym = "+sym+sym.locationString+", rebind = "+rebind0+(if (rebind0 == NoSymbol) "" else rebind0.locationString))
+ if (settings.debug.value) log(
+ "ADAPT2 pre = " + pre +
+ ", bcs.head = " + bcs.head +
+ ", sym = " + sym+sym.locationString +
+ ", rebind = " + rebind0 + (
+ if (rebind0 == NoSymbol) ""
+ else rebind0.locationString
+ )
+ )
}
val rebind = rebind0.suchThat(sym => sym.isType || sym.isStable)
if (rebind == NoSymbol) {
- if (settings.debug.value) Console.println("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
+ if (settings.debug.value) log("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
throw new MalformedType(pre, sym.nameString)
}
rebind
}
}
def apply(tp: Type): Type = tp match {
- case ThisType(sym) if (sym.isModuleClass) =>
- val sym1 = adaptToNewRun(sym.owner.thisType, sym)
- if (sym1 == sym) tp else mkThisType(sym1)
+ case ThisType(sym) =>
+ try {
+ val sym1 = adaptToNewRun(sym.owner.thisType, sym)
+ if (sym1 == sym) tp else ThisType(sym1)
+ } catch {
+ case ex: MissingTypeControl =>
+ tp
+ }
case SingleType(pre, sym) =>
if (sym.isPackage) tp
else {
@@ -3550,8 +3794,10 @@ A type's typeSymbol should never be inspected directly.
if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) tp
else typeRef(pre1, sym1, args1)
} catch {
- case ex: MissingAliasException =>
+ case ex: MissingAliasControl =>
apply(tp.dealias)
+ case _: MissingTypeControl =>
+ tp
}
}
case MethodType(params, restp) =>
@@ -3608,7 +3854,7 @@ A type's typeSymbol should never be inspected directly.
*/
def lubDepth(ts: List[Type]) = {
var d = 0
- for (tp <- ts) d = Math.max(d, tp.baseTypeSeqDepth)
+ for (tp <- ts) d = math.max(d, tp.baseTypeSeqDepth)
d + LubGlbMargin
}
@@ -3637,14 +3883,14 @@ A type's typeSymbol should never be inspected directly.
* let bt1, bt2 be the base types of tp1, tp2 relative to class bc
* Then:
* bt1 and bt2 have the same prefix, and
- * any correspondiong non-variant type arguments of bt1 and bt2 are the same
+ * any corresponding non-variant type arguments of bt1 and bt2 are the same
*/
def isPopulated(tp1: Type, tp2: Type): Boolean = {
def isConsistent(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
assert(sym1 == sym2)
pre1 =:= pre2 &&
- !(List.map3(args1, args2, sym1.typeParams) {
+ ((args1, args2, sym1.typeParams).zipped forall {
(arg1, arg2, tparam) =>
//if (tparam.variance == 0 && !(arg1 =:= arg2)) Console.println("inconsistent: "+arg1+"!="+arg2)//DEBUG
if (tparam.variance == 0) arg1 =:= arg2
@@ -3655,7 +3901,7 @@ A type's typeSymbol should never be inspected directly.
// also: think what happens if there are embedded typevars?
if (tparam.variance < 0) arg1 <:< arg2 else arg2 <:< arg1
else true
- } contains false)
+ })
case (et: ExistentialType, _) =>
et.withTypeVars(isConsistent(_, tp2))
case (_, et: ExistentialType) =>
@@ -3714,31 +3960,43 @@ A type's typeSymbol should never be inspected directly.
private def isUnifiable(pre1: Type, pre2: Type) =
(beginsWithTypeVarOrIsRefined(pre1) || beginsWithTypeVarOrIsRefined(pre2)) && (pre1 =:= pre2)
+ /** Returns true iff we are past phase specialize,
+ * sym1 and sym2 are two existential skolems with equal names and bounds,
+ * and pre1 and pre2 are equal prefixes
+ */
+ private def isSameSpecializedSkolem(sym1: Symbol, sym2: Symbol, pre1: Type, pre2: Type) = {
+ sym1.isExistentialSkolem && sym2.isExistentialSkolem &&
+ sym1.name == sym2.name &&
+ phase.specialized &&
+ sym1.info =:= sym2.info &&
+ pre1 =:= pre2
+ }
+
private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean =
- if (sym1 == sym2) phase.erasedTypes || pre1 =:= pre2
+ if (sym1 == sym2) sym1.hasFlag(PACKAGE) || phase.erasedTypes || pre1 =:= pre2
else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
/** Do `tp1' and `tp2' denote equivalent types?
*/
def isSameType(tp1: Type, tp2: Type): Boolean = try {
- sametypeCount += 1
+ incCounter(sametypeCount)
subsametypeRecursions += 1
undoLog undoUnless {
- isSameType0(tp1, tp2)
+ isSameType1(tp1, tp2)
}
} finally {
subsametypeRecursions -= 1
- if (subsametypeRecursions == 0) undoLog clear
+ if (subsametypeRecursions == 0) undoLog.clear
}
def isDifferentType(tp1: Type, tp2: Type): Boolean = try {
subsametypeRecursions += 1
undoLog undo { // undo type constraints that arise from operations in this block
- !isSameType0(tp1, tp2)
+ !isSameType1(tp1, tp2)
}
} finally {
subsametypeRecursions -= 1
- if (subsametypeRecursions == 0) undoLog clear
+ if (subsametypeRecursions == 0) undoLog.clear
}
def isDifferentTypeConstructor(tp1: Type, tp2: Type): Boolean = tp1 match {
@@ -3764,8 +4022,9 @@ A type's typeSymbol should never be inspected directly.
case _ => tp.normalize
}
*/
-
- private def isSameType0(tp1: Type, tp2: Type): Boolean =
+/*
+ private def isSameType0(tp1: Type, tp2: Type): Boolean = {
+ if (tp1 eq tp2) return true
((tp1, tp2) match {
case (ErrorType, _) => true
case (WildcardType, _) => true
@@ -3781,7 +4040,7 @@ A type's typeSymbol should never be inspected directly.
if (sym1 == sym2) =>
true
case (SingleType(pre1, sym1), SingleType(pre2, sym2))
- if equalSymsAndPrefixes(sym1, pre1, sym2, pre2) =>
+ if (equalSymsAndPrefixes(sym1, pre1, sym2, pre2)) =>
true
/*
case (SingleType(pre1, sym1), ThisType(sym2))
@@ -3822,18 +4081,14 @@ A type's typeSymbol should never be inspected directly.
// new dependent types: probably fix this, use substSym as done for PolyType
(isSameTypes(tp1.paramTypes, tp2.paramTypes) &&
res1 =:= res2 &&
- tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType])
+ tp1.isImplicit == tp2.isImplicit)
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
-// assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
- (tparams1.length == tparams2.length &&
- List.forall2(tparams1, tparams2)
- ((p1, p2) => p1.info =:= p2.info.substSym(tparams2, tparams1)) && //@M looks like it might suffer from same problem as #2210
- res1 =:= res2.substSym(tparams2, tparams1))
+ // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
+ (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
+ res1 =:= res2.substSym(tparams2, tparams1)
case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
- (tparams1.length == tparams2.length &&
- List.forall2(tparams1, tparams2)
- ((p1, p2) => p1.info =:= p2.info.substSym(tparams2, tparams1)) && //@M looks like it might suffer from same problem as #2210
- res1 =:= res2.substSym(tparams2, tparams1))
+ (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
+ res1 =:= res2.substSym(tparams2, tparams1)
case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
lo1 =:= lo2 && hi1 =:= hi2
case (BoundedWildcardType(bounds), _) =>
@@ -3867,17 +4122,183 @@ A type's typeSymbol should never be inspected directly.
val tp2n = normalizePlus(tp2)
((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
}
+ }
+*/
+ private def isSameType1(tp1: Type, tp2: Type): Boolean = {
+ if ((tp1 eq tp2) ||
+ (tp1 eq ErrorType) || (tp1 eq WildcardType) ||
+ (tp2 eq ErrorType) || (tp2 eq WildcardType))
+ true
+ else if ((tp1 eq NoType) || (tp2 eq NoType))
+ false
+ else if (tp1 eq NoPrefix)
+ tp2.typeSymbol.isPackageClass
+ else if (tp2 eq NoPrefix)
+ tp1.typeSymbol.isPackageClass
+ else {
+ isSameType2(tp1, tp2) || {
+ val tp1n = normalizePlus(tp1)
+ val tp2n = normalizePlus(tp2)
+ ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
+ }
+ }
+ }
+
+ def isSameType2(tp1: Type, tp2: Type): Boolean = {
+ tp1 match {
+ case tr1: TypeRef =>
+ tp2 match {
+ case tr2: TypeRef =>
+ return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) &&
+ ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
+ isSameTypes(tr1.args, tr2.args))) ||
+ ((tr1.pre, tr2.pre) match {
+ case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2)
+ case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1)
+ case _ => false
+ })
+ case _ =>
+ }
+ case tt1: ThisType =>
+ tp2 match {
+ case tt2: ThisType =>
+ if (tt1.sym == tt2.sym) return true
+ case _ =>
+ }
+ case st1: SingleType =>
+ tp2 match {
+ case st2: SingleType =>
+ if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true
+ case _ =>
+ }
+ case ct1: ConstantType =>
+ tp2 match {
+ case ct2: ConstantType =>
+ return (ct1.value == ct2.value)
+ case _ =>
+ }
+ case rt1: RefinedType =>
+ tp2 match {
+ case rt2: RefinedType => //
+ def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
+ sym2 =>
+ var e1 = s1.lookupEntry(sym2.name)
+ (e1 ne null) && {
+ val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner.thisType)
+ var isEqual = false
+ while (!isEqual && (e1 ne null)) {
+ isEqual = e1.sym.info =:= substSym
+ e1 = s1.lookupNextEntry(e1)
+ }
+ isEqual
+ }
+ }
+ //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
+ return isSameTypes(rt1.parents, rt2.parents) && {
+ val decls1 = rt1.decls
+ val decls2 = rt2.decls
+ isSubScope(decls1, decls2) && isSubScope(decls2, decls1)
+ }
+ case _ =>
+ }
+ case mt1: MethodType =>
+ tp2 match {
+ case mt2: MethodType =>
+ // DEPMETTODO new dependent types: probably fix this, use substSym as done for PolyType
+ return isSameTypes(mt1.paramTypes, mt2.paramTypes) &&
+ mt1.resultType =:= mt2.resultType &&
+ mt1.isImplicit == mt2.isImplicit
+ case _ =>
+ }
+ case PolyType(tparams1, res1) =>
+ tp2 match {
+ case PolyType(tparams2, res2) =>
+// assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
+ // @M looks like it might suffer from same problem as #2210
+ return (
+ (tparams1.length == tparams2.length) && // corresponds does not check length of two sequences before checking the predicate
+ (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
+ res1 =:= res2.substSym(tparams2, tparams1)
+ )
+ case _ =>
+ }
+ case ExistentialType(tparams1, res1) =>
+ tp2 match {
+ case ExistentialType(tparams2, res2) =>
+ // @M looks like it might suffer from same problem as #2210
+ return (
+ (tparams1.length == tparams2.length) && // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956
+ (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
+ res1 =:= res2.substSym(tparams2, tparams1)
+ )
+ case _ =>
+ }
+ case TypeBounds(lo1, hi1) =>
+ tp2 match {
+ case TypeBounds(lo2, hi2) =>
+ return lo1 =:= lo2 && hi1 =:= hi2
+ case _ =>
+ }
+ case BoundedWildcardType(bounds) =>
+ return bounds containsType tp2
+ case _ =>
+ }
+ tp2 match {
+ case BoundedWildcardType(bounds) =>
+ return bounds containsType tp1
+ case _ =>
+ }
+ tp1 match {
+ case tv @ TypeVar(_,_) =>
+ return tv.registerTypeEquality(tp2, true)
+ case _ =>
+ }
+ tp2 match {
+ case tv @ TypeVar(_,_) =>
+ return tv.registerTypeEquality(tp1, false)
+ case _ =>
+ }
+ tp1 match {
+ case _: AnnotatedType =>
+ return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
+ case _ =>
+ }
+ tp2 match {
+ case _: AnnotatedType =>
+ return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
+ case _ =>
+ }
+ tp1 match {
+ case _: SingletonType =>
+ tp2 match {
+ case _: SingletonType =>
+ var origin1 = tp1
+ while (origin1.underlying.isInstanceOf[SingletonType]) {
+ assert(origin1 ne origin1.underlying, origin1)
+ origin1 = origin1.underlying
+ }
+ var origin2 = tp2
+ while (origin2.underlying.isInstanceOf[SingletonType]) {
+ assert(origin2 ne origin2.underlying, origin2)
+ origin2 = origin2.underlying
+ }
+ ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
+ case _ =>
+ false
+ }
+ case _ =>
+ false
+ }
+ }
/** Are `tps1' and `tps2' lists of pairwise equivalent
* types?
*/
- def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean =
- tps1.length == tps2.length &&
- List.forall2(tps1, tps2)((tp1, tp2) => tp1 =:= tp2)
+ def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ =:= _)
- private var pendingSubTypes = new collection.mutable.HashSet[SubTypePair]
+ private val pendingSubTypes = new collection.mutable.HashSet[SubTypePair]
private var basetypeRecursions: Int = 0
- private var pendingBaseTypes = new collection.mutable.HashSet[Type]
+ private val pendingBaseTypes = new collection.mutable.HashSet[Type]
def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
@@ -3892,12 +4313,12 @@ A type's typeSymbol should never be inspected directly.
else
try {
pendingSubTypes += p
- isSubType0(tp1, tp2, depth)
+ isSubType2(tp1, tp2, depth)
} finally {
pendingSubTypes -= p
}
} else {
- isSubType0(tp1, tp2, depth)
+ isSubType2(tp1, tp2, depth)
}
}
} finally {
@@ -3953,18 +4374,17 @@ A type's typeSymbol should never be inspected directly.
((tp1.normalize, tp2.normalize) match {
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) => // @assume tp1.isHigherKinded && tp2.isHigherKinded (as they were both normalized to PolyType)
tparams1.length == tparams2.length && {
- if(tparams1.isEmpty) res1 <:< res2 // fast-path: monomorphic nullary method type
- else if(tparams1.head.owner.isMethod) { // fast-path: polymorphic method type -- type params cannot be captured
- List.forall2(tparams1, tparams2)((p1, p2) =>
- p2.info.substSym(tparams2, tparams1) <:< p1.info) &&
+ if (tparams1.isEmpty) res1 <:< res2 // fast-path: monomorphic nullary method type
+ else if (tparams1.head.owner.isMethod) { // fast-path: polymorphic method type -- type params cannot be captured
+ (tparams1 corresponds tparams2)((p1, p2) => p2.info.substSym(tparams2, tparams1) <:< p1.info) &&
res1 <:< res2.substSym(tparams2, tparams1)
} else { // normalized higher-kinded type
//@M for an example of why we need to generate fresh symbols, see neg/tcpoly_ticket2101.scala
- val tpsFresh = cloneSymbols(tparams1) // @M cloneSymbols(tparams2) should be equivalent -- TODO: check
+ val tpsFresh = cloneSymbols(tparams1)
- (List.forall2(tparams1, tparams2)((p1, p2) =>
- p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh)) &&
- res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh))
+ (tparams1 corresponds tparams2)((p1, p2) =>
+ p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh)) && // @PP: corresponds
+ res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh)
//@M the forall in the previous test could be optimised to the following,
// but not worth the extra complexity since it only shaves 1s from quick.comp
@@ -3973,9 +4393,8 @@ A type's typeSymbol should never be inspected directly.
// this optimisation holds because inlining cloneSymbols in `val tpsFresh = cloneSymbols(tparams1)` gives:
// val tpsFresh = tparams1 map (_.cloneSymbol)
// for (tpFresh <- tpsFresh) tpFresh.setInfo(tpFresh.info.substSym(tparams1, tpsFresh))
- }
}
-
+ }
case (_, _) => false // @assume !tp1.isHigherKinded || !tp2.isHigherKinded
// --> thus, cannot be subtypes (Any/Nothing has already been checked)
}))
@@ -3989,15 +4408,12 @@ A type's typeSymbol should never be inspected directly.
isSubArgs(tps1.tail, tps2.tail, tparams.tail)
)
- def isSubType0(tp1: Type, tp2: Type, depth: Int): Boolean = {
- isSubType2(tp1, tp2, depth)
- }
-
def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1
/** Does type `tp1' conform to `tp2'?
*/
private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
+ if (tp1 eq tp2) return true
if (isErrorOrWildcard(tp1)) return true
if (isErrorOrWildcard(tp2)) return true
if (tp1 eq NoType) return false
@@ -4010,10 +4426,10 @@ A type's typeSymbol should never be inspected directly.
/** First try, on the right:
* - unwrap Annotated types, BoundedWildcardTypes,
- * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard
+ * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard
* - handle common cases for first-kind TypeRefs on both sides as a fast path.
*/
- def firstTry = tp2 match {
+ def firstTry = { incCounter(ctr1); tp2 match {
// fast path: two typerefs, none of them HK
case tr2: TypeRef =>
tp1 match {
@@ -4023,8 +4439,9 @@ A type's typeSymbol should never be inspected directly.
val pre1 = tr1.pre
val pre2 = tr2.pre
(((if (sym1 == sym2) phase.erasedTypes || pre1 <:< pre2
- else (sym1.name == sym2.name && isUnifiable(pre1, pre2))) &&
- isSubArgs(tr1.args, tr2.args, sym1.typeParams))
+ else (sym1.name == sym2.name &&
+ (isUnifiable(pre1, pre2) || isSameSpecializedSkolem(sym1, sym2, pre1, pre2)))) &&
+ isSubArgs(tr1.args, tr2.args, sym1.typeParams))
||
sym2.isClass && {
val base = tr1 baseType sym2
@@ -4048,14 +4465,14 @@ A type's typeSymbol should never be inspected directly.
}
case _ =>
secondTry
- }
+ }}
/** Second try, on the left:
* - unwrap AnnotatedTypes, BoundedWildcardTypes,
* - bind typevars,
* - handle existential types by skolemization.
*/
- def secondTry = tp1 match {
+ def secondTry = { incCounter(ctr2); tp1 match {
case AnnotatedType(_, _, _) =>
tp1.withoutAnnotations <:< tp2.withoutAnnotations && annotationsConform(tp1, tp2)
case BoundedWildcardType(bounds) =>
@@ -4065,32 +4482,38 @@ A type's typeSymbol should never be inspected directly.
case ExistentialType(_, _) =>
try {
skolemizationLevel += 1
- tp1.skolemizeExistential(NoSymbol, null) <:< tp2
+ tp1.skolemizeExistential <:< tp2
} finally {
skolemizationLevel -= 1
}
case _ =>
thirdTry
- }
+ }}
def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = {
+ incCounter(ctr3);
val sym2 = tp2.sym
- if (sym2.isAliasType) {
- isSubType(tp1.normalize, tp2.normalize, depth)
- } else if (sym2.isAbstractType) {
- val tp2a = tp2.bounds.lo
-// isDifferentTypeConstructor(tp2a, tp2.pre, sym2) && tp1 <:< tp2a || fourthTry
- isDifferentTypeConstructor(tp2, tp2a) && tp1 <:< tp2a || fourthTry
- } else if (sym2 == NotNullClass) {
- tp1.isNotNull
- } else if (sym2 == SingletonClass) {
- tp1.isStable
- } else if (isRaw(sym2, tp2.args)) {
- isSubType(tp1, rawToExistential(tp2), depth)
- } else if (sym2.isRefinementClass) {
- isSubType(tp1, sym2.info, depth)
- } else {
- fourthTry
+ sym2 match {
+ case _: ClassSymbol =>
+ if (sym2 == NotNullClass)
+ tp1.isNotNull
+ else if (sym2 == SingletonClass)
+ tp1.isStable || fourthTry
+ else if (isRaw(sym2, tp2.args))
+ isSubType(tp1, rawToExistential(tp2), depth)
+ else if (sym2.name == nme.REFINE_CLASS_NAME.toTypeName)
+ isSubType(tp1, sym2.info, depth)
+ else
+ fourthTry
+ case _: TypeSymbol =>
+ if (sym2 hasFlag DEFERRED) {
+ val tp2a = tp2.bounds.lo
+ isDifferentTypeConstructor(tp2, tp2a) && tp1 <:< tp2a || fourthTry
+ } else {
+ isSubType(tp1.normalize, tp2.normalize, depth)
+ }
+ case _ =>
+ fourthTry
}
}
@@ -4099,31 +4522,33 @@ A type's typeSymbol should never be inspected directly.
* - handle typerefs, existentials, and notnull types.
* - handle left+right method types, polytypes, typebounds
*/
- def thirdTry = tp2 match {
+ def thirdTry = { incCounter(ctr3); tp2 match {
case tr2: TypeRef =>
thirdTryRef(tp1, tr2)
- case RefinedType(parents2, ref2) =>
- (parents2 forall (tp1 <:< _)) &&
- (ref2.toList forall tp1.specializes)
- case et: ExistentialType =>
- et.withTypeVars(tp1 <:< _, depth) || fourthTry
- case NotNullType(ntp2) =>
- tp1.isNotNull && tp1 <:< ntp2
- case MethodType(params2, res2) =>
+ case rt2: RefinedType =>
+ (rt2.parents forall (tp1 <:< _)) &&
+ (rt2.decls.toList forall tp1.specializes)
+ case et2: ExistentialType =>
+ et2.withTypeVars(tp1 <:< _, depth) || fourthTry
+ case nn2: NotNullType =>
+ tp1.isNotNull && tp1 <:< nn2.underlying
+ case mt2: MethodType =>
tp1 match {
- case MethodType(params1, res1) =>
+ case mt1 @ MethodType(params1, res1) =>
+ val params2 = mt2.params
+ val res2 = mt2.resultType
(params1.length == params2.length &&
- matchingParams(tp1.paramTypes, tp2.paramTypes, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
+ matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
(res1 <:< res2) &&
- tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType])
+ mt1.isImplicit == mt2.isImplicit)
case _ =>
false
}
- case PolyType(List(), res2) =>
+ case pt2 @ PolyType(List(), _) =>
tp1 match {
- case PolyType(List(), res1) =>
+ case pt1 @ PolyType(List(), _) =>
// other polytypes were already checked in isHKSubType
- res1 <:< res2
+ pt1.resultType <:< pt2.resultType
case _ =>
false
}
@@ -4136,42 +4561,47 @@ A type's typeSymbol should never be inspected directly.
}
case _ =>
fourthTry
- }
+ }}
/** Fourth try, on the left:
* - handle typerefs, refined types, notnull and singleton types.
*/
- def fourthTry = tp1 match {
- case TypeRef(pre1, sym1, args1) =>
- if (sym1.isAliasType) {
- isSubType(tp1.normalize, tp2.normalize, depth)
- } else if (sym1.isAbstractType) {
- val tp1a = tp1.bounds.hi
- isDifferentTypeConstructor(tp1, tp1a) && tp1a <:< tp2
- } else if (sym1 == NothingClass) {
- true
- } else if (sym1 == NullClass) {
- tp2 match {
- case TypeRef(_, sym2, _) =>
- (sym2 isNonBottomSubClass ObjectClass) &&
- !(tp2.normalize.typeSymbol isNonBottomSubClass NotNullClass)
- case _ =>
- isSingleType(tp2) && tp1 <:< tp2.widen
- }
- } else if (isRaw(sym1, args1)) {
- isSubType(rawToExistential(tp1), tp2, depth)
- } else if (sym1.isRefinementClass) {
- isSubType(sym1.info, tp2, depth)
- } else {
- false
+ def fourthTry = { incCounter(ctr4); tp1 match {
+ case tr1 @ TypeRef(_, sym1, _) =>
+ sym1 match {
+ case _: ClassSymbol =>
+ if (sym1 == NothingClass)
+ true
+ else if (sym1 == NullClass)
+ tp2 match {
+ case TypeRef(_, sym2, _) =>
+ sym2.isClass && (sym2 isNonBottomSubClass ObjectClass) &&
+ !(tp2.normalize.typeSymbol isNonBottomSubClass NotNullClass)
+ case _ =>
+ isSingleType(tp2) && tp1 <:< tp2.widen
+ }
+ else if (isRaw(sym1, tr1.args))
+ isSubType(rawToExistential(tp1), tp2, depth)
+ else
+ sym1.name == nme.REFINE_CLASS_NAME.toTypeName &&
+ isSubType(sym1.info, tp2, depth)
+ case _: TypeSymbol =>
+ if (sym1 hasFlag DEFERRED) {
+ val tp1a = tp1.bounds.hi
+ isDifferentTypeConstructor(tp1, tp1a) && tp1a <:< tp2
+ } else {
+ isSubType(tp1.normalize, tp2.normalize, depth)
+ }
+ case _ =>
+ false
}
- case RefinedType(parents1, ref1) =>
+ case RefinedType(parents1, _) =>
parents1 exists (_ <:< tp2)
case _: SingletonType | _: NotNullType =>
tp1.underlying <:< tp2
case _ =>
false
- }
+ }}
firstTry
}
@@ -4180,9 +4610,7 @@ A type's typeSymbol should never be inspected directly.
* that all elements of `tps1' conform to corresponding elements
* of `tps2'?
*/
- def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean =
- tps1.length == tps2.length &&
- List.forall2(tps1, tps2)((tp1, tp2) => tp1 <:< tp2)
+ def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ <:< _)
/** Does type `tp' implement symbol `sym' with same or
* stronger type? Exact only if `sym' is a member of some
@@ -4202,20 +4630,83 @@ A type's typeSymbol should never be inspected directly.
val info2 = tp2.memberInfo(sym2).substThis(tp2.typeSymbol, tp1)
//System.out.println("specializes "+tp1+"."+sym1+":"+info1+sym1.locationString+" AND "+tp2+"."+sym2+":"+info2)//DEBUG
sym2.isTerm && (info1 <:< info2) /*&& (!sym2.isStable || sym1.isStable) */ ||
- sym2.isAbstractType && info2.bounds.containsType(tp1.memberType(sym1)) ||
+ sym2.isAbstractType && {
+ val memberTp1 = tp1.memberType(sym1)
+ // println("kinds conform? "+(memberTp1, tp1, sym2, kindsConform(List(sym2), List(memberTp1), tp2, sym2.owner)))
+ info2.bounds.containsType(memberTp1) &&
+ kindsConform(List(sym2), List(memberTp1), tp1, sym1.owner)
+ } ||
sym2.isAliasType && tp2.memberType(sym2).substThis(tp2.typeSymbol, tp1) =:= tp1.memberType(sym1) //@MAT ok
}
/** A function implementing `tp1' matches `tp2' */
- def matchesType(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
+ final def matchesType(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
+ def matchesQuantified(tparams1: List[Symbol], tparams2: List[Symbol], res1: Type, res2: Type): Boolean =
+ tparams1.length == tparams2.length &&
+ matchesType(res1, res2.substSym(tparams2, tparams1), alwaysMatchSimple)
+ def lastTry =
+ tp2 match {
+ case ExistentialType(_, res2) if alwaysMatchSimple =>
+ matchesType(tp1, res2, true)
+ case MethodType(_, _) =>
+ false
+ case PolyType(tparams2, res2) =>
+ tparams2.isEmpty && matchesType(tp1, res2, alwaysMatchSimple)
+ case _ =>
+ alwaysMatchSimple || tp1 =:= tp2
+ }
+ tp1 match {
+ case mt1 @ MethodType(params1, res1) =>
+ tp2 match {
+ case mt2 @ MethodType(params2, res2) =>
+ params1.length == params2.length && // useful pre-screening optimization
+ matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
+ matchesType(res1, res2, alwaysMatchSimple) &&
+ mt1.isImplicit == mt2.isImplicit
+ case PolyType(List(), res2) =>
+ if (params1.isEmpty) matchesType(res1, res2, alwaysMatchSimple)
+ else matchesType(tp1, res2, alwaysMatchSimple)
+ case ExistentialType(_, res2) =>
+ alwaysMatchSimple && matchesType(tp1, res2, true)
+ case _ =>
+ false
+ }
+ case PolyType(tparams1, res1) =>
+ tp2 match {
+ case PolyType(tparams2, res2) =>
+ matchesQuantified(tparams1, tparams2, res1, res2)
+ case MethodType(List(), res2) if (tparams1.isEmpty) =>
+ matchesType(res1, res2, alwaysMatchSimple)
+ case ExistentialType(_, res2) =>
+ alwaysMatchSimple && matchesType(tp1, res2, true)
+ case _ =>
+ tparams1.isEmpty && matchesType(res1, tp2, alwaysMatchSimple)
+ }
+ case ExistentialType(tparams1, res1) =>
+ tp2 match {
+ case ExistentialType(tparams2, res2) =>
+ matchesQuantified(tparams1, tparams2, res1, res2)
+ case _ =>
+ if (alwaysMatchSimple) matchesType(res1, tp2, true)
+ else lastTry
+ }
+ case _ =>
+ lastTry
+ }
+ }
+
+/** matchesType above is an optimized version of the following implementation:
+
+ def matchesType2(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
def matchesQuantified(tparams1: List[Symbol], tparams2: List[Symbol], res1: Type, res2: Type): Boolean =
tparams1.length == tparams2.length &&
matchesType(res1, res2.substSym(tparams2, tparams1), alwaysMatchSimple)
(tp1, tp2) match {
case (MethodType(params1, res1), MethodType(params2, res2)) =>
- matchingParams(tp1.paramTypes, tp2.paramTypes, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
+ params1.length == params2.length && // useful pre-secreening optimization
+ matchingParams(params1, params2, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
matchesType(res1, res2, alwaysMatchSimple) &&
- tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType]
+ tp1.isImplicit == tp2.isImplicit
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
matchesQuantified(tparams1, tparams2, res1, res2)
case (PolyType(List(), rtp1), MethodType(List(), rtp2)) =>
@@ -4240,14 +4731,25 @@ A type's typeSymbol should never be inspected directly.
alwaysMatchSimple || tp1 =:= tp2
}
}
+*/
- /** Are `tps1' and `tps2' lists of pairwise equivalent types? */
- private def matchingParams(tps1: List[Type], tps2: List[Type], tps1isJava: Boolean, tps2isJava: Boolean): Boolean =
- tps1.length == tps2.length &&
- List.forall2(tps1, tps2)((tp1, tp2) =>
- (tp1 =:= tp2) ||
- tps1isJava && tp2.typeSymbol == ObjectClass && tp1.typeSymbol == AnyClass ||
- tps2isJava && tp1.typeSymbol == ObjectClass && tp2.typeSymbol == AnyClass)
+ /** Are `syms1' and `syms2' parameter lists with pairwise equivalent types? */
+ private def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
+ case Nil =>
+ syms2.isEmpty
+ case sym1 :: rest1 =>
+ syms2 match {
+ case Nil =>
+ false
+ case sym2 :: rest2 =>
+ val tp1 = sym1.tpe
+ val tp2 = sym2.tpe
+ (tp1 =:= tp2 ||
+ syms1isJava && tp2.typeSymbol == ObjectClass && tp1.typeSymbol == AnyClass ||
+ syms2isJava && tp1.typeSymbol == ObjectClass && tp2.typeSymbol == AnyClass) &&
+ matchingParams(rest1, rest2, syms1isJava, syms2isJava)
+ }
+ }
/** like map2, but returns list `xs' itself - instead of a copy - if function
* `f' maps all elements to themselves.
@@ -4282,14 +4784,13 @@ A type's typeSymbol should never be inspected directly.
val up = if (variance != CONTRAVARIANT) upper else !upper
tvar.constr.inst = null
val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
- //Console.println("solveOne0 "+tvar+" "+config+" "+bound);//DEBUG
+ //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound))
var cyclic = bound contains tparam
for ((tvar2, (tparam2, variance2)) <- config) {
- // Console.println("solveOne0(tp,up,lo,hi,lo=tp,hi=tp)="+(tparam.tpe, up, tparam2.info.bounds.lo, tparam2.info.bounds.hi, (tparam2.info.bounds.lo =:= tparam.tpe), (tparam2.info.bounds.hi =:= tparam.tpe))) //DEBUG
if (tparam2 != tparam &&
((bound contains tparam2) ||
- up && (tparam2.info.bounds.lo =:= tparam.tpe) || //@M TODO: should probably be .tpeHK
- !up && (tparam2.info.bounds.hi =:= tparam.tpe))) { //@M TODO: should probably be .tpeHK
+ up && (tparam2.info.bounds.lo =:= tparam.tpe) ||
+ !up && (tparam2.info.bounds.hi =:= tparam.tpe))) {
if (tvar2.constr.inst eq null) cyclic = true
solveOne(tvar2, tparam2, variance2)
}
@@ -4297,23 +4798,28 @@ A type's typeSymbol should never be inspected directly.
if (!cyclic) {
if (up) {
if (bound.typeSymbol != AnyClass)
- tvar.constr addHiBound bound.instantiateTypeParams(tparams, tvars)
+ tvar addHiBound bound.instantiateTypeParams(tparams, tvars)
for (tparam2 <- tparams)
- if (tparam2.info.bounds.lo =:= tparam.tpe) //@M TODO: should probably be .tpeHK
- tvar.constr addHiBound tparam2.tpe.instantiateTypeParams(tparams, tvars)
+ tparam2.info.bounds.lo.dealias match {
+ case TypeRef(_, `tparam`, _) =>
+ tvar addHiBound tparam2.tpe.instantiateTypeParams(tparams, tvars)
+ case _ =>
+ }
} else {
- if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam)
- tvar.constr addLoBound bound.instantiateTypeParams(tparams, tvars)
+ if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) {
+ tvar addLoBound bound.instantiateTypeParams(tparams, tvars)
+ }
for (tparam2 <- tparams)
- if (tparam2.info.bounds.hi =:= tparam.tpe) //@M TODO: should probably be .tpeHK
- tvar.constr addLoBound tparam2.tpe.instantiateTypeParams(tparams, tvars)
+ tparam2.info.bounds.hi.dealias match {
+ case TypeRef(_, `tparam`, _) =>
+ tvar addLoBound tparam2.tpe.instantiateTypeParams(tparams, tvars)
+ case _ =>
+ }
}
}
tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar
- // println("solveOne(useGlb, glb, lub): "+ (up, //@MDEBUG
- // if (depth != AnyDepth) glb(tvar.constr.hiBounds, depth) else glb(tvar.constr.hiBounds),
- // if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds)))
+ //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen)))
tvar setInst (
if (up) {
@@ -4321,10 +4827,12 @@ A type's typeSymbol should never be inspected directly.
} else {
if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds)
})
- // Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG
+
+ //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG
}
}
+ // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
for ((tvar, (tparam, variance)) <- config)
solveOne(tvar, tparam, variance)
@@ -4339,8 +4847,10 @@ A type's typeSymbol should never be inspected directly.
* @return ...
*/
def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
- val bounds = instantiatedBounds(pre, owner, tparams, targs)
- !(List.map2(bounds, targs)((bound, targ) => bound containsType targ) contains false)
+ var bounds = instantiatedBounds(pre, owner, tparams, targs)
+ if (targs.exists(_.annotations.nonEmpty))
+ bounds = adaptBoundsToAnnotations(bounds, tparams, targs)
+ (bounds corresponds targs)(_ containsType _) // @PP: corresponds
}
def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
@@ -4442,7 +4952,7 @@ A type's typeSymbol should never be inspected directly.
res
case TypeVar(_, constr) =>
if (constr.instValid) constr.inst
- else throw new Error("trying to do lub/glb of typevar "+tp)
+ else abort("trying to do lub/glb of typevar "+tp)
case t => t
}
val strippedTypes = ts mapConserve (stripType)
@@ -4450,32 +4960,46 @@ A type's typeSymbol should never be inspected directly.
}
def weakLub(ts: List[Type]) =
- if (ts.nonEmpty && (ts forall isNumericValueType)) numericLub(ts)
- else lub(ts)
-
- def weakGlb(ts: List[Type]) =
- if (ts.nonEmpty && (ts forall isNumericValueType)) numericGlb(ts)
- else glb(ts)
+ if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true)
+ else if (ts.nonEmpty && (ts exists (_.annotations.nonEmpty)))
+ (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true)
+ else (lub(ts), false)
+
+ def weakGlb(ts: List[Type]) = {
+ if (ts.nonEmpty && (ts forall isNumericValueType)) {
+ val nglb = numericGlb(ts)
+ if (nglb != NoType) (nglb, true)
+ else (glb(ts), false)
+ } else if (ts.nonEmpty && (ts exists (_.annotations.nonEmpty))) {
+ (annotationsGlb(glb(ts map (_.withoutAnnotations)), ts), true)
+ } else (glb(ts), false)
+ }
def numericLub(ts: List[Type]) =
- (ByteClass.tpe /: ts) ((t1, t2) => if (isNumericSubType(t1, t2)) t2 else t1)
+ ts reduceLeft ((t1, t2) =>
+ if (isNumericSubType(t1, t2)) t2
+ else if (isNumericSubType(t2, t1)) t1
+ else IntClass.tpe)
def numericGlb(ts: List[Type]) =
- (DoubleClass.tpe /: ts) ((t1, t2) => if (isNumericSubType(t1, t2)) t1 else t2)
+ ts reduceLeft ((t1, t2) =>
+ if (isNumericSubType(t1, t2)) t1
+ else if (isNumericSubType(t2, t1)) t2
+ else NoType)
def isWeakSubType(tp1: Type, tp2: Type) =
- tp1 match {
+ tp1.deconst.normalize match {
case TypeRef(_, sym1, _) if isNumericValueClass(sym1) =>
- tp2 match {
+ tp2.deconst.normalize match {
case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
- sym1 == sym2 || numericWidth(sym1) < numericWidth(sym2)
+ isNumericSubClass(sym1, sym2)
case tv2 @ TypeVar(_, _) =>
tv2.registerBound(tp1, isLowerBound = true, numBound = true)
case _ =>
isSubType(tp1, tp2)
}
case tv1 @ TypeVar(_, _) =>
- tp2 match {
+ tp2.deconst.normalize match {
case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
tv1.registerBound(tp2, isLowerBound = false, numBound = true)
case _ =>
@@ -4487,9 +5011,17 @@ A type's typeSymbol should never be inspected directly.
def isNumericSubType(tp1: Type, tp2: Type) =
isNumericValueType(tp1) && isNumericValueType(tp2) &&
- (tp1.typeSymbol == tp2.typeSymbol || numericWidth(tp1.typeSymbol) < numericWidth(tp2.typeSymbol))
+ isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol)
- def lub(ts: List[Type]): Type = lub(ts, lubDepth(ts))
+ private val lubResults = new HashMap[(Int, List[Type]), Type]
+ private val glbResults = new HashMap[(Int, List[Type]), Type]
+
+ def lub(ts: List[Type]): Type = try {
+ lub(ts, lubDepth(ts))
+ } finally {
+ lubResults.clear()
+ glbResults.clear()
+ }
/** The least upper bound wrt &lt;:&lt; of a list of types */
def lub(ts: List[Type], depth: Int): Type = {
@@ -4497,81 +5029,90 @@ A type's typeSymbol should never be inspected directly.
case List() => NothingClass.tpe
case List(t) => t
case ts @ PolyType(tparams, _) :: _ =>
- PolyType(
- List.map2(tparams, List.transpose(matchingBounds(ts, tparams)))
- ((tparam, bounds) => tparam.cloneSymbol.setInfo(glb(bounds, depth))),
- lub0(matchingInstTypes(ts, tparams)))
+ val tparams1 = (tparams, matchingBounds(ts, tparams).transpose).zipped map
+ ((tparam, bounds) => tparam.cloneSymbol.setInfo(glb(bounds, depth)))
+ PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
case ts @ MethodType(params, _) :: rest =>
MethodType(params, lub0(matchingRestypes(ts, params map (_.tpe))))
case ts @ TypeBounds(_, _) :: rest =>
- mkTypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
- case ts0 =>
- val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
- val bts: List[BaseTypeSeq] = ts map (_.baseTypeSeq)
- val lubBaseTypes: List[Type] = lubBaseTypeSeq(bts, depth)
- val lubParents = spanningTypes(lubBaseTypes)
- val lubOwner = commonOwner(ts)
- val lubBase = intersectionType(lubParents, lubOwner)
- val lubType =
- if (phase.erasedTypes || depth == 0) lubBase
- else {
- val lubRefined = refinedType(lubParents, lubOwner)
- val lubThisType = lubRefined.typeSymbol.thisType
- val narrowts = ts map (_.narrow)
- def lubsym(proto: Symbol): Symbol = {
- val prototp = lubThisType.memberInfo(proto)
- val syms = narrowts map (t =>
- t.nonPrivateMember(proto.name).suchThat(sym =>
- sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
- if (syms contains NoSymbol) NoSymbol
+ TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
+ case ts =>
+ lubResults get (depth, ts) match {
+ case Some(lubType) =>
+ lubType
+ case None =>
+ lubResults((depth, ts)) = AnyClass.tpe
+ val res = if (depth < 0) AnyClass.tpe else lub1(ts)
+ lubResults((depth, ts)) = res
+ res
+ }
+ }
+ def lub1(ts0: List[Type]): Type = {
+ val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
+ val bts: List[BaseTypeSeq] = ts map (_.baseTypeSeq)
+ val lubBaseTypes: List[Type] = lubBaseTypeSeq(bts, depth)
+ val lubParents = spanningTypes(lubBaseTypes)
+ val lubOwner = commonOwner(ts)
+ val lubBase = intersectionType(lubParents, lubOwner)
+ val lubType =
+ if (phase.erasedTypes || depth == 0) lubBase
+ else {
+ val lubRefined = refinedType(lubParents, lubOwner)
+ val lubThisType = lubRefined.typeSymbol.thisType
+ val narrowts = ts map (_.narrow)
+ def lubsym(proto: Symbol): Symbol = {
+ val prototp = lubThisType.memberInfo(proto)
+ val syms = narrowts map (t =>
+ t.nonPrivateMember(proto.name).suchThat(sym =>
+ sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
+ if (syms contains NoSymbol) NoSymbol
+ else {
+ val symtypes =
+ (narrowts, syms).zipped map ((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
+ if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
+ proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
+ else if (symtypes.tail forall (symtypes.head =:=))
+ proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head)
else {
- val symtypes =
- (List.map2(narrowts, syms)
- ((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType)));
- if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
- proto.cloneSymbol(lubRefined.typeSymbol).setInfo(lub(symtypes, decr(depth)))
- else if (symtypes.tail forall (symtypes.head =:=))
- proto.cloneSymbol(lubRefined.typeSymbol).setInfo(symtypes.head)
- else {
- def lubBounds(bnds: List[TypeBounds]): TypeBounds =
- mkTypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
- lubRefined.typeSymbol.newAbstractType(proto.pos, proto.name)
- .setInfo(lubBounds(symtypes map (_.bounds)))
- }
+ def lubBounds(bnds: List[TypeBounds]): TypeBounds =
+ TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
+ lubRefined.typeSymbol.newAbstractType(proto.pos, proto.name)
+ .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
}
}
- def refines(tp: Type, sym: Symbol): Boolean = {
- val syms = tp.nonPrivateMember(sym.name).alternatives;
- !syms.isEmpty && (syms forall (alt =>
- // todo alt != sym is strictly speaking not correct, but without it we lose
- // efficiency.
- alt != sym && !specializesSym(lubThisType, sym, tp, alt)))
- }
- for (sym <- lubBase.nonPrivateMembers) {
- // add a refinement symbol for all non-class members of lubBase
- // which are refined by every type in ts.
- if (!sym.isClass && !sym.isConstructor && (narrowts forall (t => refines(t, sym))))
- try {
- val lsym = lubsym(sym)
- if (lsym != NoSymbol) addMember(lubThisType, lubRefined, lubsym(sym))
- } catch {
- case ex: NoCommonType =>
- }
- }
- if (lubRefined.decls.isEmpty) lubBase
- else {
+ }
+ def refines(tp: Type, sym: Symbol): Boolean = {
+ val syms = tp.nonPrivateMember(sym.name).alternatives;
+ !syms.isEmpty && (syms forall (alt =>
+ // todo alt != sym is strictly speaking not correct, but without it we lose
+ // efficiency.
+ alt != sym && !specializesSym(lubThisType, sym, tp, alt)))
+ }
+ for (sym <- lubBase.nonPrivateMembers) {
+ // add a refinement symbol for all non-class members of lubBase
+ // which are refined by every type in ts.
+ if (!sym.isClass && !sym.isConstructor && (narrowts forall (t => refines(t, sym))))
+ try {
+ val lsym = lubsym(sym)
+ if (lsym != NoSymbol) addMember(lubThisType, lubRefined, lubsym(sym))
+ } catch {
+ case ex: NoCommonType =>
+ }
+ }
+ if (lubRefined.decls.isEmpty) lubBase
+ else {
// println("refined lub of "+ts+"/"+narrowts+" is "+lubRefined+", baseclasses = "+(ts map (_.baseTypeSeq) map (_.toList)))
- lubRefined
- }
+ lubRefined
}
- existentialAbstraction(tparams, lubType)
+ }
+ existentialAbstraction(tparams, lubType)
}
if (printLubs) {
println(indent + "lub of " + ts + " at depth "+depth)//debug
indent = indent + " "
assert(indent.length <= 100)
}
- val res = if (depth < 0) AnyClass.tpe else lub0(ts)
+ val res = lub0(ts)
if (printLubs) {
indent = indent.substring(0, indent.length() - 2)
println(indent + "lub of " + ts + " is " + res)//debug
@@ -4582,7 +5123,7 @@ A type's typeSymbol should never be inspected directly.
val GlbFailure = new Throwable
/** A global counter for glb calls in the `specializes' query connected to the `addMembers'
- * call in `glb'. There's a possible inifinite recursion when `specializes' calls
+ * call in `glb'. There's a possible infinite recursion when `specializes' calls
* memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb.
* The counter breaks this recursion after two calls.
* If the recursion is broken, no member is added to the glb.
@@ -4590,7 +5131,12 @@ A type's typeSymbol should never be inspected directly.
private var globalGlbDepth = 0
private final val globalGlbLimit = 2
- def glb(ts: List[Type]): Type = glb(ts, lubDepth(ts))
+ def glb(ts: List[Type]): Type = try {
+ glb(ts, lubDepth(ts))
+ } finally {
+ lubResults.clear()
+ glbResults.clear()
+ }
/** The greatest lower bound wrt &lt;:&lt; of a list of types */
private def glb(ts: List[Type], depth: Int): Type = {
@@ -4598,99 +5144,106 @@ A type's typeSymbol should never be inspected directly.
case List() => AnyClass.tpe
case List(t) => t
case ts @ PolyType(tparams, _) :: _ =>
- PolyType(
- List.map2(tparams, List.transpose(matchingBounds(ts, tparams)))
- ((tparam, bounds) => tparam.cloneSymbol.setInfo(lub(bounds, depth))),
- glb0(matchingInstTypes(ts, tparams)))
+ val tparams1 = (tparams, matchingBounds(ts, tparams).transpose).zipped map
+ ((tparam, bounds) => tparam.cloneSymbol.setInfo(lub(bounds, depth)))
+ PolyType(tparams1, glb0(matchingInstTypes(ts, tparams1)))
case ts @ MethodType(params, _) :: rest =>
MethodType(params, glb0(matchingRestypes(ts, params map (_.tpe))))
case ts @ TypeBounds(_, _) :: rest =>
- mkTypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
- case ts0 =>
- try {
- val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
- val glbOwner = commonOwner(ts)
- def refinedToParents(t: Type): List[Type] = t match {
- case RefinedType(ps, _) => ps flatMap refinedToParents
- case _ => List(t)
- }
- def refinedToDecls(t: Type): List[Scope] = t match {
- case RefinedType(ps, decls) =>
- val dss = ps flatMap refinedToDecls
- if (decls.isEmpty) dss else decls :: dss
- case _ => List()
- }
- val ts1 = ts flatMap refinedToParents
- val glbBase = intersectionType(ts1, glbOwner)
- val glbType =
- if (phase.erasedTypes || depth == 0) glbBase
- else {
- val glbRefined = refinedType(ts1, glbOwner)
- val glbThisType = glbRefined.typeSymbol.thisType
- def glbsym(proto: Symbol): Symbol = {
- val prototp = glbThisType.memberInfo(proto)
- val syms = for (t <- ts;
- alt <- (t.nonPrivateMember(proto.name).alternatives);
- if glbThisType.memberInfo(alt) matches prototp
- ) yield alt
- val symtypes = syms map glbThisType.memberInfo
- assert(!symtypes.isEmpty)
- proto.cloneSymbol(glbRefined.typeSymbol).setInfo(
- if (proto.isTerm) glb(symtypes, decr(depth))
- else {
- def isTypeBound(tp: Type) = tp match {
- case TypeBounds(_, _) => true
- case _ => false
- }
- def glbBounds(bnds: List[Type]): TypeBounds = {
- val lo = lub(bnds map (_.bounds.lo), decr(depth))
- val hi = glb(bnds map (_.bounds.hi), decr(depth))
- if (lo <:< hi) mkTypeBounds(lo, hi)
- else throw GlbFailure
- }
- val symbounds = symtypes filter isTypeBound
- var result: Type =
- if (symbounds.isEmpty)
- mkTypeBounds(NothingClass.tpe, AnyClass.tpe)
- else glbBounds(symbounds)
- for (t <- symtypes if !isTypeBound(t))
- if (result.bounds containsType t) result = t
- else throw GlbFailure
- result
- })
- }
- if (globalGlbDepth < globalGlbLimit)
- try {
- globalGlbDepth += 1
- val dss = ts flatMap refinedToDecls
- for (ds <- dss; val sym <- ds.iterator)
- if (globalGlbDepth < globalGlbLimit && !(glbThisType specializes sym))
- try {
- addMember(glbThisType, glbRefined, glbsym(sym))
- } catch {
- case ex: NoCommonType =>
- }
- } finally {
- globalGlbDepth -= 1
- }
- if (glbRefined.decls.isEmpty) glbBase else glbRefined
- }
- existentialAbstraction(tparams, glbType)
- } catch {
- case GlbFailure =>
- if (ts forall (t => NullClass.tpe <:< t)) NullClass.tpe
- else NothingClass.tpe
+ TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
+ case ts =>
+ glbResults get (depth, ts) match {
+ case Some(glbType) =>
+ glbType
+ case _ =>
+ glbResults((depth, ts)) = NothingClass.tpe
+ val res = if (depth < 0) NothingClass.tpe else glb1(ts)
+ glbResults((depth, ts)) = res
+ res
}
}
- if (settings.debug.value) {
- println(indent + "glb of " + ts + " at depth "+depth)//debug
- indent = indent + " "
- }
- val res = if (depth < 0) NothingClass.tpe else glb0(ts)
- if (settings.debug.value) {
- indent = indent.substring(0, indent.length() - 2)
- log(indent + "glb of " + ts + " is " + res)//debug
+ def glb1(ts0: List[Type]): Type = {
+ try {
+ val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
+ val glbOwner = commonOwner(ts)
+ def refinedToParents(t: Type): List[Type] = t match {
+ case RefinedType(ps, _) => ps flatMap refinedToParents
+ case _ => List(t)
+ }
+ def refinedToDecls(t: Type): List[Scope] = t match {
+ case RefinedType(ps, decls) =>
+ val dss = ps flatMap refinedToDecls
+ if (decls.isEmpty) dss else decls :: dss
+ case _ => List()
+ }
+ val ts1 = ts flatMap refinedToParents
+ val glbBase = intersectionType(ts1, glbOwner)
+ val glbType =
+ if (phase.erasedTypes || depth == 0) glbBase
+ else {
+ val glbRefined = refinedType(ts1, glbOwner)
+ val glbThisType = glbRefined.typeSymbol.thisType
+ def glbsym(proto: Symbol): Symbol = {
+ val prototp = glbThisType.memberInfo(proto)
+ val syms = for (t <- ts;
+ alt <- (t.nonPrivateMember(proto.name).alternatives);
+ if glbThisType.memberInfo(alt) matches prototp
+ ) yield alt
+ val symtypes = syms map glbThisType.memberInfo
+ assert(!symtypes.isEmpty)
+ proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted(
+ if (proto.isTerm) glb(symtypes, decr(depth))
+ else {
+ def isTypeBound(tp: Type) = tp match {
+ case TypeBounds(_, _) => true
+ case _ => false
+ }
+ def glbBounds(bnds: List[Type]): TypeBounds = {
+ val lo = lub(bnds map (_.bounds.lo), decr(depth))
+ val hi = glb(bnds map (_.bounds.hi), decr(depth))
+ if (lo <:< hi) TypeBounds(lo, hi)
+ else throw GlbFailure
+ }
+ val symbounds = symtypes filter isTypeBound
+ var result: Type =
+ if (symbounds.isEmpty)
+ TypeBounds(NothingClass.tpe, AnyClass.tpe)
+ else glbBounds(symbounds)
+ for (t <- symtypes if !isTypeBound(t))
+ if (result.bounds containsType t) result = t
+ else throw GlbFailure
+ result
+ })
+ }
+ if (globalGlbDepth < globalGlbLimit)
+ try {
+ globalGlbDepth += 1
+ val dss = ts flatMap refinedToDecls
+ for (ds <- dss; val sym <- ds.iterator)
+ if (globalGlbDepth < globalGlbLimit && !(glbThisType specializes sym))
+ try {
+ addMember(glbThisType, glbRefined, glbsym(sym))
+ } catch {
+ case ex: NoCommonType =>
+ }
+ } finally {
+ globalGlbDepth -= 1
+ }
+ if (glbRefined.decls.isEmpty) glbBase else glbRefined
+ }
+ existentialAbstraction(tparams, glbType)
+ } catch {
+ case GlbFailure =>
+ if (ts forall (t => NullClass.tpe <:< t)) NullClass.tpe
+ else NothingClass.tpe
+ }
}
+ // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
+
+ val res = glb0(ts)
+
+ // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
+
if (ts exists (_.isNotNull)) res.notNull else res
}
@@ -4708,7 +5261,7 @@ A type's typeSymbol should never be inspected directly.
* of types.
*/
private def commonOwner(tps: List[Type]): Symbol = {
- if (settings.debug.value) log("computing common owner of types " + tps)//debug
+ // if (settings.debug.value) log("computing common owner of types " + tps)//DEBUG
commonOwnerMap.init
tps foreach { tp => commonOwnerMap.apply(tp); () }
commonOwnerMap.result
@@ -4718,43 +5271,47 @@ A type's typeSymbol should never be inspected directly.
* of types `tps'. All types in `tps' are typerefs or singletypes
* with the same symbol.
* Return `Some(x)' if the computation succeeds with result `x'.
- * Return `None' if the computuation fails.
+ * Return `None' if the computation fails.
*/
def mergePrefixAndArgs(tps: List[Type], variance: Int, depth: Int): Option[Type] = tps match {
case List(tp) =>
Some(tp)
case TypeRef(_, sym, _) :: rest =>
- val pres = tps map (_.prefix)
+ val pres = tps map (_.prefix) // prefix normalizes automatically
val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
- val argss = tps map (_.typeArgs)
+ val argss = tps map (_.normalize.typeArgs) // symbol equality (of the tp in tps) was checked using typeSymbol, which normalizes, so should normalize before retrieving arguments
val capturedParams = new ListBuffer[Symbol]
- val args = List.map2(sym.typeParams, List.transpose(argss)) {
- (tparam, as) =>
- if (depth == 0)
- if (tparam.variance == variance) AnyClass.tpe
- else if (tparam.variance == -variance) NothingClass.tpe
- else NoType
- else
- if (tparam.variance == variance) lub(as, decr(depth))
- else if (tparam.variance == -variance) glb(as, decr(depth))
- else {
- val l = lub(as, decr(depth))
- val g = glb(as, decr(depth))
- if (l <:< g) l
- else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
- // just err on the conservative side, i.e. with a bound that is too high.
- // if(!(tparam.info.bounds contains tparam)){ //@M can't deal with f-bounds, see #2251
- val qvar = commonOwner(as) freshExistential "" setInfo mkTypeBounds(g, l)
- capturedParams += qvar
- qvar.tpe
- }
- }
- }
try {
+ val args = (sym.typeParams, argss.transpose).zipped map {
+ (tparam, as) =>
+ if (depth == 0)
+ if (tparam.variance == variance) AnyClass.tpe
+ else if (tparam.variance == -variance) NothingClass.tpe
+ else NoType
+ else
+ if (tparam.variance == variance) lub(as, decr(depth))
+ else if (tparam.variance == -variance) glb(as, decr(depth))
+ else {
+ val l = lub(as, decr(depth))
+ val g = glb(as, decr(depth))
+ if (l <:< g) l
+ else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
+ // just err on the conservative side, i.e. with a bound that is too high.
+ // if(!(tparam.info.bounds contains tparam)){ //@M can't deal with f-bounds, see #2251
+ val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
+ capturedParams += qvar
+ qvar.tpe
+ }
+ }
+ }
if (args contains NoType) None
else Some(existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args)))
} catch {
case ex: MalformedType => None
+ case ex: IndexOutOfBoundsException => // transpose freaked out because of irregular argss
+ // catching just in case (shouldn't happen, but also doesn't cost us)
+ if (settings.debug.value) log("transposed irregular matrix!?"+ (tps, argss))
+ None
}
case SingleType(_, sym) :: rest =>
val pres = tps map (_.prefix)
@@ -4775,7 +5332,7 @@ A type's typeSymbol should never be inspected directly.
*/
def addMember(thistp: Type, tp: Type, sym: Symbol) {
assert(sym != NoSymbol)
- if (settings.debug.value) log("add member " + sym+":"+sym.info+" to "+thistp)
+ // if (settings.debug.value) log("add member " + sym+":"+sym.info+" to "+thistp) //DEBUG
if (!(thistp specializes sym)) {
if (sym.isTerm)
for (alt <- tp.nonPrivateDecl(sym.name).alternatives)
@@ -4822,17 +5379,124 @@ A type's typeSymbol should never be inspected directly.
throw new NoCommonType(tps)
}
+
+// TODO: this desperately needs to be cleaned up
+// plan: split into kind inference and subkinding
+// every Type has a (cached) Kind
+ def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean = checkKindBounds0(tparams, targs, pre, owner, false).isEmpty
+
+ /** Check well-kindedness of type application (assumes arities are already checked) -- @M
+ *
+ * This check is also performed when abstract type members become concrete (aka a "type alias") -- then tparams.length==1
+ * (checked one type member at a time -- in that case, prefix is the name of the type alias)
+ *
+ * Type application is just like value application: it's "contravariant" in the sense that
+ * the type parameters of the supplied type arguments must conform to the type parameters of
+ * the required type parameters:
+ * - their bounds must be less strict
+ * - variances must match (here, variances are absolute, the variance of a type parameter does not influence the variance of its higher-order parameters)
+ * - @M TODO: are these conditions correct,sufficient&necessary?
+ *
+ * e.g. class Iterable[t, m[+x <: t]] --> the application Iterable[Int, List] is okay, since
+ * List's type parameter is also covariant and its bounds are weaker than <: Int
+ */
+ def checkKindBounds0(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol, explainErrors: Boolean): List[(Type, Symbol, List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)])] = {
+ var error = false
+
+ def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz) // instantiate type params that come from outside the abstract type we're currently checking
+ def transformedBounds(p: Symbol, o: Symbol) = transform(p.info.instantiateTypeParams(tparams, targs).bounds, o)
+
+ /** Check whether <arg>sym1</arg>'s variance conforms to <arg>sym2</arg>'s variance
+ *
+ * If <arg>sym2</arg> is invariant, <arg>sym1</arg>'s variance is irrelevant. Otherwise they must be equal.
+ */
+ def variancesMatch(sym1: Symbol, sym2: Symbol): Boolean = (sym2.variance==0 || sym1.variance==sym2.variance)
+
+ // check that the type parameters <arg>hkargs</arg> to a higher-kinded type conform to the expected params <arg>hkparams</arg>
+ def checkKindBoundsHK(hkargs: List[Symbol], arg: Symbol, param: Symbol, paramowner: Symbol, underHKParams: List[Symbol], withHKArgs: List[Symbol]): (List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)]) = {
+ def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs)
+ // @M sometimes hkargs != arg.typeParams, the symbol and the type may have very different type parameters
+ val hkparams = param.typeParams
+
+ if(settings.debug.value) {
+ println("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner)
+ println("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner)
+ println("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs)
+ }
+
+ if(hkargs.length != hkparams.length) {
+ if(arg == AnyClass || arg == NothingClass) (Nil, Nil, Nil) // Any and Nothing are kind-overloaded
+ else {error = true; (List((arg, param)), Nil, Nil)} // shortcut: always set error, whether explainTypesOrNot
+ } else {
+ val _arityMismatches = if(explainErrors) new ListBuffer[(Symbol, Symbol)] else null
+ val _varianceMismatches = if(explainErrors) new ListBuffer[(Symbol, Symbol)] else null
+ val _stricterBounds = if(explainErrors)new ListBuffer[(Symbol, Symbol)] else null
+ def varianceMismatch(a: Symbol, p: Symbol) { if(explainErrors) _varianceMismatches += ((a, p)) else error = true}
+ def stricterBound(a: Symbol, p: Symbol) { if(explainErrors) _stricterBounds += ((a, p)) else error = true }
+ def arityMismatches(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _arityMismatches ++= as }
+ def varianceMismatches(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _varianceMismatches ++= as }
+ def stricterBounds(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _stricterBounds ++= as }
+
+ for ((hkarg, hkparam) <- hkargs zip hkparams) {
+ if (hkparam.typeParams.isEmpty) { // base-case: kind *
+ if (!variancesMatch(hkarg, hkparam))
+ varianceMismatch(hkarg, hkparam)
+
+ // instantiateTypeParams(tparams, targs) --> higher-order bounds may contain references to type arguments
+ // substSym(hkparams, hkargs) --> these types are going to be compared as types of kind *
+ // --> their arguments use different symbols, but are conceptually the same
+ // (could also replace the types by polytypes, but can't just strip the symbols, as ordering is lost then)
+ if (!(bindHKParams(transformedBounds(hkparam, paramowner)) <:< transform(hkarg.info.bounds, owner)))
+ stricterBound(hkarg, hkparam)
+
+ if(settings.debug.value) {
+ println("checkKindBoundsHK base case: "+ hkparam +" declared bounds: "+ transformedBounds(hkparam, paramowner) +" after instantiating earlier hkparams: "+ bindHKParams(transformedBounds(hkparam, paramowner)))
+ println("checkKindBoundsHK base case: "+ hkarg +" has bounds: "+ transform(hkarg.info.bounds, owner))
+ }
+ } else {
+ if(settings.debug.value) println("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg)
+ val (am, vm, sb) = checkKindBoundsHK(hkarg.typeParams, hkarg, hkparam, paramowner, underHKParams ++ hkparam.typeParams, withHKArgs ++ hkarg.typeParams)
+ arityMismatches(am)
+ varianceMismatches(vm)
+ stricterBounds(sb)
+ }
+ if(!explainErrors && error) return (Nil, Nil, Nil) // stop as soon as we encountered an error
+ }
+ if(!explainErrors) (Nil, Nil, Nil)
+ else (_arityMismatches.toList, _varianceMismatches.toList, _stricterBounds.toList)
+ }
+ }
+
+ val errors = new ListBuffer[(Type, Symbol, List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)])]
+ (tparams zip targs).foreach{ case (tparam, targ) if (targ.isHigherKinded || !tparam.typeParams.isEmpty) =>
+ // @M must use the typeParams of the type targ, not the typeParams of the symbol of targ!!
+ val tparamsHO = targ.typeParams
+
+ val (arityMismatches, varianceMismatches, stricterBounds) =
+ checkKindBoundsHK(tparamsHO, targ.typeSymbolDirect, tparam, tparam.owner, tparam.typeParams, tparamsHO) // NOTE: *not* targ.typeSymbol, which normalizes
+
+ if(!explainErrors) {if(error) return List((NoType, NoSymbol, Nil, Nil, Nil))}
+ else if (arityMismatches.nonEmpty || varianceMismatches.nonEmpty || stricterBounds.nonEmpty) {
+ errors += ((targ, tparam, arityMismatches, varianceMismatches, stricterBounds))
+ }
+ // case (tparam, targ) => println("no check: "+(tparam, targ, tparam.typeParams.isEmpty))
+ case _ =>
+ }
+
+ errors.toList
+ }
+
// Errors and Diagnostics -----------------------------------------------------
- /** An exception signalling a type error */
- class TypeError(var pos: Position, val msg: String) extends java.lang.Error(msg) {
+ /** A throwable signalling a type error */
+ class TypeError(var pos: Position, val msg: String) extends Throwable(msg) {
def this(msg: String) = this(NoPosition, msg)
}
- class NoCommonType(tps: List[Type]) extends java.lang.Error(
- "lub/glb of incompatible types: " + tps.mkString("", " and ", ""))
+ class NoCommonType(tps: List[Type]) extends Throwable(
+ "lub/glb of incompatible types: " + tps.mkString("", " and ", "")) with ControlThrowable
- /** An exception signalling a malformed type */
+ /** A throwable signalling a malformed type */
class MalformedType(msg: String) extends TypeError(msg) {
def this(pre: Type, tp: String) = this("malformed type: " + pre + "#" + tp)
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
index 7a789fcb32..106318a47d 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileConstants.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileConstants.scala
index 34cc85b8ef..a3917cf2d2 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileConstants.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileConstants.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
@@ -77,7 +76,7 @@ object ClassfileConstants {
final val CONSTANT_INTFMETHODREF = 11
final val CONSTANT_NAMEANDTYPE = 12
- // tags desribing the type of a literal in attribute values
+ // tags describing the type of a literal in attribute values
final val BYTE_TAG = 'B'
final val CHAR_TAG = 'C'
final val DOUBLE_TAG = 'D'
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 940a8e223b..b006b2a30e 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -1,21 +1,20 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
package classfile
-import java.io.IOException
+import java.io.{ File, IOException }
import java.lang.Integer.toHexString
import scala.collection.immutable.{Map, ListMap}
import scala.collection.mutable.{ListBuffer, ArrayBuffer}
import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.{Position, NoPosition, ClassRep}
import scala.annotation.switch
+import reflect.generic.PickleBuffer
/** This abstract class implements a class file parser.
*
@@ -36,11 +35,15 @@ abstract class ClassfileParser {
protected var staticDefs: Scope = _ // the scope of all static definitions
protected var pool: ConstantPool = _ // the classfile's constant pool
protected var isScala: Boolean = _ // does class file describe a scala class?
+ protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation?
protected var isScalaRaw: Boolean = _ // this class file is a scala class with no pickled info
protected var hasMeta: Boolean = _ // does class file contain jaco meta attribute?s
protected var busy: Option[Symbol] = None // lock to detect recursive reads
private var externalName: Name = _ // JVM name of the current class
protected var classTParams = Map[Name,Symbol]()
+ protected var srcfile0 : Option[AbstractFile] = None
+
+ def srcfile = srcfile0
private object metaParser extends MetaParser {
val global: ClassfileParser.this.global.type = ClassfileParser.this.global
@@ -81,11 +84,11 @@ abstract class ClassfileParser {
*/
this.in = new AbstractFileReader(file)
if (root.isModule) {
- this.clazz = root.linkedClassOfModule
+ this.clazz = root.companionClass
this.staticModule = root
} else {
this.clazz = root
- this.staticModule = root.linkedModuleOfClass
+ this.staticModule = root.companionModule
}
this.isScala = false
this.hasMeta = false
@@ -93,7 +96,8 @@ abstract class ClassfileParser {
parseHeader
this.pool = new ConstantPool
parseClass()
- } catch {
+ }
+ catch {
case e: MissingRequirementError => handleMissing(e)
case e: RuntimeException => handleError(e)
}
@@ -216,9 +220,14 @@ abstract class ClassfileParser {
val ownerTpe = getClassOrArrayType(in.getChar(start + 1))
if (settings.debug.value)
log("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName)
- val (name, tpe) = getNameAndType(in.getChar(start + 3), ownerTpe)
+ val (name0, tpe0) = getNameAndType(in.getChar(start + 3), ownerTpe)
if (settings.debug.value)
- log("getMemberSymbol: name and tpe: " + name + ": " + tpe)
+ log("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
+
+ forceMangledName(tpe0.typeSymbol.name, false)
+ val (name, tpe) = getNameAndType(in.getChar(start + 3), ownerTpe)
+// println("new tpe: " + tpe + " at phase: " + phase)
+
if (name == nme.MODULE_INSTANCE_FIELD) {
val index = in.getChar(start + 1)
val name = getExternalName(in.getChar(starts(index) + 1))
@@ -235,13 +244,14 @@ abstract class ClassfileParser {
f = owner.info.member(newTermName(origName.toString + nme.LOCAL_SUFFIX)).suchThat(_.tpe =:= tpe)
if (f == NoSymbol) {
// if it's an impl class, try to find it's static member inside the class
- if (ownerTpe.typeSymbol.isImplClass)
+ if (ownerTpe.typeSymbol.isImplClass) {
+// println("impl class, member: " + owner.tpe.member(origName) + ": " + owner.tpe.member(origName).tpe)
f = ownerTpe.member(origName).suchThat(_.tpe =:= tpe)
- else {
+ } else {
log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
f = if (tpe.isInstanceOf[MethodType]) owner.newMethod(owner.pos, name).setInfo(tpe)
else owner.newValue(owner.pos, name).setInfo(tpe).setFlag(MUTABLE)
- log("created fake member " + f.fullNameString)
+ log("created fake member " + f.fullName)
}
// println("\townerTpe.decls: " + ownerTpe.decls)
// println("Looking for: " + name + ": " + tpe + " inside: " + ownerTpe.typeSymbol + "\n\tand found: " + ownerTpe.members)
@@ -350,6 +360,42 @@ abstract class ClassfileParser {
}
}
+ def getBytes(index: Int): Array[Byte] = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ var value = values(index).asInstanceOf[Array[Byte]]
+ if (value eq null) {
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
+ val len = in.getChar(start + 1)
+ val bytes = new Array[Byte](len)
+ Array.copy(in.buf, start + 3, bytes, 0, len)
+ val decodedLength = reflect.generic.ByteCodecs.decode(bytes)
+ value = bytes.take(decodedLength)
+ values(index) = value
+ }
+ value
+ }
+
+ def getBytes(indices: List[Int]): Array[Byte] = {
+ assert(!indices.isEmpty)
+ var value = values(indices.head).asInstanceOf[Array[Byte]]
+ if (value eq null) {
+ val bytesBuffer = ArrayBuffer.empty[Byte]
+ for (index <- indices) {
+ if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index)
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
+ val len = in.getChar(start + 1)
+ bytesBuffer ++= in.buf.view(start + 3, start + 3 + len)
+ }
+ val bytes = bytesBuffer.toArray
+ val decodedLength = reflect.generic.ByteCodecs.decode(bytes)
+ value = bytes.take(decodedLength)
+ values(indices.head) = value
+ }
+ value
+ }
+
/** Throws an exception signaling a bad constant index. */
private def errorBadIndex(index: Int) =
throw new RuntimeException("bad constant pool index: " + index + " at pos: " + in.bp)
@@ -363,11 +409,14 @@ abstract class ClassfileParser {
* flatten would not lift classes that were not referenced in the source code.
*/
def forceMangledName(name: Name, module: Boolean): Symbol = {
- val parts = name.toString.split(Array('.', '$'))
+ val parts = name.decode.toString.split(Array('.', '$'))
var sym: Symbol = definitions.RootClass
atPhase(currentRun.flattenPhase.prev) {
- for (part0 <- parts; val part = newTermName(part0)) {
- val sym1 = sym.info.decl(part.encode)//.suchThat(module == _.isModule)
+ for (part0 <- parts; if !(part0 == ""); val part = newTermName(part0)) {
+ val sym1 = atPhase(currentRun.icodePhase) {
+ sym.linkedClassOfClass.info
+ sym.info.decl(part.encode)
+ }//.suchThat(module == _.isModule)
if (sym1 == NoSymbol)
sym = sym.info.decl(part.encode.toTypeName)
else
@@ -378,14 +427,50 @@ abstract class ClassfileParser {
sym
}
-
/** Return the class symbol of the given name. */
def classNameToSymbol(name: Name): Symbol = {
- def lookupClass(name: Name) =
+ def loadClassSymbol(name: Name) = {
+ val s = name.toString
+ val file = global.classPath findSourceFile s getOrElse {
+ throw new MissingRequirementError("class " + s)
+ }
+ val completer = new global.loaders.ClassfileLoader(file)
+ var owner: Symbol = definitions.RootClass
+ var sym: Symbol = NoSymbol
+ var ss: String = null
+ var start = 0
+ var end = s indexOf '.'
+ while (end > 0) {
+ ss = s.substring(start, end)
+ sym = owner.info.decls lookup ss
+ if (sym == NoSymbol) {
+ sym = owner.newPackage(NoPosition, ss) setInfo completer
+ sym.moduleClass setInfo completer
+ owner.info.decls enter sym
+ }
+ owner = sym.moduleClass
+ start = end + 1
+ end = s.indexOf('.', start)
+ }
+ ss = s substring start
+ sym = owner.info.decls lookup ss
+ if (sym == NoSymbol) {
+ sym = owner.newClass(NoPosition, ss) setInfo completer
+ owner.info.decls enter sym
+ if (settings.debug.value && settings.verbose.value)
+ println("loaded "+sym+" from file "+file)
+ }
+ sym
+ }
+
+ def lookupClass(name: Name) = try {
if (name.pos('.') == name.length)
definitions.getMember(definitions.EmptyPackageClass, name.toTypeName)
else
- definitions.getClass(name)
+ definitions.getClass(name) // see tickets #2464, #3756
+ } catch {
+ case _: FatalError => loadClassSymbol(name)
+ }
innerClasses.get(name) match {
case Some(entry) =>
@@ -408,6 +493,30 @@ abstract class ClassfileParser {
var nameIdx = in.nextChar
externalName = pool.getClassName(nameIdx)
val c = if (externalName.toString.indexOf('$') < 0) pool.getClassSymbol(nameIdx) else clazz
+
+ /** Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled.
+ * Updates the read pointer of 'in'. */
+ def parseParents: List[Type] = {
+ if (isScala) {
+ in.nextChar // skip superclass
+ val ifaces = in.nextChar
+ in.bp += ifaces * 2 // .. and iface count interfaces
+ List(definitions.AnyRefClass.tpe) // dummy superclass, will be replaced by pickled information
+ } else {
+ try {
+ loaders.parentsLevel += 1
+ val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
+ else pool.getSuperClass(in.nextChar).tpe
+ val ifaceCount = in.nextChar
+ var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
+ if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
+ superType :: ifaces
+ } finally {
+ loaders.parentsLevel -= 1
+ }
+ }
+ }
+
if (c != clazz && externalName.toString.indexOf("$") < 0) {
if ((clazz eq NoSymbol) && (c ne NoSymbol)) clazz = c
else throw new IOException("class file '" + in.file + "' contains wrong " + c)
@@ -415,16 +524,11 @@ abstract class ClassfileParser {
addEnclosingTParams(clazz)
parseInnerClasses() // also sets the isScala / isScalaRaw / hasMeta flags, see r15956
- val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
- else pool.getSuperClass(in.nextChar).tpe
- val ifaceCount = in.nextChar
- var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
- if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
- val parents = superType :: ifaces
// get the class file parser to reuse scopes.
instanceDefs = new Scope
staticDefs = new Scope
- val classInfo = ClassInfoType(parents, instanceDefs, clazz)
+
+ val classInfo = ClassInfoType(parseParents, instanceDefs, clazz)
val staticInfo = ClassInfoType(List(), staticDefs, statics)
if (!isScala && !isScalaRaw) {
@@ -448,22 +552,32 @@ abstract class ClassfileParser {
// attributes now depend on having infos set already
parseAttributes(clazz, classInfo)
- in.bp = curbp
- val fieldCount = in.nextChar
- for (i <- 0 until fieldCount) parseField()
- sawPrivateConstructor = false
- val methodCount = in.nextChar
- for (i <- 0 until methodCount) parseMethod()
- if (!sawPrivateConstructor &&
- (instanceDefs.lookup(nme.CONSTRUCTOR) == NoSymbol &&
- (sflags & INTERFACE) == 0L))
- {
- //Console.println("adding constructor to " + clazz);//DEBUG
- instanceDefs.enter(
- clazz.newConstructor(NoPosition)
- .setFlag(clazz.flags & ConstrFlags)
- .setInfo(MethodType(List(), clazz.tpe)))
+ loaders.pendingLoadActions = { () =>
+ in.bp = curbp
+ val fieldCount = in.nextChar
+ for (i <- 0 until fieldCount) parseField()
+ sawPrivateConstructor = false
+ val methodCount = in.nextChar
+ for (i <- 0 until methodCount) parseMethod()
+ if (!sawPrivateConstructor &&
+ (instanceDefs.lookup(nme.CONSTRUCTOR) == NoSymbol &&
+ (sflags & INTERFACE) == 0L))
+ {
+ //Console.println("adding constructor to " + clazz);//DEBUG
+ instanceDefs.enter(
+ clazz.newConstructor(NoPosition)
+ .setFlag(clazz.flags & ConstrFlags)
+ .setInfo(MethodType(List(), clazz.tpe)))
+ }
+ ()
+ } :: loaders.pendingLoadActions
+ if (loaders.parentsLevel == 0) {
+ while (!loaders.pendingLoadActions.isEmpty) {
+ val item = loaders.pendingLoadActions.head
+ loaders.pendingLoadActions = loaders.pendingLoadActions.tail
+ item()
}
+ }
} else
parseAttributes(clazz, classInfo)
}
@@ -492,7 +606,7 @@ abstract class ClassfileParser {
val info = pool.getType(in.nextChar)
val sym = getOwner(jflags)
.newValue(NoPosition, name).setFlag(sflags)
- sym.setInfo(if ((jflags & JAVA_ACC_ENUM) == 0) info else mkConstantType(Constant(sym)))
+ sym.setInfo(if ((jflags & JAVA_ACC_ENUM) == 0) info else ConstantType(Constant(sym)))
setPrivateWithin(sym, jflags)
parseAttributes(sym, info)
getScope(jflags).enter(sym)
@@ -573,6 +687,8 @@ abstract class ClassfileParser {
while (!isDelimiter(sig(index))) { index += 1 }
sig.subName(start, index)
}
+ def existentialType(tparams: List[Symbol], tp: Type): Type =
+ if (tparams.isEmpty) tp else ExistentialType(tparams, tp)
def sig2type(tparams: Map[Name,Symbol], skiptvs: Boolean): Type = {
val tag = sig(index); index += 1
tag match {
@@ -604,12 +720,12 @@ abstract class ClassfileParser {
case variance @ ('+' | '-' | '*') =>
index += 1
val bounds = variance match {
- case '+' => mkTypeBounds(definitions.NothingClass.tpe,
- sig2type(tparams, skiptvs))
- case '-' => mkTypeBounds(sig2type(tparams, skiptvs),
- definitions.AnyClass.tpe)
- case '*' => mkTypeBounds(definitions.NothingClass.tpe,
- definitions.AnyClass.tpe)
+ case '+' => TypeBounds(definitions.NothingClass.tpe,
+ sig2type(tparams, skiptvs))
+ case '-' => TypeBounds(sig2type(tparams, skiptvs),
+ definitions.AnyClass.tpe)
+ case '*' => TypeBounds(definitions.NothingClass.tpe,
+ definitions.AnyClass.tpe)
}
val newtparam = sym.newExistential(sym.pos, "?"+i) setInfo bounds
existentials += newtparam
@@ -621,15 +737,16 @@ abstract class ClassfileParser {
}
accept('>')
assert(xs.length > 0)
- existentialAbstraction(existentials.toList, TypeRef(pre, classSym, xs.toList))
+ existentialType(existentials.toList, TypeRef(pre, classSym, xs.toList))
} else if (classSym.isMonomorphicType) {
tp
} else {
// raw type - existentially quantify all type parameters
val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams)
val t = TypeRef(pre, classSym, eparams.map(_.tpe))
- val res = existentialAbstraction(eparams, t)
- if (settings.debug.value && settings.verbose.value) println("raw type " + classSym + " -> " + res)
+ val res = existentialType(eparams, t)
+ if (settings.debug.value && settings.verbose.value)
+ println("raw type " + classSym + " -> " + res)
res
}
case tp =>
@@ -652,7 +769,7 @@ abstract class ClassfileParser {
while ('0' <= sig(index) && sig(index) <= '9') index += 1
var elemtp = sig2type(tparams, skiptvs)
// make unbounded Array[T] where T is a type variable into Array[T with Object]
- // (this is necessary because such arrays have a representation which is incompatibe
+ // (this is necessary because such arrays have a representation which is incompatible
// with arrays of primitive types.
if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe))
elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe))
@@ -686,7 +803,7 @@ abstract class ClassfileParser {
if (sig(index) != ':') // guard against empty class bound
ts += objToAny(sig2type(tparams, skiptvs))
}
- mkTypeBounds(definitions.NothingClass.tpe, intersectionType(ts.toList, sym))
+ TypeBounds(definitions.NothingClass.tpe, intersectionType(ts.toList, sym))
}
var tparams = classTParams
@@ -759,17 +876,21 @@ abstract class ClassfileParser {
sym.setFlag(BRIDGE)
in.skip(attrLen)
case nme.DeprecatedATTR =>
- sym.setFlag(DEPRECATED)
+ val arg = Literal(Constant("see corresponding Javadoc for more information."))
+ sym.addAnnotation(AnnotationInfo(definitions.DeprecatedAttr.tpe, List(arg), List()))
in.skip(attrLen)
case nme.ConstantValueATTR =>
val c = pool.getConstant(in.nextChar)
val c1 = convertTo(c, symtype)
- if (c1 ne null) sym.setInfo(mkConstantType(c1))
+ if (c1 ne null) sym.setInfo(ConstantType(c1))
else println("failure to convert " + c + " to " + symtype); //debug
case nme.ScalaSignatureATTR =>
- unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.toString())
+ if (!isScalaAnnot) {
+ if (settings.debug.value)
+ global.inform("warning: symbol " + sym.fullName + " has pickled signature in attribute")
+ unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.toString())
+ }
in.skip(attrLen)
- this.isScala = true
case nme.ScalaATTR =>
isScalaRaw = true
case nme.JacoMetaATTR =>
@@ -780,13 +901,21 @@ abstract class ClassfileParser {
case nme.AnnotationDefaultATTR =>
sym.addAnnotation(AnnotationInfo(definitions.AnnotationDefaultAttr.tpe, List(), List()))
in.skip(attrLen)
- // Java annotatinos on classes / methods / fields with RetentionPolicy.RUNTIME
+ // Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME
case nme.RuntimeAnnotationATTR =>
- if (!isScala) {
- // no need to read annotations if isScala, ClassfileAnnotations are pickled
- parseAnnotations(attrLen)
+ if (isScalaAnnot || !isScala) {
+ val scalaSigAnnot = parseAnnotations(attrLen)
+ if (isScalaAnnot)
+ scalaSigAnnot match {
+ case Some(san: AnnotationInfo) =>
+ val bytes =
+ san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes
+ unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.toString())
+ case None =>
+ throw new RuntimeException("Scala class file does not contain Scala annotation")
+ }
if (settings.debug.value)
- global.inform("" + sym + "; annotations = " + sym.annotations)
+ global.inform("" + sym + "; annotations = " + sym.rawAnnotations)
} else
in.skip(attrLen)
@@ -795,6 +924,18 @@ abstract class ClassfileParser {
// TODO 2: also parse RuntimeInvisibleAnnotation / RuntimeInvisibleParamAnnotation,
// i.e. java annotations with RetentionPolicy.CLASS?
+
+ case nme.ExceptionsATTR if (!isScala) =>
+ parseExceptions(attrLen)
+
+ case nme.SourceFileATTR =>
+ val srcfileLeaf = pool.getName(in.nextChar).toString.trim
+ val srcpath = sym.enclosingPackage match {
+ case NoSymbol => srcfileLeaf
+ case definitions.EmptyPackage => srcfileLeaf
+ case pkg => pkg.fullName(File.separatorChar)+File.separator+srcfileLeaf
+ }
+ srcfile0 = settings.outputDirs.srcFilesFor(in.file, srcpath).find(_.exists)
case _ =>
in.skip(attrLen)
}
@@ -814,7 +955,7 @@ abstract class ClassfileParser {
case ENUM_TAG =>
val t = pool.getType(index)
val n = pool.getName(in.nextChar)
- val s = t.typeSymbol.linkedModuleOfClass.info.decls.lookup(n)
+ val s = t.typeSymbol.companionModule.info.decls.lookup(n)
assert(s != NoSymbol, t)
Some(LiteralAnnotArg(Constant(s)))
case ARRAY_TAG =>
@@ -832,6 +973,30 @@ abstract class ClassfileParser {
}
}
+ def parseScalaSigBytes: Option[ScalaSigBytes] = {
+ val tag = in.nextByte.toChar
+ assert(tag == STRING_TAG)
+ Some(ScalaSigBytes(pool.getBytes(in.nextChar)))
+ }
+
+ def parseScalaLongSigBytes: Option[ScalaSigBytes] = try {
+ val tag = in.nextByte.toChar
+ assert(tag == ARRAY_TAG)
+ val stringCount = in.nextChar
+ val entries =
+ for (i <- 0 until stringCount) yield {
+ val stag = in.nextByte.toChar
+ assert(stag == STRING_TAG)
+ in.nextChar.toInt
+ }
+ Some(ScalaSigBytes(pool.getBytes(entries.toList)))
+ }
+ catch {
+ case e: Throwable =>
+ e.printStackTrace
+ throw e
+ }
+
/** Parse and return a single annotation. If it is malformed,
* return None.
*/
@@ -842,10 +1007,24 @@ abstract class ClassfileParser {
var hasError = false
for (i <- 0 until nargs) {
val name = pool.getName(in.nextChar)
- parseAnnotArg match {
- case Some(c) => nvpairs += ((name, c))
- case None => hasError = true
- }
+ // The "bytes: String" argument of the ScalaSignature attribute is parsed specially so that it is
+ // available as an array of bytes (the pickled Scala signature) instead of as a string. The pickled signature
+ // is encoded as a string because of limitations in the Java class file format.
+ if ((attrType == definitions.ScalaSignatureAnnotation.tpe) && (name == nme.bytes))
+ parseScalaSigBytes match {
+ case Some(c) => nvpairs += ((name, c))
+ case None => hasError = true
+ }
+ else if ((attrType == definitions.ScalaLongSignatureAnnotation.tpe) && (name == nme.bytes))
+ parseScalaLongSigBytes match {
+ case Some(c) => nvpairs += ((name, c))
+ case None => hasError = true
+ }
+ else
+ parseAnnotArg match {
+ case Some(c) => nvpairs += ((name, c))
+ case None => hasError = true
+ }
}
if (hasError) None
else Some(AnnotationInfo(attrType, List(), nvpairs.toList))
@@ -858,17 +1037,36 @@ abstract class ClassfileParser {
None // ignore malformed annotations ==> t1135
}
- /** Parse a sequence of annotations and attach them to the
- * current symbol sym.
+ /**
+ * Parse the "Exceptions" attribute which denotes the exceptions
+ * thrown by a method.
*/
- def parseAnnotations(len: Int) {
+ def parseExceptions(len: Int) {
+ val nClasses = in.nextChar
+ for (n <- 0 until nClasses) {
+ val cls = pool.getClassSymbol(in.nextChar.toInt)
+ sym.addAnnotation(AnnotationInfo(definitions.ThrowsClass.tpe,
+ Literal(Constant(cls.tpe)) :: Nil,
+ Nil))
+ }
+ }
+
+ /** Parse a sequence of annotations and attaches them to the
+ * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */
+ def parseAnnotations(len: Int): Option[AnnotationInfo] = {
val nAttr = in.nextChar
+ var scalaSigAnnot: Option[AnnotationInfo] = None
for (n <- 0 until nAttr)
parseAnnotation(in.nextChar) match {
+ case Some(scalaSig) if (scalaSig.atp == definitions.ScalaSignatureAnnotation.tpe) =>
+ scalaSigAnnot = Some(scalaSig)
+ case Some(scalaSig) if (scalaSig.atp == definitions.ScalaLongSignatureAnnotation.tpe) =>
+ scalaSigAnnot = Some(scalaSig)
case Some(annot) =>
sym.addAnnotation(annot)
case None =>
}
+ scalaSigAnnot
}
// begin parseAttributes
@@ -890,7 +1088,7 @@ abstract class ClassfileParser {
val innerClass = getOwner(jflags).newClass(NoPosition, name.toTypeName).setInfo(completer).setFlag(sflags)
val innerModule = getOwner(jflags).newModule(NoPosition, name).setInfo(completer).setFlag(sflags)
- innerClass.moduleClass.setInfo(global.loaders.moduleClassLoader)
+ innerModule.moduleClass.setInfo(global.loaders.moduleClassLoader)
getScope(jflags).enter(innerClass)
getScope(jflags).enter(innerModule)
@@ -908,12 +1106,11 @@ abstract class ClassfileParser {
}
}
- for (entry <- innerClasses.valuesIterator) {
+ for (entry <- innerClasses.values) {
// create a new class member for immediate inner classes
if (entry.outerName == externalName) {
- val file = global.classPath.findClass(entry.externalName.toString) match {
- case Some(ClassRep(Some(binary: AbstractFile), _)) => binary
- case _ => throw new AssertionError(entry.externalName)
+ val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse {
+ throw new AssertionError(entry.externalName)
}
enterClassAndModule(entry, new global.loaders.ClassfileLoader(file), entry.jflags)
}
@@ -942,6 +1139,10 @@ abstract class ClassfileParser {
in.skip(attrLen)
case nme.ScalaSignatureATTR =>
isScala = true
+ val pbuf = new PickleBuffer(in.buf, in.bp, in.bp + attrLen)
+ pbuf.readNat; pbuf.readNat;
+ if (pbuf.readNat == 0) // a scala signature attribute with no entries means that the actual scala signature
+ isScalaAnnot = true // is in a ScalaSignature annotation.
in.skip(attrLen)
case nme.ScalaATTR =>
isScalaRaw = true
@@ -1000,7 +1201,7 @@ abstract class ClassfileParser {
def getMember(sym: Symbol, name: Name): Symbol =
if (static)
if (sym == clazz) staticDefs.lookup(name)
- else sym.linkedModuleOfClass.info.member(name)
+ else sym.companionModule.info.member(name)
else
if (sym == clazz) instanceDefs.lookup(name)
else sym.info.member(name)
@@ -1013,18 +1214,18 @@ abstract class ClassfileParser {
val sym = classSymbol(outerName)
val s =
// if loading during initialization of `definitions' typerPhase is not yet set.
- // in that case we simply load the mmeber at the current phase
+ // in that case we simply load the member at the current phase
if (currentRun.typerPhase != null)
atPhase(currentRun.typerPhase)(getMember(sym, innerName.toTypeName))
else
getMember(sym, innerName.toTypeName)
- assert(s ne NoSymbol, sym + "." + innerName + " linkedModule: " + sym.linkedModuleOfClass + sym.linkedModuleOfClass.info.members)
+ assert(s ne NoSymbol, sym + "." + innerName + " linkedModule: " + sym.companionModule + sym.companionModule.info.members)
s
case None =>
val cls = classNameToSymbol(externalName)
cls
- //if (static) cls.linkedClassOfModule else cls
+ //if (static) cls.companionClass else cls
}
}
@@ -1101,6 +1302,11 @@ abstract class ClassfileParser {
// apparently occurs when processing v45.3 bytecode.
if (sym.toplevelClass != NoSymbol)
sym.privateWithin = sym.toplevelClass.owner
+
+ // protected in java means package protected. #3946
+ if ((jflags & JAVA_ACC_PROTECTED) != 0)
+ if (sym.toplevelClass != NoSymbol)
+ sym.privateWithin = sym.toplevelClass.owner
}
@inline final private def isPrivate(flags: Int) =
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 3e0681ccdf..624b3635bf 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Iulian Dragos
*/
-// $Id$
package scala.tools.nsc
package symtab
@@ -14,7 +13,6 @@ import scala.collection.mutable._
import scala.tools.nsc._
import scala.tools.nsc.backend.icode._
import scala.tools.nsc.io._
-import scala.tools.nsc.util.{Position, NoPosition, ClassRep}
import ClassfileConstants._
import Flags._
@@ -34,8 +32,8 @@ abstract class ICodeReader extends ClassfileParser {
var method: IMethod = _ // the current IMethod
val OBJECT: TypeKind = REFERENCE(definitions.ObjectClass)
- val nothingName = newTermName("scala.runtime.Nothing$")
- val nullName = newTermName("scala.runtime.Null$")
+ val nothingName = newTermName(SCALA_NOTHING)
+ val nullName = newTermName(SCALA_NULL)
var isScalaModule = false
/** Read back bytecode for the given class symbol. It returns
@@ -49,19 +47,11 @@ abstract class ICodeReader extends ClassfileParser {
isScalaModule = cls.isModule && !cls.hasFlag(JAVA)
log("Reading class: " + cls + " isScalaModule?: " + isScalaModule)
- val name = cls.fullNameString('.') + (if (sym.hasFlag(MODULE)) "$" else "")
- classPath.findClass(name) match {
- case Some(ClassRep(bin, _)) =>
- assert(bin.isDefined, "No classfile for " + cls)
- classFile = bin.get.asInstanceOf[AbstractFile]
-// if (isScalaModule)
-// sym = cls.linkedClassOfModule
-
-// for (s <- cls.info.members)
-// Console.println("" + s + ": " + s.tpe)
- parse(classFile, sym)
- case _ =>
- log("Could not find: " + cls)
+ val name = cls.fullName('.') + (if (sym.hasFlag(MODULE)) "$" else "")
+
+ classPath.findSourceFile(name) match {
+ case Some(classFile) => parse(classFile, sym)
+ case _ => log("Could not find: " + cls)
}
(staticCode, instanceCode)
@@ -108,26 +98,34 @@ abstract class ICodeReader extends ClassfileParser {
val owner = getOwner(jflags)
val dummySym = owner.newMethod(owner.pos, name).setFlag(javaToScalaFlags(jflags))
- var tpe = pool.getType(dummySym, in.nextChar)
-
- if ("<clinit>" == name.toString)
- (jflags, NoSymbol)
- else {
- val owner = getOwner(jflags)
- var sym = owner.info.member(name).suchThat(old => sameType(old.tpe, tpe));
- if (sym == NoSymbol)
- sym = owner.info.member(newTermName(name.toString + nme.LOCAL_SUFFIX)).suchThat(old => old.tpe =:= tpe);
- if (sym == NoSymbol) {
- log("Could not find symbol for " + name + ": " + tpe + " in " + owner.info.decls)
- log(owner.info.member(name).tpe + " : " + tpe)
- if (field)
- sym = owner.newValue(owner.pos, name).setInfo(tpe).setFlag(MUTABLE | javaToScalaFlags(jflags))
- else
- sym = dummySym.setInfo(tpe)
- owner.info.decls.enter(sym)
- log("added " + sym + ": " + sym.tpe)
+ try {
+ val ch = in.nextChar
+ var tpe = pool.getType(dummySym, ch)
+
+ if ("<clinit>" == name.toString)
+ (jflags, NoSymbol)
+ else {
+ val owner = getOwner(jflags)
+ var sym = owner.info.member(name).suchThat(old => sameType(old.tpe, tpe));
+ if (sym == NoSymbol)
+ sym = owner.info.member(newTermName(name.toString + nme.LOCAL_SUFFIX)).suchThat(old => old.tpe =:= tpe);
+ if (sym == NoSymbol) {
+ log("Could not find symbol for " + name + ": " + tpe + " in " + owner.info.decls)
+ log(owner.info.member(name).tpe + " : " + tpe)
+ if (name.toString() == "toMap")
+ tpe = pool.getType(dummySym, ch)
+ if (field)
+ sym = owner.newValue(owner.pos, name).setInfo(tpe).setFlag(MUTABLE | javaToScalaFlags(jflags))
+ else
+ sym = dummySym.setInfo(tpe)
+ owner.info.decls.enter(sym)
+ log("added " + sym + ": " + sym.tpe)
+ }
+ (jflags, sym)
}
- (jflags, sym)
+ } catch {
+ case e: MissingRequirementError =>
+ (jflags, NoSymbol)
}
}
@@ -144,34 +142,39 @@ abstract class ICodeReader extends ClassfileParser {
res
}
- /** Checks if tp1 is the same type as tp2, modulo implict methods.
- * We don't care about the distinction between implcit and explicit
+ /** Checks if tp1 is the same type as tp2, modulo implicit methods.
+ * We don't care about the distinction between implicit and explicit
* methods as this point, and we can't get back the information from
* bytecode anyway.
*/
private def sameType(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
- case (MethodType(args1, resTpe1), MethodType(args2, resTpe2)) =>
- if (tp1.isInstanceOf[ImplicitMethodType] || tp2.isInstanceOf[ImplicitMethodType]) {
- MethodType(args1, resTpe1) =:= MethodType(args2, resTpe2)
- } else
- tp1 =:= tp2
- case _ => tp1 =:= tp2
+ case (mt1 @ MethodType(args1, resTpe1), mt2 @ MethodType(args2, resTpe2)) if mt1.isImplicit || mt2.isImplicit =>
+ MethodType(args1, resTpe1) =:= MethodType(args2, resTpe2)
+ case _ =>
+ tp1 =:= tp2
}
override def parseMethod() {
val (jflags, sym) = parseMember(false)
- if (sym != NoSymbol) {
- log("Parsing method " + sym.fullNameString + ": " + sym.tpe);
- this.method = new IMethod(sym);
- this.method.returnType = toTypeKind(sym.tpe.resultType)
- getCode(jflags).addMethod(this.method)
- if ((jflags & JAVA_ACC_NATIVE) != 0)
- this.method.native = true
- val attributeCount = in.nextChar
- for (i <- 0 until attributeCount) parseAttribute()
- } else {
- if (settings.debug.value) log("Skipping non-existent method.");
- skipAttributes();
+ var beginning = in.bp
+ try {
+ if (sym != NoSymbol) {
+ log("Parsing method " + sym.fullName + ": " + sym.tpe);
+ this.method = new IMethod(sym);
+ this.method.returnType = toTypeKind(sym.tpe.resultType)
+ getCode(jflags).addMethod(this.method)
+ if ((jflags & JAVA_ACC_NATIVE) != 0)
+ this.method.native = true
+ val attributeCount = in.nextChar
+ for (i <- 0 until attributeCount) parseAttribute()
+ } else {
+ if (settings.debug.value) log("Skipping non-existent method.");
+ skipAttributes();
+ }
+ } catch {
+ case e: MissingRequirementError =>
+ in.bp = beginning; skipAttributes
+ if (settings.debug.value) log("Skipping non-existent method. " + e.msg);
}
}
@@ -198,12 +201,15 @@ abstract class ICodeReader extends ClassfileParser {
definitions.getClass(name)
} else if (name.endsWith("$")) {
val sym = forceMangledName(name.subName(0, name.length -1).decode, true)
+// println("classNameToSymbol: " + name + " sym: " + sym)
+ if (name.toString == "scala.collection.immutable.Stream$$hash$colon$colon$")
+ print("")
if (sym == NoSymbol)
definitions.getModule(name.subName(0, name.length - 1))
else sym
} else {
forceMangledName(name, false)
- definitions.getClass(name)
+ atPhase(currentRun.flattenPhase.next)(definitions.getClass(name))
}
if (sym.isModule)
sym.moduleClass
@@ -607,9 +613,9 @@ abstract class ICodeReader extends ClassfileParser {
// add parameters
var idx = if (method.isStatic) 0 else 1
- for (t <- method.symbol.tpe.paramTypes) {
- this.method.addParam(code.enterParam(idx, toTypeKind(t)))
- idx += 1
+ for (t <- method.symbol.tpe.paramTypes; val kind = toTypeKind(t)) {
+ this.method.addParam(code.enterParam(idx, kind))
+ idx += (if (kind.isWideType) 2 else 1)
}
pc = 0
@@ -638,7 +644,12 @@ abstract class ICodeReader extends ClassfileParser {
if (code.containsNEW) code.resolveNEWs
}
- /** TODO: move in Definitions and remove obsolete isBox/isUnbox found there. */
+ /** Note: these methods are different from the methods of the same name found
+ * in Definitions. These test whether a symbol represents one of the boxTo/unboxTo
+ * methods found in BoxesRunTime. The others test whether a symbol represents a
+ * synthetic method from one of the fake companion classes of the primitive types,
+ * such as Int.box(5).
+ */
def isBox(m: Symbol): Boolean =
(m.owner == definitions.BoxesRunTimeClass.moduleClass
&& m.name.startsWith("boxTo"))
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
index c66cd89c41..d1f9280872 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
@@ -11,7 +10,6 @@ package classfile
import java.util.{StringTokenizer, NoSuchElementException}
import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.util.{Position,NoPosition}
abstract class MetaParser{
@@ -78,7 +76,7 @@ abstract class MetaParser{
val hi =
if (token == "<") { nextToken(); parseType() }
else definitions.AnyClass.tpe
- sym.setInfo(mkTypeBounds(lo, hi))
+ sym.setInfo(TypeBounds(lo, hi))
locals enter sym;
sym
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 796ff4584a..e3bd67ec81 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -1,17 +1,18 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
package classfile
import java.lang.{Float, Double}
-import scala.tools.nsc.util.{Position, NoPosition, ShowPickled}
-import Flags._
+import util.{ Position, NoPosition, ShowPickled }
+import collection.mutable.Set
+import reflect.generic.{ PickleBuffer, PickleFormat }
import PickleFormat._
+import Flags._
/**
* Serialize a top-level module and/or class.
@@ -49,7 +50,7 @@ abstract class Pickler extends SubComponent {
val sym = tree.symbol
val pickle = new Pickle(sym, sym.name.toTermName, sym.owner)
add(sym, pickle)
- add(sym.linkedSym, pickle)
+ add(sym.companionSymbol, pickle)
pickle.finish
case _ =>
}
@@ -65,26 +66,33 @@ abstract class Pickler extends SubComponent {
private var ep = 0
private val index = new LinkedHashMap[AnyRef, Int]
+ // collect higher-order type params
+ //private var locals: Set[Symbol] = Set()
+
// private var boundSyms: List[Symbol] = Nil
+ private def isRootSym(sym: Symbol) =
+ sym.name.toTermName == rootName && sym.owner == rootOwner
+
/** Returns usually symbol's owner, but picks classfile root instead
* for existentially bound variables that have a non-local owner.
* Question: Should this be done for refinement class symbols as well?
*/
private def localizedOwner(sym: Symbol) =
- if (sym.isAbstractType && sym.hasFlag(EXISTENTIAL) && !isLocal(sym.owner)) root
+ if (isLocal(sym) && !isRootSym(sym) && !isLocal(sym.owner)) root
else sym.owner
/** Is root in symbol.owner*, or should it be treated as a local symbol
- * anyway? This is the case if symbol is a refinement class or
- * an existentially bound variable.
+ * anyway? This is the case if symbol is a refinement class,
+ * an existentially bound variable, or a higher-order type parameter.
*/
private def isLocal(sym: Symbol): Boolean =
- !sym.isPackageClass &&
- (sym.name.toTermName == rootName && sym.owner == rootOwner ||
- sym != NoSymbol && isLocal(sym.owner) ||
+ !sym.isPackageClass && sym != NoSymbol &&
+ (isRootSym(sym) ||
sym.isRefinementClass ||
- sym.isAbstractType && sym.hasFlag(EXISTENTIAL))
+ sym.isAbstractType && sym.hasFlag(EXISTENTIAL) || // existential param
+ (sym hasFlag PARAM) ||
+ isLocal(sym.owner))
private def staticAnnotations(annots: List[AnnotationInfo]) =
annots filter(ann =>
@@ -127,7 +135,6 @@ abstract class Pickler extends SubComponent {
if (sym.thisSym.tpeHK != sym.tpeHK)
putType(sym.typeOfThis);
putSymbol(sym.alias)
- putSymbol(sym.defaultGetter)
if (!sym.children.isEmpty) {
val (locals, globals) = sym.children.toList.partition(_.isLocalClass)
val children =
@@ -137,7 +144,7 @@ abstract class Pickler extends SubComponent {
localChildDummy.setInfo(ClassInfoType(List(sym.tpe), EmptyScope, localChildDummy))
localChildDummy :: globals
}
- putChildren(sym, children.sortWith((x, y) => x isLess y))
+ putChildren(sym, children sortBy (_.sealedSortName))
}
for (annot <- staticAnnotations(sym.annotations.reverse))
putAnnotation(sym, annot)
@@ -151,13 +158,13 @@ abstract class Pickler extends SubComponent {
private def putSymbols(syms: List[Symbol]) =
syms foreach putSymbol
- /** Store type and everythig it refers to in map <code>index</code>.
+ /** Store type and everything it refers to in map <code>index</code>.
*
* @param tp ...
*/
private def putType(tp: Type): Unit = if (putEntry(tp)) {
tp match {
- case NoType | NoPrefix | DeBruijnIndex(_, _) =>
+ case NoType | NoPrefix /*| DeBruijnIndex(_, _) */ =>
;
case ThisType(sym) =>
putSymbol(sym)
@@ -185,9 +192,14 @@ abstract class Pickler extends SubComponent {
case MethodType(params, restpe) =>
putType(restpe); putSymbols(params)
case PolyType(tparams, restpe) =>
+ /** no longer needed since all params are now local
+ tparams foreach { tparam =>
+ if (!isLocal(tparam)) locals += tparam // similar to existential types, these tparams are local
+ }
+ */
putType(restpe); putSymbols(tparams)
case ExistentialType(tparams, restpe) =>
-// val savedBoundSyms = boundSyms
+// val savedBoundSyms = boundSyms // boundSyms are known to be local based on the EXISTENTIAL flag (see isLocal)
// boundSyms = tparams ::: boundSyms
// try {
putType(restpe);
@@ -260,11 +272,11 @@ abstract class Pickler extends SubComponent {
putEntry(from)
putEntry(to)
}
-
- case DocDef(comment, definition) =>
+/*
+ case DocDef(comment, definition) => should not be needed
putConstant(Constant(comment))
putTree(definition)
-
+*/
case Template(parents, self, body) =>
writeNat(parents.length)
putTrees(parents)
@@ -393,11 +405,8 @@ abstract class Pickler extends SubComponent {
}
}
- private def putTrees(trees: List[Tree]) =
- trees.foreach(putTree _)
-
- private def putTreess(treess: List[List[Tree]]) =
- treess.foreach(putTrees _)
+ private def putTrees(trees: List[Tree]) = trees foreach putTree
+ private def putTreess(treess: List[List[Tree]]) = treess foreach putTrees
/** only used when pickling trees, i.e. in an
* argument of some Annotation */
@@ -477,6 +486,10 @@ abstract class Pickler extends SubComponent {
*/
private def writeRef(ref: AnyRef) { writeNat(index(ref)) }
private def writeRefs(refs: List[AnyRef]) { refs foreach writeRef }
+ private def writeRefsWithLength(refs: List[AnyRef]) {
+ writeNat(refs.length)
+ writeRefs(refs)
+ }
/** Write name, owner, flags, and info of a symbol.
*/
@@ -548,8 +561,6 @@ abstract class Pickler extends SubComponent {
writeSymInfo(sym)
if (sym.isAbstractType) TYPEsym else ALIASsym
case sym: TermSymbol =>
- if (!sym.isModule && sym.defaultGetter != NoSymbol)
- writeRef(sym.defaultGetter)
writeSymInfo(sym)
if (sym.alias != NoSymbol) writeRef(sym.alias)
if (sym.isModule) MODULEsym else VALsym
@@ -573,16 +584,16 @@ abstract class Pickler extends SubComponent {
writeRef(tp.typeSymbol); writeRefs(parents); REFINEDtpe
case ClassInfoType(parents, decls, clazz) =>
writeRef(clazz); writeRefs(parents); CLASSINFOtpe
- case MethodType(formals, restpe) =>
+ case mt @ MethodType(formals, restpe) =>
writeRef(restpe); writeRefs(formals)
- if (entry.isInstanceOf[ImplicitMethodType]) IMPLICITMETHODtpe
+ if (mt.isImplicit) IMPLICITMETHODtpe
else METHODtpe
case PolyType(tparams, restpe) =>
writeRef(restpe); writeRefs(tparams); POLYtpe
case ExistentialType(tparams, restpe) =>
writeRef(restpe); writeRefs(tparams); EXISTENTIALtpe
- case DeBruijnIndex(l, i) =>
- writeNat(l); writeNat(i); DEBRUIJNINDEXtpe
+ // case DeBruijnIndex(l, i) =>
+ // writeNat(l); writeNat(i); DEBRUIJNINDEXtpe
case c @ Constant(_) =>
if (c.tag == BooleanTag) writeLong(if (c.booleanValue) 1 else 0)
else if (ByteTag <= c.tag && c.tag <= LongTag) writeLong(c.longValue)
@@ -614,9 +625,9 @@ abstract class Pickler extends SubComponent {
args foreach writeClassfileAnnotArg
ANNOTARGARRAY
- case (target: Symbol, children: List[Symbol]) =>
+ case (target: Symbol, children: List[_]) =>
writeRef(target)
- for (c <- children) writeRef(c.asInstanceOf[Symbol])
+ writeRefs(children.asInstanceOf[List[Symbol]])
CHILDREN
case EmptyTree =>
@@ -667,13 +678,9 @@ abstract class Pickler extends SubComponent {
writeRef(tree.symbol)
writeRef(mods)
writeRef(name)
- writeNat(tparams.length)
- writeRefs(tparams)
+ writeRefsWithLength(tparams)
writeNat(vparamss.length)
- for(vparams <- vparamss) {
- writeNat(vparams.length)
- writeRefs(vparams)
- }
+ vparamss foreach writeRefsWithLength
writeRef(tpt)
writeRef(rhs)
TREE
@@ -719,8 +726,7 @@ abstract class Pickler extends SubComponent {
writeNat(TEMPLATEtree)
writeRef(tree.tpe)
writeRef(tree.symbol)
- writeNat(parents.length)
- writeRefs(parents)
+ writeRefsWithLength(parents)
writeRef(self)
writeRefs(body)
TREE
@@ -774,7 +780,6 @@ abstract class Pickler extends SubComponent {
writeRefs(trees)
TREE
-
case tree@Function(vparams, body) =>
writeNat(FUNCTIONtree)
writeRef(tree.tpe)
@@ -1010,7 +1015,6 @@ abstract class Pickler extends SubComponent {
case sym: TermSymbol =>
print(if (sym.isModule) "MODULEsym " else "VALsym ")
printSymInfo(sym)
- if (!sym.isModule) printRef(sym.defaultGetter)
if (sym.alias != NoSymbol) printRef(sym.alias)
case NoType =>
print("NOtpe")
@@ -1030,15 +1034,16 @@ abstract class Pickler extends SubComponent {
print("REFINEDtpe "); printRef(tp.typeSymbol); printRefs(parents);
case ClassInfoType(parents, decls, clazz) =>
print("CLASSINFOtpe "); printRef(clazz); printRefs(parents);
- case MethodType(formals, restpe) =>
- print(if (entry.isInstanceOf[ImplicitMethodType]) "IMPLICITMETHODtpe " else "METHODtpe ");
+ case mt @ MethodType(formals, restpe) =>
+ print(if (mt.isImplicit) "IMPLICITMETHODtpe " else "METHODtpe ");
printRef(restpe); printRefs(formals)
case PolyType(tparams, restpe) =>
print("POLYtpe "); printRef(restpe); printRefs(tparams);
case ExistentialType(tparams, restpe) =>
print("EXISTENTIALtpe "); printRef(restpe); printRefs(tparams);
- case DeBruijnIndex(l, i) =>
- print("DEBRUIJNINDEXtpe "); print(l+" "+i)
+ print("||| "+entry)
+ // case DeBruijnIndex(l, i) =>
+ // print("DEBRUIJNINDEXtpe "); print(l+" "+i)
case c @ Constant(_) =>
print("LITERAL ")
if (c.tag == BooleanTag) print("Boolean "+(if (c.booleanValue) 1 else 0))
@@ -1092,7 +1097,7 @@ abstract class Pickler extends SubComponent {
println("Pickled info for "+rootName+" V"+MajorVersion+"."+MinorVersion)
}
for (i <- 0 until ep) {
- if (showSig) {
+ if (showSig/* || rootName.toString == "StaticCompletion"*/) {
print((i formatted "%3d: ")+(writeIndex formatted "%5d: "))
printEntry(entries(i))
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
index 909ecae77a..94d9bca945 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package symtab
@@ -11,816 +10,87 @@ package classfile
import java.io.IOException
import java.lang.{Float, Double}
-import scala.tools.nsc.util.{Position, NoPosition}
-
import Flags._
-import PickleFormat._
+import scala.reflect.generic.PickleFormat._
import collection.mutable.{HashMap, ListBuffer}
import annotation.switch
+import java.io.IOException
-/** This abstract class implements ..
- *
- * @author Martin Odersky
+/** @author Martin Odersky
* @version 1.0
*/
-abstract class UnPickler {
+abstract class UnPickler extends reflect.generic.UnPickler {
val global: Global
import global._
- /** Unpickle symbol table information descending from a class and/or module root
- * from an array of bytes.
- * @param bytes bytearray from which we unpickle
- * @param offset offset from which unpickling starts
- * @param classroot the top-level class which is unpickled, or NoSymbol if unapplicable
- * @param moduleroot the top-level module which is unpickled, or NoSymbol if unapplicable
- * @param filename filename associated with bytearray, only used for error messages
- */
- def unpickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) {
- try {
- val p = if (currentRun.isDefined &&
- currentRun.picklerPhase != NoPhase &&
- phase.id > currentRun.picklerPhase.id) currentRun.picklerPhase
- else phase
- atPhase(p) {
- new UnPickle(bytes, offset, classRoot, moduleRoot, filename)
- }
- } catch {
- case ex: IOException =>
- throw ex
- case ex: Throwable =>
- /*if (settings.debug.value)*/ ex.printStackTrace()
- throw new RuntimeException("error reading Scala signature of "+filename+": "+ex.getMessage())
- }
- }
+ def scan(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) =
+ new CompileScan(bytes, offset, classRoot, moduleRoot, filename).run()
- private class UnPickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) extends PickleBuffer(bytes, offset, -1) {
- if (settings.debug.value) global.log("unpickle " + classRoot + " and " + moduleRoot)
- checkVersion(filename)
+ class CompileScan(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String)
+ extends Scan(bytes, offset, classRoot, moduleRoot, filename) {
- /** A map from entry numbers to array offsets */
- private val index = createIndex
+ protected override def debug = settings.debug.value
- /** A map from entry numbers to symbols, types, or annotations */
- private val entries = new Array[AnyRef](index.length)
+ // override def noSuchTypeTag(tag: Int, end: Int): Type = {
+ // tag match {
+ // case DEBRUIJNINDEXtpe =>
+ // DeBruijnIndex(readNat(), readNat())
+ // case _ =>
+ // super.noSuchTypeTag(tag, end)
+ // }
+ // }
- /** A map from symbols to their associated `decls' scopes */
- private val symScopes = new HashMap[Symbol, Scope]
+ override protected def errorMissingRequirement(name: Name, owner: Symbol) =
+ errorMissingRequirement(
+ "reference " + (if (name.isTypeName) "type " else "value ") +
+ name.decode + " of " + owner.tpe.widen + " refers to nonexisting symbol.")
- for (i <- 0 until index.length) {
- if (isSymbolEntry(i))
- at(i, readSymbol)
- else if (isSymbolAnnotationEntry(i))
- at(i, {() => readSymbolAnnotation(); null})
- else if (isChildrenEntry(i))
- at(i, {() => readChildren(); null})
- }
-
- if (settings.debug.value) global.log("unpickled " + classRoot + ":" + classRoot.rawInfo + ", " + moduleRoot + ":" + moduleRoot.rawInfo);//debug
+ def inferMethodAlternative(fun: Tree, argtpes: List[Type], restpe: Type) =
+ typer.infer.inferMethodAlternative(fun, List(), argtpes, restpe)
- private def checkVersion(filename: String) {
- val major = readNat()
- val minor = readNat()
- if (major != MajorVersion || minor > MinorVersion)
- throw new IOException("Scala signature " + classRoot.name +
- " has wrong version\n expected: " +
- MajorVersion + "." + MinorVersion +
- "\n found: " + major + "." + minor +
- " in "+filename)
- }
+ def newLazyTypeRef(i: Int): LazyType = new LazyTypeRef(i)
+ def newLazyTypeRefAndAlias(i: Int, j: Int): LazyType = new LazyTypeRefAndAlias(i, j)
- /** The `decls' scope associated with given symbol */
- private def symScope(sym: Symbol) = symScopes.get(sym) match {
- case None => val s = new Scope; symScopes(sym) = s; s
- case Some(s) => s
- }
-
- /** Does entry represent an (internal) symbol */
- private def isSymbolEntry(i: Int): Boolean = {
- val tag = bytes(index(i)).toInt
- (firstSymTag <= tag && tag <= lastSymTag &&
- (tag != CLASSsym || !isRefinementSymbolEntry(i)))
- }
-
- /** Does entry represent an (internal or external) symbol */
- private def isSymbolRef(i: Int): Boolean = {
- val tag = bytes(index(i))
- (firstSymTag <= tag && tag <= lastExtSymTag)
- }
-
- /** Does entry represent a name? */
- private def isNameEntry(i: Int): Boolean = {
- val tag = bytes(index(i)).toInt
- tag == TERMname || tag == TYPEname
- }
-
- /** Does entry represent a symbol annotation? */
- private def isSymbolAnnotationEntry(i: Int): Boolean = {
- val tag = bytes(index(i)).toInt
- tag == SYMANNOT
- }
-
- /** Does the entry represent children of a symbol? */
- private def isChildrenEntry(i: Int): Boolean = {
- val tag = bytes(index(i)).toInt
- tag == CHILDREN
- }
-
- /** Does entry represent a refinement symbol?
- * pre: Entry is a class symbol
+ /** Convert to a type error, that is printed gracefully instead of crashing.
+ *
+ * Similar in intent to what SymbolLoader does (but here we don't have access to
+ * error reporting, so we rely on the typechecker to report the error).
*/
- private def isRefinementSymbolEntry(i: Int): Boolean = {
- val savedIndex = readIndex
- readIndex = index(i)
- val tag = readByte().toInt
- if (tag != CLASSsym) assert(false)
- readNat(); // read length
- val result = readNameRef() == nme.REFINE_CLASS_NAME.toTypeName
- readIndex = savedIndex
- result
- }
-
- /** If entry at <code>i</code> is undefined, define it by performing
- * operation <code>op</code> with <code>readIndex at start of i'th
- * entry. Restore <code>readIndex</code> afterwards.
+ def toTypeError(e: MissingRequirementError) =
+ new TypeError(e.msg)
+
+ /** Convert to a type error, that is printed gracefully instead of crashing.
+ *
+ * Similar in intent to what SymbolLoader does (but here we don't have access to
+ * error reporting, so we rely on the typechecker to report the error).
+ *
+ * @note Unlike 2.10, 2.9 may throw either IOException or MissingRequirementError. This
+ * simply tries to make it more robust.
*/
- private def at[T <: AnyRef](i: Int, op: () => T): T = {
- var r = entries(i)
- if (r eq null) {
- val savedIndex = readIndex
- readIndex = index(i)
- r = op()
- assert(entries(i) eq null, entries(i))
- entries(i) = r
- readIndex = savedIndex
- }
- r.asInstanceOf[T]
- }
-
- /** Read a name */
- private def readName(): Name = {
- val tag = readByte()
- val len = readNat()
- tag match {
- case TERMname => newTermName(bytes, readIndex, len)
- case TYPEname => newTypeName(bytes, readIndex, len)
- case _ => errorBadSignature("bad name tag: " + tag)
- }
- }
-
- /** Read a symbol */
- private def readSymbol(): Symbol = {
- val tag = readByte()
- val end = readNat() + readIndex
- var sym: Symbol = NoSymbol
- tag match {
- case EXTref | EXTMODCLASSref =>
- val name = readNameRef()
- val owner = if (readIndex == end) definitions.RootClass else readSymbolRef()
- def fromName(name: Name) =
- if (name.toTermName == nme.ROOT) definitions.RootClass
- else if (name == nme.ROOTPKG) definitions.RootPackage
- else if (tag == EXTref) owner.info.decl(name)
- else owner.info.decl(name).moduleClass
- sym = fromName(name)
- // If sym not found try with expanded name.
- // This can happen if references to private symbols are
- // read from outside; for instance when checking the children of a class
- // (see t1722)
- if (sym == NoSymbol) {
- sym = fromName(owner.expandedName(name))
- }
-
- // If the owner is overloaded (i.e. a method), it's not possible to select the
- // right member => return NoSymbol. This can only happen when unpickling a tree.
- // the "case Apply" in readTree() takes care of selecting the correct alternative
- // after parsing the arguments.
- if (sym == NoSymbol && !owner.hasFlag(OVERLOADED)) {
- errorMissingRequirement(
- "reference " + (if (name.isTypeName) "type " else "value ") +
- name.decode + " of " + owner + " refers to nonexisting symbol.")
- }
- case NONEsym =>
- sym = NoSymbol
- case _ => // symbols that were pickled with Pickler.writeSymInfo
- var defaultGetter: Symbol = NoSymbol
- var nameref = readNat()
- if (tag == VALsym && isSymbolRef(nameref)) {
- defaultGetter = at(nameref, readSymbol)
- nameref = readNat()
- }
- val name = at(nameref, readName)
- val owner = readSymbolRef()
- val flags = pickledToRawFlags(readLongNat())
- var privateWithin: Symbol = NoSymbol
- var inforef = readNat()
- if (isSymbolRef(inforef)) {
- privateWithin = at(inforef, readSymbol)
- inforef = readNat()
- }
- tag match {
- case TYPEsym =>
- sym = owner.newAbstractType(NoPosition, name)
- case ALIASsym =>
- sym = owner.newAliasType(NoPosition, name)
- case CLASSsym =>
- sym =
- if (name == classRoot.name && owner == classRoot.owner)
- (if ((flags & MODULE) != 0L) moduleRoot.moduleClass
- else classRoot)
- else
- if ((flags & MODULE) != 0L) owner.newModuleClass(NoPosition, name)
- else owner.newClass(NoPosition, name)
- if (readIndex != end) sym.typeOfThis = new LazyTypeRef(readNat())
- case MODULEsym =>
- val clazz = at(inforef, readType).typeSymbol
- sym =
- if (name == moduleRoot.name && owner == moduleRoot.owner) moduleRoot
- else {
- assert(clazz.isInstanceOf[ModuleClassSymbol], clazz)
- val mclazz = clazz.asInstanceOf[ModuleClassSymbol]
- val m = owner.newModule(NoPosition, name, mclazz)
- mclazz.setSourceModule(m)
- m
- }
- case VALsym =>
- sym = if (name == moduleRoot.name && owner == moduleRoot.owner) moduleRoot.resetFlag(MODULE)
- else owner.newValue(NoPosition, name)
- sym.defaultGetter = defaultGetter
- case _ =>
- errorBadSignature("bad symbol tag: " + tag)
- }
- sym.setFlag(flags.toLong & PickledFlags)
- sym.privateWithin = privateWithin
- if (readIndex != end) assert(sym hasFlag (SUPERACCESSOR | PARAMACCESSOR), sym)
- if (sym hasFlag SUPERACCESSOR) assert(readIndex != end)
- sym.setInfo(
- if (readIndex != end) new LazyTypeRefAndAlias(inforef, readNat())
- else new LazyTypeRef(inforef))
- if (sym.owner.isClass && sym != classRoot && sym != moduleRoot &&
- !sym.isModuleClass && !sym.isRefinementClass && !sym.isTypeParameter && !sym.isExistential)
- symScope(sym.owner) enter sym
- }
- sym
- }
-
- /** Read a type */
- private def readType(): Type = {
- val tag = readByte()
- val end = readNat() + readIndex
- tag match {
- case NOtpe =>
- NoType
- case NOPREFIXtpe =>
- NoPrefix
- case THIStpe =>
- mkThisType(readSymbolRef())
- case SINGLEtpe =>
- singleType(readTypeRef(), readSymbolRef())
- case SUPERtpe =>
- val thistpe = readTypeRef()
- val supertpe = readTypeRef()
- SuperType(thistpe, supertpe)
- case CONSTANTtpe =>
- mkConstantType(readConstantRef())
- case TYPEREFtpe =>
- val pre = readTypeRef()
- val sym = readSymbolRef()
- var args = until(end, readTypeRef)
- rawTypeRef(pre, sym, args)
- case TYPEBOUNDStpe =>
- mkTypeBounds(readTypeRef(), readTypeRef())
- case REFINEDtpe =>
- val clazz = readSymbolRef()
-/*
- val ps = until(end, readTypeRef)
- val dcls = symScope(clazz)
- new RefinedType(ps, dcls) { override def symbol = clazz }
-*/
- new RefinedType(until(end, readTypeRef), symScope(clazz)) {
- override def typeSymbol = clazz
- }
- case CLASSINFOtpe =>
- val clazz = readSymbolRef()
- ClassInfoType(until(end, readTypeRef), symScope(clazz), clazz)
- case METHODtpe =>
- val restpe = readTypeRef()
- val params = until(end, readSymbolRef)
- // if the method is overloaded, the params cannot be determined (see readSymbol) => return NoType.
- // Only happen for trees, "case Apply" in readTree() takes care of selecting the correct
- // alternative after parsing the arguments.
- if (params.contains(NoSymbol) || restpe == NoType) NoType
- else MethodType(params, restpe)
- case IMPLICITMETHODtpe =>
- val restpe = readTypeRef()
- val params = until(end, readSymbolRef)
- ImplicitMethodType(params, restpe)
- case POLYtpe =>
- val restpe = readTypeRef()
- val typeParams = until(end, readSymbolRef)
- // see comment above in "case METHODtpe"
- if (typeParams.contains(NoSymbol) || restpe == NoType) NoType
- else PolyType(typeParams, restpe)
- case EXISTENTIALtpe =>
- val restpe = readTypeRef()
- ExistentialType(until(end, readSymbolRef), restpe)
- case ANNOTATEDtpe =>
- var typeRef = readNat()
- val selfsym = if (isSymbolRef(typeRef)) {
- val s = at(typeRef, readSymbol)
- typeRef = readNat()
- s
- } else NoSymbol
- val tp = at(typeRef, readType)
- val annots = until(end, readAnnotationRef)
- if (settings.selfInAnnots.value || (selfsym == NoSymbol))
- AnnotatedType(annots, tp, selfsym)
- else
- tp // drop annotations with a self symbol unless
- // -Yself-in-annots is on
- case DEBRUIJNINDEXtpe =>
- DeBruijnIndex(readNat(), readNat())
- case _ =>
- errorBadSignature("bad type tag: " + tag)
- }
- }
-
- /** Read a constant */
- private def readConstant(): Constant = {
- val tag = readByte().toInt
- val len = readNat()
- (tag: @switch) match {
- case LITERALunit => Constant(())
- case LITERALboolean => Constant(if (readLong(len) == 0L) false else true)
- case LITERALbyte => Constant(readLong(len).toByte)
- case LITERALshort => Constant(readLong(len).toShort)
- case LITERALchar => Constant(readLong(len).toChar)
- case LITERALint => Constant(readLong(len).toInt)
- case LITERALlong => Constant(readLong(len))
- case LITERALfloat => Constant(Float.intBitsToFloat(readLong(len).toInt))
- case LITERALdouble => Constant(Double.longBitsToDouble(readLong(len)))
- case LITERALstring => Constant(readNameRef().toString())
- case LITERALnull => Constant(null)
- case LITERALclass => Constant(readTypeRef())
- case LITERALenum => Constant(readSymbolRef())
- case _ => errorBadSignature("bad constant tag: " + tag)
- }
- }
-
- /** Read children and store them into the corresponding symbol.
- */
- private def readChildren() {
- val tag = readByte()
- assert(tag == CHILDREN)
- val end = readNat() + readIndex
- val target = readSymbolRef()
- while (readIndex != end) target addChild readSymbolRef()
- }
-
- /** Read an annotation argument, which is pickled either
- * as a Constant or a Tree.
- */
- private def readAnnotArg(): Tree = {
- if (peekByte() == TREE) {
- readTree()
- } else {
- val const = readConstant()
- Literal(const).setType(const.tpe)
- }
- }
-
- /** Read a ClassfileAnnotArg (argument to a classfile annotation)
- */
- private def readClassfileAnnotArg(): ClassfileAnnotArg = {
- val b = peekByte()
- if (peekByte() == ANNOTINFO) {
- NestedAnnotArg(readAnnotation())
- } else if (peekByte() == ANNOTARGARRAY) {
- readByte()
- val end = readNat() + readIndex
- ArrayAnnotArg(until(end, readClassfileAnnotArgRef).toArray)
- } else {
- LiteralAnnotArg(readConstant())
- }
- }
-
- /** Read an AnnotationInfo. Not to be called directly, use
- * readAnnotation or readSymbolAnnotation
- */
- private def readAnnotationInfo(end: Int): AnnotationInfo = {
- val atp = readTypeRef()
- val args = new ListBuffer[Tree]
- val assocs = new ListBuffer[(Name, ClassfileAnnotArg)]
- while (readIndex != end) {
- val argref = readNat()
- if (isNameEntry(argref))
- assocs += ((at(argref, readName), readClassfileAnnotArgRef))
- else
- args += at(argref, readAnnotArg)
- }
- AnnotationInfo(atp, args.toList, assocs.toList)
- }
-
-
- /** Read an annotation and as a side effect store it into
- * the symbol it requests. Called at top-level, for all
- * (symbol, annotInfo) entries. */
- private def readSymbolAnnotation() {
- val tag = readByte()
- if (tag != SYMANNOT)
- errorBadSignature("symbol annotation expected ("+ tag +")")
- val end = readNat() + readIndex
- val target = readSymbolRef()
- target.addAnnotation(readAnnotationInfo(end))
- }
-
- /** Read an annotation and return it. Used when unpickling
- * an ANNOTATED(WSELF)tpe or a NestedAnnotArg */
- private def readAnnotation(): AnnotationInfo = {
- val tag = readByte()
- if (tag != ANNOTINFO)
- errorBadSignature("annotation expected (" + tag + ")")
- val end = readNat() + readIndex
- readAnnotationInfo(end)
- }
-
- /* Read an abstract syntax tree */
- private def readTree(): Tree = {
- val outerTag = readByte()
- if (outerTag != TREE)
- errorBadSignature("tree expected (" + outerTag + ")")
- val end = readNat() + readIndex
- val tag = readByte()
- val tpe =
- if (tag != EMPTYtree)
- readTypeRef()
- else
- NoType
-
- tag match {
- case EMPTYtree =>
- EmptyTree
-
- case PACKAGEtree =>
- val symbol = readSymbolRef()
- val pid = readTreeRef().asInstanceOf[RefTree]
- val stats = until(end, readTreeRef)
- PackageDef(pid, stats) setType tpe
-
- case CLASStree =>
- val symbol = readSymbolRef()
- val mods = readModifiersRef()
- val name = readNameRef()
- val impl = readTemplateRef()
- val tparams = until(end, readTypeDefRef)
- (ClassDef(mods, name, tparams, impl).
- setSymbol(symbol).
- setType(tpe))
-
- case MODULEtree =>
- val symbol = readSymbolRef()
- val mods = readModifiersRef()
- val name = readNameRef()
- val impl = readTemplateRef()
- (ModuleDef(mods, name, impl).
- setSymbol(symbol).
- setType(tpe))
-
- case VALDEFtree =>
- val symbol = readSymbolRef()
- val mods = readModifiersRef()
- val name = readNameRef()
- val tpt = readTreeRef()
- val rhs = readTreeRef()
-
- (ValDef(mods, name, tpt, rhs).
- setSymbol(symbol).
- setType(tpe))
-
- case DEFDEFtree =>
- val symbol = readSymbolRef()
- val mods = readModifiersRef()
- val name = readNameRef()
- val numTparams = readNat()
- val tparams = times(numTparams, readTypeDefRef)
- val numVparamss = readNat
- val vparamss = times(numVparamss, () => {
- val len = readNat()
- times(len, readValDefRef)})
- val tpt = readTreeRef()
- val rhs = readTreeRef()
-
- (DefDef(mods, name, tparams, vparamss, tpt, rhs).
- setSymbol(symbol).
- setType(tpe))
-
- case TYPEDEFtree =>
- val symbol = readSymbolRef()
- val mods = readModifiersRef()
- val name = readNameRef()
- val rhs = readTreeRef()
- val tparams = until(end, readTypeDefRef)
-
- (TypeDef(mods, name, tparams, rhs).
- setSymbol(symbol).
- setType(tpe))
-
- case LABELtree =>
- val symbol = readSymbolRef()
- val name = readNameRef()
- val rhs = readTreeRef()
- val params = until(end, readIdentRef)
- (LabelDef(name, params, rhs).
- setSymbol(symbol).
- setType(tpe))
-
- case IMPORTtree =>
- val symbol = readSymbolRef()
- val expr = readTreeRef()
- val selectors = until(end, () => {
- val from = readNameRef()
- val to = readNameRef()
- ImportSelector(from, -1, to, -1)
- })
- (Import(expr, selectors).
- setSymbol(symbol).
- setType(tpe))
-
- case DOCDEFtree =>
- val comment = readConstantRef match {
- case Constant(com: String) => com
- case other =>
- errorBadSignature("Document comment not a string (" + other + ")")
- }
- val definition = readTreeRef()
- (DocDef(comment, definition).setType(tpe))
-
- case TEMPLATEtree =>
- val symbol = readSymbolRef()
- val numParents = readNat()
- val parents = times(numParents, readTreeRef)
- val self = readValDefRef()
- val body = until(end, readTreeRef)
-
- (Template(parents, self, body).
- setSymbol(symbol).
- setType(tpe))
-
- case BLOCKtree =>
- val expr = readTreeRef()
- val stats = until(end, readTreeRef)
- Block(stats, expr).setType(tpe)
-
- case CASEtree =>
- val pat = readTreeRef()
- val guard = readTreeRef()
- val body = readTreeRef()
- CaseDef(pat, guard, body).setType(tpe)
-
- case ALTERNATIVEtree =>
- val trees = until(end, readTreeRef)
- Alternative(trees).setType(tpe)
-
- case STARtree =>
- val elem = readTreeRef()
- Star(elem).setType(tpe)
-
- case BINDtree =>
- val symbol = readSymbolRef()
- val name = readNameRef()
- val body = readTreeRef()
- (Bind(name, body).
- setSymbol(symbol).
- setType(tpe))
-
- case UNAPPLYtree =>
- val fun = readTreeRef()
- val args = until(end, readTreeRef)
- (UnApply(fun: Tree, args).setType(tpe))
-
- case ARRAYVALUEtree =>
- val elemtpt = readTreeRef()
- val trees = until(end, readTreeRef)
- (ArrayValue(elemtpt, trees).setType(tpe))
-
- case FUNCTIONtree =>
- val symbol = readSymbolRef()
- val body = readTreeRef()
- val vparams = until(end, readValDefRef)
- (Function(vparams, body).
- setSymbol(symbol).
- setType(tpe))
-
- case ASSIGNtree =>
- val lhs = readTreeRef()
- val rhs = readTreeRef()
- Assign(lhs, rhs).setType(tpe)
-
- case IFtree =>
- val cond = readTreeRef()
- val thenp = readTreeRef()
- val elsep = readTreeRef()
- If(cond, thenp, elsep).setType(tpe)
-
- case MATCHtree =>
- val selector = readTreeRef()
- val cases = until(end, readCaseDefRef)
- Match(selector, cases).setType(tpe)
-
- case RETURNtree =>
- val symbol = readSymbolRef()
- val expr = readTreeRef()
- (Return(expr).
- setSymbol(symbol).
- setType(tpe))
-
- case TREtree =>
- val block = readTreeRef()
- val finalizer = readTreeRef()
- val catches = until(end, readCaseDefRef)
- Try(block, catches, finalizer).setType(tpe)
-
- case THROWtree =>
- val expr = readTreeRef()
- Throw(expr).setType(tpe)
-
- case NEWtree =>
- val tpt = readTreeRef()
- New(tpt).setType(tpe)
-
- case TYPEDtree =>
- val expr = readTreeRef()
- val tpt = readTreeRef()
- Typed(expr, tpt).setType(tpe)
-
- case TYPEAPPLYtree =>
- val fun = readTreeRef()
- val args = until(end, readTreeRef)
- TypeApply(fun, args).setType(tpe)
-
- case APPLYtree =>
- val fun = readTreeRef()
- val args = until(end, readTreeRef)
- if (fun.symbol hasFlag OVERLOADED) {
- fun.setType(fun.symbol.info)
- typer.infer.inferMethodAlternative(fun, Nil, args map (_.tpe), tpe)
- }
- Apply(fun, args).setType(tpe)
-
- case APPLYDYNAMICtree =>
- val symbol = readSymbolRef()
- val qual = readTreeRef()
- val args = until(end, readTreeRef)
- ApplyDynamic(qual, args).setSymbol(symbol).setType(tpe)
-
- case SUPERtree =>
- val symbol = readSymbolRef()
- val qual = readNameRef()
- val mix = readNameRef()
- Super(qual, mix).setSymbol(symbol).setType(tpe)
-
- case THIStree =>
- val symbol = readSymbolRef()
- val qual = readNameRef()
- This(qual).setSymbol(symbol).setType(tpe)
-
- case SELECTtree =>
- val symbol = readSymbolRef()
- val qualifier = readTreeRef()
- val selector = readNameRef()
- Select(qualifier, selector).setSymbol(symbol).setType(tpe)
-
- case IDENTtree =>
- val symbol = readSymbolRef()
- val name = readNameRef()
- Ident(name).setSymbol(symbol).setType(tpe)
-
- case LITERALtree =>
- val value = readConstantRef()
- Literal(value).setType(tpe)
-
- case TYPEtree =>
- TypeTree().setType(tpe)
-
- case ANNOTATEDtree =>
- val annot = readTreeRef()
- val arg = readTreeRef()
- Annotated(annot, arg).setType(tpe)
-
- case SINGLETONTYPEtree =>
- val ref = readTreeRef()
- SingletonTypeTree(ref).setType(tpe)
-
- case SELECTFROMTYPEtree =>
- val qualifier = readTreeRef()
- val selector = readNameRef()
- SelectFromTypeTree(qualifier, selector).setType(tpe)
-
- case COMPOUNDTYPEtree =>
- val templ = readTemplateRef()
- CompoundTypeTree(templ: Template).setType(tpe)
-
- case APPLIEDTYPEtree =>
- val tpt = readTreeRef()
- val args = until(end, readTreeRef)
- AppliedTypeTree(tpt, args).setType(tpe)
-
- case TYPEBOUNDStree =>
- val lo = readTreeRef()
- val hi = readTreeRef()
- TypeBoundsTree(lo, hi).setType(tpe)
-
- case EXISTENTIALTYPEtree =>
- val tpt = readTreeRef()
- val whereClauses = until(end, readTreeRef)
- ExistentialTypeTree(tpt, whereClauses).setType(tpe)
-
- case _ =>
- errorBadSignature("unknown tree type (" + tag + ")")
- }
- }
-
- def readModifiers(): Modifiers = {
- val tag = readNat()
- if (tag != MODIFIERS)
- errorBadSignature("expected a modifiers tag (" + tag + ")")
- val end = readNat() + readIndex
- val pflagsHi = readNat()
- val pflagsLo = readNat()
- val pflags = (pflagsHi.toLong << 32) + pflagsLo
- val flags = pickledToRawFlags(pflags)
- val privateWithin = readNameRef()
- Modifiers(flags, privateWithin, Nil, new Map.EmptyMap)
- }
-
- /* Read a reference to a pickled item */
- private def readNameRef(): Name = at(readNat(), readName)
- private def readSymbolRef(): Symbol = at(readNat(), readSymbol)
- private def readTypeRef(): Type = at(readNat(), readType)
- private def readConstantRef(): Constant = at(readNat(), readConstant)
- private def readAnnotArgRef(): Tree =
- at(readNat(), readAnnotArg)
- private def readClassfileAnnotArgRef(): ClassfileAnnotArg =
- at(readNat(), readClassfileAnnotArg)
- private def readAnnotationRef(): AnnotationInfo =
- at(readNat(), readAnnotation)
- private def readModifiersRef(): Modifiers =
- at(readNat(), readModifiers)
- private def readTreeRef(): Tree =
- at(readNat(), readTree)
-
- private def readTemplateRef(): Template =
- readTreeRef() match {
- case templ:Template => templ
- case other =>
- errorBadSignature("expected a template (" + other + ")")
- }
- private def readCaseDefRef(): CaseDef =
- readTreeRef() match {
- case tree:CaseDef => tree
- case other =>
- errorBadSignature("expected a case def (" + other + ")")
- }
- private def readValDefRef(): ValDef =
- readTreeRef() match {
- case tree:ValDef => tree
- case other =>
- errorBadSignature("expected a ValDef (" + other + ")")
- }
- private def readIdentRef(): Ident =
- readTreeRef() match {
- case tree:Ident => tree
- case other =>
- errorBadSignature("expected an Ident (" + other + ")")
- }
- private def readTypeDefRef(): TypeDef =
- readTreeRef() match {
- case tree:TypeDef => tree
- case other =>
- errorBadSignature("expected an TypeDef (" + other + ")")
- }
-
-
- private def errorBadSignature(msg: String) =
- throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg)
-
- private def errorMissingRequirement(msg: String) =
- if (settings.debug.value) errorBadSignature(msg)
- else throw new IOException("class file needed by "+classRoot.name+" is missing.\n"+msg)
+ def toTypeError(e: IOException) =
+ new TypeError(e.getMessage)
+ /** A lazy type which when completed returns type at index `i`. */
private class LazyTypeRef(i: Int) extends LazyType {
private val definedAtRunId = currentRunId
private val p = phase
- override def complete(sym: Symbol) : Unit = {
+ override def complete(sym: Symbol) : Unit = try {
val tp = at(i, readType)
if (p != phase) atPhase(p) (sym setInfo tp)
else sym setInfo tp
if (currentRunId != definedAtRunId) sym.setInfo(adaptToNewRunMap(tp))
+ } catch {
+ case e: MissingRequirementError => throw toTypeError(e)
+ case e: IOException => throw toTypeError(e)
}
override def load(sym: Symbol) { complete(sym) }
}
+ /** A lazy type which when completed returns type at index `i` and sets alias
+ * of completed symbol to symbol at index `j`.
+ */
private class LazyTypeRefAndAlias(i: Int, j: Int) extends LazyTypeRef(i) {
- override def complete(sym: Symbol) {
+ override def complete(sym: Symbol) = try {
super.complete(sym)
var alias = at(j, readSymbol)
if (alias hasFlag OVERLOADED) {
@@ -829,6 +99,9 @@ abstract class UnPickler {
}
}
sym.asInstanceOf[TermSymbol].setAlias(alias)
+ } catch {
+ case e: MissingRequirementError => throw toTypeError(e)
+ case e: IOException => throw toTypeError(e)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala b/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
index 79bb8e7bbc..f62a42375a 100644
--- a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
+++ b/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
@@ -1,8 +1,7 @@
/* NSC -- new scala compiler
- * Copyright 2004-2009 LAMP/EPFL
+ * Copyright 2004-2010 LAMP/EPFL
*/
-// $Id$
package scala.tools.nsc
package symtab
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
index d12ade1ab0..a90fb8b66c 100644
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
@@ -1,8 +1,7 @@
/* NSC -- new scala compiler
- * Copyright 2004-2009 LAMP/EPFL
+ * Copyright 2004-2010 LAMP/EPFL
*/
-// $Id$
package scala.tools.nsc
package symtab
@@ -13,7 +12,6 @@ import java.io.IOException
import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute, _}
import scala.collection.mutable.{HashMap, HashSet}
-import scala.tools.nsc.util.{Position, NoPosition}
import classfile.UnPickler
/**
@@ -43,7 +41,7 @@ abstract class TypeParser {
def parse(typ: MSILType, root: Symbol) {
- def handleError(e: Exception) = {
+ def handleError(e: Throwable) = {
if (settings.debug.value) e.printStackTrace() //debug
throw new IOException("type '" + typ.FullName + "' is broken\n(" + e.getMessage() + ")")
}
@@ -51,11 +49,11 @@ abstract class TypeParser {
busy = true
if (root.isModule) {
- this.clazz = root.linkedClassOfModule
+ this.clazz = root.companionClass
this.staticModule = root
} else {
this.clazz = root
- this.staticModule = root.linkedModuleOfClass
+ this.staticModule = root.companionModule
}
try {
parseClass(typ)
@@ -120,8 +118,8 @@ abstract class TypeParser {
staticDefs.enter(nclazz)
staticDefs.enter(nmodule)
- assert(nclazz.linkedModuleOfClass == nmodule, nmodule)
- assert(nmodule.linkedClassOfModule == nclazz, nclazz)
+ assert(nclazz.companionModule == nmodule, nmodule)
+ assert(nmodule.companionClass == nclazz, nclazz)
}
val fields = typ.getFields()
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 359ff9e1a9..f1ed9f43a0 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package transform
@@ -116,7 +115,7 @@ abstract class AddInterfaces extends InfoTransform {
* </p>
* <ul>
* <li>
- * for every interface member of <code>iface</code> its implemention
+ * for every interface member of <code>iface</code> its implementation
* method, if one is needed.
* </li>
* <li>
@@ -141,7 +140,7 @@ abstract class AddInterfaces extends InfoTransform {
for (sym <- ifaceDecls.iterator) {
if (isInterfaceMember(sym)) {
if (needsImplMethod(sym)) {
- val impl = sym.cloneSymbol(implClass).setInfo(sym.info).resetFlag(lateDEFERRED)
+ val impl = sym.cloneSymbol(implClass).resetFlag(lateDEFERRED)
if (currentRun.compiles(implClass)) implMethodMap(sym) = impl
decls enter impl
sym setFlag lateDEFERRED
@@ -162,7 +161,8 @@ abstract class AddInterfaces extends InfoTransform {
case ClassInfoType(parents, decls, _) =>
assert(phase == implClassPhase)
ClassInfoType(
- ObjectClass.tpe :: (parents.tail map mixinToImplClass) ::: List(iface.tpe),
+ ObjectClass.tpe :: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass))
+ ::: List(iface.tpe),
implDecls(sym, decls),
sym)
case PolyType(tparams, restpe) =>
@@ -243,7 +243,7 @@ abstract class AddInterfaces extends InfoTransform {
tree.symbol = implMethod
new ChangeOwnerAndReturnTraverser(ifaceMethod, implMethod)(tree)
case None =>
- throw new Error("implMethod missing for " + ifaceMethod)
+ abort("implMethod missing for " + ifaceMethod)
}
private def implMemberDef(tree: Tree): Tree =
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index dafec5e1c3..9b569fa45e 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -1,16 +1,14 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyrights 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package transform
import symtab._
import Flags._
-import scala.tools.nsc.util.Position
-import scala.collection.mutable.{ListBuffer, HashMap}
+import scala.collection._
abstract class CleanUp extends Transform with ast.TreeDSL {
import global._
@@ -24,10 +22,12 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
new CleanUpTransformer(unit)
class CleanUpTransformer(unit: CompilationUnit) extends Transformer {
- private val newDefs = new ListBuffer[Tree]
- private val newInits = new ListBuffer[Tree]
+ private val newStaticMembers = mutable.Buffer.empty[Tree]
+ private val newStaticInits = mutable.Buffer.empty[Tree]
+ private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
- private val classConstantMeth = new HashMap[String, Symbol]
+ //private val classConstantMeth = new HashMap[String, Symbol]
+ //private val symbolStaticFields = new HashMap[String, (Symbol, Tree, Tree)]
private var localTyper: analyzer.Typer = null
@@ -51,35 +51,18 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
private def typedWithPos(pos: Position)(tree: Tree) =
localTyper typed { atPos(pos)(tree) }
- private def classConstantMethod(pos: Position, sig: String): Symbol =
- (classConstantMeth get sig) getOrElse {
- val forName = getMember(ClassClass.linkedModuleOfClass, nme.forName)
- val owner = currentOwner.enclClass
-
- val cvar = owner.newVariable(pos, unit.fresh.newName(pos, "class$Cache"))
- .setFlag(PRIVATE | STATIC | MUTABLE | SYNTHETIC).setInfo(ClassClass.tpe)
- owner.info.decls enter cvar
- val cdef = typedWithPos(pos) { VAL(cvar) === NULL }
-
- val meth = owner.newMethod(pos, unit.fresh.newName(pos, "class$Method"))
- .setFlag(PRIVATE | STATIC | SYNTHETIC).setInfo(MethodType(List(), ClassClass.tpe))
- owner.info.decls enter meth
- val mdef = typedWithPos(pos)(DEF(meth) ===
- gen.mkCached(cvar, Apply(REF(forName), List(Literal(sig))))
- )
-
- newDefs.append(cdef, mdef)
- classConstantMeth.update(sig, meth)
- meth
- }
-
override def transformUnit(unit: CompilationUnit) =
unit.body = transform(unit.body)
/** A value class is defined to be only Java-compatible values: unit is
* not part of it, as opposed to isValueClass in definitions. scala.Int is
* a value class, java.lang.Integer is not. */
- def isValueClass(sym: Symbol) = boxedClass contains sym
+ def isJavaValueClass(sym: Symbol) = boxedClass contains sym
+ def isJavaValueType(tp: Type) = isJavaValueClass(tp.typeSymbol)
+
+ /** The boxed type if it's a primitive; identity otherwise.
+ */
+ def toBoxedType(tp: Type) = if (isJavaValueType(tp)) boxedClass(tp.typeSymbol).tpe else tp
override def transform(tree: Tree): Tree = tree match {
@@ -110,7 +93,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* - The type-checker has prevented dynamic applies on methods which
* parameter's erased types are not statically known at the call site.
* This is necessary to allow dispatching the call to the correct
- * method (dispatching on paramters is static in Scala). In practice,
+ * method (dispatching on parameters is static in Scala). In practice,
* this limitation only arises when the called method is defined as a
* refinement, where the refinement defines a parameter based on a
* type variable. */
@@ -134,10 +117,10 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
currentClass.info.decls enter varSym
val varDef = typedPos( VAL(varSym) === forInit )
- newDefs append transform(varDef)
+ newStaticMembers append transform(varDef)
val varInit = typedPos( REF(varSym) === forInit )
- newInits append transform(varInit)
+ newStaticInits append transform(varInit)
varSym
}
@@ -151,7 +134,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
currentClass.info.decls enter methSym
val methDef = typedPos( DefDef(methSym, { forBody(Pair(methSym, methSym.paramss(0))) }) )
- newDefs append transform(methDef)
+ newStaticMembers append transform(methDef)
methSym
}
@@ -185,7 +168,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
case MONO_CACHE =>
- /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)":
+ /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
+ (but with a SoftReference wrapping reflClass$Cache, similarly in the poly Cache) :
var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
@@ -210,16 +194,19 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
addStaticVariableToClass("reflMethod$Cache", MethodClass.tpe, NULL, false)
val reflClassCacheSym: Symbol =
- addStaticVariableToClass("reflClass$Cache", ClassClass.tpe, NULL, false)
+ addStaticVariableToClass("reflClass$Cache", SoftReferenceClass.tpe, NULL, false)
def getMethodSym = ClassClass.tpe member nme.getMethod_
+ def isCacheEmpty(receiver: Symbol): Tree =
+ reflClassCacheSym.IS_NULL() OR (reflClassCacheSym.GET() ANY_NE REF(receiver))
+
addStaticMethodToClass("reflMethod$Method", List(ClassClass.tpe), MethodClass.tpe) {
case Pair(reflMethodSym, List(forReceiverSym)) =>
BLOCK(
- IF (REF(reflClassCacheSym) ANY_NE REF(forReceiverSym)) THEN BLOCK(
+ IF (isCacheEmpty(forReceiverSym)) THEN BLOCK(
REF(reflMethodCacheSym) === ((REF(forReceiverSym) DOT getMethodSym)(LIT(method), REF(reflParamsCacheSym))) ,
- REF(reflClassCacheSym) === REF(forReceiverSym),
+ REF(reflClassCacheSym) === gen.mkSoftRef(REF(forReceiverSym)),
UNIT
) ENDIF,
REF(reflMethodCacheSym)
@@ -228,11 +215,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
case POLY_CACHE =>
- /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)":
+ /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
+ (SoftReference so that it does not interfere with classloader garbage collection, see ticket
+ #2365 for details):
var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
- var reflPoly$Cache: scala.runtime.MethodCache = new EmptyMethodCache()
+ var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
var method: JMethod = reflPoly$Cache.find(forReceiver)
@@ -240,7 +229,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
return method
else {
method = forReceiver.getMethod("xyz", reflParams$Cache)
- reflPoly$Cache = reflPoly$Cache.add(forReceiver, method)
+ method.setAccessible(true) // issue #2381
+ reflPoly$Cache = new SoftReference(reflPoly$Cache.get.add(forReceiver, method))
return method
}
}
@@ -250,23 +240,26 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val reflParamsCacheSym: Symbol =
addStaticVariableToClass("reflParams$Cache", theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
- val reflPolyCacheSym: Symbol =
- addStaticVariableToClass("reflPoly$Cache", MethodCacheClass.tpe, NEW(TypeTree(EmptyMethodCacheClass.tpe)), false)
+ def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
+ val reflPolyCacheSym: Symbol = addStaticVariableToClass("reflPoly$Cache", SoftReferenceClass.tpe, mkNewPolyCache, false)
+ def getPolyCache = fn(REF(reflPolyCacheSym), nme.get) AS_ATTR MethodCacheClass.tpe
addStaticMethodToClass("reflMethod$Method", List(ClassClass.tpe), MethodClass.tpe)
{ case Pair(reflMethodSym, List(forReceiverSym)) =>
val methodSym = reflMethodSym.newVariable(ad.pos, mkTerm("method")) setInfo MethodClass.tpe
BLOCK(
- VAL(methodSym) === ((REF(reflPolyCacheSym) DOT methodCache_find)(REF(forReceiverSym))) ,
+ IF (getPolyCache ANY_EQ NULL) THEN (REF(reflPolyCacheSym) === mkNewPolyCache) ENDIF,
+ VAL(methodSym) === ((getPolyCache DOT methodCache_find)(REF(forReceiverSym))) ,
IF (REF(methodSym) OBJ_!= NULL) .
THEN (Return(REF(methodSym)))
ELSE {
def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym)))
- def cacheRHS = ((REF(reflPolyCacheSym) DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
+ def cacheRHS = ((getPolyCache DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
BLOCK(
REF(methodSym) === methodSymRHS,
- REF(reflPolyCacheSym) === cacheRHS,
+ (REF(methodSym) DOT methodClass_setAccessible)(LIT(true)),
+ REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
Return(REF(methodSym))
)
}
@@ -339,23 +332,68 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* a dynamic call will box them as a side-effect. */
/* ### CALLING THE APPLY ### */
- def callAsReflective(paramTypes: List[Type], resType: Type, structResType: Type): Tree = localTyper typed {
- def fixResult(tree: Tree): Tree = localTyper typed {
- structResType.typeSymbol match {
- case UnitClass => BLOCK(tree, REF(BoxedUnit_UNIT))
- case ObjectClass => tree
- case _ => tree AS_ATTR structResType
+ def callAsReflective(paramTypes: List[Type], resType: Type): Tree = {
+ /* Some info about the type of the method being called. */
+ val methSym = ad.symbol
+ val boxedResType = toBoxedType(resType) // Int -> Integer
+ val resultSym = boxedResType.typeSymbol
+ // If this is a primitive method type (like '+' in 5+5=10) then the
+ // parameter types and the (unboxed) result type should all be primitive types,
+ // and the method name should be in the primitive->structural map.
+ def isJavaValueMethod = (
+ (resType :: paramTypes forall isJavaValueType) && // issue #1110
+ (getPrimitiveReplacementForStructuralCall isDefinedAt methSym.name)
+ )
+ // Erasure lets Unit through as Unit, but a method returning Any will have an
+ // erased return type of Object and should also allow Unit.
+ def isDefinitelyUnit = (resultSym == UnitClass)
+ def isMaybeUnit = (resultSym == ObjectClass) || isDefinitelyUnit
+ // If there's any chance this signature could be met by an Array.
+ val isArrayMethodSignature = {
+ def typesMatchApply = paramTypes match {
+ case List(tp) => tp <:< IntClass.tpe
+ case _ => false
+ }
+ def typesMatchUpdate = paramTypes match {
+ case List(tp1, tp2) => (tp1 <:< IntClass.tpe) && isMaybeUnit
+ case _ => false
}
+
+ (methSym.name == nme.length && params.isEmpty) ||
+ (methSym.name == nme.clone_ && params.isEmpty) ||
+ (methSym.name == nme.apply && typesMatchApply) ||
+ (methSym.name == nme.update && typesMatchUpdate)
}
- val qualSym = qual.tpe.typeSymbol
- val methSym = ad.symbol
- def defaultCall = {
+
+ /* Some info about the argument at the call site. */
+ val qualSym = qual.tpe.typeSymbol
+ val args = qual :: params
+ def isDefinitelyArray = (qualSym == ArrayClass)
+ def isMaybeArray = (qualSym == ObjectClass) || isDefinitelyArray
+ def isMaybeBoxed = platform isMaybeBoxed qualSym
+
+ // This is complicated a bit by trying to handle Arrays correctly.
+ // Under normal circumstances if the erased return type is Object then
+ // we're not going to box it to Unit, but that is the situation with
+ // a signature like def f(x: { def update(x: Int, y: Long): Any })
+ //
+ // However we only want to do that boxing if it has been determined
+ // to be an Array and a method returning Unit. But for this fixResult
+ // could be called in one place: instead it is called separately from the
+ // unconditional outcomes (genValueCall, genArrayCall, genDefaultCall.)
+ def fixResult(tree: Tree, mustBeUnit: Boolean = false) =
+ if (mustBeUnit || resultSym == UnitClass) BLOCK(tree, REF(BoxedUnit_UNIT)) // boxed unit
+ else if (resultSym == ObjectClass) tree // no cast necessary
+ else tree AS_ATTR boxedResType // cast to expected type
+
+ /** Normal non-Array call */
+ def genDefaultCall = {
// reflective method call machinery
- val invokeName = MethodClass.tpe member nme.invoke_ // reflect.Method.invoke(...)
- def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
- def lookup = Apply(cache, List(qual GETCLASS)) // get Method object from cache
- def args = ArrayValue(TypeTree(ObjectClass.tpe), params) // args for invocation
- def invocation = (lookup DOT invokeName)(qual, args) // .invoke(qual, ...)
+ val invokeName = MethodClass.tpe member nme.invoke_ // reflect.Method.invoke(...)
+ def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
+ def lookup = Apply(cache, List(qual GETCLASS)) // get Method object from cache
+ def invokeArgs = ArrayValue(TypeTree(ObjectClass.tpe), params) // args for invocation
+ def invocation = (lookup DOT invokeName)(qual, invokeArgs) // .invoke(qual, ...)
// exception catching machinery
val invokeExc = currentOwner.newValue(ad.pos, mkTerm()) setInfo InvocationTargetExceptionClass.tpe
@@ -363,50 +401,41 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def catchBody = Throw(Apply(Select(Ident(invokeExc), nme.getCause), Nil))
// try { method.invoke } catch { case e: InvocationTargetExceptionClass => throw e.getCause() }
- TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } ENDTRY
+ fixResult(TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } ENDTRY)
}
- def useValueOperator = {
- def isBoxed(qualSym: Symbol): Boolean =
- (qualSym isNonBottomSubClass BoxedNumberClass) ||
- (!forMSIL && (qualSym isNonBottomSubClass BoxedCharacterClass))
- ((qualSym == definitions.ObjectClass) || isBoxed(qualSym)) && // may be a boxed value class
- (getPrimitiveReplacementForStructuralCall isDefinedAt methSym.name) &&
- ((resType :: paramTypes) forall (x => isValueClass(x.typeSymbol))) // issue #1110
- }
- def useArrayOperator =
- ((qualSym == definitions.ObjectClass) || (qualSym == definitions.ArrayClass)) &&
- ((methSym.name == nme.length) || (methSym.name == nme.update) || (methSym.name == nme.apply))
- val callCode = if (useValueOperator) {
+
+ /** A possible primitive method call, represented by methods in BoxesRunTime. */
+ def genValueCall(operator: Symbol) = fixResult(REF(operator) APPLY args)
+ def genValueCallWithTest = {
val (operator, test) = getPrimitiveReplacementForStructuralCall(methSym.name)
- def args = qual :: params
- fixResult((IF (test) THEN (REF(operator) APPLY args) ELSE defaultCall))
+ IF (test) THEN genValueCall(operator) ELSE genDefaultCall
}
- else if (useArrayOperator) {
- val args = qual :: params
- val operatorCall = // what follows is incredibly ugly. this dirty fix should be deal with at the next cleanup of cleanup.
- if (methSym.name == nme.length)
- (REF(boxMethod(IntClass)) APPLY (REF(arrayLengthMethod) APPLY args))
- else if (methSym.name == nme.update)
- (REF(arrayUpdateMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)), args(2)))
- else
- (REF(arrayApplyMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1))))
- (IF (qual IS_OBJ arrayType(ObjectClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(ByteClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(ShortClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(IntClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(LongClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(FloatClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(DoubleClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(CharClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(BooleanClass.tpe)) THEN operatorCall
- ELSE fixResult(defaultCall)
- )))))))))
- }
- else fixResult(defaultCall)
- localTyper.typed(callCode)
- }
- def getClass(q: Tree): Tree = (q DOT nme.getClass_)()
+ /** A native Array call. */
+ def genArrayCall = fixResult(
+ methSym.name match {
+ case nme.length => REF(boxMethod(IntClass)) APPLY (REF(arrayLengthMethod) APPLY args)
+ case nme.update => REF(arrayUpdateMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)), args(2))
+ case nme.apply => REF(arrayApplyMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)))
+ case nme.clone_ => REF(arrayCloneMethod) APPLY List(args(0))
+ },
+ mustBeUnit = methSym.name == nme.update
+ )
+
+ /** A conditional Array call, when we can't determine statically if the argument is
+ * an Array, but the structural type method signature is consistent with an Array method
+ * so we have to generate both kinds of code.
+ */
+ def genArrayCallWithTest =
+ IF ((qual GETCLASS()) DOT nme.isArray) THEN genArrayCall ELSE genDefaultCall
+
+ localTyper typed (
+ if (isMaybeBoxed && isJavaValueMethod) genValueCallWithTest
+ else if (isArrayMethodSignature && isDefinitelyArray) genArrayCall
+ else if (isArrayMethodSignature && isMaybeArray) genArrayCallWithTest
+ else genDefaultCall
+ )
+ }
if (settings.refinementMethodDispatch.value == "invoke-dynamic") {
/* val guardCallSite: Tree = {
@@ -429,8 +458,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* This creates the tree that does the reflective call (see general comment
* on the apply-dynamic tree for its format). This tree is simply composed
- * of three succesive calls, first to getClass on the callee, then to
- * getMethod on the classs, then to invoke on the method.
+ * of three successive calls, first to getClass on the callee, then to
+ * getMethod on the class, then to invoke on the method.
* - getMethod needs an array of classes for choosing one amongst many
* overloaded versions of the method. This is provided by paramTypeClasses
* and must be done on the static type as Scala's dispatching is static on
@@ -452,10 +481,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val sym = currentOwner.newValue(ad.pos, mkTerm("qual")) setInfo qual0.tpe
qual = REF(sym)
- def structResType = if (isValueClass(resType.typeSymbol)) boxedClass(resType.typeSymbol).tpe else resType
BLOCK(
VAL(sym) === qual0,
- callAsReflective(mparams map (_.tpe), resType, structResType)
+ callAsReflective(mparams map (_.tpe), resType)
)
}
}
@@ -490,43 +518,30 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* Some cleanup transformations add members to templates (classes, traits, etc).
* When inside a template (i.e. the body of one of its members), two maps
- * (newDefs and newInits) are available in the tree transformer. Any mapping from
- * a symbol to a MemberDef (DefDef, ValDef, etc.) that is in newDefs once the
+ * (newStaticMembers and newStaticInits) are available in the tree transformer. Any mapping from
+ * a symbol to a MemberDef (DefDef, ValDef, etc.) that is in newStaticMembers once the
* transformation of the template is finished will be added as a member to the
- * template. Any mapping from a symbol to a tree that is in newInits, will be added
+ * template. Any mapping from a symbol to a tree that is in newStaticInits, will be added
* as a statement of the form "symbol = tree" to the beginning of the default
* constructor. */
case Template(parents, self, body) =>
localTyper = typer.atOwner(tree, currentClass)
- if (!forMSIL) {
- classConstantMeth.clear
- newDefs.clear
- newInits.clear
- var newBody =
- transformTrees(body)
- val firstConstructor =
- treeInfo.firstConstructor(newBody)
- newBody =
- transformTrees(newDefs.toList) ::: (
- for (member <- newBody) yield member match {
- case thePrimaryConstructor@DefDef(mods, name, tparams, vparamss, tpt, rhs) if (thePrimaryConstructor == firstConstructor) =>
- val newRhs = rhs match {
- case theRhs@Block(stats, expr) =>
- treeCopy.Block(theRhs, transformTrees(newInits.toList) ::: stats, expr)
- }
- treeCopy.DefDef(thePrimaryConstructor, mods, name, tparams, vparamss, tpt, newRhs)
- case notThePrimaryConstructor =>
- notThePrimaryConstructor
- }
- )
- treeCopy.Template(tree, parents, self, newBody)
- }
- else super.transform(tree)
+ newStaticMembers.clear
+ newStaticInits.clear
+ symbolsStoredAsStatic.clear
+ val transformedTemplate: Template =
+ if (!forMSIL) {
+ var newBody =
+ transformTrees(body)
+ treeCopy.Template(tree, parents, self, transformTrees(newStaticMembers.toList) ::: newBody)
+ }
+ else super.transform(tree).asInstanceOf[Template]
+ addStaticInits(transformedTemplate) // postprocess to include static ctors
case Literal(c) if (c.tag == ClassTag) && !forMSIL=>
val tpe = c.typeValue
typedWithPos(tree.pos) {
- if (isValueClass(tpe.typeSymbol) || tpe.typeSymbol == definitions.UnitClass) {
+ if (isValueClass(tpe.typeSymbol)) {
if (tpe.typeSymbol == UnitClass)
Select(REF(BoxedUnit_TYPE), BoxedUnit_TYPE)
else
@@ -566,9 +581,117 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
super.transform(tree)
+ /*
+ * This transformation should identify Scala symbol invocations in the tree and replace them
+ * with references to a static member. Also, whenever a class has at least a single symbol invocation
+ * somewhere in its methods, a new static member should be created and initialized for that symbol.
+ * For instance, say we have a Scala class:
+ *
+ * class Cls {
+ * // ...
+ * def someSymbol = `symbolic
+ * // ...
+ * }
+ *
+ * After transformation, this class looks like this:
+ *
+ * class Cls {
+ * private "static" val <some_name>$symbolic = Symbol("symbolic")
+ * // ...
+ * def someSymbol = <some_name>$symbolic
+ * // ...
+ * }
+ *
+ * The reasoning behind this transformation is the following. Symbols get interned - they are stored
+ * in a global map which is protected with a lock. The reason for this is making equality checks
+ * quicker. But calling Symbol.apply, although it does return a unique symbol, accesses a locked object,
+ * making symbol access slow. To solve this, the unique symbol from the global symbol map in Symbol
+ * is accessed only once during class loading, and after that, the unique symbol is in the static
+ * member. Hence, it is cheap to both reach the unique symbol and do equality checks on it.
+ *
+ * And, finally, be advised - scala symbol literal and the Symbol class of the compiler
+ * have little in common.
+ */
+ case symapp @ Apply(Select(Select(a @ Ident(nme.scala_), b @ nme.Symbol), nme.apply),
+ List(Literal(Constant(symname: String)))) =>
+ // add the symbol name to a map if it's not there already
+ val rhs = gen.mkCast(Apply(gen.scalaDot(nme.Symbol), List(Literal(Constant(symname)))), symbolType)
+ val staticFieldSym = getSymbolStaticField(symapp.pos, symname, rhs, symapp)
+
+ // create a reference to a static field
+ val ntree = typedWithPos(symapp.pos)(REF(staticFieldSym))
+
+ super.transform(ntree)
case _ =>
super.transform(tree)
}
+
+ /* Returns the symbol and the tree for the symbol field interning a reference to a symbol 'synmname'.
+ * If it doesn't exist, i.e. the symbol is encountered the first time,
+ * it creates a new static field definition and initialization and returns it.
+ */
+ private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): Symbol =
+ symbolsStoredAsStatic.getOrElseUpdate(symname, {
+ val freshname = unit.fresh.newName(pos, "symbol$")
+ val theTyper = typer.atOwner(tree, currentClass)
+
+ // create a symbol for the static field
+ val stfieldSym = currentClass.newVariable(pos, freshname)
+ .setFlag(PRIVATE | STATIC | SYNTHETIC | FINAL)
+ .setInfo(symbolType)
+ currentClass.info.decls enter stfieldSym
+
+ // create field definition and initialization
+ val stfieldDef = theTyper.typed { atPos(pos)(VAL(stfieldSym) === rhs) }
+ val stfieldInit = theTyper.typed { atPos(pos)(REF(stfieldSym) === rhs) }
+
+ // add field definition to new defs
+ newStaticMembers append stfieldDef
+ newStaticInits append stfieldInit
+
+ stfieldSym
+ })
+
+ /* finds the static ctor DefDef tree within the template if it exists. */
+ private def findStaticCtor(template: Template): Option[Tree] =
+ template.body find {
+ case defdef @ DefDef(mods, nme.CONSTRUCTOR, tparam, vparam, tp, rhs) => defdef.symbol hasFlag STATIC
+ case _ => false
+ }
+
+ /* changes the template for the class so that it contains a static constructor with symbol fields inits,
+ * augments an existing static ctor if one already existed.
+ */
+ private def addStaticInits(template: Template): Template =
+ if (newStaticInits.isEmpty)
+ template
+ else {
+ val newCtor = findStaticCtor(template) match {
+ // in case there already were static ctors - augment existing ones
+ // currently, however, static ctors aren't being generated anywhere else
+ case Some(ctor @ DefDef(mods, name, tparams, vparamss, tpt, rhs)) =>
+ // modify existing static ctor
+ val newBlock = rhs match {
+ case block @ Block(stats, expr) =>
+ // need to add inits to existing block
+ treeCopy.Block(block, newStaticInits.toList ::: stats, expr)
+ case term: TermTree =>
+ // need to create a new block with inits and the old term
+ treeCopy.Block(term, newStaticInits.toList, term)
+ }
+ treeCopy.DefDef(ctor, mods, name, tparams, vparamss, tpt, newBlock)
+ case None =>
+ // create new static ctor
+ val staticCtorSym = currentClass.newConstructor(template.pos)
+ .setFlag(STATIC)
+ .setInfo(UnitClass.tpe)
+ val rhs = Block(newStaticInits.toList, Literal(()))
+ val staticCtorTree = DefDef(staticCtorSym, rhs)
+ localTyper.typed { atPos(template.pos)(staticCtorTree) }
+ }
+ treeCopy.Template(template, template.parents, template.self, newCtor :: template.body)
+ }
+
} // CleanUpTransformer
}
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index d8f0290507..34b7b7f95b 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -1,8 +1,7 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
* @author
*/
-// $Id$
package scala.tools.nsc
package transform
@@ -17,6 +16,7 @@ import util.TreeSet
abstract class Constructors extends Transform with ast.TreeDSL {
import global._
import definitions._
+ import collection.mutable
/** the following two members override abstract members in Transform */
val phaseName: String = "constructors"
@@ -24,6 +24,9 @@ abstract class Constructors extends Transform with ast.TreeDSL {
protected def newTransformer(unit: CompilationUnit): Transformer =
new ConstructorTransformer(unit)
+ private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = new mutable.HashMap[Symbol, List[Tree]]
+ private val ctorParams: mutable.Map[Symbol, List[Symbol]] = new mutable.HashMap[Symbol, List[Symbol]]
+
class ConstructorTransformer(unit: CompilationUnit) extends Transformer {
def transformClassTemplate(impl: Template): Template = {
@@ -31,6 +34,9 @@ abstract class Constructors extends Transform with ast.TreeDSL {
val stats = impl.body // the transformed template body
val localTyper = typer.atOwner(impl, clazz)
+ val specializedFlag: Symbol = clazz.info.decl(nme.SPECIALIZED_INSTANCE)
+ val shouldGuard = (specializedFlag != NoSymbol) && !clazz.hasFlag(SPECIALIZED)
+
var constr: DefDef = null // The primary constructor
var constrParams: List[Symbol] = null // ... and its parameters
var constrBody: Block = null // ... and its body
@@ -68,25 +74,32 @@ abstract class Constructors extends Transform with ast.TreeDSL {
}
var thisRefSeen: Boolean = false
+ var usesSpecializedField: Boolean = false
// A transformer for expressions that go into the constructor
val intoConstructorTransformer = new Transformer {
+ def isParamRef(sym: Symbol) =
+ (sym hasFlag PARAMACCESSOR) &&
+ sym.owner == clazz &&
+ !(sym.isGetter && sym.accessed.isVariable) &&
+ !sym.isSetter
override def transform(tree: Tree): Tree = tree match {
case Apply(Select(This(_), _), List()) =>
// references to parameter accessor methods of own class become references to parameters
// outer accessors become references to $outer parameter
- if ((tree.symbol hasFlag PARAMACCESSOR) && tree.symbol.owner == clazz)
+ if (isParamRef(tree.symbol))
gen.mkAttributedIdent(parameter(tree.symbol.accessed)) setPos tree.pos
else if (tree.symbol.outerSource == clazz && !clazz.isImplClass)
gen.mkAttributedIdent(parameterNamed(nme.OUTER)) setPos tree.pos
else
super.transform(tree)
- case Select(This(_), _)
- if ((tree.symbol hasFlag PARAMACCESSOR) && !tree.symbol.isSetter && tree.symbol.owner == clazz) =>
+ case Select(This(_), _) if (isParamRef(tree.symbol)) =>
// references to parameter accessor field of own class become references to parameters
gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos
case Select(_, _) =>
thisRefSeen = true
+ if (specializeTypes.specializedTypeVars(tree.symbol).nonEmpty)
+ usesSpecializedField = true
super.transform(tree)
case This(_) =>
thisRefSeen = true
@@ -217,7 +230,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// Could symbol's definition be omitted, provided it is not accessed?
// This is the case if the symbol is defined in the current class, and
// ( the symbol is an object private parameter accessor field, or
- // the symbol is an outer accessor of a final class which does not override another outer accesser. )
+ // the symbol is an outer accessor of a final class which does not override another outer accessor. )
def maybeOmittable(sym: Symbol) =
(sym.owner == clazz &&
((sym hasFlag PARAMACCESSOR) && sym.isPrivateLocal ||
@@ -275,12 +288,130 @@ abstract class Constructors extends Transform with ast.TreeDSL {
copyParam(acc, parameter(acc))
}
+ /** Return a single list of statements, merging the generic class constructor with the
+ * specialized stats. The original statements are retyped in the current class, and
+ * assignments to generic fields that have a corresponding specialized assignment in
+ * `specializedStats` are replaced by the specialized assignment.
+ */
+ def mergeConstructors(genericClazz: Symbol, originalStats: List[Tree], specializedStats: List[Tree]): List[Tree] = {
+ val specBuf = new ListBuffer[Tree]
+ specBuf ++= specializedStats
+
+ def specializedAssignFor(sym: Symbol): Option[Tree] =
+ specializedStats.find {
+ case Assign(sel @ Select(This(_), _), rhs) if sel.symbol.hasFlag(SPECIALIZED) =>
+ val (generic, _, _) = nme.splitSpecializedName(nme.localToGetter(sel.symbol.name))
+ generic == nme.localToGetter(sym.name)
+ case _ => false
+ }
+
+ /** Rewrite calls to ScalaRunTime.array_update to the proper apply method in scala.Array.
+ * Erasure transforms Array.update to ScalaRunTime.update when the element type is a type
+ * variable, but after specialization this is a concrete primitive type, so it would
+ * be an error to pass it to array_update(.., .., Object).
+ */
+ def rewriteArrayUpdate(tree: Tree): Tree = {
+ val array_update = definitions.ScalaRunTimeModule.info.member("array_update")
+ val adapter = new Transformer {
+ override def transform(t: Tree): Tree = t match {
+ case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == array_update =>
+ localTyper.typed(Apply(gen.mkAttributedSelect(xs, definitions.Array_update), List(idx, v)))
+ case _ => super.transform(t)
+ }
+ }
+ adapter.transform(tree)
+ }
+
+ log("merging: " + originalStats.mkString("\n") + "\nwith\n" + specializedStats.mkString("\n"))
+ val res = for (s <- originalStats; val stat = s.duplicate) yield {
+ log("merge: looking at " + stat)
+ val stat1 = stat match {
+ case Assign(sel @ Select(This(_), field), _) =>
+ specializedAssignFor(sel.symbol).getOrElse(stat)
+ case _ => stat
+ }
+ if (stat1 ne stat) {
+ log("replaced " + stat + " with " + stat1)
+ specBuf -= stat1
+ }
+
+ if (stat1 eq stat) {
+ assert(ctorParams(genericClazz).length == constrParams.length)
+ // this is just to make private fields public
+ (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), constrParams, null, true))(stat1)
+
+ val stat2 = rewriteArrayUpdate(stat1)
+ // statements coming from the original class need retyping in the current context
+ if (settings.debug.value) log("retyping " + stat2)
+
+ val d = new specializeTypes.Duplicator
+ d.retyped(localTyper.context1.asInstanceOf[d.Context],
+ stat2,
+ genericClazz,
+ clazz,
+ Map.empty)
+ } else
+ stat1
+ }
+ if (specBuf.nonEmpty)
+ println("residual specialized constructor statements: " + specBuf)
+ res
+ }
+
+ /** Add an 'if' around the statements coming after the super constructor. This
+ * guard is necessary if the code uses specialized fields. A specialized field is
+ * initialized in the subclass constructor, but the accessors are (already) overridden
+ * and pointing to the (empty) fields. To fix this, a class with specialized fields
+ * will not run its constructor statements if the instance is specialized. The specialized
+ * subclass includes a copy of those constructor statements, and runs them. To flag that a class
+ * has specialized fields, and their initialization should be deferred to the subclass, method
+ * 'specInstance$' is added in phase specialize.
+ */
+ def guardSpecializedInitializer(stats0: List[Tree]): List[Tree] = if (settings.nospecialization.value) stats0 else {
+ // split the statements in presuper and postsuper
+ var (prefix, postfix) = stats0.span(tree => !((tree.symbol ne null) && tree.symbol.isConstructor))
+ if (postfix.nonEmpty) {
+ prefix = prefix :+ postfix.head
+ postfix = postfix.tail
+ }
+
+ if (usesSpecializedField && shouldGuard && postfix.nonEmpty) {
+ // save them for duplication in the specialized subclass
+ guardedCtorStats(clazz) = postfix
+ ctorParams(clazz) = constrParams
+
+ val tree =
+ If(
+ Apply(
+ Select(
+ Apply(gen.mkAttributedRef(specializedFlag), List()),
+ definitions.getMember(definitions.BooleanClass, nme.UNARY_!)),
+ List()),
+ Block(postfix, Literal(())),
+ EmptyTree)
+
+ prefix ::: List(localTyper.typed(tree))
+ } else if (clazz.hasFlag(SPECIALIZED)) {
+ // add initialization from its generic class constructor
+ val (genericName, _, _) = nme.splitSpecializedName(clazz.name)
+ val genericClazz = clazz.owner.info.decl(genericName.toTypeName)
+ assert(genericClazz != NoSymbol)
+
+ guardedCtorStats.get(genericClazz) match {
+ case Some(stats1) =>
+ val merged = mergeConstructors(genericClazz, stats1, postfix)
+ prefix ::: merged
+ case None => stats0
+ }
+ } else stats0
+ }
+
// Assemble final constructor
defBuf += treeCopy.DefDef(
constr, constr.mods, constr.name, constr.tparams, constr.vparamss, constr.tpt,
treeCopy.Block(
constrBody,
- paramInits ::: constrPrefixBuf.toList ::: constrStatBuf.toList,
+ paramInits ::: constrPrefixBuf.toList ::: guardSpecializedInitializer(constrStatBuf.toList),
constrBody.expr));
// Unlink all fields that can be dropped from class scope
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 7c57b2e16f..8f05dbdac2 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package transform
@@ -10,7 +9,7 @@ package transform
import scala.tools.nsc.symtab.classfile.ClassfileConstants._
import scala.collection.mutable.{HashMap,ListBuffer}
import scala.collection.immutable.Set
-import scala.tools.nsc.util.Position
+import scala.util.control.ControlThrowable
import symtab._
import Flags._
@@ -18,7 +17,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
{
import global._ // the global environment
import definitions._ // standard classes and methods
- // @S: XXX: why is this here? earsure is a typer, if you comment this
+ // @S: XXX: why is this here? erasure is a typer, if you comment this
// out erasure still works, uses its own typed methods.
lazy val typerXXX = this.typer
import typerXXX.{typed} // methods to type trees
@@ -31,6 +30,8 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
def newTransformer(unit: CompilationUnit): Transformer =
new ErasureTransformer(unit)
+ override def keepsTypeParams = false
+
// -------- erasure on types --------------------------------------------------------
/** An extractor objec for generic arrays */
@@ -39,7 +40,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
/** Is `tp` an unbounded generic type (i.e. which could be instantiated
* with primitive as well as class types)?.
*/
- private def genericCore(tp: Type): Type = tp match {
+ private def genericCore(tp: Type): Type = tp.normalize match {
case TypeRef(_, argsym, _) if (argsym.isAbstractType && !(argsym.owner hasFlag JAVA)) =>
tp
case ExistentialType(tparams, restp) =>
@@ -52,7 +53,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
* then Some(N, T) where N is the number of Array constructors enclosing `T`,
* otherwise None. Existentials on any level are ignored.
*/
- def unapply(tp: Type): Option[(Int, Type)] = tp match {
+ def unapply(tp: Type): Option[(Int, Type)] = tp.normalize match {
case TypeRef(_, ArrayClass, List(arg)) =>
genericCore(arg) match {
case NoType =>
@@ -75,6 +76,13 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
case _ => 0
}
+ // @M #2585 when generating a java generic signature that includes a selection of an inner class p.I, (p = `pre`, I = `cls`)
+ // must rewrite to p'.I, where p' refers to the class that directly defines the nested class I
+ // see also #2585 marker in javaSig: there, type arguments must be included (use pre.baseType(cls.owner))
+ // requires cls.isClass
+ @inline private def rebindInnerClass(pre: Type, cls: Symbol): Type =
+ if(cls.owner.isClass) cls.owner.tpe else pre // why not cls.isNestedClass?
+
/** <p>
* The erasure <code>|T|</code> of a type <code>T</code>. This is:
* </p>
@@ -90,7 +98,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
* - For a typeref scala.Array+[T] where T is not an abstract type, scala.Array+[|T|].
* - For a typeref scala.Any or scala.AnyVal, java.lang.Object.
* - For a typeref scala.Unit, scala.runtime.BoxedUnit.
- * - For a typeref P.C[Ts] where C refers to a class, |P|.C.
+ * - For a typeref P.C[Ts] where C refers to a class, |P|.C. (Where P is first rebound to the class that directly defines C.)
* - For a typeref P.C[Ts] where C refers to an alias type, the erasure of C's alias.
* - For a typeref P.C[Ts] where C refers to an abstract type, the
* erasure of C's upper bound.
@@ -109,6 +117,29 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
* </ul>
*/
val erasure = new TypeMap {
+
+ // Compute the dominant part of the intersection type with given `parents` according to new spec.
+ def intersectionDominator(parents: List[Type]): Type =
+ if (parents.isEmpty) ObjectClass.tpe
+ else {
+ val psyms = parents map (_.typeSymbol)
+ if (psyms contains ArrayClass) {
+ // treat arrays specially
+ arrayType(
+ intersectionDominator(
+ parents filter (_.typeSymbol == ArrayClass) map (_.typeArgs.head)))
+ } else {
+ // implement new spec for erasure of refined types.
+ def isUnshadowed(psym: Symbol) =
+ !(psyms exists (qsym => (psym ne qsym) && (qsym isNonBottomSubClass psym)))
+ val cs = parents.iterator.filter { p => // isUnshadowed is a bit expensive, so try classes first
+ val psym = p.typeSymbol
+ psym.isClass && !psym.isTrait && isUnshadowed(psym)
+ }
+ (if (cs.hasNext) cs else parents.iterator.filter(p => isUnshadowed(p.typeSymbol))).next()
+ }
+ }
+
def apply(tp: Type): Type = {
tp match {
case ConstantType(_) =>
@@ -120,11 +151,11 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
if (unboundedGenericArrayLevel(tp) == 1) ObjectClass.tpe
else if (args.head.typeSymbol == NothingClass || args.head.typeSymbol == NullClass) arrayType(ObjectClass.tpe)
else typeRef(apply(pre), sym, args map this)
- else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass) erasedTypeRef(ObjectClass)
+ else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass || sym == NotNullClass) erasedTypeRef(ObjectClass)
else if (sym == UnitClass) erasedTypeRef(BoxedUnitClass)
- else if (sym.isClass)
- typeRef(apply(if (sym.owner.isClass) sym.owner.tpe else pre), sym, List())
- else apply(sym.info)
+ else if (sym.isRefinementClass) apply(intersectionDominator(tp.parents))
+ else if (sym.isClass) typeRef(apply(rebindInnerClass(pre, sym)), sym, List()) // #2585
+ else apply(sym.info) // alias type or abstract type
case PolyType(tparams, restpe) =>
apply(restpe)
case ExistentialType(tparams, restpe) =>
@@ -134,13 +165,13 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
cloneSymbols(params) map (p => p.setInfo(apply(p.tpe))),
if (restpe.typeSymbol == UnitClass)
erasedTypeRef(UnitClass)
- else if (settings.Xexperimental.value)
- apply(mt.resultType(params map (_.tpe))) // this gets rid of DeBruijnTypes
+ else if (settings.YdepMethTpes.value)
+ // this replaces each typeref that refers to an argument by the type `p.tpe` of the actual argument p (p in params)
+ apply(mt.resultType(params map (_.tpe)))
else
apply(restpe))
case RefinedType(parents, decls) =>
- if (parents.isEmpty) erasedTypeRef(ObjectClass)
- else apply(parents.head)
+ apply(intersectionDominator(parents))
case AnnotatedType(_, atp, _) =>
apply(atp)
case ClassInfoType(parents, decls, clazz) =>
@@ -163,7 +194,8 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
traverse(st.supertype)
case TypeRef(pre, sym, args) =>
if (sym == ArrayClass) args foreach traverse
- else if (sym.isTypeParameterOrSkolem || sym.isExistential || !args.isEmpty) result = true
+ else if (sym.isTypeParameterOrSkolem || sym.isExistentiallyBound || !args.isEmpty) result = true
+ else if (sym.isClass) traverse(rebindInnerClass(pre, sym)) // #2585
else if (!sym.owner.isPackageClass) traverse(pre)
case PolyType(_, _) | ExistentialType(_, _) =>
result = true
@@ -171,8 +203,8 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
if (!parents.isEmpty) traverse(parents.head)
case ClassInfoType(parents, _, _) =>
parents foreach traverse
- case AnnotatedType(_, atp, _) =>
- traverse(atp)
+ case AnnotatedType(_, atp, _) =>
+ traverse(atp)
case _ =>
mapOver(tp)
}
@@ -180,56 +212,138 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
}
}
- private def needsJavaSig(tp: Type) = !settings.Ynogenericsig.value && NeedsSigCollector.collect(tp)
+ def needsJavaSig(tp: Type) = !settings.Ynogenericsig.value && NeedsSigCollector.collect(tp)
- private lazy val tagOfClass = Map[Symbol,Char](
- ByteClass -> BYTE_TAG,
- CharClass -> CHAR_TAG,
- DoubleClass -> DOUBLE_TAG,
- FloatClass -> FLOAT_TAG,
- IntClass -> INT_TAG,
- LongClass -> LONG_TAG,
- ShortClass -> SHORT_TAG,
- BooleanClass -> BOOL_TAG,
- UnitClass -> VOID_TAG
+ // only refer to type params that will actually make it into the sig, this excludes:
+ // * higher-order type parameters (aka !sym.owner.isTypeParameterOrSkolem)
+ // * parameters of methods
+ // * type members not visible in the enclosing template
+ private def isTypeParameterInSig(sym: Symbol, initialSymbol: Symbol) = (
+ !sym.isHigherOrderTypeParameter &&
+ sym.isTypeParameterOrSkolem && (
+ (initialSymbol.enclClassChain.exists(sym isNestedIn _)) ||
+ (initialSymbol.isMethod && initialSymbol.typeParams.contains(sym))
+ )
)
+ // Ensure every '.' in the generated signature immediately follows
+ // a close angle bracket '>'. Any which do not are replaced with '$'.
+ // This arises due to multiply nested classes in the face of the
+ // rewriting explained at rebindInnerClass. This should be done in a
+ // more rigorous way up front rather than catching it after the fact,
+ // but that will be more involved.
+ private def dotCleanup(sig: String): String = {
+ var last: Char = '\0'
+ sig map {
+ case '.' if last != '>' => last = '.' ; '$'
+ case ch => last = ch ; ch
+ }
+ }
+
+ /** This object is only used for sanity testing when -check:genjvm is set.
+ * In that case we make sure that the erasure of the `normalized' type
+ * is the same as the erased type that's generated. Normalization means
+ * unboxing some primitive types and further simplifications as they are done in jsig.
+ */
+ val prepareSigMap = new TypeMap {
+ def squashBoxed(tp: Type): Type = tp.normalize match {
+ case t @ RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve squashBoxed
+ if (parents1 eq parents) tp
+ else RefinedType(parents1, decls)
+ case t @ ExistentialType(tparams, tpe) =>
+ val tpe1 = squashBoxed(tpe)
+ if (tpe1 eq tpe) t
+ else ExistentialType(tparams, tpe1)
+ case t =>
+ if (boxedClass contains t.typeSymbol) ObjectClass.tpe
+ else tp
+ }
+ def apply(tp: Type): Type = tp.normalize match {
+ case tp1 @ TypeBounds(lo, hi) =>
+ val lo1 = squashBoxed(apply(lo))
+ val hi1 = squashBoxed(apply(hi))
+ if ((lo1 eq lo) && (hi1 eq hi)) tp1
+ else TypeBounds(lo1, hi1)
+ case tp1 @ TypeRef(pre, sym, args) =>
+ def argApply(tp: Type) = {
+ val tp1 = apply(tp)
+ if (tp1.typeSymbol == UnitClass) ObjectClass.tpe
+ else squashBoxed(tp1)
+ }
+ if (sym == ArrayClass && args.nonEmpty)
+ if (unboundedGenericArrayLevel(tp1) == 1) ObjectClass.tpe
+ else mapOver(tp1)
+ else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
+ ObjectClass.tpe
+ else if (sym == UnitClass)
+ BoxedUnitClass.tpe
+ else if (sym == NothingClass)
+ RuntimeNothingClass.tpe
+ else if (sym == NullClass)
+ RuntimeNullClass.tpe
+ else {
+ val pre1 = apply(pre)
+ val args1 = args mapConserve argApply
+ if ((pre1 eq pre) && (args1 eq args)) tp1
+ else TypeRef(pre1, sym, args1)
+ }
+ case tp1 @ MethodType(params, restpe) =>
+ val params1 = mapOver(params)
+ val restpe1 = if (restpe.normalize.typeSymbol == UnitClass) UnitClass.tpe else apply(restpe)
+ if ((params1 eq params) && (restpe1 eq restpe)) tp1
+ else MethodType(params1, restpe1)
+ case tp1 @ RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve apply
+ if (parents1 eq parents) tp1
+ else RefinedType(parents1, decls)
+ case t @ ExistentialType(tparams, tpe) =>
+ val tpe1 = apply(tpe)
+ if (tpe1 eq tpe) t
+ else ExistentialType(tparams, tpe1)
+ case tp1: ClassInfoType =>
+ tp1
+ case tp1 =>
+ mapOver(tp1)
+ }
+ }
/** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
* type for constructors.
*/
- def javaSig(sym: Symbol, info: Type): Option[String] = atPhase(currentRun.erasurePhase) {
+ def javaSig(sym0: Symbol, info: Type): Option[String] = atPhase(currentRun.erasurePhase) {
+ def boxedSig(tp: Type) = jsig(tp, primitiveOK = false)
- def jsig(tp: Type): String = jsig2(false, List(), tp)
+ def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.normalize match {
+ case RefinedType(parents, _) => parents map normalize
+ case tp => tp :: Nil
+ }
- def jsig2(toplevel: Boolean, tparams: List[Symbol], tp0: Type): String = {
+ def jsig(tp0: Type, existentiallyBound: List[Symbol] = Nil, toplevel: Boolean = false, primitiveOK: Boolean = true): String = {
val tp = tp0.dealias
tp match {
case st: SubType =>
- jsig2(toplevel, tparams, st.supertype)
+ jsig(st.supertype, existentiallyBound, toplevel, primitiveOK)
case ExistentialType(tparams, tpe) =>
- jsig2(toplevel, tparams, tpe)
+ jsig(tpe, tparams, toplevel, primitiveOK)
case TypeRef(pre, sym, args) =>
def argSig(tp: Type) =
- if (tparams contains tp.typeSymbol) {
+ if (existentiallyBound contains tp.typeSymbol) {
val bounds = tp.typeSymbol.info.bounds
- if (!(AnyRefClass.tpe <:< bounds.hi)) "+"+jsig(bounds.hi)
- else if (!(bounds.lo <:< NullClass.tpe)) "-"+jsig(bounds.lo)
+ if (!(AnyRefClass.tpe <:< bounds.hi)) "+" + boxedSig(bounds.hi)
+ else if (!(bounds.lo <:< NullClass.tpe)) "-" + boxedSig(bounds.lo)
else "*"
- } else if (tp.typeSymbol == UnitClass) {
- jsig(ObjectClass.tpe)
} else {
- boxedClass get tp.typeSymbol match {
- case Some(boxed) => jsig(boxed.tpe)
- case None => jsig(tp)
- }
+ boxedSig(tp)
}
def classSig: String =
- "L"+atPhase(currentRun.icodePhase)(sym.fullNameString + global.genJVM.moduleSuffix(sym)).replace('.', '/')
+ "L"+atPhase(currentRun.icodePhase)(sym.fullName + global.genJVM.moduleSuffix(sym)).replace('.', '/')
def classSigSuffix: String =
"."+sym.name
- if (sym == ArrayClass)
- ARRAY_TAG.toString+(args map jsig).mkString
- else if (sym.isTypeParameterOrSkolem && !sym.owner.isTypeParameterOrSkolem /*not a higher-order type parameter, as these are suppressed*/)
+ if (sym == ArrayClass) {
+ if (unboundedGenericArrayLevel(tp) == 1) jsig(ObjectClass.tpe)
+ else ARRAY_TAG.toString+(args map (jsig(_))).mkString
+ }
+ else if (isTypeParameterInSig(sym, sym0.enclClass))
TVAR_TAG.toString+sym.name+";"
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
jsig(ObjectClass.tpe)
@@ -239,44 +353,57 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
jsig(RuntimeNothingClass.tpe)
else if (sym == NullClass)
jsig(RuntimeNullClass.tpe)
- else if (isValueClass(sym))
- tagOfClass(sym).toString
- else if (sym.isClass)
- {
- if (needsJavaSig(pre)) {
- val s = jsig(pre)
- if (s.charAt(0) == 'L') s.substring(0, s.length - 1) + classSigSuffix
- else classSig
- } else classSig
- } + {
- if (args.isEmpty) "" else "<"+(args map argSig).mkString+">"
- } + ";"
+ else if (isValueClass(sym)) {
+ if (!primitiveOK) jsig(ObjectClass.tpe)
+ else if (sym == UnitClass) jsig(BoxedUnitClass.tpe)
+ else abbrvTag(sym).toString
+ }
+ else if (sym.isClass) {
+ val preRebound = pre.baseType(sym.owner) // #2585
+ dotCleanup(
+ (
+ if (needsJavaSig(preRebound)) {
+ val s = jsig(preRebound, existentiallyBound)
+ if (s.charAt(0) == 'L') s.substring(0, s.length - 1) + classSigSuffix
+ else classSig
+ }
+ else classSig
+ ) + (
+ if (args.isEmpty) "" else
+ "<"+(args map argSig).mkString+">"
+ ) + (
+ ";"
+ )
+ )
+ }
else jsig(erasure(tp))
case PolyType(tparams, restpe) =>
- def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.normalize match {
- case RefinedType(parents, _) => parents map normalize
- case tp => List(tp)
- }
+ assert(tparams.nonEmpty)
def boundSig(bounds: List[Type]) = {
- def isClassBound(t: Type) = !t.typeSymbol.isTrait
- val classBound = bounds find isClassBound match {
- case Some(t) => jsig(t)
- case None => ""
- }
- ":"+classBound+(for (t <- bounds if !isClassBound(t)) yield ":"+jsig(t)).mkString
+ val (isTrait, isClass) = bounds partition (_.typeSymbol.isTrait)
+
+ ":" + (
+ if (isClass.isEmpty) "" else boxedSig(isClass.head)
+ ) + (
+ isTrait map (x => ":" + boxedSig(x)) mkString
+ )
}
- assert(!tparams.isEmpty)
- def paramSig(tsym: Symbol) = tsym.name+boundSig(hiBounds(tsym.info.bounds))
- (if (toplevel) "<"+(tparams map paramSig).mkString+">" else "")+jsig(restpe)
+ def paramSig(tsym: Symbol) = tsym.name + boundSig(hiBounds(tsym.info.bounds))
+
+ val paramString = if (toplevel) tparams map paramSig mkString ("<", "", ">") else ""
+ paramString + jsig(restpe)
case MethodType(params, restpe) =>
- "("+(params map (_.tpe) map jsig).mkString+")"+
- (if (restpe.typeSymbol == UnitClass || sym.isConstructor) VOID_TAG.toString else jsig(restpe))
- case RefinedType(parents, decls) if (!parents.isEmpty) =>
- jsig(parents.head)
+ "("+(params map (_.tpe) map (jsig(_))).mkString+")"+
+ (if (restpe.typeSymbol == UnitClass || sym0.isConstructor) VOID_TAG.toString else jsig(restpe))
+ case RefinedType(parent :: _, decls) =>
+ boxedSig(parent)
case ClassInfoType(parents, _, _) =>
- (parents map jsig).mkString
+ (parents map (boxedSig(_))).mkString
case AnnotatedType(_, atp, _) =>
jsig(atp)
+ case BoundedWildcardType(bounds) =>
+ println("something's wrong: "+sym0+":"+sym0.tpe+" has a bounded wildcard type")
+ jsig(bounds.hi)
case _ =>
val etp = erasure(tp)
if (etp eq tp) throw new UnknownSig
@@ -284,12 +411,8 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
}
}
if (needsJavaSig(info)) {
- try {
- //println("Java sig of "+sym+" is "+jsig2(true, List(), sym.info))//DEBUG
- Some(jsig2(true, List(), info))
- } catch {
- case ex: UnknownSig => None
- }
+ try Some(jsig(info, toplevel = true))
+ catch { case ex: UnknownSig => None }
}
else None
}
@@ -300,9 +423,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
def erasedTypeRef(sym: Symbol): Type =
typeRef(erasure(sym.owner.tpe), sym, List())
- /** Remove duplicate references to class Object in a list of parent classes
- * todo: needed?
- */
+ /** Remove duplicate references to class Object in a list of parent classes */
private def removeDoubleObject(tps: List[Type]): List[Type] = tps match {
case List() => List()
case tp :: tps1 =>
@@ -340,7 +461,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
else if (sym == Object_isInstanceOf || sym == ArrayClass)
PolyType(sym.info.typeParams, erasure(sym.info.resultType))
else if (sym.isAbstractType)
- mkTypeBounds(WildcardType, WildcardType)
+ TypeBounds(WildcardType, WildcardType)
else if (sym.isTerm && sym.owner == ArrayClass) {
if (sym.isClassConstructor)
tp match {
@@ -398,6 +519,24 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
override def newTyper(context: Context) = new Eraser(context)
+ /** An extractor object for boxed expressions
+ object Boxed {
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ case LabelDef(name, params, Boxed(rhs)) =>
+ Some(treeCopy.LabelDef(tree, name, params, rhs) setType rhs.tpe)
+ case Select(_, _) if tree.symbol == BoxedUnit_UNIT =>
+ Some(Literal(()) setPos tree.pos setType UnitClass.tpe)
+ case Block(List(unboxed), ret @ Select(_, _)) if ret.symbol == BoxedUnit_UNIT =>
+ Some(if (unboxed.tpe.typeSymbol == UnitClass) tree
+ else Block(List(unboxed), Literal(()) setPos tree.pos setType UnitClass.tpe))
+ case Apply(fn, List(unboxed)) if isBox(fn.symbol) =>
+ Some(unboxed)
+ case _ =>
+ None
+ }
+ }
+ */
+
/** The modifier typer which retypes with erased types. */
class Eraser(context: Context) extends Typer(context) {
@@ -413,21 +552,17 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
else BLOCK(tree, REF(BoxedUnit_UNIT))
case x =>
assert(x != ArrayClass)
- (REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectClass.tpe
+ tree match {
+ case Apply(boxFun, List(arg)) if (isUnbox(tree.symbol)) =>
+ log("boxing an unbox: " + tree)
+ log("replying with " + arg)
+ arg
+ case _ =>
+ (REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectClass.tpe
+ }
})
}
- /** generate ScalaRuntime.boxArray(tree)
- * !!! todo: optimize this in case the runtime type is known
- */
- private def boxArray(tree: Tree): Tree = tree match {
- case LabelDef(name, params, rhs) =>
- val rhs1 = boxArray(rhs)
- treeCopy.LabelDef(tree, name, params, rhs1) setType rhs1.tpe
- case _ =>
- typedPos(tree.pos) { gen.mkRuntimeCall(nme.boxArray, List(tree)) }
- }
-
/** Unbox <code>tree</code> of boxed type to expected type <code>pt</code>.
*
* @param tree the given tree
@@ -435,6 +570,11 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
* @return the unboxed tree
*/
private def unbox(tree: Tree, pt: Type): Tree = tree match {
+/*
+ case Boxed(unboxed) =>
+ println("unbox shorten: "+tree) // this never seems to kick in during build and test; therefore disabled.
+ adaptToType(unboxed, pt)
+ */
case LabelDef(name, params, rhs) =>
val rhs1 = unbox(rhs, pt)
treeCopy.LabelDef(tree, name, params, rhs1) setType rhs1.tpe
@@ -450,28 +590,18 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
}
/** Generate a synthetic cast operation from <code>tree.tpe</code> to <code>pt</code>.
+ * @pre pt eq pt.normalize
*/
private def cast(tree: Tree, pt: Type): Tree =
tree AS_ATTR pt
- /** Is symbol a member of unboxed arrays (which will be expanded directly
- * later)?
- *
- * @param sym ..
- * @return <code>true</code> if ..
- */
- private def isUnboxedArrayMember(sym: Symbol) = sym.name match {
- case nme.apply | nme.length | nme.update => true
- case _ => sym.owner == ObjectClass
- }
-
private def isUnboxedValueMember(sym: Symbol) =
sym != NoSymbol && isValueClass(sym.owner)
/** Adapt <code>tree</code> to expected type <code>pt</code>.
*
* @param tree the given tree
- * @param pt the expected type.
+ * @param pt the expected type
* @return the adapted tree
*/
private def adaptToType(tree: Tree, pt: Type): Tree = {
@@ -481,7 +611,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
tree
else if (isValueClass(tree.tpe.typeSymbol) && !isValueClass(pt.typeSymbol))
adaptToType(box(tree), pt)
- else if (tree.tpe.isInstanceOf[MethodType] && tree.tpe.paramTypes.isEmpty) {
+ else if (tree.tpe.isInstanceOf[MethodType] && tree.tpe.params.isEmpty) {
if (!tree.symbol.isStable) assert(false, "adapt "+tree+":"+tree.tpe+" to "+pt)
adaptToType(Apply(tree, List()) setPos tree.pos setType tree.tpe.resultType, pt)
} else if (pt <:< tree.tpe)
@@ -552,7 +682,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
//Console.println("adaptMember: " + tree);
tree match {
case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List()) if tree.symbol == Any_asInstanceOf =>
- val qual1 = typedQualifier(qual)
+ val qual1 = typedQualifier(qual, NOmode, ObjectClass.tpe) // need to have an expected type, see #3037
val qualClass = qual1.tpe.typeSymbol
val targClass = targ.tpe.typeSymbol
/*
@@ -573,7 +703,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
else if (tree.symbol.owner == AnyClass)
adaptMember(atPos(tree.pos)(Select(qual, getMember(ObjectClass, name))))
else {
- var qual1 = typedQualifier(qual);
+ var qual1 = typedQualifier(qual)
if ((isValueClass(qual1.tpe.typeSymbol) && !isUnboxedValueMember(tree.symbol)))
qual1 = box(qual1)
else if (!isValueClass(qual1.tpe.typeSymbol) && isUnboxedValueMember(tree.symbol))
@@ -581,7 +711,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
if (isValueClass(tree.symbol.owner) && !isValueClass(qual1.tpe.typeSymbol))
tree.symbol = NoSymbol
- else if (qual1.tpe.isInstanceOf[MethodType] && qual1.tpe.paramTypes.isEmpty) {
+ else if (qual1.tpe.isInstanceOf[MethodType] && qual1.tpe.params.isEmpty) {
assert(qual1.symbol.isStable, qual1.symbol);
qual1 = Apply(qual1, List()) setPos qual1.pos setType qual1.tpe.resultType
} else if (!(qual1.isInstanceOf[Super] || (qual1.tpe.typeSymbol isSubClass tree.symbol.owner))) {
@@ -622,7 +752,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
Console.println("exception when typing " + tree)
Console.println(er.msg + " in file " + context.owner.sourceFile)
er.printStackTrace
- throw new Error
+ abort()
}
def adaptCase(cdef: CaseDef): CaseDef = {
val body1 = adaptToType(cdef.body, tree1.tpe)
@@ -714,7 +844,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
val opc = new overridingPairs.Cursor(root) {
override def exclude(sym: Symbol): Boolean =
(!sym.isTerm || sym.hasFlag(PRIVATE) || super.exclude(sym)
- // specialized members have no type history before 'specialize', causing duble def errors for curried defs
+ // specialized members have no type history before 'specialize', causing double def errors for curried defs
|| !sym.hasTypeAt(currentRun.refchecksPhase.id))
override def matches(sym1: Symbol, sym2: Symbol): Boolean =
@@ -887,155 +1017,154 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
* </ul>
*/
private val preTransformer = new Transformer {
- override def transform(tree: Tree): Tree = {
- if (tree.symbol == ArrayClass && !tree.isType) return tree // !!! needed?
- val tree1 = tree match {
- case ClassDef(mods, name, tparams, impl) =>
- if (settings.debug.value)
- log("defs of " + tree.symbol + " = " + tree.symbol.info.decls)
- treeCopy.ClassDef(tree, mods, name, List(), impl)
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- treeCopy.DefDef(tree, mods, name, List(), vparamss, tpt, rhs)
- case TypeDef(_, _, _, _) =>
- EmptyTree
- case Apply(instanceOf @ TypeApply(fun @ Select(qual, name), args @ List(arg)), List()) // !!! todo: simplify by having GenericArray also extract trees
- if ((fun.symbol == Any_isInstanceOf || fun.symbol == Object_isInstanceOf) &&
- unboundedGenericArrayLevel(arg.tpe) > 0) =>
- val level = unboundedGenericArrayLevel(arg.tpe)
- def isArrayTest(arg: Tree) =
- gen.mkRuntimeCall("isArray", List(arg, Literal(Constant(level))))
- typedPos(tree.pos) {
- if (level == 1) isArrayTest(qual)
- else
- gen.evalOnce(qual, currentOwner, unit) { qual1 =>
- gen.mkAnd(
- Apply(TypeApply(Select(qual1(), fun.symbol),
- List(TypeTree(erasure(arg.tpe)))),
- List()),
- isArrayTest(qual1()))
- }
- }
- case TypeApply(fun, args) if (fun.symbol.owner != AnyClass &&
- fun.symbol != Object_asInstanceOf &&
- fun.symbol != Object_isInstanceOf) =>
- // leave all other type tests/type casts, remove all other type applications
- fun
- case Apply(fn @ Select(qual, name), args) if (fn.symbol.owner == ArrayClass) =>
- if (unboundedGenericArrayLevel(qual.tpe.widen) == 1)
- // convert calls to apply/update/length on generic arrays to
- // calls of ScalaRunTime.array_xxx method calls
- typedPos(tree.pos) { gen.mkRuntimeCall("array_"+name, qual :: args) }
+ def preErase(tree: Tree): Tree = tree match {
+ case ClassDef(mods, name, tparams, impl) =>
+ if (settings.debug.value)
+ log("defs of " + tree.symbol + " = " + tree.symbol.info.decls)
+ treeCopy.ClassDef(tree, mods, name, List(), impl)
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ treeCopy.DefDef(tree, mods, name, List(), vparamss, tpt, rhs)
+ case TypeDef(_, _, _, _) =>
+ EmptyTree
+ case Apply(instanceOf @ TypeApply(fun @ Select(qual, name), args @ List(arg)), List()) // !!! todo: simplify by having GenericArray also extract trees
+ if ((fun.symbol == Any_isInstanceOf || fun.symbol == Object_isInstanceOf) &&
+ unboundedGenericArrayLevel(arg.tpe) > 0) =>
+ val level = unboundedGenericArrayLevel(arg.tpe)
+ def isArrayTest(arg: Tree) =
+ gen.mkRuntimeCall("isArray", List(arg, Literal(Constant(level))))
+ typedPos(tree.pos) {
+ if (level == 1) isArrayTest(qual)
else
- // store exact array erasure in map to be retrieved later when we might
- // need to do the cast in adaptMember
- treeCopy.Apply(
- tree,
- SelectFromArray(qual, name, erasure(qual.tpe)).copyAttrs(fn),
- args)
- case Apply(fn, args) =>
- if (fn.symbol == Any_asInstanceOf)
- fn match {
- case TypeApply(Select(qual, _), List(targ)) =>
- if (qual.tpe <:< targ.tpe) {
- atPos(tree.pos) { Typed(qual, TypeTree(targ.tpe)) }
- } else if (isNumericValueClass(qual.tpe.typeSymbol) &&
- isNumericValueClass(targ.tpe.typeSymbol)) {
- // convert numeric type casts
- val cname = newTermName("to" + targ.tpe.typeSymbol.name)
- val csym = qual.tpe.member(cname)
- assert(csym != NoSymbol)
- atPos(tree.pos) { Apply(Select(qual, csym), List()) }
- } else
- tree
+ gen.evalOnce(qual, currentOwner, unit) { qual1 =>
+ gen.mkAnd(
+ Apply(TypeApply(Select(qual1(), fun.symbol),
+ List(TypeTree(erasure(arg.tpe)))),
+ List()),
+ isArrayTest(qual1()))
}
- // todo: also handle the case where the singleton type is buried in a compound
- else if (fn.symbol == Any_isInstanceOf)
- fn match {
- case TypeApply(sel @ Select(qual, name), List(targ)) =>
- def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
- Apply(
- TypeApply(
- Select(q(), Object_isInstanceOf) setPos sel.pos,
- List(TypeTree(tp) setPos targ.pos)) setPos fn.pos,
- List()) setPos tree.pos
- targ.tpe match {
- case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
- val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
+ }
+ case TypeApply(fun, args) if (fun.symbol.owner != AnyClass &&
+ fun.symbol != Object_asInstanceOf &&
+ fun.symbol != Object_isInstanceOf) =>
+ // leave all other type tests/type casts, remove all other type applications
+ preErase(fun)
+ case Apply(fn @ Select(qual, name), args) if (fn.symbol.owner == ArrayClass) =>
+ if (unboundedGenericArrayLevel(qual.tpe.widen) == 1)
+ // convert calls to apply/update/length on generic arrays to
+ // calls of ScalaRunTime.array_xxx method calls
+ typedPos(tree.pos) { gen.mkRuntimeCall("array_"+name, qual :: args) }
+ else
+ // store exact array erasure in map to be retrieved later when we might
+ // need to do the cast in adaptMember
+ treeCopy.Apply(
+ tree,
+ SelectFromArray(qual, name, erasure(qual.tpe)).copyAttrs(fn),
+ args)
+
+ case Apply(fn @ Select(qual, _), Nil) if (fn.symbol == Any_## || fn.symbol == Object_##) =>
+ Apply(gen.mkAttributedRef(scalaRuntimeHash), List(qual))
+
+ case Apply(fn, args) =>
+ if (fn.symbol == Any_asInstanceOf)
+ fn match {
+ case TypeApply(Select(qual, _), List(targ)) =>
+ if (qual.tpe <:< targ.tpe) {
+ atPos(tree.pos) { Typed(qual, TypeTree(targ.tpe)) }
+ } else if (isNumericValueClass(qual.tpe.typeSymbol) &&
+ isNumericValueClass(targ.tpe.typeSymbol)) {
+ // convert numeric type casts
+ val cname = newTermName("to" + targ.tpe.typeSymbol.name)
+ val csym = qual.tpe.member(cname)
+ assert(csym != NoSymbol)
+ atPos(tree.pos) { Apply(Select(qual, csym), List()) }
+ } else
+ tree
+ }
+ // todo: also handle the case where the singleton type is buried in a compound
+ else if (fn.symbol == Any_isInstanceOf)
+ fn match {
+ case TypeApply(sel @ Select(qual, name), List(targ)) =>
+ def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
+ Apply(
+ TypeApply(
+ Select(q(), Object_isInstanceOf) setPos sel.pos,
+ List(TypeTree(tp) setPos targ.pos)) setPos fn.pos,
+ List()) setPos tree.pos
+ targ.tpe match {
+ case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
+ val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
+ atPos(tree.pos) {
+ Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
+ }
+ case RefinedType(parents, decls) if (parents.length >= 2) =>
+ gen.evalOnce(qual, currentOwner, unit) { q =>
atPos(tree.pos) {
- Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
- }
- case RefinedType(parents, decls) if (parents.length >= 2) =>
- gen.evalOnce(qual, currentOwner, unit) { q =>
- atPos(tree.pos) {
- parents map mkIsInstanceOf(q) reduceRight gen.mkAnd
- }
+ parents map mkIsInstanceOf(q) reduceRight gen.mkAnd
}
- case _ =>
- tree
- }
- case _ => tree
- }
- else {
- def doDynamic(fn: Tree, qual: Tree): Tree = {
- if (fn.symbol.owner.isRefinementClass && fn.symbol.allOverriddenSymbols.isEmpty)
- ApplyDynamic(qual, args) setSymbol fn.symbol setPos tree.pos
- else tree
- }
- fn match {
- case Select(qual, _) => doDynamic(fn, qual)
- case TypeApply(fni@Select(qual, _), _) => doDynamic(fni, qual)// type parameters are irrelevant in case of dynamic call
- case _ =>
- tree
- }
+ }
+ case _ =>
+ tree
+ }
+ case _ => tree
}
-
- case Select(_, _) =>
- if (tree.symbol.owner.isRefinementClass) {
- val overridden = tree.symbol.allOverriddenSymbols
- assert(!overridden.isEmpty, tree.symbol)
- tree.symbol = overridden.head
+ else {
+ def doDynamic(fn: Tree, qual: Tree): Tree = {
+ if (fn.symbol.owner.isRefinementClass && fn.symbol.allOverriddenSymbols.isEmpty)
+ ApplyDynamic(qual, args) setSymbol fn.symbol setPos tree.pos
+ else tree
}
- tree
+ fn match {
+ case Select(qual, _) => doDynamic(fn, qual)
+ case TypeApply(fni@Select(qual, _), _) => doDynamic(fni, qual)// type parameters are irrelevant in case of dynamic call
+ case _ =>
+ tree
+ }
+ }
- case Template(parents, self, body) =>
- assert(!currentOwner.isImplClass)
- //Console.println("checking no dble defs " + tree)//DEBUG
- checkNoDoubleDefs(tree.symbol.owner)
- treeCopy.Template(tree, parents, emptyValDef, addBridges(body, currentOwner))
+ case Select(_, _) =>
+ // println("preXform: "+ (tree, tree.symbol, tree.symbol.owner, tree.symbol.owner.isRefinementClass))
+ if (tree.symbol.owner.isRefinementClass) {
+ val overridden = tree.symbol.allOverriddenSymbols
+ assert(!overridden.isEmpty, tree.symbol)
+ tree.symbol = overridden.head
+ }
+ tree
- case Match(selector, cases) =>
- Match(Typed(selector, TypeTree(selector.tpe)), cases)
+ case Template(parents, self, body) =>
+ assert(!currentOwner.isImplClass)
+ //Console.println("checking no dble defs " + tree)//DEBUG
+ checkNoDoubleDefs(tree.symbol.owner)
+ treeCopy.Template(tree, parents, emptyValDef, addBridges(body, currentOwner))
- case Literal(ct) if ct.tag == ClassTag
- && ct.typeValue.typeSymbol != definitions.UnitClass =>
- treeCopy.Literal(tree, Constant(erasure(ct.typeValue)))
+ case Match(selector, cases) =>
+ Match(Typed(selector, TypeTree(selector.tpe)), cases)
- case _ =>
- tree
- }
- tree1 match {
- case EmptyTree | TypeTree() =>
- tree1 setType erasure(tree1.tpe)
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- val result = super.transform(tree1) setType null
- tpt.tpe = erasure(tree.symbol.tpe).resultType
- result
- case _ =>
- case class MyError(count : Int, ex : AssertionError) extends Error(ex.getMessage)
- try {
+ case Literal(ct) if ct.tag == ClassTag
+ && ct.typeValue.typeSymbol != definitions.UnitClass =>
+ treeCopy.Literal(tree, Constant(erasure(ct.typeValue)))
+
+ case _ =>
+ tree
+ }
+
+ override def transform(tree: Tree): Tree =
+ if (tree.symbol == ArrayClass && !tree.isType) tree // !!! needed?
+ else {
+ val tree1 = preErase(tree)
+ // println("preErase: "+ tree +" = "+ tree1)
+ val res = tree1 match {
+ case EmptyTree | TypeTree() =>
+ tree1 setType erasure(tree1.tpe)
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ val result = super.transform(tree1) setType null
+ tpt.tpe = erasure(tree1.symbol.tpe).resultType
+ result
+ case _ =>
super.transform(tree1) setType null
- } catch {
- case e @ MyError(n, ex) if n > 5 => throw e
- case MyError(n,ex) =>
- Console.println(tree1)
- throw MyError(n + 1, ex)
-// case ex : AssertionError =>
-// Console.println(tree1)
-// throw MyError(0, ex)
-// case ex => throw ex
- }
+ }
+ // println("xform: "+ res)
+ res
}
- }
}
/** The main transform function: Pretransfom the tree, and then
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index f8221d13e0..da3f71456f 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package transform
@@ -62,8 +61,14 @@ abstract class ExplicitOuter extends InfoTransform
result
}
+ /** Issue a migration warning for instance checks which might be on an Array and
+ * for which the type parameter conforms to Seq, because these answers changed in 2.8.
+ */
+ def isArraySeqTest(lhs: Type, rhs: Type) =
+ ArrayClass.tpe <:< lhs.widen && rhs.widen.matchesPattern(SeqClass.tpe)
+
def outerAccessor(clazz: Symbol): Symbol = {
- val firstTry = clazz.info.decl(clazz expandedName nme.OUTER)
+ val firstTry = clazz.info.decl(nme.expandedName(nme.OUTER, clazz))
if (firstTry != NoSymbol && firstTry.outerSource == clazz) firstTry
else clazz.info.decls find (_.outerSource == clazz) getOrElse NoSymbol
}
@@ -84,7 +89,7 @@ abstract class ExplicitOuter extends InfoTransform
* <p>
* Add an outer accessor <code>$outer$$C</code> to every inner class
* with fully qualified name <code>C</code> that is not an interface.
- * The outer accesssor is abstract for traits, concrete for other
+ * The outer accessor is abstract for traits, concrete for other
* classes.
* </p>
* <p>
@@ -105,12 +110,9 @@ abstract class ExplicitOuter extends InfoTransform
def transformInfo(sym: Symbol, tp: Type): Type = tp match {
case MethodType(params, restpe1) =>
val restpe = transformInfo(sym, restpe1)
- if (sym.owner.isTrait && ((sym hasFlag SUPERACCESSOR) || sym.isModule)) { // 5
+ if (sym.owner.isTrait && ((sym hasFlag (ACCESSOR | SUPERACCESSOR)) || sym.isModule)) { // 5
sym.makeNotPrivate(sym.owner)
}
- // moved form the term transformer
- if (sym.owner.isTrait && (sym hasFlag (ACCESSOR | SUPERACCESSOR)))
- sym.makeNotPrivate(sym.owner); //(2)
if (sym.owner.isTrait && (sym hasFlag PROTECTED)) sym setFlag notPROTECTED // 6
if (sym.isClassConstructor && isInner(sym.owner)) { // 1
val p = sym.newValueParameter(sym.pos, "arg" + nme.OUTER)
@@ -154,7 +156,15 @@ abstract class ExplicitOuter extends InfoTransform
if (restp eq restp1) tp else PolyType(tparams, restp1)
case _ =>
- tp
+ // Local fields of traits need to be unconditionally unprivatized.
+ // Reason: Those fields might need to be unprivatized if referenced by an inner class.
+ // On the other hand, mixing in the trait into a separately compiled
+ // class needs to have a common naming scheme, independently of whether
+ // the field was accessed from an inner class or not. See #2946
+ if (sym.owner.isTrait && sym.hasFlag(LOCAL) &&
+ (sym.getter(sym.owner.toInterface) == NoSymbol))
+ sym.makeNotPrivate(sym.owner)
+ tp
}
/** A base class for transformers that maintain <code>outerParam</code>
@@ -291,7 +301,6 @@ abstract class ExplicitOuter extends InfoTransform
* </p>
*/
class ExplicitOuterTransformer(unit: CompilationUnit) extends OuterPathTransformer(unit) {
-
/** The definition tree of the outer accessor of current class
*/
def outerFieldDef: Tree = VAL(outerField(currentClass)) === EmptyTree
@@ -320,7 +329,9 @@ abstract class ExplicitOuter extends InfoTransform
val path =
if (mixinClass.owner.isTerm) THIS(mixinClass.owner.enclClass)
else gen.mkAttributedQualifier(currentClass.thisType baseType mixinClass prefix)
- val rhs = ExplicitOuterTransformer.this.transform(path)
+ // Need to cast for nested outer refs in presence of self-types. See ticket #3274.
+ val rhs = gen.mkAsInstanceOf(ExplicitOuterTransformer.this.transform(path),
+ outerAcc.info.resultType)
// @S: atPos not good enough because of nested atPos in DefDef method, which gives position from wrong class!
rhs setPos currentClass.pos
@@ -350,8 +361,7 @@ abstract class ExplicitOuter extends InfoTransform
localTyper typed (DEF(method) === {
new ChangeOwnerTraverser(currentOwner, method) traverse guard
- new TreeSymSubstituter(vs, method.paramss.head) traverse guard
- guard
+ new TreeSymSubstituter(vs, method.paramss.head) transform (guard)
})
}
@@ -479,8 +489,14 @@ abstract class ExplicitOuter extends InfoTransform
matchTranslation(mch)
case _ =>
- val x = super.transform(tree)
+ if (settings.Xmigration28.value) tree match {
+ case TypeApply(fn @ Select(qual, _), args) if fn.symbol == Object_isInstanceOf || fn.symbol == Any_isInstanceOf =>
+ if (isArraySeqTest(qual.tpe, args.head.tpe))
+ unit.warning(tree.pos, "An Array will no longer match as Seq[_].")
+ case _ => ()
+ }
+ val x = super.transform(tree)
if (x.tpe eq null) x
else x setType transformInfo(currentOwner, x.tpe)
}
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index f5650a74a6..1bee37bfe6 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package transform
diff --git a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
index 42c2d1ee43..fa93a9b534 100644
--- a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author
*/
-// $Id$
package scala.tools.nsc
package transform
@@ -23,7 +22,7 @@ trait InfoTransform extends Transform {
new Phase(prev)
protected def changesBaseClasses = true
- protected def keepsTypeParams = false
+ protected def keepsTypeParams = true
class Phase(prev: scala.tools.nsc.Phase) extends super.Phase(prev) {
override val keepsTypeParams = InfoTransform.this.keepsTypeParams
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index a6e639b8e7..12c85504cf 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author
*/
-// $Id$
package scala.tools.nsc
package transform
@@ -11,7 +10,6 @@ import symtab._
import Flags._
import util.TreeSet
import scala.collection.mutable.{HashMap, LinkedHashMap, ListBuffer}
-import scala.tools.nsc.util.{Position, NoPosition}
abstract class LambdaLift extends InfoTransform {
import global._
@@ -151,19 +149,23 @@ abstract class LambdaLift extends InfoTransform {
// The param symbol in the MethodType should not be renamed, only the symbol in scope. This way,
// parameter names for named arguments are not changed. Example: without cloning the MethodType,
// def closure(x: Int) = { () => x }
- // would have the signatrue
+ // would have the signature
// closure: (x$1: Int)() => Int
if (sym.hasFlag(PARAM) && sym.owner.info.paramss.exists(_.contains(sym)))
sym.owner.setInfo(sym.owner.info.cloneInfo(sym.owner))
}
changedFreeVars = true
if (settings.debug.value) log("" + sym + " is free in " + owner);
- if (sym.isVariable && !(sym hasFlag CAPTURED)) {
+ if ((sym.isVariable || (sym.isValue && sym.isLazy)) && !sym.hasFlag(CAPTURED)) {
sym setFlag CAPTURED
val symClass = sym.tpe.typeSymbol;
atPhase(phase.next) {
sym updateInfo (
- if (isValueClass(symClass)) refClass(symClass).tpe else ObjectRefClass.tpe)
+ if (sym.hasAnnotation(VolatileAttr))
+ if (isValueClass(symClass)) volatileRefClass(symClass).tpe else VolatileObjectRefClass.tpe
+ else
+ if (isValueClass(symClass)) refClass(symClass).tpe else ObjectRefClass.tpe
+ )
}
}
}
@@ -332,6 +334,17 @@ abstract class LambdaLift extends InfoTransform {
lifted(MethodType(sym.info.params ::: addParams, sym.info.resultType)))
treeCopy.DefDef(tree, mods, name, tparams, List(vparams ::: freeParams), tpt, rhs)
case ClassDef(mods, name, tparams, impl @ Template(parents, self, body)) =>
+ // Disabled attempt to to add getters to freeParams
+ // this does not work yet. Problem is that local symbols need local names
+ // and references to local symbols need to be transformed into
+ // method calls to setters.
+ // def paramGetter(param: Symbol): Tree = {
+ // val getter = param.newGetter setFlag TRANS_FLAG resetFlag PARAMACCESSOR // mark because we have to add them to interface
+ // sym.info.decls.enter(getter)
+ // val rhs = Select(gen.mkAttributedThis(sym), param) setType param.tpe
+ // DefDef(getter, rhs) setPos tree.pos setType NoType
+ // }
+ // val newDefs = if (sym.isTrait) freeParams ::: (ps map paramGetter) else freeParams
treeCopy.ClassDef(tree, mods, name, tparams,
treeCopy.Template(impl, parents, self, body ::: freeParams))
}
@@ -339,6 +352,38 @@ abstract class LambdaLift extends InfoTransform {
tree
}
+/* Something like this will be necessary to eliminate the implementation
+ * restiction from paramGetter above:
+ * We need to pass getters to the interface of an implementation class.
+ private def fixTraitGetters(lifted: List[Tree]): List[Tree] =
+ for (stat <- lifted) yield stat match {
+ case ClassDef(mods, name, tparams, templ @ Template(parents, self, body))
+ if stat.symbol.isTrait && !stat.symbol.isImplClass =>
+ val iface = stat.symbol
+ lifted.find(l => l.symbol.isImplClass && l.symbol.toInterface == iface) match {
+ case Some(implDef) =>
+ val impl = implDef.symbol
+ val implGetters = impl.info.decls.toList filter (_ hasFlag TRANS_FLAG)
+ if (implGetters.nonEmpty) {
+ val ifaceGetters = implGetters map { ig =>
+ ig resetFlag TRANS_FLAG
+ val getter = ig cloneSymbol iface setFlag DEFERRED
+ iface.info.decls enter getter
+ getter
+ }
+ val ifaceGetterDefs = ifaceGetters map (DefDef(_, EmptyTree) setType NoType)
+ treeCopy.ClassDef(
+ stat, mods, name, tparams,
+ treeCopy.Template(templ, parents, self, body ::: ifaceGetterDefs))
+ } else
+ stat
+ case None =>
+ stat
+ }
+ case _ =>
+ stat
+ }
+*/
private def liftDef(tree: Tree): Tree = {
val sym = tree.symbol
if (sym.owner.isAuxiliaryConstructor && sym.isMethod) // # bug 1909
@@ -400,6 +445,13 @@ abstract class LambdaLift extends InfoTransform {
if (elemTree.tpe.typeSymbol != tp.typeSymbol) gen.mkAttributedCast(elemTree, tp) else elemTree
}
else tree1
+ case Block(stats, expr0) =>
+ val (lzyVals, rest) = stats.partition {
+ case stat@ValDef(_, _, _, _) if stat.symbol.isLazy => true
+ case stat@ValDef(_, _, _, _) if stat.symbol.hasFlag(MODULEVAR) => true
+ case _ => false
+ }
+ treeCopy.Block(tree, lzyVals:::rest, expr0)
case _ =>
tree
}
@@ -412,7 +464,7 @@ abstract class LambdaLift extends InfoTransform {
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
def addLifted(stat: Tree): Tree = stat match {
case ClassDef(mods, name, tparams, impl @ Template(parents, self, body)) =>
- val lifted = liftedDefs(stat.symbol).toList map addLifted
+ val lifted = /*fixTraitGetters*/(liftedDefs(stat.symbol).toList map addLifted)
val result = treeCopy.ClassDef(
stat, mods, name, tparams, treeCopy.Template(impl, parents, self, body ::: lifted))
liftedDefs -= stat.symbol
@@ -429,7 +481,7 @@ abstract class LambdaLift extends InfoTransform {
override def transformUnit(unit: CompilationUnit) {
computeFreeVars
atPhase(phase.next)(super.transformUnit(unit))
- assert(liftedDefs.size == 0, liftedDefs.keysIterator.toList)
+ assert(liftedDefs.size == 0, liftedDefs.keys.toList)
}
} // class LambdaLifter
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 16c5f8754d..ec283f67fd 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -4,7 +4,7 @@ package transform;
import scala.tools.nsc._
import scala.collection.mutable.HashMap
-abstract class LazyVals extends Transform with ast.TreeDSL {
+abstract class LazyVals extends Transform with TypingTransformers with ast.TreeDSL {
// inherits abstract value `global' and class `Phase' from Transform
import global._ // the global environment
@@ -17,18 +17,10 @@ abstract class LazyVals extends Transform with ast.TreeDSL {
def newTransformer(unit: CompilationUnit): Transformer =
new LazyValues(unit)
- /** Create a new phase which applies transformer */
- override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = new Phase(prev)
-
- /** The phase defined by this transform */
- class Phase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
- def apply(unit: global.CompilationUnit): Unit = newTransformer(unit) transformUnit unit
- }
-
/**
* Transform local lazy accessors to check for the initialized bit.
*/
- class LazyValues(unit: CompilationUnit) extends Transformer {
+ class LazyValues(unit: CompilationUnit) extends TypingTransformer(unit) {
/** map from method symbols to the number of lazy values it defines. */
private val lazyVals = new HashMap[Symbol, Int] {
override def default(meth: Symbol) = 0
@@ -47,8 +39,10 @@ abstract class LazyVals extends Transform with ast.TreeDSL {
*/
override def transform(tree: Tree): Tree = {
val sym = tree.symbol
+ curTree = tree
+
tree match {
- case DefDef(mods, name, tparams, vparams, tpt, rhs) =>
+ case DefDef(mods, name, tparams, vparams, tpt, rhs) => atOwner(tree.symbol) {
val res = if (!sym.owner.isClass && sym.hasFlag(LAZY)) {
val enclosingDummyOrMethod =
if (sym.enclMethod == NoSymbol) sym.owner else sym.enclMethod
@@ -61,8 +55,9 @@ abstract class LazyVals extends Transform with ast.TreeDSL {
super.transform(rhs)
treeCopy.DefDef(tree, mods, name, tparams, vparams, tpt,
typed(addBitmapDefs(sym, res)))
+ }
- case Template(parents, self, body) =>
+ case Template(parents, self, body) => atOwner(currentOwner) {
val body1 = super.transformTrees(body)
var added = false
val stats =
@@ -76,6 +71,7 @@ abstract class LazyVals extends Transform with ast.TreeDSL {
stat
}
treeCopy.Template(tree, parents, self, stats)
+ }
case _ => super.transform(tree)
}
@@ -94,11 +90,12 @@ abstract class LazyVals extends Transform with ast.TreeDSL {
}
val bmps = bitmaps(methSym) map (ValDef(_, ZERO))
+
+ def isMatch(params: List[Ident]) = (params.tail corresponds methSym.tpe.params)(_.tpe == _.tpe)
+
if (bmps.isEmpty) rhs else rhs match {
case Block(assign, l @ LabelDef(name, params, rhs1))
- if (name.toString.equals("_" + methSym.name)
- && List.forall2(params.tail, methSym.tpe.paramTypes) { (ident, tpe) => ident.tpe == tpe }) =>
- val sym = l.symbol
+ if name.toString == ("_" + methSym.name) && isMatch(params) =>
Block(assign, treeCopy.LabelDef(l, name, params, typed(prependStats(bmps, rhs1))))
case _ => prependStats(bmps, rhs)
@@ -124,7 +121,7 @@ abstract class LazyVals extends Transform with ast.TreeDSL {
* }
* where bitmap$n is an int value acting as a bitmap of initialized values. It is
* the 'n' is (offset / 32), the MASK is (1 << (offset % 32)). If the value has type
- * unit, no field is used to chache the value, so the resulting code is:
+ * unit, no field is used to cache the value, so the resulting code is:
* {
* if ((bitmap$n & MASK) == 0) {
* <rhs>;
@@ -144,8 +141,10 @@ abstract class LazyVals extends Transform with ast.TreeDSL {
}
assert(res != UNIT || meth.tpe.finalResultType.typeSymbol == UnitClass)
- atPos(tree.pos)(typed {
- def body = { IF ((Ident(bitmapSym) INT_& mask) INT_== ZERO) THEN block ENDIF }
+ val cond = (Ident(bitmapSym) INT_& mask) INT_== ZERO
+
+ atPos(tree.pos)(localTyper.typed {
+ def body = gen.mkDoubleCheckedLocking(meth.enclClass, cond, List(block), Nil)
BLOCK(body, res)
})
}
@@ -168,6 +167,10 @@ abstract class LazyVals extends Transform with ast.TreeDSL {
bmps(n)
else {
val sym = meth.newVariable(meth.pos, nme.bitmapName(n)).setInfo(IntClass.tpe)
+ atPhase(currentRun.typerPhase) {
+ sym addAnnotation AnnotationInfo(VolatileAttr.tpe, Nil, Nil)
+ }
+
bitmaps(meth) = (sym :: bmps).reverse
sym
}
diff --git a/src/compiler/scala/tools/nsc/transform/LiftCode.scala b/src/compiler/scala/tools/nsc/transform/LiftCode.scala
index da4961b345..a6a4a8d22c 100644
--- a/src/compiler/scala/tools/nsc/transform/LiftCode.scala
+++ b/src/compiler/scala/tools/nsc/transform/LiftCode.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Gilles Dubochet
*/
-// $Id$
package scala.tools.nsc
package transform
@@ -79,7 +78,7 @@ abstract class LiftCode extends Transform with Reifiers {
gen.mkAttributedRef(definitions.getModule(name))
else {
val name = className(c)
- if (name.length() == 0) throw new Error("don't know how to inject " + value)
+ if (name.length() == 0) abort("don't know how to inject " + value)
val injectedArgs = new ListBuffer[Tree]
for (i <- 0 until c.productArity)
injectedArgs += inject(c.productElement(i))
@@ -103,7 +102,7 @@ abstract class LiftCode extends Transform with Reifiers {
case null =>
gen.mkAttributedRef(definitions.getModule("scala.reflect.NoType"))
case _ =>
- throw new Error("don't know how to inject " + value)
+ abort("don't know how to inject " + value)
}
}
} // Injector
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 0daad7f2a7..e46a427e0c 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -1,15 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package transform
import symtab._
import Flags._
-import scala.tools.nsc.util.{Position,NoPosition}
import collection.mutable.{ListBuffer, HashMap}
abstract class Mixin extends InfoTransform with ast.TreeDSL {
@@ -20,7 +18,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/** The name of the phase: */
val phaseName: String = "mixin"
- /** The phase might set the fiollowing new flags: */
+ /** The phase might set the following new flags: */
override def phaseNewFlags: Long = lateMODULE | notABSTRACT
/** This map contains a binding (class -> info) if
@@ -136,7 +134,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def addMember(clazz: Symbol, member: Symbol): Symbol = {
if (settings.debug.value) log("new member of " + clazz + ":" + member.defString)
clazz.info.decls enter member
- member setFlag MIXEDIN
+ member.setFlag(MIXEDIN)
}
def needsExpandedSetterName(field: Symbol) =
@@ -148,8 +146,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* only once per class. The mixedin flag is used to remember whether late
* members have been added to an interface.
* - lazy fields don't get a setter.
- *
- * @param clazz ...
*/
def addLateInterfaceMembers(clazz: Symbol) {
if ((treatedClassInfos get clazz) != Some(clazz.info)) {
@@ -176,7 +172,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
setter.setInfo(MethodType(setter.newSyntheticValueParams(List(field.info)), UnitClass.tpe))
if (needsExpandedSetterName(field)) {
//println("creating expanded setter from "+field)
- setter.name = clazz.expandedSetterName(setter.name)
+ setter.name = nme.expandedSetterName(setter.name, clazz)
}
setter
}
@@ -213,7 +209,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - if a member M of T is forwarded to the implementation class, add
* a forwarder for M unless one exists already.
* The alias of the forwarder is the static member it forwards to.
- * - for every abstract accessor in T, add a field and an implementation for that acessor
+ * - for every abstract accessor in T, add a field and an implementation for that accessor
* - for every super accessor in T, add an implementation of that accessor
* - for every module in T, add a module
*/
@@ -235,12 +231,27 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
for (member <- impl.info.decls.toList) {
if (isForwarded(member)) {
val imember = member.overriddenSymbol(iface)
- //Console.println("mixin member "+member+":"+member.tpe+member.locationString+" "+imember+" "+imember.overridingSymbol(clazz)+" to "+clazz+" with scope "+clazz.info.decls)//DEBUG
+ // atPhase(currentRun.erasurePhase){
+ // println(""+(clazz, iface, clazz.typeParams, iface.typeParams, imember, clazz.thisType.baseType(iface), clazz.thisType.baseType(iface).memberInfo(imember), imember.info substSym(iface.typeParams, clazz.typeParams) ))
+ // }
+ // Console.println("mixin member "+member+":"+member.tpe+member.locationString+" "+imember+" "+imember.overridingSymbol(clazz)+" to "+clazz+" with scope "+clazz.info.decls)//DEBUG
if (imember.overridingSymbol(clazz) == NoSymbol &&
clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives.contains(imember)) {
+ val newSym = atPhase(currentRun.erasurePhase){
+ val res = imember.cloneSymbol(clazz)
+ // since we used the member (imember) from the interface that represents the trait that's being mixed in,
+ // have to instantiate the interface type params (that may occur in imember's info) as they are seen from the class
+ // we can't use the member that we get from the implementation class, as it's a clone that was made after erasure,
+ // and thus it does not know its info at the beginning of erasure anymore
+ // optimize: no need if iface has no typeparams
+ if(iface.typeParams nonEmpty) res.setInfo(clazz.thisType.baseType(iface).memberInfo(imember))
+ res
+ } // clone before erasure got rid of type info we'll need to generate a javaSig
+ // now we'll have the type info at (the beginning of) erasure in our history,
+ newSym.updateInfo(imember.info.cloneInfo(newSym)) // and now newSym has the info that's been transformed to fit this period (no need for asSeenFrom as phase.erasedTypes)
val member1 = addMember(
clazz,
- member.cloneSymbol(clazz) setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED))
+ newSym setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED))
member1.asInstanceOf[TermSymbol] setAlias member;
}
}
@@ -391,6 +402,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
&& !(currentOwner.isGetter && currentOwner.accessed == sym) // getter
&& !definitions.isValueClass(sym.tpe.resultType.typeSymbol)
&& sym.owner == templ.symbol.owner
+ && !sym.hasFlag(LAZY)
&& !tree.isDef) {
log("added use in: " + currentOwner + " -- " + tree)
usedIn(sym) ::= currentOwner
@@ -418,7 +430,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
class MixinTransformer(unit : CompilationUnit) extends Transformer {
/** Within a static implementation method: the parameter referring to the
- * current object undefined evrywhere else.
+ * current object undefined everywhere else.
*/
private var self: Symbol = _
@@ -464,16 +476,23 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
if (!currentOwner.isTrait) addMixedinMembers(currentOwner,unit)
else if (currentOwner hasFlag lateINTERFACE) addLateInterfaceMembers(currentOwner)
tree
- case DefDef(mods, name, tparams, List(vparams), tpt, rhs) if currentOwner.isImplClass =>
- if (isImplementedStatically(sym)) {
- sym setFlag notOVERRIDE
- self = sym.newValue(sym.pos, nme.SELF)
- .setFlag(PARAM)
- .setInfo(toInterface(currentOwner.typeOfThis));
- val selfdef = ValDef(self) setType NoType
- treeCopy.DefDef(tree, mods, name, tparams, List(selfdef :: vparams), tpt, rhs)
+ case DefDef(mods, name, tparams, List(vparams), tpt, rhs) =>
+ if (currentOwner.isImplClass) {
+ if (isImplementedStatically(sym)) {
+ sym setFlag notOVERRIDE
+ self = sym.newValue(sym.pos, nme.SELF)
+ .setFlag(PARAM)
+ .setInfo(toInterface(currentOwner.typeOfThis));
+ val selfdef = ValDef(self) setType NoType
+ treeCopy.DefDef(tree, mods, name, tparams, List(selfdef :: vparams), tpt, rhs)
+ } else {
+ EmptyTree
+ }
} else {
- EmptyTree
+ if (currentOwner.isTrait && sym.isSetter && !atPhase(currentRun.picklerPhase)(sym.isDeferred)) {
+ sym.addAnnotation(AnnotationInfo(TraitSetterAnnotationClass.tpe, List(), List()))
+ }
+ tree
}
case Apply(tapp @ TypeApply(fn, List(arg)), List()) =>
if (arg.tpe.typeSymbol.isImplClass) {
@@ -571,7 +590,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val newSyms = newDefs map (_.symbol)
def isNotDuplicate(tree: Tree) = tree match {
case DefDef(_, _, _, _, _, _) =>
- val sym = tree.symbol;
+ val sym = tree.symbol
!(sym.isDeferred &&
(newSyms exists (nsym => nsym.name == sym.name && (nsym.tpe matches sym.tpe))))
case _ =>
@@ -640,7 +659,10 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else lhs INT_!= ZERO
}
- /** return a 'lazified' version of rhs.
+ /** return a 'lazified' version of rhs. It uses double-checked locking to ensure
+ * initialization is performed at most once. Private fields used only in this
+ * initializer are subsequently set to null.
+ *
* @param clazz The class symbol
* @param init The tree which initializes the field ( f = <rhs> )
* @param fieldSym The symbol of this lazy field
@@ -649,13 +671,15 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* The result will be a tree of the form
* {
* if ((bitmap$n & MASK) == 0) {
- * synhronized(this) {
+ * synchronized(this) {
* if ((bitmap$n & MASK) == 0) {
- * synhronized(this) {
- * init // l$ = <rhs>
- * }
+ * init // l$ = <rhs>
* bitmap$n = bimap$n | MASK
- * }}}
+ * }
+ * }
+ * this.f1 = null
+ * ... this.fn = null
+ * }
* l$
* }
* where bitmap$n is an int value acting as a bitmap of initialized values. It is
@@ -671,21 +695,26 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val bitmapSym = bitmapFor(clazz, offset)
val mask = LIT(1 << (offset % FLAGS_PER_WORD))
def cond = mkTest(clazz, mask, bitmapSym, true)
- val nulls = (lazyValNullables(lzyVal).toList.sort(_.id < _.id) map nullify)
+ val nulls = (lazyValNullables(lzyVal).toList sortBy (_.id) map nullify)
def syncBody = init ::: List(mkSetFlag(clazz, offset), UNIT)
log("nulling fields inside " + lzyVal + ": " + nulls)
- val result =
- IF (cond) THEN BLOCK(
- (gen.mkSynchronized(
- gen mkAttributedThis clazz,
- IF (cond) THEN BLOCK(syncBody: _*) ENDIF
- )
- :: nulls): _*) ENDIF
-
+ val result = gen.mkDoubleCheckedLocking(clazz, cond, syncBody, nulls)
typedPos(init.head.pos)(BLOCK(result, retVal))
}
+ def mkInnerClassAccessorDoubleChecked(clazz: Symbol, rhs: Tree): Tree =
+ rhs match {
+ case Block(List(assign), returnTree) =>
+ val Assign(moduleVarRef, _) = assign
+ val cond = Apply(Select(moduleVarRef, nme.eq),List(Literal(Constant(null))))
+ val doubleSynchrTree = gen.mkDoubleCheckedLocking(clazz, cond, List(assign), Nil)
+ Block(List(doubleSynchrTree), returnTree)
+ case _ =>
+ assert(false, "Invalid getter " + rhs + " for module in class " + clazz)
+ EmptyTree
+ }
+
def mkCheckedAccessor(clazz: Symbol, retVal: Tree, offset: Int, pos: Position): Tree = {
val bitmapSym = bitmapFor(clazz, offset)
val mask = LIT(1 << (offset % FLAGS_PER_WORD))
@@ -703,6 +732,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* the class constructor is changed to set the initialized bits.
*/
def addCheckedGetters(clazz: Symbol, stats: List[Tree]): List[Tree] = {
+ // TODO: not used?
def findLazyAssignment(stats: List[Tree]): Tree = (
for (s @ Assign(lhs, _) <- stats ; if lhs.symbol hasFlag LAZY) yield s
) head // if there's no assignment then it's a bug and we crash
@@ -731,28 +761,40 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
case DefDef(mods, name, tp, vp, tpt, rhs) if sym.isConstructor =>
treeCopy.DefDef(stat, mods, name, tp, vp, tpt, addInitBits(clazz, rhs))
+ case DefDef(mods, name, tp, vp, tpt, rhs)
+ if settings.checkInit.value && !clazz.isTrait && sym.isSetter =>
+ val getter = sym.getter(clazz)
+ if (needsInitFlag(getter) && fieldOffset.isDefinedAt(getter))
+ treeCopy.DefDef(stat, mods, name, tp, vp, tpt,
+ Block(List(rhs, localTyper.typed(mkSetFlag(clazz, fieldOffset(getter)))), UNIT))
+ else
+ stat
+ case DefDef(mods, name, tp, vp, tpt, rhs)
+ if sym.isModule && !clazz.isTrait && !sym.hasFlag(BRIDGE) =>
+ val rhs1 = mkInnerClassAccessorDoubleChecked(clazz, rhs)
+ treeCopy.DefDef(stat, mods, name, tp, vp, tpt, typedPos(stat.pos)(rhs1))
case _ => stat
}
stats1
}
- /** Does this field require an intialized bit? */
+ /** Does this field require an initialized bit? */
def needsInitFlag(sym: Symbol) = {
val res = (settings.checkInit.value
&& sym.isGetter
&& !sym.isInitializedToDefault
- && !sym.hasFlag(PARAMACCESSOR)
+ && !sym.hasFlag(PARAMACCESSOR | SPECIALIZED)
&& !sym.accessed.hasFlag(PRESUPER)
&& !sym.isOuterAccessor)
- if (settings.debug.value) {
- log("needsInitFlag(" + sym.fullNameString + "): " + res)
- log("\tsym.isGetter: " + sym.isGetter)
- log("\t!isInitializedToDefault: " + !sym.isInitializedToDefault + sym.hasFlag(DEFAULTINIT) + sym.hasFlag(ACCESSOR) + sym.isTerm)
- log("\t!sym.hasFlag(PARAMACCESSOR): " + !sym.hasFlag(PARAMACCESSOR))
- //println("\t!sym.accessed.hasFlag(PRESUPER): " + !sym.accessed.hasFlag(PRESUPER))
- log("\t!sym.isOuterAccessor: " + !sym.isOuterAccessor)
- }
+// if (settings.debug.value) {
+// log("needsInitFlag(" + sym.fullName + "): " + res)
+// log("\tsym.isGetter: " + sym.isGetter)
+// log("\t!isInitializedToDefault: " + !sym.isInitializedToDefault + sym.hasFlag(DEFAULTINIT) + sym.hasFlag(ACCESSOR) + sym.isTerm)
+// log("\t!sym.hasFlag(PARAMACCESSOR): " + !sym.hasFlag(PARAMACCESSOR))
+// //println("\t!sym.accessed.hasFlag(PRESUPER): " + !sym.accessed.hasFlag(PRESUPER))
+// log("\t!sym.isOuterAccessor: " + !sym.isOuterAccessor)
+// }
res
}
@@ -820,7 +862,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def buildFieldPositions(clazz: Symbol) {
var fields = usedBits(clazz)
for (f <- clazz.info.decls.iterator if needsInitFlag(f) || f.hasFlag(LAZY)) {
- if (settings.debug.value) log(f.fullNameString + " -> " + fields)
+ if (settings.debug.value) log(f.fullName + " -> " + fields)
fieldOffset(f) = fields
fields += 1
}
@@ -893,7 +935,11 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// add modules
val vdef = gen.mkModuleVarDef(sym)
addDef(position(sym), vdef)
- addDef(position(sym), gen.mkCachedModuleAccessDef(sym, vdef.symbol))
+
+ val rhs = gen.newModule(sym, vdef.symbol.tpe)
+ val assignAndRet = gen.mkAssignAndReturn(vdef.symbol, rhs)
+ val rhs1 = mkInnerClassAccessorDoubleChecked(clazz, assignAndRet)
+ addDef(position(sym), DefDef(sym, rhs1))
} else if (!sym.isMethod) {
// add fields
addDef(position(sym), ValDef(sym))
@@ -994,7 +1040,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
case Super(_, mix) =>
// change super calls to methods in implementation classes to static calls.
// Transform references super.m(args) as follows:
- // - if `m' refers to a trait, insert a static call to the correspondign static
+ // - if `m' refers to a trait, insert a static call to the corresponding static
// implementation
// - otherwise return tree unchanged
if (mix == nme.EMPTY.toTypeName && currentOwner.enclClass.isImplClass)
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 6edab9b758..1500759b30 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package transform
@@ -15,7 +14,7 @@ import annotation.tailrec
/** A class that yields a kind of iterator (`Cursor`),
* which yields all pairs of overriding/overridden symbols
* that are visible in some baseclass, unless there's a parent class
- * that aleady contains the same pairs.
+ * that already contains the same pairs.
* @author Martin Odersky
* @version 1.0
*/
@@ -42,7 +41,7 @@ abstract class OverridingPairs {
*/
protected def parents: List[Type] = base.info.parents
- /** Does `sym1` match `sym2` so that it qualifies as overiding.
+ /** Does `sym1` match `sym2` so that it qualifies as overriding.
* Types always match. Term symbols match if their membertypes
* relative to <base>.this do
*/
@@ -99,11 +98,15 @@ abstract class OverridingPairs {
private val size = base.info.baseClasses.length
- /** A map from baseclasses of <base> to ints, with smaller ints meansing lower in
- * lineraizatuon order.
+ /** A map from baseclasses of <base> to ints, with smaller ints meaning lower in
+ * linearization order.
*/
private val index = new HashMap[Symbol, Int]
+ // Note: overridingPairs can be called at odd instances by the Eclipse plugin
+ // Soemtimes symbols are not yet defined and we get missing keys.
+ // The implementation here is hardened so that it does not crash on a missing key.
+
{ var i = 0
for (bc <- base.info.baseClasses) {
index(bc) = i
@@ -126,22 +129,37 @@ abstract class OverridingPairs {
{ for (i <- List.range(0, size))
subParents(i) = new BitSet(size);
for (p <- parents) {
- val pIndex = index(p.typeSymbol)
- for (bc <- p.baseClasses)
- if (p.baseType(bc) =:= self.baseType(bc))
- include(subParents(index(bc)), pIndex)
- else if (settings.debug.value)
- log("SKIPPING "+p+" -> "+p.baseType(bc)+" / "+self.baseType(bc)+" from "+base)
+ index get p.typeSymbol match {
+ case Some(pIndex) =>
+ for (bc <- p.baseClasses)
+ if (p.baseType(bc) =:= self.baseType(bc))
+ index get bc match {
+ case Some(bcIndex) =>
+ include(subParents(bcIndex), pIndex)
+ case None =>
+ }
+ else if (settings.debug.value)
+ log("SKIPPING "+p+" -> "+p.baseType(bc)+" / "+self.baseType(bc)+" from "+base)
+ case None =>
+ }
}
- }
+ }
/** Do `sym1` and `sym2` have a common subclass in `parents`?
* In that case we do not follow their overriding pairs
*/
private def hasCommonParentAsSubclass(sym1: Symbol, sym2: Symbol) = {
- val index1 = index(sym1.owner)
- val index2 = index(sym2.owner)
- intersectionContainsElementLeq(subParents(index1), subParents(index2), index1 min index2)
+ index get sym1.owner match {
+ case Some(index1) =>
+ index get sym2.owner match {
+ case Some(index2) =>
+ intersectionContainsElementLeq(subParents(index1), subParents(index2), index1 min index2)
+ case None =>
+ false
+ }
+ case None =>
+ false
+ }
}
/** The scope entries that have already been visited as overridden
@@ -157,13 +175,13 @@ abstract class OverridingPairs {
/** The current entry candidate for overridden */
private var nextEntry = curEntry
- /** The current candidate symbol for overridding */
+ /** The current candidate symbol for overriding */
var overriding: Symbol = _
- /** If not null: The symbol overridden by overridding */
+ /** If not null: The symbol overridden by overriding */
var overridden: Symbol = _
- //@M: note that next is called once during object initialisation
+ //@M: note that next is called once during object initialization
def hasNext: Boolean = curEntry ne null
@tailrec
diff --git a/src/compiler/scala/tools/nsc/transform/Reifiers.scala b/src/compiler/scala/tools/nsc/transform/Reifiers.scala
index d4290bff59..eef260f5a3 100644
--- a/src/compiler/scala/tools/nsc/transform/Reifiers.scala
+++ b/src/compiler/scala/tools/nsc/transform/Reifiers.scala
@@ -18,11 +18,14 @@ trait Reifiers {
if (sym.isClass) reflect.Class(fullname)
else if (sym.isType) reflect.TypeField(fullname, reify(sym.info))
else if (sym.isMethod) reflect.Method(fullname, reify(sym.info))
+ else if (sym.isValueParameter) reflect.LocalValue(reflect.NoSymbol, fullname, reify(sym.info))
else reflect.Field(fullname, reify(sym.info));
def reify(sym: Symbol): reflect.Symbol = {
if (sym.isRoot || sym.isRootPackage || sym.isEmptyPackageClass || sym.isEmptyPackage)
reflect.RootSymbol
+ else if (sym.isValueParameter)
+ mkGlobalSymbol(sym.name.toString, sym)
else if (sym.owner.isTerm)
reflect.NoSymbol
else reify(sym.owner) match {
@@ -216,8 +219,6 @@ trait Reifiers {
if (rsym == reflect.NoSymbol) throw new TypeError("cannot reify symbol: " + tree.symbol)
else reflect.Select(reify(qual), reify(tree.symbol))
- case _ : StubTree => reflect.Literal(0)
-
case Literal(constant) =>
reflect.Literal(constant.value)
diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
index 33d454534f..6661543d89 100644
--- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package transform
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 91b6e1e9d3..ab99e91e33 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -1,17 +1,16 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Iulian Dragos
*/
-// $Id$
package scala.tools.nsc
package transform
import scala.tools.nsc.symtab.Flags
import scala.tools.nsc.util.FreshNameCreator
-import scala.tools.nsc.util.Position
import scala.collection.{mutable, immutable}
+import immutable.Set
/** Specialize code on types.
*/
@@ -31,6 +30,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
type TypeEnv = immutable.Map[Symbol, Type]
def emptyEnv: TypeEnv = immutable.ListMap.empty[Symbol, Type]
+ import definitions.SpecializedClass
object TypeEnv {
/** Return a new type environment binding specialized type parameters of sym to
* the given args. Expects the lists to have the same length.
@@ -43,7 +43,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
env
}
- /** Is this typeenv included in `other'? All type variables in this environement
+ /** Is this typeenv included in `other'? All type variables in this environment
* are defined in `other' and bound to the same type.
*/
def includes(t1: TypeEnv, t2: TypeEnv) = {
@@ -55,8 +55,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** Reduce the given environment to contain mappins only for type variables in tps. */
- def reduce(env: TypeEnv, tps: immutable.Set[Symbol]): TypeEnv = {
+ /** Reduce the given environment to contain mappings only for type variables in tps. */
+ def restrict(env: TypeEnv, tps: immutable.Set[Symbol]): TypeEnv = {
env filter { kv => tps.contains(kv._1)}
}
@@ -83,7 +83,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
/** For a given class and concrete type arguments, give its specialized class */
- val specializedClass: mutable.Map[(Symbol, TypeEnv), Symbol] = new mutable.HashMap
+ val specializedClass: mutable.Map[(Symbol, TypeEnv), Symbol] = new mutable.LinkedHashMap
+
+ /** Returns the generic class that was specialized to 'cls', or
+ * 'cls' itself if cls is not a specialized subclass.
+ */
+ def genericClass(cls: Symbol): Symbol =
+ if (cls.hasFlag(SPECIALIZED))
+ cls.info.parents.head.typeSymbol
+ else
+ cls
/** Map a method symbol to a list of its specialized overloads in the same class. */
private val overloads: mutable.Map[Symbol, List[Overload]] = new mutable.HashMap[Symbol, List[Overload]] {
@@ -92,12 +101,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case class Overload(sym: Symbol, env: TypeEnv) {
override def toString: String =
- "specalized overload " + sym + " in " + env
+ "specialized overload " + sym + " in " + env
}
- /** The annotation used to mark specialized type parameters. */
- lazy val SpecializedClass = definitions.getClass("scala.specialized")
-
protected def newTransformer(unit: CompilationUnit): Transformer =
new SpecializationTransformer(unit)
@@ -111,6 +117,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* type bounds of other @specialized type parameters (and not in its result type).
*/
def degenerate = false
+
+ def isAccessor = false
}
/** Symbol is a special overloaded method of 'original', in the environment env. */
@@ -124,11 +132,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
/** Symbol is a specialized accessor for the `target' field. */
- case class SpecializedAccessor(target: Symbol) extends SpecializedInfo
+ case class SpecializedAccessor(target: Symbol) extends SpecializedInfo {
+ override def isAccessor = true
+ }
/** Symbol is a specialized method whose body should be the target's method body. */
case class Implementation(target: Symbol) extends SpecializedInfo
+ /** Symbol is a specialized override paired with `target'. */
+ case class SpecialOverride(target: Symbol) extends SpecializedInfo
+
/** An Inner class that specializes on a type parameter of the enclosing class. */
case class SpecializedInnerClass(target: Symbol, env: TypeEnv) extends SpecializedInfo
@@ -159,7 +172,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def hasSpecializedParams(clazz: Symbol): Boolean =
!specializedParams(clazz).isEmpty
- /** Return specialized type paramters. */
+ /** Return specialized type parameters. */
def specializedParams(sym: Symbol): List[Symbol] =
splitParams(sym.info.typeParams)._1
@@ -174,7 +187,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
override def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) if !args.isEmpty =>
val pre1 = this(pre)
- val args1 = args map this
+ val args1 = args// map this
val unspecArgs = unspecializedArgs(sym, args)
specializedClass.get((sym, TypeEnv.fromSpecialization(sym, args1))) match {
case Some(sym1) =>
@@ -183,7 +196,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case None =>
typeRef(pre1, sym, args1)
}
- case _ => mapOver(tp)
+ case _ => tp // mapOver(tp)
}
}
@@ -197,30 +210,27 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def specializedName(sym: Symbol, env: TypeEnv): Name = {
val tvars = if (sym.isClass) env.keySet
- else specializedTypeVars(sym.info).intersect(env.keySet)
+ else specializedTypeVars(sym).intersect(env.keySet)
val (methparams, others) = tvars.toList.partition(_.owner.isMethod)
- val tvars1 = methparams.sortWith(_.name.toString < _.name.toString)
- val tvars2 = others.sortWith(_.name.toString < _.name.toString)
- log("specName(" + sym + ") env " + env)
+ val tvars1 = methparams sortBy (_.name.toString)
+ val tvars2 = others sortBy (_.name.toString)
+ if (settings.debug.value) log("specName(%s) env: %s tvars: %s ".format(sym, env, (tvars1, tvars2)))
specializedName(sym.name, tvars1 map env, tvars2 map env)
}
+ /** Great moments in backward compatibility: the specialized names in 2.8.1
+ * were created based on data which mapped "Long" to "L" instead of "J", so we
+ * preserve that here.
+ */
+ private def abbrvTag281(t: Type) = definitions.abbrvTag(t.typeSymbol) match {
+ case 'J' => 'L'
+ case ch => ch
+ }
+
/** Specialize name for the two list of types. The first one denotes
* specialization on method type parameters, the second on outer environment.
*/
private def specializedName(name: Name, types1: List[Type], types2: List[Type]): Name = {
- def split: (String, String, String) = {
- if (name.endsWith("$sp")) {
- val name1 = name.subName(0, name.length - 3)
- val idxC = name1.lastPos('c')
- val idxM = name1.lastPos('m', idxC)
- (name1.subName(0, idxM - 1).toString,
- name1.subName(idxC + 1, name1.length).toString,
- name1.subName(idxM + 1, idxC).toString)
- } else
- (name.toString, "", "")
- }
-
if (nme.INITIALIZER == name || (types1.isEmpty && types2.isEmpty))
name
else if (nme.isSetterName(name))
@@ -228,64 +238,43 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else if (nme.isLocalName(name))
nme.getterToLocal(specializedName(nme.localToGetter(name), types1, types2))
else {
- val (base, cs, ms) = split
- newTermName(base + "$"
- + "m" + ms + types1.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "")
- + "c" + cs + types2.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "$sp"))
+ val (base, cs, ms) = nme.splitSpecializedName(name)
+ newTermName(base.toString + "$"
+ + "m" + ms + types1.map(abbrvTag281).mkString("", "", "")
+ + "c" + cs + types2.map(abbrvTag281).mkString("", "", "$sp"))
}
}
- lazy val primitiveTypes = Map(
- "Unit" -> definitions.UnitClass.tpe,
- "Boolean" -> definitions.BooleanClass.tpe,
- "Byte" -> definitions.ByteClass.tpe,
- "Short" -> definitions.ShortClass.tpe,
- "Char" -> definitions.CharClass.tpe,
- "Int" -> definitions.IntClass.tpe,
- "Long" -> definitions.LongClass.tpe,
- "Float" -> definitions.FloatClass.tpe,
- "Double" -> definitions.DoubleClass.tpe)
-
-
-
- /** Parse the given string into the list of types it contains.
- *
- * @param str comma-separated string of distinct primitive types.
- */
- def parseTypes(str: String): List[Type] = {
- if (str.trim == "")
- List()
- else {
- val buf = new mutable.ListBuffer[Type]
- for (t <- str.split(','))
- primitiveTypes.get(t.trim) match {
- case Some(tpe) => buf += tpe
- case None =>
- error("Invalid type " + t + ". Expected one of " + primitiveTypes.keysIterator.mkString("", ", ", "."))
- }
- buf.toList
- }
- }
-
- /** Return the concrete types `sym' should be specialized at.
+ lazy val primitiveTypes = List(
+ definitions.UnitClass.tpe,
+ definitions.BooleanClass.tpe,
+ definitions.ByteClass.tpe,
+ definitions.ShortClass.tpe,
+ definitions.CharClass.tpe,
+ definitions.IntClass.tpe,
+ definitions.LongClass.tpe,
+ definitions.FloatClass.tpe,
+ definitions.DoubleClass.tpe)
+
+ /** Return the concrete types `sym' should be specialized at.
*/
def concreteTypes(sym: Symbol): List[Type] =
sym.getAnnotation(SpecializedClass) match {
case Some(AnnotationInfo(_, args, _)) =>
args match {
- case Literal(ct) :: _ =>
- val tpes = parseTypes(ct.stringValue)
+ case Nil =>
+ log(sym + " specialized on everything")
+ primitiveTypes.toList
+ case _ =>
+ val tpes = args.map(_.symbol.companionClass.tpe)
log(sym + " specialized on " + tpes)
tpes
- case _ =>
- log(sym + " specialized on everything")
- primitiveTypes.valuesIterator.toList
}
case _ =>
Nil
}
- /** Return a list of all type environements for all specializations
+ /** Return a list of all type environments for all specializations
* of @specialized types in `tps'.
*/
private def specializations(tps: List[Symbol]): List[TypeEnv] = {
@@ -306,21 +295,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else for (v <- values(xs.head); vs <- count(xs.tail, values)) yield v :: vs
}
- /** Does the given tpe need to be specialized in the environment 'env'? */
+ /** Does the given tpe need to be specialized in the environment 'env'?
+ * Specialization is needed for
+ * - members with specialized type parameters found in the given environment
+ * - constructors of specialized classes
+ * - normalized members whose type bounds appear in the environment
+ */
private def needsSpecialization(env: TypeEnv, sym: Symbol): Boolean = {
- def needsIt(tpe: Type): Boolean = tpe match {
- case TypeRef(pre, sym, args) =>
- (env.keysIterator.contains(sym)
- || (args exists needsIt))
- case PolyType(tparams, resTpe) => needsIt(resTpe)
- case MethodType(argTpes, resTpe) =>
- (argTpes exists (sym => needsIt(sym.tpe))) || needsIt(resTpe)
- case ClassInfoType(parents, stats, sym) =>
- stats.toList exists (s => needsIt(s.info))
- case _ => false
- }
-
- (needsIt(sym.info)
+ (specializedTypeVars(sym).intersect(env.keySet).nonEmpty
+ || (sym.isClassConstructor && sym.enclClass.typeParams.exists(_.hasAnnotation(SpecializedClass)))
|| (isNormalizedMember(sym) && info(sym).typeBoundsIn(env)))
}
@@ -332,22 +315,41 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}))
- private def specializedTypeVars(tpe: List[Type]): immutable.Set[Symbol] =
+ def specializedTypeVars(tpe: List[Type]): immutable.Set[Symbol] =
tpe.foldLeft(immutable.ListSet.empty[Symbol]: immutable.Set[Symbol]) {
(s, tp) => s ++ specializedTypeVars(tp)
}
- /** Return the set of @specialized type variables mentioned by the given type. */
- private def specializedTypeVars(tpe: Type): immutable.Set[Symbol] = tpe match {
+ def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] =
+ atPhase(currentRun.typerPhase)(specializedTypeVars(sym.info))
+
+ /** Return the set of @specialized type variables mentioned by the given type.
+ * It only counts type variables that appear:
+ * - naked
+ * - as arguments to type constructors in @specialized positions
+ * (arrays ar considered as Array[@specialized T]
+ */
+ def specializedTypeVars(tpe: Type): immutable.Set[Symbol] = tpe match {
case TypeRef(pre, sym, args) =>
- if (sym.isTypeParameter && sym.hasAnnotation(SpecializedClass))
- specializedTypeVars(args) + sym
- else
+ if (sym.isAliasType)
+ specializedTypeVars(tpe.normalize)
+ else if ( sym.isTypeParameter && sym.hasAnnotation(SpecializedClass)
+ || (sym.isTypeSkolem && sym.deSkolemize.hasAnnotation(SpecializedClass)))
+ immutable.ListSet.empty + sym
+ else if (sym == definitions.ArrayClass)
specializedTypeVars(args)
+ else {
+ val extra = for ((tp, arg) <- sym.typeParams.zip(args) if tp.hasAnnotation(SpecializedClass))
+ yield specializedTypeVars(arg).toList
+ immutable.ListSet.empty[Symbol] ++ extra.flatten
+ }
+
case PolyType(tparams, resTpe) =>
specializedTypeVars(tparams map (_.info)) ++ specializedTypeVars(resTpe)
+
case MethodType(argSyms, resTpe) =>
specializedTypeVars(argSyms map (_.tpe)) ++ specializedTypeVars(resTpe)
+
case ExistentialType(_, res) => specializedTypeVars(res)
case AnnotatedType(_, tp, _) => specializedTypeVars(tp)
case TypeBounds(hi, lo) => specializedTypeVars(hi) ++ specializedTypeVars(lo)
@@ -373,18 +375,55 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
typeEnv(cls) = env
this.specializedClass((clazz, env)) = cls
+ // declarations of the newly specialized class 'cls'
val decls1 = new Scope
+ // original unspecialized type parameters
+ var oldClassTParams: List[Symbol] = Nil
+
+ // unspecialized type parameters of 'cls' (cloned)
+ var newClassTParams: List[Symbol] = Nil
+
+ // has to be a val in order to be computed early. It is later called
+ // within 'atPhase(next)', which would lead to an infinite cycle otherwise
val specializedInfoType: Type = {
val (_, unspecParams) = splitParams(clazz.info.typeParams)
- val tparams1 = cloneSymbols(unspecParams, cls)
- var parents = List(subst(env, clazz.tpe).subst(unspecParams, tparams1 map (_.tpe)))
+ oldClassTParams = unspecParams
+ newClassTParams = cloneSymbols(unspecParams, cls) map subst(env)
+
+ def applyContext(tpe: Type) =
+ subst(env, tpe).subst(unspecParams, newClassTParams map (_.tpe))
+
+ /** Return a list of specialized parents to be re-mixed in a specialized subclass.
+ * Assuming env = [T -> Int] and
+ * class Integral[@specialized T] extends Numeric[T]
+ * and Numeric[U] is specialized on U, this produces List(Numeric$mcI).
+ *
+ * so that class Integral$mci extends Integral[Int] with Numeric$mcI.
+ */
+ def specializedParents(parents: List[Type]): List[Type] = {
+ val res = new mutable.ListBuffer[Type]
+ for (p <- parents) {
+ val stp = atPhase(phase.next)(specializedType(p))
+ if (stp != p)
+ if (p.typeSymbol.isTrait) res += stp
+ else if (currentRun.compiles(clazz))
+ reporter.warning(clazz.pos, p.typeSymbol + " must be a trait. Specialized version of "
+ + clazz + " will inherit generic " + p)
+ }
+ res.reverse.toList
+ }
+
+ var parents = List(applyContext(atPhase(currentRun.typerPhase)(clazz.tpe)))
if (parents.head.typeSymbol.isTrait)
parents = parents.head.parents.head :: parents
- val infoType = ClassInfoType(parents, decls1, cls)
- if (tparams1.isEmpty) infoType else PolyType(tparams1, infoType)
+ val extraSpecializedMixins = specializedParents(clazz.info.parents.map(applyContext))
+ log("extraSpecializedMixins: " + extraSpecializedMixins)
+ val infoType = ClassInfoType(parents ::: extraSpecializedMixins, decls1, cls)
+ if (newClassTParams.isEmpty) infoType else PolyType(newClassTParams, infoType)
}
+ log("specializedClass " + cls + ": " + specializedInfoType)
atPhase(phase.next)(cls.setInfo(specializedInfoType))
val fullEnv = outerEnv ++ env
@@ -396,11 +435,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
def enterMember(sym: Symbol): Symbol = {
typeEnv(sym) = fullEnv ++ typeEnv(sym) // append the full environment
- sym.setInfo(sym.info.substThis(clazz, ThisType(cls)))
+ sym.setInfo(sym.info.substThis(clazz, ThisType(cls)).subst(oldClassTParams, newClassTParams map (_.tpe)))
decls1.enter(subst(fullEnv)(sym))
}
- /** Create and enter in scope an overriden symbol m1 for `m' that forwards
+ /** Create and enter in scope an overridden symbol m1 for `m' that forwards
* to `om'. `om' is a fresh, special overload of m1 that is an implementation
* of `m'. For example, for a
*
@@ -423,15 +462,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
info(specMember) = Forward(om)
- info(om) = Implementation(original)
+ info(om) = if (original.isDeferred) Forward(original) else Implementation(original)
typeEnv(om) = env ++ typeEnv(m) // add the environment for any method tparams
+ overloads(specMember) = Overload(om, typeEnv(om)) :: overloads(specMember)
enterMember(om)
}
log("specializedClass: " + cls)
for (m <- normMembers if needsSpecialization(outerEnv ++ env, m) && satisfiable(fullEnv)) {
- log(" * looking at: " + m)
+ if (settings.debug.value) log(" * looking at: " + m)
if (!m.isDeferred) concreteSpecMethods += m
// specialized members have to be overridable.
@@ -446,20 +486,20 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val NormalizedMember(original) = info(m)
if (!conflicting(env ++ typeEnv(m))) {
if (info(m).degenerate) {
- log("degenerate normalized member " + m + " info(m): " + info(m))
+ if (settings.debug.value) log("degenerate normalized member " + m + " info(m): " + info(m))
val specMember = enterMember(m.cloneSymbol(cls)).setFlag(SPECIALIZED).resetFlag(DEFERRED)
info(specMember) = Implementation(original)
typeEnv(specMember) = env ++ typeEnv(m)
} else {
val om = forwardToOverload(m)
- log("normalizedMember " + m + " om: " + om + " typeEnv(om): " + typeEnv(om))
+ if (settings.debug.value) log("normalizedMember " + m + " om: " + om + " typeEnv(om): " + typeEnv(om))
}
} else
log("conflicting env for " + m + " env: " + env)
} else if (m.isDeferred) { // abstract methods
val specMember = enterMember(m.cloneSymbol(cls)).setFlag(SPECIALIZED).resetFlag(DEFERRED)
- log("deferred " + specMember.fullNameString + " is forwarded")
+ if (settings.debug.value) log("deferred " + specMember.fullName + " is forwarded")
info(specMember) = new Forward(specMember) {
override def target = m.owner.info.member(specializedName(m, env))
@@ -480,7 +520,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def overrideIn(clazz: Symbol, sym: Symbol) = {
val sym1 = sym.cloneSymbol(clazz)
.setFlag(OVERRIDE | SPECIALIZED)
- .resetFlag(DEFERRED | CASEACCESSOR | ACCESSOR | PARAMACCESSOR | LAZY)
+ .resetFlag(DEFERRED | CASEACCESSOR | PARAMACCESSOR | LAZY)
sym1.setInfo(sym1.info.asSeenFrom(clazz.tpe, sym1.owner))
}
@@ -491,20 +531,21 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
enterMember(specVal)
// create accessors
- log("m: " + m + " isLocal: " + nme.isLocalName(m.name) + " specVal: " + specVal.name + " isLocal: " + nme.isLocalName(specVal.name))
+ if (settings.debug.value)
+ log("m: " + m + " isLocal: " + nme.isLocalName(m.name) + " specVal: " + specVal.name + " isLocal: " + nme.isLocalName(specVal.name))
if (nme.isLocalName(m.name)) {
val specGetter = mkAccessor(specVal, nme.localToGetter(specVal.name)).setInfo(MethodType(List(), specVal.info))
val origGetter = overrideIn(cls, m.getter(clazz))
info(origGetter) = Forward(specGetter)
enterMember(specGetter)
enterMember(origGetter)
- log("created accessors: " + specGetter + " orig: " + origGetter)
+ if (settings.debug.value) log("created accessors: " + specGetter + " orig: " + origGetter)
clazz.caseFieldAccessors.find(_.name.startsWith(m.name)) foreach { cfa =>
val cfaGetter = overrideIn(cls, cfa)
info(cfaGetter) = SpecializedAccessor(specVal)
enterMember(cfaGetter)
- log("found case field accessor for " + m + " added override " + cfaGetter);
+ if (settings.debug.value) log("found case field accessor for " + m + " added override " + cfaGetter);
}
if (specVal.isVariable && m.setter(clazz) != NoSymbol) {
@@ -526,18 +567,23 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
typeEnv(specClass) = fullEnv
specClass.name = specializedName(specClass, fullEnv)
enterMember(specClass)
- log("entered specialized class with info " + specClass.fullNameString + ": " + specClass.info)
+ log("entered specialized class " + specClass.fullName)
info(specClass) = SpecializedInnerClass(m, fullEnv)
}
}
cls
}
- log("specializeClass " + clazz.fullNameString)
+ log("specializeClass " + clazz.fullName)
val decls1 = (clazz.info.decls.toList flatMap { m: Symbol =>
if (m.isAnonymousClass) List(m) else {
normalizeMember(m.owner, m, outerEnv) flatMap { normalizedMember =>
val ms = specializeMember(m.owner, normalizedMember, outerEnv, clazz.info.typeParams)
+// atPhase(currentRun.typerPhase)(println("normalizedMember.info: " + normalizedMember.info)) // bring the info to the typer phase
+ // interface traits have concrete members now
+ if (ms.nonEmpty && clazz.isTrait && clazz.isInterface)
+ clazz.resetFlag(INTERFACE)
+
if (normalizedMember.isMethod) {
val newTpe = subst(outerEnv, normalizedMember.info)
if (newTpe != normalizedMember.info) // only do it when necessary, otherwise the method type might be at a later phase already
@@ -570,8 +616,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* m$D[ U](x: Double, y: U)
*/
private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = {
- if (sym.isMethod && !sym.info.typeParams.isEmpty) {
- val (stps, tps) = splitParams(sym.info.typeParams)
+ if (settings.debug.value) log("normalizeMember: " + sym.fullName)
+ if (sym.isMethod && !atPhase(currentRun.typerPhase)(sym.typeParams.isEmpty)) {
+ var (stps, tps) = splitParams(sym.info.typeParams)
+ val unusedStvars = stps filterNot (specializedTypeVars(sym.info).toList contains)
+ if (unusedStvars.nonEmpty && currentRun.compiles(sym) && !sym.isSynthetic) {
+ reporter.warning(sym.pos, "%s %s unused or used in non-specializable positions."
+ .format(unusedStvars.mkString("", ", ", ""), if (unusedStvars.length == 1) "is" else "are"))
+ unusedStvars foreach (_.removeAnnotation(SpecializedClass))
+ stps = stps filterNot (unusedStvars contains)
+ tps = sym.info.typeParams filterNot (_.hasAnnotation(SpecializedClass))
+ }
val res = sym :: (for (env <- specializations(stps) if needsSpecialization(env, sym)) yield {
val keys = env.keysIterator.toList;
val vals = env.valuesIterator.toList
@@ -586,7 +641,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
specMember.setInfo(polyType(tps1, methodType))
- log("expanded member: " + sym + ": " + sym.info + " -> " + specMember + ": " + specMember.info + " env: " + env)
+ if (settings.debug.value) log("expanded member: " + sym + ": " + sym.info + " -> " + specMember + ": " + specMember.info + " env: " + env)
info(specMember) = NormalizedMember(sym)
overloads(sym) = Overload(specMember, env) :: overloads(sym)
specMember
@@ -596,8 +651,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else List(sym)
}
- /** Specialize member `m' w.r.t. to the outer environment and the type parameters of
- * the innermost enclosing class.
+ /** Specialize member `m' w.r.t. to the outer environment and the type
+ * parameters of the innermost enclosing class.
*
* Turns 'private' into 'protected' for members that need specialization.
*
@@ -607,16 +662,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def specializeOn(tparams: List[Symbol]): List[Symbol] =
for (spec <- specializations(tparams)) yield {
if (sym.hasFlag(PRIVATE)) sym.resetFlag(PRIVATE).setFlag(PROTECTED)
+ sym.resetFlag(FINAL)
val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec))
- typeEnv(specMember) = outerEnv ++ spec
+ typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec
+ if (settings.debug.value) log("added specialized overload: %s in env: %s".format(specMember, typeEnv(specMember)))
overloads(sym) = Overload(specMember, spec) :: overloads(sym)
specMember
}
if (sym.isMethod) {
-// log("specializeMember " + sym + " with own stps: " + specializedTypes(sym.info.typeParams))
+ if (settings.debug.value) log("specializeMember %s with tps: %s stvars(sym): %s".format(sym, tps, specializedTypeVars(sym)))
val tps1 = if (sym.isConstructor) tps filter (tp => sym.info.paramTypes.contains(tp)) else tps
- val tps2 = tps1 intersect specializedTypeVars(sym.info).toList
+ val tps2 = tps1 intersect specializedTypeVars(sym).toList
if (!sym.isDeferred) concreteSpecMethods += sym
specializeOn(tps2) map {m => info(m) = SpecialOverload(sym, typeEnv(m)); m}
@@ -629,7 +686,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val specMember = sym.cloneSymbol(owner) // this method properly duplicates the symbol's info
specMember.name = specializedName(sym, env)
- specMember.setInfo(subst(env, specMember.info))
+ specMember.setInfo(subst(env, specMember.info.asSeenFrom(owner.thisType, sym.owner)))
.setFlag(SPECIALIZED)
.resetFlag(DEFERRED | CASEACCESSOR | ACCESSOR | LAZY)
}
@@ -646,35 +703,83 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def specialOverrides(clazz: Symbol): List[Symbol] = {
log("specialOverrides(" + clazz + ")")
- val opc = new overridingPairs.Cursor(clazz)
+
+ /** Return the overridden symbol in syms that needs a specialized overriding symbol,
+ * together with its specialization environment. The overridden symbol may not be
+ * the closest to 'overriding', in a given hierarchy.
+ *
+ * An method m needs a special override if
+ * * m overrides a method whose type contains specialized type variables
+ * * there is a valid specialization environment that maps the overridden method type to m's type.
+ */
+ def needsSpecialOverride(overriding: Symbol, syms: List[Symbol]): (Symbol, TypeEnv) = {
+ def missingSpecializations(baseTvar: Symbol, derivedTvar: Symbol): Set[Type] = {
+ val baseSet = concreteTypes(baseTvar).toSet
+ val derivedSet = concreteTypes(derivedTvar).toSet
+ baseSet diff derivedSet
+ }
+
+ def checkOverriddenTParams(overridden: Symbol) {
+ if (currentRun.compiles(overriding))
+ for ((baseTvar, derivedTvar) <- overridden.info.typeParams.zip(overriding.info.typeParams);
+ val missing = missingSpecializations(baseTvar, derivedTvar)
+ if missing.nonEmpty)
+ reporter.error(derivedTvar.pos,
+ "Type parameter has to be specialized at least for the same types as in the overridden method. Missing " +
+ "types: " + missing.mkString("", ", ", ""))
+ }
+
+ for (overridden <- syms) {
+ if (settings.debug.value)
+ log("Overridden: " + overridden.fullName + ": " + overridden.info
+ + "\n by " + overriding.fullName + ": " + overriding.info)
+ val stvars = specializedTypeVars(overridden.info)
+ if (!stvars.isEmpty) {
+ if (settings.debug.value) log("\t\tspecializedTVars: " + stvars)
+ checkOverriddenTParams(overridden)
+
+ val env = unify(overridden.info, overriding.info, emptyEnv)
+ if (settings.debug.value)
+ log("\t\tenv: " + env + "isValid: " + TypeEnv.isValid(env, overridden)
+ + "found: " + atPhase(phase.next)(overridden.owner.info.decl(specializedName(overridden, env))))
+ if (!TypeEnv.restrict(env, stvars).isEmpty
+ && TypeEnv.isValid(env, overridden)
+ && atPhase(phase.next)(overridden.owner.info.decl(specializedName(overridden, env))) != NoSymbol)
+ return (overridden, env)
+ }
+ }
+ (NoSymbol, emptyEnv)
+ }
+
val oms = new mutable.ListBuffer[Symbol]
- while (opc.hasNext) {
- log("\toverriding pairs: " + opc.overridden.fullNameString + ": " + opc.overridden.info
- + " overriden by " + opc.overriding.fullNameString + ": " + opc.overriding.info)
- if (opc.overriding.owner == clazz && !specializedTypeVars(opc.overridden.info).isEmpty) {
- log("\t\tspecializedTVars: " + specializedTypeVars(opc.overridden.info))
- val env = unify(opc.overridden.info, opc.overriding.info, emptyEnv)
- log("\t\tenv: " + env + "isValid: "
- + TypeEnv.isValid(env, opc.overridden)
- + " exists: " + opc.overridden.owner.info.decl(specializedName(opc.overridden, env)))
- if (!env.isEmpty
- && TypeEnv.isValid(env, opc.overridden)
- && opc.overridden.owner.info.decl(specializedName(opc.overridden, env)) != NoSymbol) {
- log("Added specialized overload for " + opc.overriding.fullNameString + " in env: " + env)
- val om = specializedOverload(clazz, opc.overridden, env)
- if (!opc.overriding.isDeferred) {
- concreteSpecMethods += opc.overriding
- info(om) = Implementation(opc.overriding)
- info(opc.overriding) = Forward(om)
- }
- overloads(opc.overriding) = Overload(om, env) :: overloads(opc.overriding)
- oms += om
- atPhase(phase.next)(
- assert(opc.overridden.owner.info.decl(om.name) != NoSymbol,
- "Could not find " + om.name + " in " + opc.overridden.owner.info.decls))
+ for (overriding <- clazz.info.decls;
+ val (overridden, env) = needsSpecialOverride(overriding, overriding.allOverriddenSymbols)
+ if overridden != NoSymbol) {
+ val om = specializedOverload(clazz, overridden, env)
+ log("Added specialized overload for %s in env: %s with type: %s".format(overriding.fullName, env, om.info))
+ typeEnv(om) = env
+ concreteSpecMethods += overriding
+ if (!overriding.isDeferred) { // concrete method
+ // if the override is a normalized member, 'om' gets the implementation from
+ // its original target, and adds the environment of the normalized member (that is,
+ // any specialized /method/ type parameter bindings)
+ info(om) = info.get(overriding) match {
+ case Some(NormalizedMember(target)) =>
+ typeEnv(om) = env ++ typeEnv(overriding)
+ SpecialOverride(target)
+ case _ => SpecialOverride(overriding)
}
+ info(overriding) = Forward(om)
+ om setPos overriding.pos
+ } else { // abstract override
+ if (settings.debug.value) log("abstract override " + overriding.fullName + " with specialized " + om.fullName)
+ info(om) = Forward(overriding)
}
- opc.next
+ overloads(overriding) = Overload(om, env) :: overloads(overriding)
+ oms += om
+ atPhase(phase.next)(
+ assert(overridden.owner.info.decl(om.name) != NoSymbol,
+ "Could not find " + om.name + " in " + overridden.owner.info.decls))
}
oms.toList
}
@@ -709,6 +814,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case (RefinedType(_, _), RefinedType(_, _)) => env
case (AnnotatedType(_, tp1, _), tp2) => unify(tp2, tp1, env)
case (ExistentialType(_, res1), _) => unify(tp2, res1, env)
+ case _ =>
+ log("don't know how to unify %s [%s] with %s [%s]".format(tp1, tp1.getClass, tp2, tp2.getClass))
+ env
}
}
@@ -724,7 +832,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
override def default(key: Symbol) = emptyEnv
}
- /** Apply type bindings in the given environement `env' to all declarations. */
+ /** Apply type bindings in the given environment `env' to all declarations. */
private def subst(env: TypeEnv, decls: List[Symbol]): List[Symbol] =
decls map subst(env)
@@ -739,8 +847,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
override def mapOver(tp: Type): Type = tp match {
case ClassInfoType(parents, decls, clazz) =>
val parents1 = parents mapConserve (this);
- val decls1 = mapOver(decls.toList);
- if ((parents1 eq parents) && (decls1 eq decls)) tp
+ val declsList = decls.toList
+ val decls1 = mapOver(declsList);
+ if ((parents1 eq parents) && (decls1 eq declsList)) tp
else ClassInfoType(parents1, new Scope(decls1), clazz)
case AnnotatedType(annots, atp, selfsym) =>
val annots1 = mapOverAnnotations(annots)
@@ -772,23 +881,35 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else tpe)
}
- /** Type transformation.
+ /** Type transformation. It is applied to all symbols, compiled or loaded.
+ * If it is a 'no-specialization' run, it is applied only to loaded symbols.
*/
override def transformInfo(sym: Symbol, tpe: Type): Type = {
- val res = tpe match {
- case PolyType(targs, ClassInfoType(base, decls, clazz)) =>
+ val res = if (!settings.nospecialization.value || !currentRun.compiles(sym)) tpe match {
+ case PolyType(targs, ClassInfoType(base, decls, clazz))
+ if clazz != definitions.RepeatedParamClass
+ && clazz != definitions.JavaRepeatedParamClass
+ && !clazz.hasFlag(JAVA) =>
val parents = base map specializedType
- PolyType(targs, ClassInfoType(parents, new Scope(specializeClass(clazz, typeEnv(clazz))), clazz))
-
- case ClassInfoType(base, decls, clazz) if !clazz.isPackageClass =>
-// val parents = base map specializedType
- log("transformInfo " + clazz )
- val res = ClassInfoType(base map specializedType, new Scope(specializeClass(clazz, typeEnv(clazz))), clazz)
+ if (settings.debug.value) log("transformInfo (poly) " + clazz + " with parents1: " + parents + " ph: " + phase)
+// if (clazz.name.toString == "$colon$colon")
+// (new Throwable).printStackTrace
+ PolyType(targs, ClassInfoType(parents,
+ new Scope(specializeClass(clazz, typeEnv(clazz)) ::: specialOverrides(clazz)),
+ clazz))
+
+ case ClassInfoType(base, decls, clazz) if !clazz.isPackageClass && !clazz.hasFlag(JAVA) =>
+ atPhase(phase.next)(base.map(_.typeSymbol.info))
+ val parents = base map specializedType
+ if (settings.debug.value) log("transformInfo " + clazz + " with parents1: " + parents + " ph: " + phase)
+ val res = ClassInfoType(base map specializedType,
+ new Scope(specializeClass(clazz, typeEnv(clazz)) ::: specialOverrides(clazz)),
+ clazz)
res
case _ =>
tpe
- }
+ } else tpe
res
}
@@ -805,13 +926,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
def conflicting(env: TypeEnv, warn: (Position, String) => Unit): Boolean =
env exists { case (tvar, tpe) =>
- if (!(subst(env, tvar.info.bounds.lo) <:< tpe) && (tpe <:< subst(env, tvar.info.bounds.hi))) {
+ if (!((subst(env, tvar.info.bounds.lo) <:< tpe)
+ && (tpe <:< subst(env, tvar.info.bounds.hi)))) {
warn(tvar.pos, "Bounds prevent specialization for " + tvar)
true
} else false
}
- /** The type environemnt is sound w.r.t. to all type bounds or only soft
+ /** The type environment is sound w.r.t. to all type bounds or only soft
* conflicts appear. An environment is sound if all bindings are within
* the bounds of the given type variable. A soft conflict is a binding
* that does not fall within the bounds, but whose bounds contain
@@ -842,15 +964,55 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
satisfiable(env, silent)
}
- import java.io.PrintWriter
-
- /*************************** Term transformation ************************************/
-
class Duplicator extends {
val global: SpecializeTypes.this.global.type = SpecializeTypes.this.global
} with typechecker.Duplicators
- import global.typer.typed
+ /** A tree symbol substituter that substitutes on type skolems.
+ * If a type parameter is a skolem, it looks for the original
+ * symbol in the 'from' and maps it to the corresponding new
+ * symbol. The new symbol should probably be a type skolem as
+ * well (not enforced).
+ *
+ * All private members are made protected in order to be accessible from
+ * specialized classes.
+ */
+ class ImplementationAdapter(from: List[Symbol],
+ to: List[Symbol],
+ targetClass: Symbol,
+ addressFields: Boolean) extends TreeSymSubstituter(from, to) {
+ override val symSubst = new SubstSymMap(from, to) {
+ override def matches(sym1: Symbol, sym2: Symbol) =
+ if (sym2.isTypeSkolem) sym2.deSkolemize eq sym1
+ else sym1 eq sym2
+ }
+
+ private def isAccessible(sym: Symbol): Boolean =
+ (currentClass == sym.owner.enclClass) && (currentClass != targetClass)
+
+ private def shouldMakePublic(sym: Symbol): Boolean =
+ sym.hasFlag(PRIVATE | PROTECTED) && (addressFields || !nme.isLocalName(sym.name))
+
+ /** All private members that are referenced are made protected,
+ * in order to be accessible from specialized subclasses.
+ */
+ override def transform(tree: Tree): Tree = tree match {
+ case Select(qual, name) =>
+ val sym = tree.symbol
+ if (sym.hasFlag(PRIVATE))
+ if (settings.debug.value)
+ log("seeing private member %s, currentClass: %s, owner: %s, isAccessible: %b, isLocalName: %b"
+ .format(sym, currentClass, sym.owner.enclClass, isAccessible(sym), nme.isLocalName(sym.name)))
+ if (shouldMakePublic(sym) && !isAccessible(sym)) {
+ if (settings.debug.value) log("changing private flag of " + sym)
+ sym.makeNotPrivate(sym.owner)
+ }
+ super.transform(tree)
+
+ case _ =>
+ super.transform(tree)
+ }
+ }
def specializeCalls(unit: CompilationUnit) = new TypingTransformer(unit) {
/** Map a specializable method to it's rhs, when not deferred. */
@@ -863,42 +1025,44 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
class CollectMethodBodies extends Traverser {
override def traverse(tree: Tree) = tree match {
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) if concreteSpecMethods(tree.symbol) || tree.symbol.isConstructor =>
- log("adding body of " + tree.symbol)
- body(tree.symbol) = rhs
-// body(tree.symbol) = tree // whole method
- parameters(tree.symbol) = vparamss map (_ map (_.symbol))
- super.traverse(tree)
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ if (concreteSpecMethods(tree.symbol) || tree.symbol.isConstructor) {
+ if (settings.debug.value) log("adding body of " + tree.symbol)
+ body(tree.symbol) = rhs
+ // body(tree.symbol) = tree // whole method
+ parameters(tree.symbol) = vparamss map (_ map (_.symbol))
+ } // no need to descend further down inside method bodies
+
case ValDef(mods, name, tpt, rhs) if concreteSpecMethods(tree.symbol) =>
body(tree.symbol) = rhs
- super.traverse(tree)
+ //super.traverse(tree)
case _ =>
super.traverse(tree)
}
}
- import posAssigner._
-
override def transform(tree: Tree): Tree = {
val symbol = tree.symbol
/** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
def specSym(qual: Tree): Option[Symbol] = {
val env = unify(symbol.tpe, tree.tpe, emptyEnv)
- log("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
+ log("[specSym] checking for rerouting: %s with \n\tsym.tpe: %s, \n\ttree.tpe: %s \n\tenv: %s \n\tname: %s"
+ .format(tree, symbol.tpe, tree.tpe, env, specializedName(symbol, env)))
if (!env.isEmpty) { // a method?
- val specMember = overload(symbol, env)
- if (specMember.isDefined) Some(specMember.get.sym)
- else { // a field?
- val specMember = qual.tpe.member(specializedName(symbol, env))
- if (specMember ne NoSymbol) Some(specMember)
- else None
- }
+ val specMember = qual.tpe.member(specializedName(symbol, env))
+ if (specMember ne NoSymbol)
+ if (typeEnv(specMember) == env) Some(specMember)
+ else {
+ log("wrong environments for specialized member: \n\ttypeEnv(%s) = %s\n\tenv = %s".format(specMember, typeEnv(specMember), env))
+ None
+ }
+ else None
} else None
}
def maybeTypeApply(fun: Tree, targs: List[Tree]) =
- if (targs.isEmpty)fun else TypeApply(fun, targs)
+ if (targs.isEmpty) fun else TypeApply(fun, targs)
curTree = tree
tree match {
@@ -910,20 +1074,22 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
Apply(
Select(New(TypeTree(findSpec(tpt.tpe))), nme.CONSTRUCTOR),
transformTrees(args))))
- } else tree
+ } else super.transform(tree)
- case TypeApply(Select(qual, name), targs) if (!specializedTypeVars(symbol.info).isEmpty && name != nme.CONSTRUCTOR) =>
- log("checking typeapp for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe)
+ case TypeApply(Select(qual, name), targs)
+ if (!specializedTypeVars(symbol.info).isEmpty && name != nme.CONSTRUCTOR) =>
+ if (settings.debug.value) log("checking typeapp for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe)
val qual1 = transform(qual)
specSym(qual1) match {
case Some(specMember) =>
- log("found " + specMember)
+ if (settings.debug.value) log("found " + specMember.fullName)
assert(symbol.info.typeParams.length == targs.length)
val env = typeEnv(specMember)
val residualTargs =
- for ((tvar, targ) <-symbol.info.typeParams.zip(targs) if !env.isDefinedAt(tvar))
+ for ((tvar, targ) <- symbol.info.typeParams.zip(targs) if !env.isDefinedAt(tvar))
yield targ
- assert(residualTargs.length == specMember.info.typeParams.length)
+ assert(residualTargs.length == specMember.info.typeParams.length,
+ "residual: %s, tparams: %s, env: %s".format(residualTargs, symbol.info.typeParams, env))
val tree1 = maybeTypeApply(Select(qual1, specMember), residualTargs)
log("rewrote " + tree + " to " + tree1)
localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
@@ -931,32 +1097,44 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case None => super.transform(tree)
}
- case Select(qual, name) if (/*!symbol.isMethod
- &&*/ !specializedTypeVars(symbol.info).isEmpty
- && name != nme.CONSTRUCTOR) =>
- val qual1 = transform(qual)
- log("checking for unification at " + tree + " with sym.tpe: " + symbol.tpe + " and tree.tpe: " + tree.tpe + " at " + tree.pos.line)
- val env = unify(symbol.tpe, tree.tpe, emptyEnv)
- log("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
- if (!env.isEmpty) {
- val specMember = overload(symbol, env)
- if (specMember.isDefined) {
- log("** routing " + tree + " to " + specMember.get.sym.fullNameString + " tree: " + Select(qual1, specMember.get.sym.name))
- localTyper.typedOperator(atPos(tree.pos)(Select(qual1, specMember.get.sym.name)))
- } else {
- val specMember = qual1.tpe.member(specializedName(symbol, env))
- if (specMember ne NoSymbol) {
- log("** using spec member " + specMember)
- localTyper.typed(atPos(tree.pos)(Select(qual1, specMember.name)))
- } else
+ case Select(qual, name) =>
+ if (settings.debug.value)
+ log("[%s] looking at Select: %s sym: %s: %s [tree.tpe: %s]".format(tree.pos.line, tree, symbol, symbol.info, tree.tpe))
+
+ if (!specializedTypeVars(symbol.info).isEmpty && name != nme.CONSTRUCTOR) {
+ val env = unify(symbol.tpe, tree.tpe, emptyEnv)
+ if (settings.debug.value) log("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
+ if (!env.isEmpty) {
+ val specMember = overload(symbol, env)
+ if (specMember.isDefined) {
+ log("** routing " + tree + " to " + specMember.get.sym.fullName)
+ localTyper.typedOperator(atPos(tree.pos)(Select(transform(qual), specMember.get.sym.name)))
+ } else {
+ val qual1 = transform(qual)
+ val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe))
+ if (specMember ne NoSymbol) {
+ log("** using spec member " + specMember + ": " + specMember.tpe)
+ val tree1 = atPos(tree.pos)(Select(qual1, specMember))
+ if (specMember.isMethod)
+ localTyper.typedOperator(tree1)
+ else
+ localTyper.typed(tree1)
+ } else
+ treeCopy.Select(tree, qual1, name)
+ }
+ } else
+ super.transform(tree)
+ } else overloads(symbol).find(_.sym.info =:= symbol.info) match {
+ case Some(specMember) =>
+ val qual1 = transform(qual)
+ if (settings.debug.value) log("** routing " + tree + " to " + specMember.sym.fullName + " tree: " + Select(qual1, specMember.sym.name))
+ localTyper.typedOperator(atPos(tree.pos)(Select(qual1, specMember.sym.name)))
+ case None =>
super.transform(tree)
- }
- } else
- super.transform(tree)
+ }
case PackageDef(pid, stats) =>
- tree.symbol.info // make sure specializations have been peformed
- log("PackageDef owner: " + symbol)
+ tree.symbol.info // make sure specializations have been performed
atOwner(tree, symbol) {
val specMembers = implSpecClasses(stats) map localTyper.typed
treeCopy.PackageDef(tree, pid, transformStats(stats ::: specMembers, symbol.moduleClass))
@@ -966,73 +1144,79 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val specMembers = makeSpecializedMembers(tree.symbol.enclClass) ::: (implSpecClasses(body) map localTyper.typed)
if (!symbol.isPackageClass)
(new CollectMethodBodies)(tree)
- treeCopy.Template(tree, parents, self, atOwner(currentOwner)(transformTrees(body ::: specMembers)))
+ val parents1 = currentOwner.info.parents.zipWithIndex.map {
+ case (tpe, idx) => TypeTree(tpe) setPos parents(idx).pos
+ }
+ treeCopy.Template(tree,
+ parents1 /*currentOwner.info.parents.map(tpe => TypeTree(tpe) setPos parents.head.pos)*/ ,
+ self,
+ atOwner(currentOwner)(transformTrees(body ::: specMembers)))
case ddef @ DefDef(mods, name, tparams, vparamss, tpt, rhs) if info.isDefinedAt(symbol) =>
if (symbol.isConstructor) {
val t = atOwner(symbol) {
val superRef: Tree = Select(Super(nme.EMPTY.toTypeName, nme.EMPTY.toTypeName), nme.CONSTRUCTOR)
- forwardCall(tree.pos, superRef, vparamss)
+ forwardCtorCall(tree.pos, superRef, vparamss, symbol.owner)
+ }
+ if (symbol.isPrimaryConstructor) localTyper typed {
+ atPos(symbol.pos)(treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, Block(List(t), Literal(()))))
+ } else {
+ // duplicate the original constructor
+ duplicateBody(ddef, info(symbol).target)
}
- val tree1 = atPos(symbol.pos)(treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, Block(List(t), Literal(()))))
- log(tree1)
- localTyper.typed(tree1)
} else info(symbol) match {
case Implementation(target) =>
- assert(body.isDefinedAt(target), "sym: " + symbol.fullNameString + " target: " + target.fullNameString)
+ assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
// we have an rhs, specialize it
val tree1 = duplicateBody(ddef, target)
- log("implementation: " + tree1)
+ if (settings.debug.value) log("implementation: " + tree1)
val DefDef(mods, name, tparams, vparamss, tpt, rhs) = tree1
treeCopy.DefDef(tree1, mods, name, tparams, vparamss, tpt, transform(rhs))
case NormalizedMember(target) =>
- log("normalized member " + symbol + " of " + target)
-
- if (conflicting(typeEnv(symbol))) {
- val targs = makeTypeArguments(symbol, target)
- log("targs: " + targs)
- val call =
- forwardCall(tree.pos,
- TypeApply(
- Select(This(symbol.owner), target),
- targs map TypeTree),
- vparamss)
- log("call: " + call)
- localTyper.typed(
- treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt,
- maybeCastTo(symbol.info.finalResultType,
- target.info.subst(target.info.typeParams, targs).finalResultType,
- call)))
-
-/* copy.DefDef(tree, mods, name, tparams, vparamss, tpt,
- typed(Apply(gen.mkAttributedRef(definitions.Predef_error),
- List(Literal("boom! you stepped on a bug. This method should never be called.")))))*/
+ if (target.isDeferred || conflicting(typeEnv(symbol))) {
+ treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt,
+ localTyper.typed(
+ Apply(gen.mkAttributedRef(definitions.Predef_error),
+ List(Literal("boom! you stepped on a bug. This method should never be called.")))))
} else {
// we have an rhs, specialize it
val tree1 = duplicateBody(ddef, target)
- log("implementation: " + tree1)
+ if (settings.debug.value) log("implementation: " + tree1)
val DefDef(mods, name, tparams, vparamss, tpt, rhs) = tree1
treeCopy.DefDef(tree1, mods, name, tparams, vparamss, tpt, transform(rhs))
}
+ case SpecialOverride(target) =>
+ assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
+ if (settings.debug.value) log("moving implementation: " + body(target))
+ // we have an rhs, specialize it
+ val tree1 = addBody(ddef, target)
+ (new ChangeOwnerTraverser(target, tree1.symbol))(tree1.rhs)
+ if (settings.debug.value)
+ println("changed owners, now: " + tree1)
+ val DefDef(mods, name, tparams, vparamss, tpt, rhs) = tree1
+ treeCopy.DefDef(tree1, mods, name, tparams, vparamss, tpt, transform(rhs))
+
+
case SpecialOverload(original, env) =>
- log("completing specialized " + symbol.fullNameString + " calling " + original)
+ if (settings.debug.value) log("completing specialized " + symbol.fullName + " calling " + original)
val t = DefDef(symbol, { vparamss =>
val fun = Apply(Select(This(symbol.owner), original),
makeArguments(original, vparamss.head))
- maybeCastTo(symbol.owner.info.memberType(symbol).finalResultType,
- symbol.owner.info.memberType(original).finalResultType,
- fun)
+ gen.maybeMkAsInstanceOf(fun,
+ symbol.owner.thisType.memberType(symbol).finalResultType,
+ symbol.owner.thisType.memberType(original).finalResultType)
})
- log("created " + t)
+ if (settings.debug.value) log("created " + t)
localTyper.typed(t)
case fwd @ Forward(_) =>
val rhs1 = forwardCall(tree.pos, gen.mkAttributedRef(symbol.owner.thisType, fwd.target), vparamss)
- log("completed forwarder to specialized overload: " + fwd.target + ": " + rhs1)
+ if (settings.debug.value)
+ log("completed forwarder to specialized overload: " + fwd.target + ": " + rhs1)
localTyper.typed(treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, rhs1))
case SpecializedAccessor(target) =>
@@ -1043,16 +1227,37 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
localTyper.typed(treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, rhs1))
}
- case ValDef(mods, name, tpt, rhs) if symbol.hasFlag(SPECIALIZED) =>
+ case ValDef(mods, name, tpt, rhs) if symbol.hasFlag(SPECIALIZED) && !symbol.hasFlag(PARAMACCESSOR) =>
assert(body.isDefinedAt(symbol.alias))
val tree1 = treeCopy.ValDef(tree, mods, name, tpt, body(symbol.alias).duplicate)
- log("now typing: " + tree1 + " in " + tree.symbol.owner.fullNameString)
+ if (settings.debug.value) log("now typing: " + tree1 + " in " + tree.symbol.owner.fullName)
val d = new Duplicator
- d.retyped(localTyper.context1.asInstanceOf[d.Context],
+ val tree2 = d.retyped(localTyper.context1.asInstanceOf[d.Context],
tree1,
symbol.alias.enclClass,
symbol.enclClass,
typeEnv(symbol.alias) ++ typeEnv(tree.symbol))
+ val ValDef(mods1, name1, tpt1, rhs1) = tree2
+ treeCopy.ValDef(tree1, mods1, name1, tpt1, transform(rhs1))
+
+// val tree1 =
+// treeCopy.ValDef(tree, mods, name, tpt,
+// localTyper.typed(
+// Apply(Select(Super(currentClass, nme.EMPTY), symbol.alias.getter(symbol.alias.owner)),
+// List())))
+// if (settings.debug.value) log("replaced ValDef: " + tree1 + " in " + tree.symbol.owner.fullName)
+// tree1
+
+ case Apply(sel @ Select(sup @ Super(qual, name), name1), args)
+ if (sup.symbol.info.parents != atPhase(phase.prev)(sup.symbol.info.parents)) =>
+
+ def parents = sup.symbol.info.parents
+ if (settings.debug.value) log(tree + " parents changed from: " + atPhase(phase.prev)(parents) + " to: " + parents)
+
+ val res = localTyper.typed(
+ Apply(Select(Super(qual, name) setPos sup.pos, name1) setPos sel.pos, transformTrees(args)) setPos tree.pos)
+ if (settings.debug.value) log("retyping call to super, from: " + symbol + " to " + res.symbol)
+ res
case _ =>
super.transform(tree)
@@ -1065,12 +1270,40 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
(tparams1, tparams map (_.symbol))
}
- private def duplicateBody(tree: DefDef, target: Symbol): Tree = {
+
+ private def duplicateBody(tree: DefDef, source: Symbol) = {
+ val symbol = tree.symbol
+ val meth = addBody(tree, source)
+ if (settings.debug.value) log("now typing: " + meth + " in " + symbol.owner.fullName)
+ val d = new Duplicator
+ try {
+ d.retyped(localTyper.context1.asInstanceOf[d.Context],
+ meth,
+ source.enclClass,
+ symbol.enclClass,
+ typeEnv(source) ++ typeEnv(symbol))
+ } catch {
+ case e =>
+ println("error compiling %s [%s]".format(unit, tree.pos))
+ throw e
+ }
+ }
+
+
+ /** Put the body of 'source' as the right hand side of the method 'tree'.
+ * The destination method gets fresh symbols for type and value parameters,
+ * and the body is updated to the new symbols, and owners adjusted accordingly.
+ * However, if the same source tree is used in more than one place, full re-typing
+ * is necessary. @see method duplicateBody
+ */
+ private def addBody(tree: DefDef, source: Symbol): DefDef = {
val symbol = tree.symbol
- log("specializing body of" + symbol.fullNameString + ": " + symbol.info)
+ if (settings.debug.value) log("specializing body of" + symbol.fullName + ": " + symbol.info)
val DefDef(mods, name, tparams, vparamss, tpt, _) = tree
- val (_, origtparams) = splitParams(target.typeParams)
- log("substituting " + origtparams + " for " + symbol.typeParams)
+// val (_, origtparams) = splitParams(source.typeParams)
+ val boundTvars = typeEnv(symbol).keySet
+ val origtparams = source.typeParams.filter(!boundTvars(_))
+ if (settings.debug.value) log("substituting " + origtparams + " for " + symbol.typeParams)
// skolemize type parameters
val (oldtparams, newtparams) = reskolemize(tparams)
@@ -1080,86 +1313,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
ValDef(param.cloneSymbol(symbol).setInfo(param.info.substSym(oldtparams, newtparams)))
})
- // replace value and type paremeters of the old method with the new ones
+ // replace value and type parameters of the old method with the new ones
val symSubstituter = new ImplementationAdapter(
- parameters(target).flatten ::: origtparams,
- vparamss1.flatten.map(_.symbol) ::: newtparams)
- val adapter = new AdaptSpecializedValues
- val tmp = symSubstituter(adapter(body(target).duplicate))
+ parameters(source).flatten ::: origtparams,
+ vparamss1.flatten.map(_.symbol) ::: newtparams,
+ source.enclClass,
+ false) // don't make private fields public
+ val tmp = symSubstituter(body(source).duplicate)
tpt.tpe = tpt.tpe.substSym(oldtparams, newtparams)
- val meth = treeCopy.DefDef(tree, mods, name, tparams, vparamss1, tpt, tmp)
-
- log("now typing: " + meth + " in " + symbol.owner.fullNameString)
- val d = new Duplicator
- d.retyped(localTyper.context1.asInstanceOf[d.Context],
- meth,
- target.enclClass,
- symbol.enclClass,
- typeEnv(target) ++ typeEnv(symbol))
- }
-
- /** A tree symbol substituter that substitutes on type skolems.
- * If a type parameter is a skolem, it looks for the original
- * symbol in the 'from' and maps it to the corresponding new
- * symbol. The new symbol should probably be a type skolem as
- * well (not enforced).
- *
- * All private members are made protected in order to be accessible from
- * specialized classes.
- */
- class ImplementationAdapter(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) {
- override val symSubst = new SubstSymMap(from, to) {
- override def matches(sym1: Symbol, sym2: Symbol) =
- if (sym2.isTypeSkolem) sym2.deSkolemize eq sym1
- else sym1 eq sym2
- }
-
- /** All private members that are referenced are made protected,
- * in order to be accessible from specialized subclasses.
- */
- override def traverse(tree: Tree): Unit = tree match {
- case Select(qual, name) =>
- if (tree.symbol.hasFlag(PRIVATE | PROTECTED)) {
- log("changing private flag of " + tree.symbol)
-// tree.symbol.resetFlag(PRIVATE).setFlag(PROTECTED)
- tree.symbol.resetFlag(PRIVATE | PROTECTED)
- }
- super.traverse(tree)
-
- case _ =>
- super.traverse(tree)
- }
- }
-
- /** Does the given tree need a cast to a type parameter's upper bound?
- * A cast is needed for values of type A, where A is a specialized type
- * variable with a non-trivial upper bound. When A is specialized, its
- * specialization may not satisfy the upper bound. We generate casts to
- * be able to type check code. Such methods will never be called, as they
- * are not visible to the user. The compiler will insert such calls only when
- * the bounds are satisfied.
- */
- private class AdaptSpecializedValues extends Transformer {
- private def needsCast(tree: Tree): Boolean = {
- val sym = tree.tpe.typeSymbol
- (sym.isTypeParameterOrSkolem
- && sym.hasAnnotation(SpecializedClass)
- && sym.info.bounds.hi != definitions.AnyClass.tpe
- /*&& !(tree.tpe <:< sym.info.bounds.hi)*/)
- }
-
- override def transform(tree: Tree): Tree = {
- val tree1 = super.transform(tree)
- if (needsCast(tree1)) {
- log("inserting cast for " + tree1 + " tpe: " + tree1.tpe)
- val tree2 = gen.mkAsInstanceOf(tree1, tree1.tpe.typeSymbol.info.bounds.hi)
- log(" casted to: " + tree2)
- tree2
- } else
- tree1
- }
- def apply(t: Tree): Tree = transform(t)
+ treeCopy.DefDef(tree, mods, name, tparams, vparamss1, tpt, tmp)
}
def warn(clazz: Symbol)(pos: Position, err: String) =
@@ -1171,47 +1334,81 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def makeSpecializedMembers(cls: Symbol): List[Tree] = {
// add special overrides first
- if (!cls.hasFlag(SPECIALIZED))
- for (m <- specialOverrides(cls)) cls.info.decls.enter(m)
+// if (!cls.hasFlag(SPECIALIZED))
+// for (m <- specialOverrides(cls)) cls.info.decls.enter(m)
val mbrs = new mutable.ListBuffer[Tree]
+ var hasSpecializedFields = false
for (m <- cls.info.decls.toList
if m.hasFlag(SPECIALIZED)
&& (m.sourceFile ne null)
&& satisfiable(typeEnv(m), warn(cls))) {
- log("creating tree for " + m.fullNameString)
+ log("creating tree for " + m.fullName)
if (m.isMethod) {
+ if (info(m).target.isGetterOrSetter) hasSpecializedFields = true
if (m.isClassConstructor) {
val origParamss = parameters(info(m).target)
- assert(origParamss.length == 1) // we are after uncurry
val vparams =
for ((tp, sym) <- m.info.paramTypes zip origParamss(0))
yield m.newValue(sym.pos, specializedName(sym, typeEnv(cls)))
.setInfo(tp)
.setFlag(sym.flags)
+
// param accessors for private members (the others are inherited from the generic class)
- for (param <- vparams if cls.info.nonPrivateMember(param.name) == NoSymbol;
- val acc = param.cloneSymbol(cls).setFlag(PARAMACCESSOR | PRIVATE)) {
- log("param accessor for " + acc.fullNameString)
- cls.info.decls.enter(acc)
- mbrs += ValDef(acc, EmptyTree).setType(NoType).setPos(m.pos)
- }
+ if (m.isPrimaryConstructor)
+ for (param <- vparams if cls.info.nonPrivateMember(param.name) == NoSymbol;
+ val acc = param.cloneSymbol(cls).setFlag(PARAMACCESSOR | PRIVATE)) {
+ cls.info.decls.enter(acc)
+ mbrs += ValDef(acc, EmptyTree).setType(NoType).setPos(m.pos)
+ }
+
// ctor
- mbrs += DefDef(m, Modifiers(m.flags), List(vparams) map (_ map ValDef), EmptyTree)
- } else
- mbrs += DefDef(m, { paramss => EmptyTree })
+ mbrs += atPos(m.pos)(DefDef(m, Modifiers(m.flags), List(vparams) map (_ map ValDef), EmptyTree))
+ } else {
+ mbrs += atPos(m.pos)(DefDef(m, { paramss => EmptyTree }))
+ }
} else if (m.isValue) {
mbrs += ValDef(m, EmptyTree).setType(NoType).setPos(m.pos)
} else if (m.isClass) {
// mbrs +=
// ClassDef(m, Template(m.info.parents map TypeTree, emptyValDef, List())
// .setSymbol(m.newLocalDummy(m.pos)))
-// log("created synthetic class: " + m.fullNameString)
+// log("created synthetic class: " + m.fullName)
+ }
+ }
+ if (hasSpecializedFields) {
+ import definitions.BooleanClass
+
+ val isSpecializedInstance = cls.hasFlag(SPECIALIZED) || cls.info.parents.exists(_.typeSymbol.hasFlag(SPECIALIZED))
+ val sym = cls.newMethod(nme.SPECIALIZED_INSTANCE, cls.pos)
+ .setInfo(MethodType(Nil, BooleanClass.tpe))
+ cls.info.decls.enter(sym)
+ mbrs += atPos(sym.pos) {
+ DefDef(sym, Literal(isSpecializedInstance).setType(BooleanClass.tpe)).setType(NoType)
}
}
mbrs.toList
}
+
+ /** Create specialized class definitions */
+ def implSpecClasses(trees: List[Tree]): List[Tree] = {
+ val buf = new mutable.ListBuffer[Tree]
+ for (tree <- trees)
+ tree match {
+ case ClassDef(_, _, _, impl) =>
+ tree.symbol.info // force specialization
+ for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) {
+ val parents = specCls.info.parents.map(TypeTree)
+ buf +=
+ ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
+ .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
+ log("created synthetic class: " + specCls + " of " + sym1 + " in env: " + env)
+ }
+ case _ =>
+ }
+ buf.toList
+ }
}
private def forwardCall(pos: util.Position, receiver: Tree, paramss: List[List[ValDef]]): Tree = {
@@ -1219,23 +1416,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
atPos(pos) { (receiver /: argss) (Apply) }
}
- /** Create specialized class definitions */
- def implSpecClasses(trees: List[Tree]): List[Tree] = {
- val buf = new mutable.ListBuffer[Tree]
- for (tree <- trees)
- tree match {
- case ClassDef(_, _, _, impl) =>
- tree.symbol.info // force specialization
- for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) {
- buf +=
- ClassDef(specCls, Template(specCls.info.parents map TypeTree, emptyValDef, List())
- .setSymbol(specCls.newLocalDummy(sym1.pos)))
- log("created synthetic class: " + specCls + " of " + sym1 + " in env: " + env)
- }
- case _ =>
- }
- log(buf)
- buf.toList
+ private def forwardCtorCall(pos: util.Position, receiver: Tree, paramss: List[List[ValDef]], clazz: Symbol): Tree = {
+ val argss = paramss map (_ map (x =>
+ if (x.name.endsWith("$sp") && clazz.info.member(nme.originalName(x.name)).isPublic)
+ gen.mkAsInstanceOf(Literal(Constant(null)), x.symbol.tpe)
+ else
+ Ident(x.symbol))
+ )
+ atPos(pos) { (receiver /: argss) (Apply) }
}
/** Concrete methods that use a specialized type, or override such methods. */
@@ -1254,21 +1442,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
for (tp <- owner.info.memberType(target).typeParams)
yield
if (!env.isDefinedAt(tp))
- typeRef(NoPrefix, from.info.typeParams.find(_ == tp.name).get, Nil)
+ typeRef(NoPrefix, from.info.typeParams.find(_.name == tp.name).get, Nil)
else if ((env(tp) <:< tp.info.bounds.hi) && (tp.info.bounds.lo <:< env(tp)))
env(tp)
else tp.info.bounds.hi
}
- /** Cast `tree' to 'pt', unless tpe is a subtype of pt, or pt is Unit. */
- def maybeCastTo(pt: Type, tpe: Type, tree: Tree): Tree =
- if ((pt == definitions.UnitClass.tpe) || (tpe <:< pt)) {
- log("no need to cast from " + tpe + " to " + pt)
- tree
- } else
- gen.mkAsInstanceOf(tree, pt)
-
-
private def makeArguments(fun: Symbol, vparams: List[Symbol]): List[Tree] = {
def needsCast(tp1: Type, tp2: Type): Boolean =
!(tp1 <:< tp2)
@@ -1296,8 +1475,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
class SpecializationTransformer(unit: CompilationUnit) extends Transformer {
+ informProgress("specializing " + unit)
override def transform(tree: Tree) =
- atPhase(phase.next) {
+ if (settings.nospecialization.value) tree
+ else atPhase(phase.next) {
val res = specializeCalls(unit).transform(tree)
res
}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 93cb5baefa..9b54dd9428 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -1,8 +1,7 @@
/* NSC -- new scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Iulian Dragos
*/
-// $Id$
package scala.tools.nsc
package transform
@@ -39,9 +38,6 @@ abstract class TailCalls extends Transform
}
}
- /** The @tailrec annotation indicates TCO is mandatory */
- private def tailrecRequired(defdef: DefDef) = defdef.symbol hasAnnotation TailrecClass
-
/**
* A Tail Call Transformer
*
@@ -72,7 +68,7 @@ abstract class TailCalls extends Transform
* are optimized. Since 'this' is not a local variable, a dummy local val
* is added and used as a label parameter. The backend knows to load
* the corresponding argument in the 'this' (local at index 0). This dummy local
- * is never used and should be cleand up by dead code elmination (when enabled).
+ * is never used and should be cleand up by dead code elimination (when enabled).
* </p>
* <p>
* This phase has been moved before pattern matching to catch more
@@ -105,6 +101,9 @@ abstract class TailCalls extends Transform
/** Tells whether we are in a (possible) tail position */
var tailPos = false
+ /** The reason this method could not be optimized. */
+ var tailrecFailReason = "it contains a recursive call not in tail position"
+
/** Is the label accessed? */
var accessed = false
@@ -138,7 +137,8 @@ abstract class TailCalls extends Transform
t
}
- private var ctx: Context = new Context()
+ private var ctx: Context = new Context()
+ private def enclosingType = ctx.currentMethod.enclClass.typeOfThis
/** Rewrite this tree to contain no tail recursive calls */
def transform(tree: Tree, nctx: Context): Tree = {
@@ -150,28 +150,61 @@ abstract class TailCalls extends Transform
}
override def transform(tree: Tree): Tree = {
+ /** A possibly polymorphic apply to be considered for tail call transformation.
+ */
+ def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree]) = {
+ def isRecursiveCall = ctx.currentMethod eq fun.symbol
+ def isMandatory = ctx.currentMethod hasAnnotation TailrecClass
+ def isEligible = ctx.currentMethod.isEffectivelyFinal
+ def transformArgs = transformTrees(args, mkContext(ctx, false))
+ def matchesTypeArgs = ctx.tparams sameElements (targs map (_.tpe.typeSymbol))
+ def defaultTree = treeCopy.Apply(tree, target, transformArgs)
+
+ def sameTypeOfThis(receiver: Tree) =
+ receiver.tpe.widen =:= enclosingType.widen
+
+ /** Records failure reason in Context for reporting.
+ */
+ def cannotRewrite(reason: String) = {
+ if (isMandatory)
+ ctx.tailrecFailReason = reason
+
+ defaultTree
+ }
+ def rewriteTailCall(receiver: Tree, otherArgs: List[Tree]): Tree = {
+ log("Rewriting tail recursive method call at: " + fun.pos)
+
+ ctx.accessed = true
+ typed { atPos(fun.pos)(Apply(Ident(ctx.label), receiver :: otherArgs)) }
+ }
+
+ if (!isRecursiveCall) defaultTree
+ else if (!isEligible) cannotRewrite("it is neither private nor final so can be overridden")
+ else if (!ctx.tailPos) cannotRewrite("it contains a recursive call not in tail position")
+ else if (!matchesTypeArgs) cannotRewrite("it is called recursively with different type arguments")
+ else fun match {
+ case Select(_, _) if forMSIL => cannotRewrite("it cannot be optimized on MSIL")
+ case Select(qual, _) if !sameTypeOfThis(qual) => cannotRewrite("it changes type of 'this' on a polymorphic recursive call")
+ case Select(qual, _) => rewriteTailCall(qual, transformArgs)
+ case _ => rewriteTailCall(This(currentClass), transformArgs)
+ }
+ }
+
tree match {
case dd @ DefDef(mods, name, tparams, vparams, tpt, rhs) =>
log("Entering DefDef: " + name)
- var isTransformed = false
val newCtx = mkContext(ctx)
newCtx.currentMethod = tree.symbol
newCtx.makeLabel()
- val currentClassParam = tree.symbol.newSyntheticValueParam(currentClass.tpe)
+ val currentClassParam = tree.symbol.newSyntheticValueParam(currentClass.typeOfThis)
newCtx.label.setInfo(MethodType(currentClassParam :: tree.symbol.tpe.params, tree.symbol.tpe.finalResultType))
newCtx.tailPos = true
- val isEligible = newCtx.currentMethod.isFinal || (newCtx.currentMethod.enclClass hasFlag Flags.MODULE)
- // If -Ytailrecommend is given, we speculatively try transforming ineligible methods and
- // report where we would have been successful.
- val recommend = settings.Ytailrec.value
- val savedFlags: Option[Long] = if (recommend) Some(newCtx.currentMethod.flags) else None
-
- if (isEligible || recommend) {
- if (recommend)
- newCtx.currentMethod.flags |= Flags.FINAL
+ val isEligible = newCtx.currentMethod.isEffectivelyFinal
+ val isMandatory = dd.symbol.hasAnnotation(TailrecClass) && !forMSIL // @tailrec annotation indicates mandatory transformation
+ if (isEligible) {
newCtx.tparams = Nil
log(" Considering " + name + " for tailcalls")
tree.symbol.tpe match {
@@ -185,34 +218,27 @@ abstract class TailCalls extends Transform
val t1 = treeCopy.DefDef(tree, mods, name, tparams, vparams, tpt, {
val transformed = transform(rhs, newCtx)
- savedFlags foreach (newCtx.currentMethod.flags = _)
transformed match {
case newRHS if isEligible && newCtx.accessed =>
log("Rewrote def " + newCtx.currentMethod)
- isTransformed = true
val newThis = newCtx.currentMethod
. newValue (tree.pos, nme.THIS)
- . setInfo (currentClass.tpe)
+ . setInfo (currentClass.typeOfThis)
. setFlag (Flags.SYNTHETIC)
typed(atPos(tree.pos)(Block(
List(ValDef(newThis, This(currentClass))),
LabelDef(newCtx.label, newThis :: (vparams.flatten map (_.symbol)), newRHS)
)))
- case _ if recommend =>
- if (newCtx.accessed)
- unit.warning(dd.pos, "method is tailrecommended")
- // transform with the original flags restored
- transform(rhs, newCtx)
+ case rhs =>
+ if (isMandatory)
+ unit.error(dd.pos, "could not optimize @tailrec annotated method: " + newCtx.tailrecFailReason)
- case rhs => rhs
+ rhs
}
})
- if (!forMSIL && !isTransformed && tailrecRequired(dd))
- unit.error(dd.pos, "could not optimize @tailrec annotated method")
-
log("Leaving DefDef: " + name)
t1
@@ -269,50 +295,16 @@ abstract class TailCalls extends Transform
case Typed(expr, tpt) => super.transform(tree)
case Apply(tapply @ TypeApply(fun, targs), vargs) =>
- lazy val defaultTree = treeCopy.Apply(tree, tapply, transformTrees(vargs, mkContext(ctx, false)))
- if ( ctx.currentMethod.isFinal &&
- ctx.tailPos &&
- isSameTypes(ctx.tparams, targs map (_.tpe.typeSymbol)) &&
- isRecursiveCall(fun)) {
- fun match {
- case Select(receiver, _) =>
- val recTpe = receiver.tpe.widen
- val enclTpe = ctx.currentMethod.enclClass.typeOfThis
- // make sure the type of 'this' doesn't change through this polymorphic recursive call
- if (!forMSIL &&
- (receiver.tpe.typeParams.isEmpty ||
- (receiver.tpe.widen == ctx.currentMethod.enclClass.typeOfThis)))
- rewriteTailCall(fun, receiver :: transformTrees(vargs, mkContext(ctx, false)))
- else
- defaultTree
- case _ => rewriteTailCall(fun, This(currentClass) :: transformTrees(vargs, mkContext(ctx, false)))
- }
- } else
- defaultTree
+ rewriteApply(tapply, fun, targs, vargs)
case TypeApply(fun, args) =>
super.transform(tree)
- case Apply(fun, args) if (fun.symbol == definitions.Boolean_or ||
- fun.symbol == definitions.Boolean_and) =>
- treeCopy.Apply(tree, fun, transformTrees(args))
-
case Apply(fun, args) =>
- lazy val defaultTree = treeCopy.Apply(tree, fun, transformTrees(args, mkContext(ctx, false)))
- if (ctx.currentMethod.isFinal &&
- ctx.tailPos &&
- isRecursiveCall(fun)) {
- fun match {
- case Select(receiver, _) =>
- if (!forMSIL)
- rewriteTailCall(fun, receiver :: transformTrees(args, mkContext(ctx, false)))
- else
- defaultTree
- case _ => rewriteTailCall(fun, This(currentClass) :: transformTrees(args, mkContext(ctx, false)))
- }
- } else
- defaultTree
-
+ if (fun.symbol == Boolean_or || fun.symbol == Boolean_and)
+ treeCopy.Apply(tree, fun, transformTrees(args))
+ else
+ rewriteApply(fun, fun, Nil, args)
case Super(qual, mix) =>
tree
@@ -333,33 +325,5 @@ abstract class TailCalls extends Transform
def transformTrees(trees: List[Tree], nctx: Context): List[Tree] =
trees map ((tree) => transform(tree, nctx))
-
- private def rewriteTailCall(fun: Tree, args: List[Tree]): Tree = {
- log("Rewriting tail recursive method call at: " +
- (fun.pos))
- ctx.accessed = true
- //println("fun: " + fun + " args: " + args)
- val t = atPos(fun.pos)(Apply(Ident(ctx.label), args))
- // println("TAIL: "+t)
- typed(t)
- }
-
- private def isSameTypes(ts1: List[Symbol], ts2: List[Symbol]): Boolean = {
- def isSameType(t1: Symbol, t2: Symbol) = {
- t1 == t2
- }
- List.forall2(ts1, ts2)(isSameType)
- }
-
- /** Returns <code>true</code> if the fun tree refers to the same method as
- * the one saved in <code>ctx</code>.
- *
- * @param fun the expression that is applied
- * @return <code>true</code> if the tree symbol refers to the innermost
- * enclosing method
- */
- private def isRecursiveCall(fun: Tree): Boolean =
- (fun.symbol eq ctx.currentMethod)
}
-
}
diff --git a/src/compiler/scala/tools/nsc/transform/Transform.scala b/src/compiler/scala/tools/nsc/transform/Transform.scala
index e3a8dd0659..2fc3eee59c 100644
--- a/src/compiler/scala/tools/nsc/transform/Transform.scala
+++ b/src/compiler/scala/tools/nsc/transform/Transform.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package transform
diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
index ac72383195..de5355de61 100644
--- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
@@ -1,13 +1,11 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package transform
-import util.Position
import scala.collection.mutable.{Map, HashMap}
/** A base class for transforms.
@@ -19,19 +17,22 @@ trait TypingTransformers {
import global._
abstract class TypingTransformer(unit: CompilationUnit) extends Transformer {
- var localTyper: analyzer.Typer = analyzer.newTyper(
- analyzer.rootContext(unit, EmptyTree, true))
+ var localTyper: analyzer.Typer =
+ if (phase.erasedTypes)
+ erasure.newTyper(erasure.rootContext(unit, EmptyTree, true)).asInstanceOf[analyzer.Typer]
+ else
+ analyzer.newTyper(analyzer.rootContext(unit, EmptyTree, true))
protected var curTree: Tree = _
protected def typedPos(pos: Position)(tree: Tree) = localTyper typed { atPos(pos)(tree) }
/** a typer for each enclosing class */
- var typers: Map[Symbol, analyzer.Typer] = new HashMap
+ val typers: Map[Symbol, analyzer.Typer] = new HashMap
override def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans)
def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = {
val savedLocalTyper = localTyper
-// println("ttransformer atOwner: " + owner + " isPackage? " + owner.isPackage)
+// println("transformer atOwner: " + owner + " isPackage? " + owner.isPackage)
localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner)
typers += Pair(owner, localTyper)
val result = super.atOwner(owner)(trans)
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index eec523a2b8..8e3722dd99 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -1,15 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author
*/
-// $Id$
package scala.tools.nsc
package transform
import symtab.Flags._
import scala.collection.mutable.{HashMap, HashSet}
-import scala.tools.nsc.util.Position
/*<export>*/
/** - uncurry all symbol and tree types (@see UnCurryPhase)
@@ -47,9 +45,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
// ------ Type transformation --------------------------------------------------------
-//@MAT: uncurry and uncurryType fully expand type aliases in their input and output
-// note: don't normalize higher-kined types -- @M TODO: maybe split those uses of normalize?
-// OTOH, should be a problem as calls to normalize only occur on types with kind * in principle (in well-typed programs)
+// uncurry and uncurryType expand type aliases
private def expandAlias(tp: Type): Type = if (!tp.isHigherKinded) tp.normalize else tp
private def isUnboundedGeneric(tp: Type) = tp match {
@@ -63,14 +59,14 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
tp match {
case MethodType(params, MethodType(params1, restpe)) =>
apply(MethodType(params ::: params1, restpe))
- case MethodType(formals, ExistentialType(tparams, restpe @ MethodType(_, _))) =>
- assert(false, "unexpected curried method types with intervening exitential")
+ case MethodType(params, ExistentialType(tparams, restpe @ MethodType(_, _))) =>
+ assert(false, "unexpected curried method types with intervening existential")
tp0
- case mt: ImplicitMethodType =>
- apply(MethodType(mt.params, mt.resultType))
- case PolyType(List(), restpe) =>
+ case MethodType(h :: t, restpe) if h.isImplicit =>
+ apply(MethodType(h.cloneSymbol.resetFlag(IMPLICIT) :: t, restpe))
+ case PolyType(List(), restpe) => // nullary method type
apply(MethodType(List(), restpe))
- case PolyType(tparams, restpe) =>
+ case PolyType(tparams, restpe) => // polymorphic nullary method type, since it didn't occur in a higher-kinded position
PolyType(tparams, apply(MethodType(List(), restpe)))
case TypeRef(pre, ByNameParamClass, List(arg)) =>
apply(functionType(List(), arg))
@@ -83,6 +79,29 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
expandAlias(mapOver(tp))
}
}
+
+//@M TODO: better fix for the gross hack that conflates polymorphic nullary method types with type functions
+// `[tpars] tref` (PolyType(tpars, tref)) could uncurry to either:
+// - `[tpars]() tref` (PolyType(tpars, MethodType(List(), tref))
+// a nullary method types uncurry to a method with an empty argument list
+// - `[tpars] tref` (PolyType(tpars, tref))
+// a proper type function -- see mapOverArgs: can only occur in args of TypeRef (right?))
+// the issue comes up when a partial type application gets normalised to a polytype, like `[A] Function1[X, A]`
+// should not apply the uncurry transform to such a type
+// see #2594 for an example
+
+ // decide whether PolyType represents a nullary method type (only if type has kind *)
+ // for higher-kinded types, leave PolyType intact
+ override def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
+ map2Conserve(args, tparams) { (arg, tparam) =>
+ arg match {
+ // is this a higher-kinded position? (TODO: confirm this is the only case)
+ case PolyType(tparams, restpe) if tparam.typeParams.nonEmpty => // higher-kinded type param
+ PolyType(tparams, apply(restpe)) // could not be a nullary method type
+ case _ =>
+ this(arg)
+ }
+ }
}
private val uncurryType = new TypeMap {
@@ -154,7 +173,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
throw ex
}
- /* Is tree a reference `x' to a call by name parameter that neeeds to be converted to
+ /* Is tree a reference `x' to a call by name parameter that needs to be converted to
* x.apply()? Note that this is not the case if `x' is used as an argument to another
* call by name parameter.
*/
@@ -168,8 +187,8 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
* additional parameter sections of a case class are skipped.
*/
def uncurryTreeType(tp: Type): Type = tp match {
- case MethodType(formals, MethodType(formals1, restpe)) if (inPattern) =>
- uncurryTreeType(MethodType(formals, restpe))
+ case MethodType(params, MethodType(params1, restpe)) if (inPattern) =>
+ uncurryTreeType(MethodType(params, restpe))
case _ =>
uncurry(tp)
}
@@ -178,7 +197,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
/** The type of a non-local return expression with given argument type */
private def nonLocalReturnExceptionType(argtype: Type) =
- appliedType(NonLocalReturnExceptionClass.typeConstructor, List(argtype))
+ appliedType(NonLocalReturnControlClass.typeConstructor, List(argtype))
/** A hashmap from method symbols to non-local return keys */
private val nonLocalReturnKeys = new HashMap[Symbol, Symbol]
@@ -196,9 +215,9 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
/** Generate a non-local return throw with given return expression from given method.
* I.e. for the method's non-local return key, generate:
*
- * throw new NonLocalReturnException(key, expr)
+ * throw new NonLocalReturnControl(key, expr)
* todo: maybe clone a pre-existing exception instead?
- * (but what to do about excaptions that miss their targets?)
+ * (but what to do about exceptions that miss their targets?)
*/
private def nonLocalReturnThrow(expr: Tree, meth: Symbol) =
localTyper.typed {
@@ -215,7 +234,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
* try {
* body
* } catch {
- * case ex: NonLocalReturnException[_] =>
+ * case ex: NonLocalReturnControl[_] =>
* if (ex.key().eq(key)) ex.value()
* else throw ex
* }
@@ -227,7 +246,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
val ex = meth.newValue(body.pos, nme.ex) setInfo extpe
val pat = Bind(ex,
Typed(Ident(nme.WILDCARD),
- AppliedTypeTree(Ident(NonLocalReturnExceptionClass),
+ AppliedTypeTree(Ident(NonLocalReturnControlClass),
List(Bind(nme.WILDCARD.toTypeName,
EmptyTree)))))
val rhs =
@@ -276,11 +295,11 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
* new $anon()
*
* transform a function node (x => body) of type PartialFunction[T, R] where
- * body = x match { case P_i if G_i => E_i }_i=1..n
+ * body = expr match { case P_i if G_i => E_i }_i=1..n
* to:
*
* class $anon() extends Object() with PartialFunction[T, R] with ScalaObject {
- * def apply(x: T): R = (x: @unchecked) match {
+ * def apply(x: T): R = (expr: @unchecked) match {
* { case P_i if G_i => E_i }_i=1..n
* def isDefinedAt(x: T): boolean = (x: @unchecked) match {
* case P_1 if G_1 => true
@@ -297,68 +316,63 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
*/
def transformFunction(fun: Function): Tree = {
val fun1 = deEta(fun)
+ def owner = fun.symbol.owner
+ def targs = fun.tpe.typeArgs
+ def isPartial = fun.tpe.typeSymbol == PartialFunctionClass
+
if (fun1 ne fun) fun1
else {
- val anonClass = fun.symbol.owner.newAnonymousFunctionClass(fun.pos)
- .setFlag(FINAL | SYNTHETIC | inConstructorFlag)
- val formals = fun.tpe.typeArgs.init
- val restpe = fun.tpe.typeArgs.last
- anonClass setInfo ClassInfoType(
- List(ObjectClass.tpe, fun.tpe, ScalaObjectClass.tpe), new Scope, anonClass);
- val applyMethod = anonClass.newMethod(fun.pos, nme.apply).setFlag(FINAL)
- applyMethod.setInfo(MethodType(applyMethod.newSyntheticValueParams(formals), restpe))
- anonClass.info.decls enter applyMethod;
- for (vparam <- fun.vparams) vparam.symbol.owner = applyMethod;
- new ChangeOwnerTraverser(fun.symbol, applyMethod).traverse(fun.body);
- def applyMethodDef(body: Tree) =
- DefDef(Modifiers(FINAL), nme.apply, List(), List(fun.vparams), TypeTree(restpe), body)
- .setSymbol(applyMethod)
-/*
- def toStringMethodDefs = fun match {
- case etaExpansion(_, fn, _) if (fn.hasSymbol) =>
- List(
- DefDef(Modifiers(FINAL | OVERRIDE), nme.toString_, List(), List(List()), TypeTree(StringClass.tpe),
- Literal(fn.symbol.name)))
- case _ =>
- List()
+ val (formals, restpe) = (targs.init, targs.last)
+ val anonClass = owner newAnonymousFunctionClass fun.pos setFlag (FINAL | SYNTHETIC | inConstructorFlag)
+ def parents =
+ if (isFunctionType(fun.tpe)) List(abstractFunctionForFunctionType(fun.tpe))
+ else List(ObjectClass.tpe, fun.tpe)
+
+ anonClass setInfo ClassInfoType(parents, new Scope, anonClass)
+ val applyMethod = anonClass.newMethod(fun.pos, nme.apply) setFlag FINAL
+ applyMethod setInfo MethodType(applyMethod newSyntheticValueParams formals, restpe)
+ anonClass.info.decls enter applyMethod
+
+ fun.vparams foreach (_.symbol.owner = applyMethod)
+ new ChangeOwnerTraverser(fun.symbol, applyMethod) traverse fun.body
+
+ def mkUnchecked(tree: Tree) = {
+ def newUnchecked(expr: Tree) = Annotated(New(gen.scalaDot(UncheckedClass.name), List(Nil)), expr)
+ tree match {
+ case Match(selector, cases) => atPos(tree.pos) { Match(newUnchecked(selector), cases) }
+ case _ => tree
+ }
}
-*/
- def mkUnchecked(tree: Tree) = tree match {
- case Match(selector, cases) =>
- atPos(tree.pos) {
- Match(
- Annotated(New(TypeTree(UncheckedClass.tpe), List(List())), selector),
- cases)
- }
- case _ =>
- tree
+
+ def applyMethodDef() = {
+ val body = if (isPartial) mkUnchecked(fun.body) else fun.body
+ DefDef(Modifiers(FINAL), nme.apply, Nil, List(fun.vparams), TypeTree(restpe), body) setSymbol applyMethod
}
- val members = {
- if (fun.tpe.typeSymbol == PartialFunctionClass) {
- val isDefinedAtMethod = anonClass.newMethod(fun.pos, nme.isDefinedAt).setFlag(FINAL)
- isDefinedAtMethod.setInfo(MethodType(isDefinedAtMethod.newSyntheticValueParams(formals),
- BooleanClass.tpe))
- anonClass.info.decls enter isDefinedAtMethod
- def idbody(idparam: Symbol) = fun.body match {
- case Match(_, cases) =>
- val substParam = new TreeSymSubstituter(List(fun.vparams.head.symbol), List(idparam));
- def transformCase(cdef: CaseDef): CaseDef =
- substParam(
- resetLocalAttrs(
- CaseDef(cdef.pat.duplicate, cdef.guard.duplicate, Literal(true))))
- if (cases exists treeInfo.isDefaultCase) Literal(true)
- else
- Match(
- Ident(idparam),
- (cases map transformCase) :::
- List(CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(false))))
- }
- List(applyMethodDef(mkUnchecked(fun.body)),
- DefDef(isDefinedAtMethod, mkUnchecked(idbody(isDefinedAtMethod.paramss.head.head))))
- } else {
- List(applyMethodDef(fun.body))
- }
- } /* ::: toStringMethodDefs */
+ def isDefinedAtMethodDef() = {
+ val m = anonClass.newMethod(fun.pos, nme.isDefinedAt) setFlag FINAL
+ m setInfo MethodType(m newSyntheticValueParams formals, BooleanClass.tpe)
+ anonClass.info.decls enter m
+
+ val Match(selector, cases) = fun.body
+ val vparam = fun.vparams.head.symbol
+ val idparam = m.paramss.head.head
+ val substParam = new TreeSymSubstituter(List(vparam), List(idparam))
+ def substTree[T <: Tree](t: T): T = substParam(resetLocalAttrs(t))
+
+ def transformCase(cdef: CaseDef): CaseDef =
+ substTree(CaseDef(cdef.pat.duplicate, cdef.guard.duplicate, Literal(true)))
+ def defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(false))
+
+ DefDef(m, mkUnchecked(
+ if (cases exists treeInfo.isDefaultCase) Literal(true)
+ else Match(substTree(selector.duplicate), (cases map transformCase) ::: List(defaultCase))
+ ))
+ }
+
+ val members =
+ if (isPartial) List(applyMethodDef, isDefinedAtMethodDef)
+ else List(applyMethodDef)
+
localTyper.typed {
atPos(fun.pos) {
Block(
@@ -366,7 +380,6 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
Typed(
New(TypeTree(anonClass.tpe), List(List())),
TypeTree(fun.tpe)))
-
}
}
}
@@ -387,13 +400,20 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
val predef = gen.mkAttributedRef(PredefModule)
val meth =
if ((elemtp <:< AnyRefClass.tpe) && !isPhantomClass(elemtp.typeSymbol))
- Select(predef, "wrapRefArray")
+ TypeApply(Select(predef, "wrapRefArray"), List(TypeTree(elemtp)))
else if (isValueClass(elemtp.typeSymbol))
Select(predef, "wrap"+elemtp.typeSymbol.name+"Array")
else
TypeApply(Select(predef, "genericWrapArray"), List(TypeTree(elemtp)))
- val adaptedTree = // need to cast to Array[elemtp], as arrays are not covariant
- gen.mkCast(tree, arrayType(elemtp))
+ val pt = arrayType(elemtp)
+ val adaptedTree = // might need to cast to Array[elemtp], as arrays are not covariant
+ if (tree.tpe <:< pt) tree
+ else gen.mkCast(
+ if (elemtp.typeSymbol == AnyClass && isValueClass(tree.tpe.typeArgs.head.typeSymbol))
+ gen.mkRuntimeCall("toObjectArray", List(tree))
+ else
+ tree,
+ arrayType(elemtp))
Apply(meth, List(adaptedTree))
}
}
@@ -436,17 +456,19 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
if (isJava &&
suffix.tpe.typeSymbol == ArrayClass &&
isValueClass(suffix.tpe.typeArgs.head.typeSymbol) &&
- fun.tpe.paramTypes.last.typeSymbol == ArrayClass &&
- fun.tpe.paramTypes.last.typeArgs.head.typeSymbol == ObjectClass)
+ { val lastFormal2 = fun.tpe.params.last.tpe
+ lastFormal2.typeSymbol == ArrayClass &&
+ lastFormal2.typeArgs.head.typeSymbol == ObjectClass
+ })
suffix = localTyper.typedPos(pos) {
gen.mkRuntimeCall("toObjectArray", List(suffix))
}
}
- args.take(formals.length - 1) ::: List(suffix setType formals.last)
+ args.take(formals.length - 1) ::: List(suffix setType lastFormal)
case _ =>
args
}
- List.map2(formals, args1) { (formal, arg) =>
+ (formals, args1).zipped map { (formal, arg) =>
if (formal.typeSymbol != ByNameParamClass) {
arg
} else if (isByNameRef(arg)) {
@@ -461,6 +483,16 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
}
}
+ /** For removing calls to specially designated methods.
+ */
+ def elideIntoUnit(tree: Tree): Tree = Literal(()) setPos tree.pos setType UnitClass.tpe
+ def isElidable(tree: Tree) = {
+ val sym = tree.symbol
+ // XXX settings.noassertions.value temporarily retained to avoid
+ // breakage until a reasonable interface is settled upon.
+ sym != null && sym.elisionLevel.exists(x => x < settings.elidebelow.value || settings.noassertions.value)
+ }
+
// ------ The tree transformers --------------------------------------------------------
def mainTransform(tree: Tree): Tree = {
@@ -565,21 +597,15 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
treeCopy.UnApply(tree, fn1, args1)
case Apply(fn, args) =>
- // XXX settings.noassertions.value temporarily retained to avoid
- // breakage until a reasonable interface is settled upon.
- def elideFunctionCall(sym: Symbol) =
- sym != null && sym.elisionLevel.exists(x => x < settings.elideLevel.value || settings.noassertions.value)
-
- if (elideFunctionCall(fn.symbol)) {
- Literal(()).setPos(tree.pos).setType(UnitClass.tpe)
- } else if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head)) {
+ if (isElidable(fn))
+ elideIntoUnit(tree)
+ else if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head))
transform(treeCopy.Apply(tree, fn, List(liftTree(args.head))))
- } else {
+ else
withNeedLift(true) {
- val formals = fn.tpe.paramTypes;
+ val formals = fn.tpe.paramTypes
treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
}
- }
case Assign(Select(_, _), _) =>
withNeedLift(true) { super.transform(tree) }
@@ -618,16 +644,21 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
} setType uncurryTreeType(tree.tpe)
def postTransform(tree: Tree): Tree = atPhase(phase.next) {
- def applyUnary(): Tree =
- if (tree.symbol.isMethod &&
- (!tree.tpe.isInstanceOf[PolyType] || tree.tpe.typeParams.isEmpty)) {
- if (!tree.tpe.isInstanceOf[MethodType]) tree.tpe = MethodType(List(), tree.tpe);
- atPos(tree.pos)(Apply(tree, List()) setType tree.tpe.resultType)
- } else if (tree.isType) {
- TypeTree(tree.tpe) setPos tree.pos
- } else {
- tree
+ def applyUnary(): Tree = {
+ def needsParens = tree.symbol.isMethod && (!tree.tpe.isInstanceOf[PolyType] || tree.tpe.typeParams.isEmpty)
+ def repair = {
+ if (!tree.tpe.isInstanceOf[MethodType])
+ tree.tpe = MethodType(Nil, tree.tpe)
+
+ atPos(tree.pos)(Apply(tree, Nil) setType tree.tpe.resultType)
}
+
+ if (isElidable(tree)) elideIntoUnit(tree) // was not seen in mainTransform
+ else if (needsParens) repair
+ else if (tree.isType) TypeTree(tree.tpe) setPos tree.pos
+ else tree
+ }
+
tree match {
case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
val rhs1 = nonLocalReturnKeys.get(tree.symbol) match {
@@ -636,11 +667,6 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
}
treeCopy.DefDef(tree, mods, name, tparams, List(vparamss.flatten), tpt, rhs1)
case Try(body, catches, finalizer) =>
- // If warnings are enabled, alert about promiscuously catching cases.
- if (settings.YwarnCatches.value)
- for (cd <- catches find treeInfo.catchesThrowable)
- unit.warning(cd.pos, "catch clause swallows everything: not advised.")
-
if (catches forall treeInfo.isCatchCase) tree
else {
val exname = unit.fresh.newName(tree.pos, "ex$")
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index ef3b7afa95..23553e6879 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -1,12 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
+import util.Statistics._
+
/** The main attribution phase.
*/
trait Analyzer extends AnyRef
@@ -20,6 +21,7 @@ trait Analyzer extends AnyRef
with SyntheticMethods
with Unapplies
with NamesDefaults
+ with TypeDiagnostics
{
val global : Global
import global._
@@ -31,26 +33,60 @@ trait Analyzer extends AnyRef
val runsRightAfter = None
def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
override val checkable = false
+ override def keepsTypeParams = false
+
def apply(unit: CompilationUnit) {
newNamer(rootContext(unit)).enterSym(unit.body)
}
}
}
- var typerTime = 0L
+ object packageObjects extends SubComponent {
+ val global: Analyzer.this.global.type = Analyzer.this.global
+ val phaseName = "packageobjects"
+ val runsAfter = List[String]()
+ val runsRightAfter= Some("namer")
+
+ def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
+ override val checkable = false
+ import global._
+
+ val openPackageObjectsTraverser = new Traverser {
+ override def traverse(tree: Tree): Unit = tree match {
+ case ModuleDef(_, _, _) =>
+ if (tree.symbol.name == nme.PACKAGEkw) {
+ loaders.openPackageModule(tree.symbol)()
+ }
+ case ClassDef(_, _, _, _) => () // make it fast
+ case _ => super.traverse(tree)
+ }
+ }
+
+ def apply(unit: CompilationUnit) {
+ openPackageObjectsTraverser(unit.body)
+ }
+ }
+ }
object typerFactory extends SubComponent {
val global: Analyzer.this.global.type = Analyzer.this.global
val phaseName = "typer"
val runsAfter = List[String]()
- val runsRightAfter = Some("namer")
+ val runsRightAfter = Some("packageobjects")
def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
+ override def keepsTypeParams = false
resetTyper()
+ // the log accumulates entries over time, even though it should not (Adriaan, Martin said so).
+ // Lacking a better fix, we clear it here (before the phase is created, meaning for each
+ // compiler run). This is good enough for the resident compiler, which was the most affected.
+ undoLog.clear()
override def run {
- val start = if (util.Statistics.enabled) System.nanoTime() else 0L
+ val start = startTimer(typerNanos)
currentRun.units foreach applyPhase
- if (util.Statistics.enabled)
- typerTime += System.nanoTime() - start
+ undoLog.clear()
+ // need to clear it after as well or 10K+ accumulated entries are
+ // uncollectable the rest of the way.
+ stopTimer(typerNanos, start)
}
def apply(unit: CompilationUnit) {
try {
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index ba328b9f48..5e11cf7bb6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
@@ -34,7 +33,7 @@ abstract class ConstantFolder {
* @param tree ...
* @param pt ...
*/
- def apply(tree: Tree, pt: Type): Tree = fold(tree, tree.tpe match {
+ def apply(tree: Tree, pt: Type): Tree = fold(apply(tree), tree.tpe match {
case ConstantType(x) => x convertTo pt
case _ => null
})
@@ -42,7 +41,7 @@ abstract class ConstantFolder {
private def fold(tree: Tree, compX: => Constant): Tree =
try {
val x = compX
- if ((x ne null) && x.tag != UnitTag) tree setType mkConstantType(x)
+ if ((x ne null) && x.tag != UnitTag) tree setType ConstantType(x)
else tree
} catch {
case _: ArithmeticException => tree // the code will crash at runtime,
@@ -154,7 +153,7 @@ abstract class ConstantFolder {
private def foldBinop(op: Name, x: Constant, y: Constant): Constant = {
val optag =
if (x.tag == y.tag) x.tag
- else if (isNumeric(x.tag) && isNumeric(y.tag)) Math.max(x.tag, y.tag)
+ else if (x.isNumeric && y.isNumeric) math.max(x.tag, y.tag)
else NoTag
try optag match {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 4da47d58dd..16ac0254b7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -1,15 +1,14 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
import symtab.Flags._
-import scala.tools.nsc.util.{Position,NoPosition}
import scala.collection.mutable.ListBuffer
+import annotation.tailrec
/** This trait ...
*
@@ -91,8 +90,8 @@ trait Contexts { self: Analyzer =>
}
class Context private[typechecker] {
var unit: CompilationUnit = _
- var tree: Tree = _ // Tree associated with this context
- var owner: Symbol = NoSymbol// The current owner
+ var tree: Tree = _ // Tree associated with this context
+ var owner: Symbol = NoSymbol // The current owner
var scope: Scope = _ // The current scope
var outer: Context = _ // The next outer context
var enclClass: Context = _ // The next outer context whose tree is a
@@ -103,7 +102,7 @@ trait Contexts { self: Analyzer =>
// not inherited to child contexts
var depth: Int = 0
var imports: List[ImportInfo] = List() // currently visible imports
- var openImplicits: List[Type] = List() // types for which implicit arguments
+ var openImplicits: List[(Type,Symbol)] = List() // types for which implicit arguments
// are currently searched
// for a named application block (Tree) the corresponding NamedApplyInfo
var namedApplyBlockInfo: Option[(Tree, NamedApplyInfo)] = None
@@ -111,7 +110,10 @@ trait Contexts { self: Analyzer =>
var inConstructorSuffix = false // are we in a secondary constructor
// after the this constructor call?
var returnsSeen = false // for method context: were returns encountered?
- var inSelfSuperCall = false // is this a context for a constructor self or super call?
+ var inSelfSuperCall = false // is this context (enclosed in) a constructor call?
+ // (the call to the super or self constructor in the first line of a constructor)
+ // in this context the object's fields should not be in scope
+
var reportAmbiguousErrors = false
var reportGeneralErrors = false
var diagnostic: List[String] = Nil // these messages are printed when issuing an error
@@ -122,39 +124,7 @@ trait Contexts { self: Analyzer =>
var savedTypeBounds: List[(Symbol, Type)] = List() // saved type bounds
// for type parameters which are narrowed in a GADT
- def intern0 : Context = {
- if (this eq NoContext) return this
- val txt = new Context
- txt.unit = unit
- txt.tree = tree
- txt.owner = owner
- txt.scope = scope
- assert(outer ne this) // stupid
- txt.outer = outer // already interned
- def fix(what : Context) =
- if (what eq this) txt
- else what
- txt.enclClass = fix(enclClass)
- txt.enclMethod = fix(enclMethod)
- txt.implicitsEnabled = implicitsEnabled
- txt.variance = variance
- txt._undetparams = _undetparams
- txt.depth = depth
- txt.imports = imports
- txt.openImplicits = openImplicits
- txt.prefix = prefix
- txt.inConstructorSuffix = inConstructorSuffix
- txt.returnsSeen = returnsSeen
- txt.reportGeneralErrors = reportGeneralErrors
- txt.checking = checking
- txt.retyping = retyping
- txt.savedTypeBounds = savedTypeBounds
- txt
- }
- override def equals(that: Any): Boolean = that match {
- case that: AnyRef if this eq that => true
- case that => super.equals(that)
- }
+ var typingIndent: String = ""
def undetparams = _undetparams
def undetparams_=(ps: List[Symbol]) = {
@@ -168,6 +138,13 @@ trait Contexts { self: Analyzer =>
tparams
}
+ def withImplicitsDisabled[T](op: => T): T = {
+ val saved = implicitsEnabled
+ implicitsEnabled = false
+ try op
+ finally implicitsEnabled = saved
+ }
+
/**
* @param unit ...
* @param tree ...
@@ -180,15 +157,11 @@ trait Contexts { self: Analyzer =>
scope: Scope, imports: List[ImportInfo]): Context = {
val c = new Context
c.unit = unit
- c.tree = sanitize(tree)
+ c.tree = tree
c.owner = owner
c.scope = scope
- c.outer = intern(this)
- def internIf(txt : Context) = {
- if (txt eq this) c.outer // already interned!
- else txt
- }
+ c.outer = this
tree match {
case Template(_, _, _) | PackageDef(_, _) =>
@@ -196,7 +169,7 @@ trait Contexts { self: Analyzer =>
c.prefix = c.owner.thisType
c.inConstructorSuffix = false
case _ =>
- c.enclClass = internIf(this.enclClass)
+ c.enclClass = this.enclClass
c.prefix =
if (c.owner != this.owner && c.owner.isTerm) NoPrefix
else this.prefix
@@ -206,14 +179,16 @@ trait Contexts { self: Analyzer =>
case DefDef(_, _, _, _, _, _) =>
c.enclMethod = c
case _ =>
- c.enclMethod = internIf(this.enclMethod)
+ c.enclMethod = this.enclMethod
}
c.variance = this.variance
c.depth = if (scope == this.scope) this.depth else this.depth + 1
c.imports = imports
+ c.inSelfSuperCall = inSelfSuperCall
c.reportAmbiguousErrors = this.reportAmbiguousErrors
c.reportGeneralErrors = this.reportGeneralErrors
c.diagnostic = this.diagnostic
+ c.typingIndent = typingIndent
c.implicitsEnabled = this.implicitsEnabled
c.checking = this.checking
c.retyping = this.retyping
@@ -253,8 +228,8 @@ trait Contexts { self: Analyzer =>
def make(tree: Tree): Context =
make(tree, owner)
- def makeSilent(reportAmbiguousErrors: Boolean): Context = {
- val c = make(tree)
+ def makeSilent(reportAmbiguousErrors: Boolean, newtree: Tree = tree): Context = {
+ val c = make(newtree)
c.reportGeneralErrors = false
c.reportAmbiguousErrors = reportAmbiguousErrors
c
@@ -291,34 +266,29 @@ trait Contexts { self: Analyzer =>
argContext
}
- //todo: remove
- def makeConstructorSuffixContext = {
- val c = make(tree)
- c.inConstructorSuffix = true
- c
- }
-
private def diagString =
if (diagnostic.isEmpty) ""
else diagnostic.mkString("\n","\n", "")
- def error(pos: Position, err: Error) {
- val msg = err.getMessage() + diagString
- if (reportGeneralErrors)
- unit.error(pos, if (checking) "**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
- else
- throw err
+ private def addDiagString(msg: String) = {
+ val ds = diagString
+ if (msg endsWith ds) msg else msg + ds
}
- def error(pos: Position, msg: String) {
- val msg1 = msg + diagString
- if (reportGeneralErrors)
- unit.error(pos, if (checking) "**** ERROR DURING INTERNAL CHECKING ****\n" + msg1 else msg1)
- else
- throw new TypeError(pos, msg1)
+ private def unitError(pos: Position, msg: String) =
+ unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
+
+ def error(pos: Position, err: Throwable) =
+ if (reportGeneralErrors) unitError(pos, addDiagString(err.getMessage()))
+ else throw err
+
+ def error(pos: Position, msg: String) = {
+ val msg1 = addDiagString(msg)
+ if (reportGeneralErrors) unitError(pos, msg1)
+ else throw new TypeError(pos, msg1)
}
- def warning(pos: Position, msg: String) {
+ def warning(pos: Position, msg: String) = {
if (reportGeneralErrors) unit.warning(pos, msg)
}
@@ -361,26 +331,33 @@ trait Contexts { self: Analyzer =>
override def toString(): String = {
if (this == NoContext) "NoContext"
else owner.toString() + " @ " + tree.getClass() +
- " " + tree.toString() + ", scope = " + scope.hashCode() +
+ " " + tree.toString() + ", scope = " + scope.## +
" " + scope.toList + "\n:: " + outer.toString()
}
+ /** Is `sub' a subclass of `base' or a companion object of such a subclass?
+ */
+ def isSubClassOrCompanion(sub: Symbol, base: Symbol) =
+ sub.isNonBottomSubClass(base) ||
+ sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base)
+
/** Return closest enclosing context that defines a superclass of `clazz', or a
* companion module of a superclass of `clazz', or NoContext if none exists */
def enclosingSuperClassContext(clazz: Symbol): Context = {
var c = this.enclClass
while (c != NoContext &&
!clazz.isNonBottomSubClass(c.owner) &&
- !(c.owner.isModuleClass && clazz.isNonBottomSubClass(c.owner.linkedClassOfModule)))
+ !(c.owner.isModuleClass && clazz.isNonBottomSubClass(c.owner.companionClass)))
c = c.outer.enclClass
c
}
- /** Return closest enclosing context that defines a subclass of `clazz', or NoContext
- * if none exists */
+ /** Return closest enclosing context that defines a subclass of `clazz' or a companion
+ * object thereof, or NoContext if no such context exists
+ */
def enclosingSubClassContext(clazz: Symbol): Context = {
var c = this.enclClass
- while (c != NoContext && !c.owner.isNonBottomSubClass(clazz))
+ while (c != NoContext && !isSubClassOrCompanion(c.owner, clazz))
c = c.outer.enclClass
c
}
@@ -394,9 +371,26 @@ trait Contexts { self: Analyzer =>
* @return ...
*/
def isAccessible(sym: Symbol, pre: Type, superAccess: Boolean): Boolean = {
+ @inline def accessWithinLinked(ab: Symbol) = {
+ val linked = ab.linkedClassOfClass
+ // don't have access if there is no linked class
+ // (before adding the `ne NoSymbol` check, this was a no-op when linked eq NoSymbol,
+ // since `accessWithin(NoSymbol) == true` whatever the symbol)
+ (linked ne NoSymbol) && accessWithin(linked)
+ }
+
+ /** Are we inside definition of `ab'? */
+ def accessWithin(ab: Symbol) = {
+ // #3663: we must disregard package nesting if sym isJavaDefined
+ if(sym.isJavaDefined) {
+ // is `o` or one of its transitive owners equal to `ab`?
+ // stops at first package, since further owners can only be surrounding packages
+ @tailrec def abEnclosesStopAtPkg(o: Symbol): Boolean =
+ (o eq ab) || (!o.isPackageClass && (o ne NoSymbol) && abEnclosesStopAtPkg(o.owner))
+ abEnclosesStopAtPkg(owner)
+ } else (owner hasTransOwner ab)
+ }
- /** Are we inside definition of `sym'? */
- def accessWithin(sym: Symbol): Boolean = this.owner.ownersIterator contains sym
/*
var c = this
while (c != NoContext && c.owner != owner) {
@@ -418,17 +412,20 @@ trait Contexts { self: Analyzer =>
(pre == NoPrefix) || {
val ab = sym.accessBoundary(sym.owner)
- ((ab.isTerm || ab == definitions.RootClass)
- ||
- (accessWithin(ab) || accessWithin(ab.linkedClassOfClass)) &&
- (!sym.hasFlag(LOCAL) ||
- (sym hasFlag PROTECTED) && isSubThisType(pre, sym.owner) ||
- pre =:= sym.owner.thisType)
- ||
- (sym hasFlag PROTECTED) &&
- (superAccess ||
- (pre.widen.typeSymbol.isNonBottomSubClass(sym.owner) &&
- (isSubClassOfEnclosing(pre.widen.typeSymbol) || phase.erasedTypes))))
+ ( (ab.isTerm || ab == definitions.RootClass)
+ || (accessWithin(ab) || accessWithinLinked(ab)) &&
+ ( !sym.hasFlag(LOCAL)
+ || sym.owner.isImplClass // allow private local accesses to impl classes
+ || (sym hasFlag PROTECTED) && isSubThisType(pre, sym.owner)
+ || pre =:= sym.owner.thisType
+ )
+ || (sym hasFlag PROTECTED) &&
+ ( superAccess
+ || sym.isConstructor
+ || (pre.widen.typeSymbol.isNonBottomSubClass(sym.owner) &&
+ (isSubClassOfEnclosing(pre.widen.typeSymbol) || phase.erasedTypes))
+ )
+ )
// note: phase.erasedTypes disables last test, because after addinterfaces
// implementation classes are not in the superclass chain. If we enable the
// test, bug780 fails.
@@ -463,93 +460,100 @@ trait Contexts { self: Analyzer =>
implicitsCache = null
if (outer != null && outer != this) outer.resetCache
}
- private def collectImplicits(syms: List[Symbol], pre: Type): List[ImplicitInfo] =
- for (sym <- syms if sym.hasFlag(IMPLICIT) && isAccessible(sym, pre, false))
+
+ /** A symbol `sym` qualifies as an implicit if it has the IMPLICIT flag set,
+ * it is accessible, and if it is imported there is not already a local symbol
+ * with the same names. Local symbols override imported ones. This fixes #2866.
+ */
+ private def isQualifyingImplicit(sym: Symbol, pre: Type, imported: Boolean) =
+ sym.hasFlag(IMPLICIT) &&
+ isAccessible(sym, pre, false) &&
+ !(imported && {
+ val e = scope.lookupEntry(sym.name)
+ (e ne null) && (e.owner == scope)
+ })
+
+ private def collectImplicits(syms: List[Symbol], pre: Type, imported: Boolean = false): List[ImplicitInfo] =
+ for (sym <- syms if isQualifyingImplicit(sym, pre, imported))
yield new ImplicitInfo(sym.name, pre, sym)
private def collectImplicitImports(imp: ImportInfo): List[ImplicitInfo] = {
val pre = imp.qual.tpe
def collect(sels: List[ImportSelector]): List[ImplicitInfo] = sels match {
- case List() => List()
- case List(ImportSelector(nme.WILDCARD, _, _, _)) => collectImplicits(pre.implicitMembers, pre)
+ case List() =>
+ List()
+ case List(ImportSelector(nme.WILDCARD, _, _, _)) =>
+ collectImplicits(pre.implicitMembers, pre, imported = true)
case ImportSelector(from, _, to, _) :: sels1 =>
var impls = collect(sels1) filter (info => info.name != from)
if (to != nme.WILDCARD) {
for (sym <- imp.importedSymbol(to).alternatives)
- if (sym.hasFlag(IMPLICIT) && isAccessible(sym, pre, false))
+ if (isQualifyingImplicit(sym, pre, imported = true))
impls = new ImplicitInfo(to, pre, sym) :: impls
}
impls
}
- if (settings.debug.value)
- log("collect implicit imports " + imp + "=" + collect(imp.tree.selectors))//debug
+ //if (settings.debug.value) log("collect implicit imports " + imp + "=" + collect(imp.tree.selectors))//DEBUG
collect(imp.tree.selectors)
}
def implicitss: List[List[ImplicitInfo]] = {
- val nextOuter =
- if (owner.isConstructor) {
- if (outer.tree.isInstanceOf[Template]) outer.outer.outer
- else outer.outer.outer
- } else outer
- // can we can do something smarter to bring back the implicit cache?
+ val nextOuter = if (owner.isConstructor) outer.outer.outer else outer
if (implicitsRunId != currentRunId) {
implicitsRunId = currentRunId
implicitsCache = List()
val newImplicits: List[ImplicitInfo] =
if (owner != nextOuter.owner && owner.isClass && !owner.isPackageClass && !inSelfSuperCall) {
if (!owner.isInitialized) return nextOuter.implicitss
- if (settings.debug.value)
- log("collect member implicits " + owner + ", implicit members = " +
- owner.thisType.implicitMembers)//debug
+ // if (settings.debug.value) log("collect member implicits " + owner + ", implicit members = " + owner.thisType.implicitMembers)//DEBUG
val savedEnclClass = enclClass
this.enclClass = this
val res = collectImplicits(owner.thisType.implicitMembers, owner.thisType)
this.enclClass = savedEnclClass
res
} else if (scope != nextOuter.scope && !owner.isPackageClass) {
- if (settings.debug.value)
- log("collect local implicits " + scope.toList)//debug
+ if (settings.debug.value) log("collect local implicits " + scope.toList)//DEBUG
collectImplicits(scope.toList, NoPrefix)
} else if (imports != nextOuter.imports) {
assert(imports.tail == nextOuter.imports)
collectImplicitImports(imports.head)
+ } else if (owner.isPackageClass) {
+ // the corresponding package object may contain implicit members.
+ collectImplicits(owner.tpe.implicitMembers, owner.tpe)
} else List()
implicitsCache = if (newImplicits.isEmpty) nextOuter.implicitss
else newImplicits :: nextOuter.implicitss
}
implicitsCache
}
- override def hashCode = {
- var hc = 0
- implicit def b2i(b : Boolean) = if (b) 1 else 0
- // assum enclClass/enclMethod/outer are all interned already.
- hc += tree.hashCodeStructure
- def f(txt : Context) = if (txt eq this) 0 else System.identityHashCode(txt)
- hc += f(enclClass)
- hc += f(enclMethod)
- hc += f(outer)
- hc += owner.hashCode
- hc += scope.hashCode
- hc += variance.hashCode
- hc += _undetparams.hashCode
- hc += depth
- hc += imports.hashCode
- hc += prefix.hashCode
- hc += inConstructorSuffix
- hc += checking
- hc += retyping
- hc += savedTypeBounds.hashCode
- hc += (if (unit eq null) 0 else unit.hashCode)
- hc
- }
+ /**
+ * Find a symbol in this context or one of its outers.
+ *
+ * Used to find symbols are owned by methods (or fields), they can't be
+ * found in some scope.
+ *
+ * Examples: companion module of classes owned by a method, default getter
+ * methods of nested methods. See NamesDefaults.scala
+ */
+ def lookup(name: Name, expectedOwner: Symbol) = {
+ var res: Symbol = NoSymbol
+ var ctx = this
+ while(res == NoSymbol && ctx.outer != ctx) {
+ val s = ctx.scope.lookup(name)
+ if (s != NoSymbol && s.owner == expectedOwner)
+ res = s
+ else
+ ctx = ctx.outer
+ }
+ res
+ }
}
class ImportInfo(val tree: Import, val depth: Int) {
/** The prefix expression */
def qual: Tree = tree.symbol.info match {
case ImportType(expr) => expr
- case ErrorType => tree
+ case ErrorType => tree setType NoType // fix for #2870
case _ => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info);//debug
}
@@ -565,16 +569,16 @@ trait Contexts { self: Analyzer =>
var renamed = false
var selectors = tree.selectors
while (selectors != Nil && result == NoSymbol) {
- if (selectors.head.name != nme.WILDCARD)
- notifyImport(name, qual.tpe, selectors.head.name, selectors.head.rename)
+// if (selectors.head.name != nme.WILDCARD) // used to be for IDE
+// notifyImport(name, qual.tpe, selectors.head.name, selectors.head.rename)
if (selectors.head.rename == name.toTermName)
- result = qual.tpe.member(
+ result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports
if (name.isTypeName) selectors.head.name.toTypeName else selectors.head.name)
else if (selectors.head.name == name.toTermName)
renamed = true
else if (selectors.head.name == nme.WILDCARD && !renamed)
- result = qual.tpe.member(name)
+ result = qual.tpe.nonLocalMember(name)
selectors = selectors.tail
}
result
@@ -593,23 +597,9 @@ trait Contexts { self: Analyzer =>
}
override def toString() = tree.toString()
-
- override def hashCode = tree.hashCodeStructure + depth
- override def equals(that : Any) = that match {
- case that : ImportInfo =>
- depth == that.depth && (tree equalsStructure that.tree)
- case _ => false
- }
}
case class ImportType(expr: Tree) extends Type {
- override def equals(that : Any) = that match {
- case ImportType(expr) => this.expr == expr
- case _ => false
- }
- override def hashCode = expr.hashCode
override def safeToString = "ImportType("+expr+")"
}
- protected def intern(txt : Context) = txt
-
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala b/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala
index 82cbd13efd..43ac9182c3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
@@ -137,7 +136,7 @@ abstract class DeVirtualize extends InfoTransform with TypingTransformers {
protected def factoryName(clazz: Symbol) =
atPhase(ownPhase) { newTermName("new$"+clazz.name) }
- /** Does `clazz' contaion virtual classes? */
+ /** Does `clazz' contain virtual classes? */
protected def containsVirtuals(clazz: Symbol) = clazz.info.decls.toList exists (_.isVirtualClass)
/** The inner classes that need factory methods in `clazz'
@@ -177,7 +176,7 @@ abstract class DeVirtualize extends InfoTransform with TypingTransformers {
/** The name of the field representing a constructor parameter of a virtual class */
protected def paramFieldName(clazz: Symbol, index: Int) = atPhase(ownPhase) {
- clazz.expandedName(newTermName("param$"+index))
+ nme.expandedName(newTermName("param$"+index), clazz)
}
/** The name of the field representing a constructor parameter of a virtual class */
@@ -252,7 +251,7 @@ abstract class DeVirtualize extends InfoTransform with TypingTransformers {
val parents2 = addOverriddenVirtuals(clazz) map {
c => typeRef(clazz.owner.thisType, c, typeParams map (_.tpe))
}
- mkTypeBounds(NothingClass.tpe, intersectionType(parents1 ::: parents2))
+ TypeBounds(NothingClass.tpe, intersectionType(parents1 ::: parents2))
}
}
}
@@ -297,7 +296,7 @@ abstract class DeVirtualize extends InfoTransform with TypingTransformers {
private def copyType(tpe: Type): Type = tpe match {
case MethodType(formals, restpe) => MethodType(formals, copyType(restpe))
case PolyType(List(), restpe) => PolyType(List(), copyType(restpe))
- case PolyType(_, _) => throw new Error("bad case: "+tpe)
+ case PolyType(_, _) => abort("bad case: "+tpe)
case _ => owner.thisType.memberType(abstractType(clazz))
}
def getInfo = copyType(clazz.primaryConstructor.tpe)
@@ -305,9 +304,9 @@ abstract class DeVirtualize extends InfoTransform with TypingTransformers {
factory
}
- def removeDuplicates(ts: List[Type]): List[Type] = ts match {
+ def distinct(ts: List[Type]): List[Type] = ts match {
case List() => List()
- case t :: ts1 => t :: removeDuplicates(ts1 filter (_.typeSymbol != t.typeSymbol))
+ case t :: ts1 => t :: distinct(ts1 filter (_.typeSymbol != t.typeSymbol))
}
/** The concrete class symbol VC$fix in the factory symbol (@see mkFactory)
@@ -333,7 +332,7 @@ abstract class DeVirtualize extends InfoTransform with TypingTransformers {
}
atPhase(ownPhase.next) {
val parents2 =
- removeDuplicates(parents1.flatMap(addOverriddenVirtuals))
+ distinct(parents1.flatMap(addOverriddenVirtuals))
.map(_.substSym(clazz.typeParams, factory.typeParams))
sym setInfo ClassInfoType(parents2, new Scope, cclazz)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index c9a2a377c1..dbc3ffbe17 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -85,6 +85,24 @@ abstract class Duplicators extends Analyzer {
typeRef(mapOver(pre), newsym, mapOverArgs(args, newsym.typeParams))
} else
super.mapOver(tpe)
+
+ case SingleType(pre, sym) =>
+ val sym1 = updateSym(sym)
+ if (sym1 ne sym) {
+ log("fixing " + sym + " -> " + sym1)
+ singleType(mapOver(pre), sym1)
+ } else
+ super.mapOver(tpe)
+
+ case ThisType(sym) =>
+ val sym1 = updateSym(sym)
+ if (sym1 ne sym) {
+ log("fixing " + sym + " -> " + sym1)
+ ThisType(sym1)
+ } else
+ super.mapOver(tpe)
+
+
case _ =>
super.mapOver(tpe)
}
@@ -93,15 +111,10 @@ abstract class Duplicators extends Analyzer {
/** Fix the given type by replacing invalid symbols with the new ones. */
def fixType(tpe: Type): Type = {
val tpe1 = envSubstitution(tpe)
- log("tpe1: " + tpe1)
val tpe2: Type = (new FixInvalidSyms)(tpe1)
- val tpe3 = tpe2 match {
- case TypeRef(_, sym, _) if (sym.owner == oldClassOwner) =>
- log("seeing " + sym.fullNameString + " from a different angle")
- tpe2.asSeenFrom(newClassOwner.thisType, oldClassOwner)
- case _ => tpe2
- }
- log("tpe2: " + tpe3)
+ val tpe3 = if (newClassOwner ne null) {
+ tpe2.asSeenFrom(newClassOwner.thisType, oldClassOwner)
+ } else tpe2
tpe3
}
@@ -127,7 +140,7 @@ abstract class Duplicators extends Analyzer {
ldef.symbol = newsym
log("newsym: " + newsym + " info: " + newsym.info)
- case DefDef(_, _, tparams, vparamss, _, rhs) =>
+ case DefDef(_, name, tparams, vparamss, _, rhs) =>
// invalidate parameters
invalidate(tparams ::: vparamss.flatten)
tree.symbol = NoSymbol
@@ -157,7 +170,7 @@ abstract class Duplicators extends Analyzer {
typed(ddef)
}
- /** Special typer method allowing for re-type checking trees. It expects a typed tree.
+ /** Special typer method for re-type checking trees. It expects a typed tree.
* Returns a typed tree that has fresh symbols for all definitions in the original tree.
*
* Each definition tree is visited and its symbol added to the invalidSyms map (except LabelDefs),
@@ -172,10 +185,11 @@ abstract class Duplicators extends Analyzer {
* namer/typer handle them, or Idents that refer to them.
*/
override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
- log("typing " + tree)
+ if (settings.debug.value) log("typing " + tree + ": " + tree.tpe)
if (tree.hasSymbol && tree.symbol != NoSymbol
&& !tree.symbol.isLabel // labels cannot be retyped by the type checker as LabelDef has no ValDef/return type trees
&& invalidSyms.isDefinedAt(tree.symbol)) {
+ if (settings.debug.value) log("removed symbol " + tree.symbol)
tree.symbol = NoSymbol
}
@@ -227,20 +241,49 @@ abstract class Duplicators extends Analyzer {
case Select(th @ This(_), sel) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) =>
log("selection on this, no type ascription required")
- super.typed(atPos(tree.pos)(Select(This(newClassOwner), sel)), mode, pt)
+ // we use the symbol name instead of the tree name because the symbol may have been
+ // name mangled, rendering the tree name obsolete
+ super.typed(atPos(tree.pos)(Select(This(newClassOwner), tree.symbol.name)), mode, pt)
case This(_) if (oldClassOwner ne null) && (tree.symbol == oldClassOwner) =>
// val tree1 = Typed(This(newClassOwner), TypeTree(fixType(tree.tpe.widen)))
val tree1 = This(newClassOwner)
- log("mapped " + tree + " to " + tree1)
+ if (settings.debug.value) log("mapped " + tree + " to " + tree1)
super.typed(atPos(tree.pos)(tree1), mode, pt)
+ case This(_) =>
+ tree.symbol = updateSym(tree.symbol)
+ tree.tpe = null
+ super.typed(tree, mode, pt)
+
case Super(qual, mix) if (oldClassOwner ne null) && (tree.symbol == oldClassOwner) =>
val tree1 = Super(qual, mix)
log("changed " + tree + " to " + tree1)
super.typed(atPos(tree.pos)(tree1))
+ case Match(scrut, cases) =>
+ val scrut1 = typed(scrut, EXPRmode | BYVALmode, WildcardType)
+ val scrutTpe = scrut1.tpe.widen
+ val cases1 = if (scrutTpe.isFinalType) cases filter {
+ case CaseDef(Bind(_, pat @ Typed(_, tpt)), EmptyTree, body) =>
+ // the typed pattern is not incompatible with the scrutinee type
+ scrutTpe.matchesPattern(fixType(tpt.tpe))
+ case CaseDef(Typed(_, tpt), EmptyTree, body) =>
+ // the typed pattern is not incompatible with the scrutinee type
+ scrutTpe.matchesPattern(fixType(tpt.tpe))
+ case _ => true
+ } else cases
+
+ super.typed(atPos(tree.pos)(Match(scrut, cases1)), mode, pt)
+
+ case EmptyTree =>
+ // no need to do anything, in particular, don't set the type to null, EmptyTree.tpe_= asserts
+ tree
+
case _ =>
+ if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
+ tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
+ }
tree.tpe = null
super.typed(tree, mode, pt)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index 7d75994ef3..7cfc4733d3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
@@ -20,12 +19,13 @@ trait EtaExpansion { self: Analyzer =>
import global._
object etaExpansion {
+ private def isMatch(vparam: ValDef, arg: Tree) = arg match {
+ case Ident(name) => vparam.name == name
+ case _ => false
+ }
+
def unapply(tree: Tree): Option[(List[ValDef], Tree, List[Tree])] = tree match {
- case Function(vparams, Apply(fn, args))
- if (List.forall2(vparams, args) {
- case (vparam, Ident(name)) => vparam.name == name
- case _ => false
- }) =>
+ case Function(vparams, Apply(fn, args)) if (vparams corresponds args)(isMatch) => // @PP: corresponds
Some((vparams, fn, args))
case _ =>
None
@@ -59,7 +59,7 @@ trait EtaExpansion { self: Analyzer =>
// Martin to Sean: I removed the
// else if (n == 0) branch and changed `n' in the line above to `(cnt - 1)'
// this was necessary because otherwise curried eta-expansions would get the same
- // symbol. An example which failes test/files/run/Course-2002-02.scala
+ // symbol. An example which fails test/files/run/Course-2002-02.scala
// todo: review and get rid of the `n' argument (which is unused right now).
}
// { cnt = cnt + 1; newTermName("eta$" + cnt) }
@@ -113,9 +113,7 @@ trait EtaExpansion { self: Analyzer =>
* @return ...
*/
def expand(tree: Tree, tpe: Type): Tree = tpe match {
- case mt: ImplicitMethodType =>
- tree
- case MethodType(paramSyms, restpe) =>
+ case mt @ MethodType(paramSyms, restpe) if !mt.isImplicit =>
val params = paramSyms map (sym =>
ValDef(Modifiers(SYNTHETIC | PARAM),
sym.name, TypeTree(sym.tpe) , EmptyTree))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 6bbacfa311..6f54c96952 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
//todo: rewrite or disllow new T where T is a mixin (currently: <init> not a member of T)
//todo: use inherited type info also for vars and values
@@ -12,8 +11,9 @@ package scala.tools.nsc
package typechecker
import scala.collection.mutable.{LinkedHashMap, ListBuffer}
-import scala.tools.nsc.util.{ HashSet, Position, Set, NoPosition, SourceFile }
+import scala.tools.nsc.util.{HashSet, Set, SourceFile}
import symtab.Flags._
+import util.Statistics._
/** This trait provides methods to find various kinds of implicits.
*
@@ -27,16 +27,7 @@ self: Analyzer =>
import definitions._
def traceImplicits = printTypings
-
- var implicitTime = 0L
- var inscopeSucceed = 0L
- var inscopeFail = 0L
- var oftypeSucceed = 0L
- var oftypeFail = 0L
- var manifSucceed = 0L
- var manifFail = 0L
- var hits = 0
- var misses = 0
+ import global.typer.{printTyping, deindentTyping, indentTyping}
/** Search for an implicit value. See the comment on `result` at the end of class `ImplicitSearch`
* for more info how the search is conducted.
@@ -52,10 +43,22 @@ self: Analyzer =>
* @return A search result
*/
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult = {
+ printTyping("Beginning implicit search for "+ tree +" expecting "+ pt + (if(isView) " looking for a view" else ""))
+ indentTyping()
+ val rawTypeStart = startCounter(rawTypeImpl)
+ val findMemberStart = startCounter(findMemberImpl)
+ val subtypeStart = startCounter(subtypeImpl)
+ val start = startTimer(implicitNanos)
if (traceImplicits && !tree.isEmpty && !context.undetparams.isEmpty)
println("typing implicit with undetermined type params: "+context.undetparams+"\n"+tree)
val result = new ImplicitSearch(tree, pt, isView, context.makeImplicit(reportAmbiguous)).bestImplicit
context.undetparams = context.undetparams filterNot (result.subst.from contains _)
+ stopTimer(implicitNanos, start)
+ stopCounter(rawTypeImpl, rawTypeStart)
+ stopCounter(findMemberImpl, findMemberStart)
+ stopCounter(subtypeImpl, subtypeStart)
+ deindentTyping()
+ printTyping("Implicit search yielded: "+ result)
result
}
@@ -103,9 +106,14 @@ self: Analyzer =>
/** Does type `tp` contain an Error type as parameter or result?
*/
private def containsError(tp: Type): Boolean = tp match {
- case PolyType(tparams, restpe) => containsError(restpe)
- case MethodType(params, restpe) => (params map (_.tpe) exists (_.isError)) || containsError(restpe)
- case _ => tp.isError
+ case PolyType(tparams, restpe) =>
+ containsError(restpe)
+ case MethodType(params, restpe) =>
+ for (p <- params)
+ if (p.tpe.isError) return true
+ containsError(restpe)
+ case _ =>
+ tp.isError
}
def isCyclicOrErroneous = try {
@@ -123,8 +131,7 @@ self: Analyzer =>
case _ => false
}
- override def hashCode =
- name.hashCode + pre.hashCode + sym.hashCode
+ override def hashCode = name.## + pre.## + sym.##
override def toString = "ImplicitInfo(" + name + "," + pre + "," + sym + ")"
}
@@ -169,7 +176,7 @@ self: Analyzer =>
object HasMethodMatching {
def apply(name: Name, argtpes: List[Type], restpe: Type): Type = {
def templateArgType(argtpe: Type) =
- new BoundedWildcardType(mkTypeBounds(argtpe, AnyClass.tpe))
+ new BoundedWildcardType(TypeBounds(argtpe, AnyClass.tpe))
val dummyMethod = new TermSymbol(NoSymbol, NoPosition, "typer$dummy")
val mtpe = MethodType(dummyMethod.newSyntheticValueParams(argtpes map templateArgType), restpe)
memberWildcardType(name, mtpe)
@@ -207,17 +214,18 @@ self: Analyzer =>
*/
class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context)
extends Typer(context0) {
-
+ printTyping("begin implicit search: "+(tree, pt, isView, context.outer.undetparams))
// assert(tree.isEmpty || tree.pos.isDefined, tree)
import infer._
-
/** Is implicit info `info1` better than implicit info `info2`?
*/
- def improves(info1: ImplicitInfo, info2: ImplicitInfo) =
+ def improves(info1: ImplicitInfo, info2: ImplicitInfo) = {
+ incCounter(improvesCount)
(info2 == NoImplicitInfo) ||
(info1 != NoImplicitInfo) &&
isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
+ }
/** Map all type params in given list to WildcardType
* @param tp The type in which to do the mapping
@@ -226,11 +234,14 @@ self: Analyzer =>
private def tparamsToWildcards(tp: Type, tparams: List[Symbol]) =
tp.instantiateTypeParams(tparams, tparams map (t => WildcardType))
- /* Map a polytype to one in which all type parameters are replaced by wildcards.
+ /* Map a polytype to one in which all type parameters and argument-dependent types are replaced by wildcards.
+ * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate DebruijnIndex types
+ * when checking whether `b` is a valid implicit, as we haven't even searched a value for the implicit arg `x`,
+ * so we have to approximate (otherwise it is excluded a priori).
*/
private def depoly(tp: Type): Type = tp match {
- case PolyType(tparams, restpe) => tparamsToWildcards(restpe, tparams)
- case _ => tp
+ case PolyType(tparams, restpe) => tparamsToWildcards(ApproximateDependentMap(restpe), tparams)
+ case _ => ApproximateDependentMap(tp)
}
/** Does type `dtor` dominate type `dted`?
@@ -243,7 +254,7 @@ self: Analyzer =>
* by replacing variables by their upper bounds,
* - all remaining free type parameters in the type are replaced by WildcardType.
* The _complexity_ of a stripped core type corresponds roughly to the number of
- * nodes in its ast, except that singleton types are widened befoe taking the complexity.
+ * nodes in its ast, except that singleton types are widened before taking the complexity.
* Two types overlap if they have the same type symbol, or
* if one or both are intersection types with a pair of overlapiing parent types.
*/
@@ -282,7 +293,7 @@ self: Analyzer =>
overlaps(dtor1, dted1) && (dtor1 =:= dted1 || complexity(dtor1) > complexity(dted1))
}
- if (util.Statistics.enabled) implcnt += 1
+ incCounter(implicitSearchCount)
/** Issues an error signalling ambiguous implicits */
private def ambiguousImplicitError(info1: ImplicitInfo, info2: ImplicitInfo,
@@ -296,10 +307,25 @@ self: Analyzer =>
if (isView) {
val found = pt.typeArgs(0)
val req = pt.typeArgs(1)
- typeErrorMsg(found, req)+
- "\nNote that implicit conversions are not applicable because they are ambiguous:\n "+
- coreMsg+"are possible conversion functions from "+ found+" to "+req
- } else {
+
+ /** A nice spot to explain some common situations a little
+ * less confusingly.
+ */
+ def explanation = {
+ if ((found =:= AnyClass.tpe) && (AnyRefClass.tpe <:< req))
+ "Note: Any is not implicitly converted to AnyRef. You can safely\n" +
+ "pattern match x: AnyRef or cast x.asInstanceOf[AnyRef] to do so."
+ else if ((found <:< AnyValClass.tpe) && (AnyRefClass.tpe <:< req))
+ "Note: primitive types are not implicitly converted to AnyRef.\n" +
+ "You can safely force boxing by casting x.asInstanceOf[AnyRef]."
+ else
+ "Note that implicit conversions are not applicable because they are ambiguous:\n "+
+ coreMsg+"are possible conversion functions from "+ found+" to "+req
+ }
+
+ typeErrorMsg(found, req) + "\n" + explanation
+ }
+ else {
"ambiguous implicit values:\n "+coreMsg + "match expected type "+pt
})
}
@@ -307,6 +333,12 @@ self: Analyzer =>
/** The type parameters to instantiate */
val undetParams = if (isView) List() else context.outer.undetparams
+ /** Replace undetParams in type `tp` by Any/Nothing, according to variance */
+ def approximate(tp: Type) =
+ tp.instantiateTypeParams(undetParams, undetParams map (_ => WildcardType))
+
+ val wildPt = approximate(pt)
+
/** Try to construct a typed tree from given implicit info with given
* expected type.
* Detect infinite search trees for implicits.
@@ -315,14 +347,14 @@ self: Analyzer =>
* @pre <code>info.tpe</code> does not contain an error
*/
private def typedImplicit(info: ImplicitInfo): SearchResult =
- context.openImplicits find (dominates(pt, _)) match {
+ (context.openImplicits find { case (tp, sym) => sym == tree.symbol && dominates(pt, tp)}) match {
case Some(pending) =>
// println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
throw DivergentImplicit
SearchFailure
case None =>
try {
- context.openImplicits = pt :: context.openImplicits
+ context.openImplicits = (pt, tree.symbol) :: context.openImplicits
// println(" "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG
typedImplicit0(info)
} catch {
@@ -353,47 +385,62 @@ self: Analyzer =>
case _ => tp.isStable
}
- /** Replace undetParams in type `tp` by Any/Nothing, according to variance */
- def approximate(tp: Type) =
- tp.instantiateTypeParams(undetParams, undetParams map (_ => WildcardType))
-
- /** Instantiated `pt' so that undetermined type parameters are replaced by wildcards
- */
- val wildPt = approximate(pt)
-
/** Does type `tp' match expected type `pt'
* This is the case if either `pt' is a unary function type with a
* HasMethodMatching type as result, and `tp' is a unary function
* or method type whose result type has a method whose name and type
* correspond to the HasMethodMatching type,
* or otherwise if `tp' is compatible with `pt'.
+ * This method is performance critical: 5-8% of typechecking time.
*/
- def matchesPt(tp: Type, pt: Type, undet: List[Symbol]) =
- isCompatible(tp, pt) || {
+ def matchesPt(tp: Type, pt: Type, undet: List[Symbol]) = {
+ val start = startTimer(matchesPtNanos)
+ val result = normSubType(tp, pt) || isView && {
pt match {
- case Function1(arg, HasMethodMatching(name, argtpes, restpe)) =>
- normalize(tp) match {
- case Function1(arg1, res1) =>
- (arg <:< arg1) &&
- (res1.member(name) filter (m => isApplicableSafe(undet, m.tpe, argtpes, restpe))) != NoSymbol
- case _ =>
- false
- }
+ case Function1(arg, res) =>
+ matchesPtView(tp, arg, res, undet)
case _ =>
false
}
}
+ stopTimer(matchesPtNanos, start)
+ result
+ }
+
+ def matchesPtView(tp: Type, ptarg: Type, ptres: Type, undet: List[Symbol]): Boolean = tp match {
+ case mt @ MethodType(params, restpe) =>
+ if (mt.isImplicit) matchesPtView(restpe, ptarg, ptres, undet)
+ else params.length == 1 && matchesArgRes(params.head.tpe, restpe, ptarg, ptres, undet)
+ case ExistentialType(tparams, qtpe) =>
+ matchesPtView(normalize(qtpe), ptarg, ptres, undet)
+ case Function1(arg1, res1) =>
+ matchesArgRes(arg1, res1, ptarg, ptres, undet)
+ case _ => false
+ }
+
+ def matchesArgRes(tparg: Type, tpres: Type, ptarg: Type, ptres: Type, undet: List[Symbol]): Boolean =
+ (ptarg weak_<:< tparg) && {
+ ptres match {
+ case HasMethodMatching(name, argtpes, restpe) =>
+ (tpres.member(name) filter (m =>
+ isApplicableSafe(undet, m.tpe, argtpes, restpe))) != NoSymbol
+ case _ =>
+ tpres <:< ptres
+ }
+ }
- //if (traceImplicits) println("typed impl for "+wildPt+"? "+info.name+":"+depoly(info.tpe)+"/"+undetParams+"/"+isPlausiblyCompatible(info.tpe, wildPt)+"/"+matchesPt(depoly(info.tpe), wildPt, List()))
- if (isPlausiblyCompatible(info.tpe, wildPt) &&
- matchesPt(depoly(info.tpe), wildPt, List()) &&
- isStable(info.pre)) {
+ incCounter(plausiblyCompatibleImplicits)
+
+ printTyping("typed impl for "+wildPt+"? "+info.name +":"+ depoly(info.tpe)+ " orig info= "+ info.tpe +"/"+undetParams+"/"+isPlausiblyCompatible(info.tpe, wildPt)+"/"+matchesPt(depoly(info.tpe), wildPt, List())+"/"+info.pre+"/"+isStable(info.pre))
+ if (matchesPt(depoly(info.tpe), wildPt, List()) && isStable(info.pre)) {
+
+ incCounter(matchingImplicits)
val itree = atPos(tree.pos.focus) {
if (info.pre == NoPrefix) Ident(info.name)
else Select(gen.mkAttributedQualifier(info.pre), info.name)
}
- if (traceImplicits) println("typed impl?? "+info.name+":"+info.tpe+" ==> "+itree+" with pt = "+pt+", wildpt = "+wildPt)
+ printTyping("typedImplicit0 typing"+ itree +" with wildpt = "+ wildPt +" from implicit "+ info.name+":"+info.tpe)
def fail(reason: String): SearchResult = {
if (settings.XlogImplicits.value)
inform(itree+" is not a valid implicit value for "+pt+" because:\n"+reason)
@@ -410,10 +457,12 @@ self: Analyzer =>
else
typed1(itree, EXPRmode, wildPt)
- if (traceImplicits) println("typed implicit "+itree1+":"+itree1.tpe+", pt = "+wildPt)
+ incCounter(typedImplicits)
+
+ printTyping("typed implicit "+itree1+":"+itree1.tpe+", pt = "+wildPt)
val itree2 = if (isView) (itree1: @unchecked) match { case Apply(fun, _) => fun }
else adapt(itree1, EXPRmode, wildPt)
- if (traceImplicits) println("adapted implicit "+itree1.symbol+":"+itree2.tpe+" to "+wildPt)
+ printTyping("adapted implicit "+itree1.symbol+":"+itree2.tpe+" to "+wildPt)
def hasMatchingSymbol(tree: Tree): Boolean = (tree.symbol == info.sym) || {
tree match {
case Apply(fun, _) => hasMatchingSymbol(fun)
@@ -427,28 +476,37 @@ self: Analyzer =>
else if (hasMatchingSymbol(itree1)) {
val tvars = undetParams map freshVar
if (matchesPt(itree2.tpe, pt.instantiateTypeParams(undetParams, tvars), undetParams)) {
- if (traceImplicits) println("tvars = "+tvars+"/"+(tvars map (_.constr)))
+ printTyping("tvars = "+tvars+"/"+(tvars map (_.constr)))
val targs = solvedTypes(tvars, undetParams, undetParams map varianceInType(pt),
false, lubDepth(List(itree2.tpe, pt)))
- checkBounds(itree2.pos, NoPrefix, NoSymbol, undetParams, targs, "inferred ") // #2421
+
+ // #2421: check that we correctly instantiated type parameters outside of the implicit tree:
+ checkBounds(itree2.pos, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
// filter out failures from type inference, don't want to remove them from undetParams!
// we must be conservative in leaving type params in undetparams
- val uninstantiated = new ListBuffer[Symbol]
- val detargs = adjustTypeArgs(undetParams, targs, WildcardType, uninstantiated) // prototype == WildcardType: want to remove all inferred Nothing's
- // even if Nothing was inferred correctly, it's okay to ignore it (if it was the only solution, we'll infer it again next time)
- val (okParams, okArgs) = (undetParams zip detargs) filter {case (p, a) => !uninstantiated.contains(p)} unzip
- // TODO: optimise above line(s?) once `zipped filter` works (oh, the irony! this line is needed to get Zipped to type check...)
-
+ val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(undetParams, targs) // prototype == WildcardType: want to remove all inferred Nothing's
val subst = new TreeTypeSubstituter(okParams, okArgs)
subst traverse itree2
+ // #2421b: since type inference (which may have been performed during implicit search)
+ // does not check whether inferred arguments meet the bounds of the corresponding parameter (see note in solvedTypes),
+ // must check again here:
+ // TODO: I would prefer to just call typed instead of duplicating the code here, but this is probably a hotspot (and you can't just call typed, need to force re-typecheck)
+ itree2 match {
+ case TypeApply(fun, args) => typedTypeApply(itree2, EXPRmode, fun, args)
+ case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c
+ case _ =>
+ }
+
val result = new SearchResult(itree2, subst)
+ incCounter(foundImplicits)
if (traceImplicits) println("RESULT = "+result)
// println("RESULT = "+itree+"///"+itree1+"///"+itree2)//DEBUG
result
} else {
- if (traceImplicits) println("incompatible: "+itree2.tpe+" does not match "+pt.instantiateTypeParams(undetParams, tvars))
+ printTyping("incompatible: "+itree2.tpe+" does not match "+pt.instantiateTypeParams(undetParams, tvars))
+
SearchFailure
}
} else if (settings.XlogImplicits.value)
@@ -468,10 +526,10 @@ self: Analyzer =>
* - the symbol's type is initialized
* - the symbol comes from a classfile
* - the symbol comes from a different sourcefile than the current one
- * - the symbol's definition comes before, and does not contain the closest enclosing definition,
+ * - the symbol and the accessed symbol's definitions come before, and do not contain the closest enclosing definition, // see #3373
* - the symbol's definition is a val, var, or def with an explicit result type
* The aim of this method is to prevent premature cyclic reference errors
- * by computing the types of only those implicitis for which one of these
+ * by computing the types of only those implicits for which one of these
* conditions is true.
*/
def isValid(sym: Symbol) = {
@@ -487,9 +545,15 @@ self: Analyzer =>
case _ => true
}
}
- def comesBefore(sym: Symbol, owner: Symbol) =
- sym.pos.pointOrElse(0) < owner.pos.pointOrElse(Integer.MAX_VALUE) &&
- !(owner.ownerChain contains sym)
+ def comesBefore(sym: Symbol, owner: Symbol) = {
+ val ownerPos = owner.pos.pointOrElse(Integer.MAX_VALUE)
+ sym.pos.pointOrElse(0) < ownerPos && (
+ if(sym isGetterOrSetter) {
+ val symAcc = sym.accessed // #3373
+ symAcc.pos.pointOrElse(0) < ownerPos &&
+ !(owner.ownerChain exists (o => (o eq sym) || (o eq symAcc))) // probably faster to iterate only once, don't feel like duplicating hasTransOwner for this case
+ } else !(owner hasTransOwner sym)) // faster than owner.ownerChain contains sym
+ }
sym.isInitialized ||
sym.sourceFile == null ||
@@ -510,9 +574,32 @@ self: Analyzer =>
isLocal: Boolean,
invalidImplicits: ListBuffer[Symbol]): Map[ImplicitInfo, SearchResult] = {
+ val start = startCounter(subtypeAppInfos)
+
/** A set containing names that are shadowed by implicit infos */
lazy val shadowed = new HashSet[Name]("shadowed", 512)
+ // #3453
+ // in addition to the implicit symbols that may shadow the implicit with name `name`,
+ // this method tests whether there's a non-implicit symbol with name `name` in scope
+ // inspired by logic in typedIdent
+ def nonImplicitSynonymInScope(name: Name) = {
+ val defEntry = context.scope.lookupEntry(name)
+ (defEntry ne null) &&
+ reallyExists(defEntry.sym) &&
+ !defEntry.sym.isImplicit // the implicit ones are handled by the `shadowed` set above
+ // also, subsumes the test that defEntry.sym ne info.sym
+ // (the `info` that's in scope at the call to nonImplicitSynonymInScope in tryImplicit)
+ }
+
+ /** Is `sym' the standard conforms method in Predef?
+ * Note: DON't replace this by sym == Predef_conforms, as Predef_conforms is a `def'
+ * which does a member lookup (it can't be a lazy val because we might reload Predef
+ * during resident compilations).
+ */
+ def isConformsMethod(sym: Symbol) =
+ sym.name == nme.conforms && sym.owner == PredefModule.moduleClass
+
/** Try implicit `info` to see whether it is applicable for expected type `pt`.
* This is the case if all of the following holds:
* - the info's type is not erroneous,
@@ -521,18 +608,23 @@ self: Analyzer =>
* - the result of typedImplicit is non-empty.
* @return A search result with an attributed tree containing the implicit if succeeded,
* SearchFailure if not.
+ * @note Extreme hotspot!
*/
- def tryImplicit(info: ImplicitInfo): SearchResult =
+ def tryImplicit(info: ImplicitInfo): SearchResult = {
+ incCounter(triedImplicits)
if (info.isCyclicOrErroneous ||
- (isLocal && shadowed.contains(info.name)) ||
- (isView && (info.sym == Predef_identity || info.sym == Predef_conforms)) //@M this condition prevents no-op conversions, which are a problem (besides efficiency),
- // TODO: remove `info.sym == Predef_identity` once we have a new STARR that only has conforms as an implicit
+ (isLocal && (shadowed.contains(info.name) || nonImplicitSynonymInScope(info.name))) ||
+ (isView && isConformsMethod(info.sym)) ||
+ //@M this condition prevents no-op conversions, which are a problem (besides efficiency),
// one example is removeNames in NamesDefaults, which relies on the type checker failing in case of ambiguity between an assignment/named arg
- ) SearchFailure
- else typedImplicit(info)
+ !isPlausiblyCompatible(info.tpe, wildPt))
+ SearchFailure
+ else
+ typedImplicit(info)
+ }
- def appInfos(is: List[ImplicitInfo]): Map[ImplicitInfo, SearchResult] = {
- var applicable = Map[ImplicitInfo, SearchResult]()
+ def addAppInfos(is: List[ImplicitInfo], m: Map[ImplicitInfo, SearchResult]): Map[ImplicitInfo, SearchResult] = {
+ var applicable = m
for (i <- is)
if (!isValid(i.sym)) invalidImplicits += i.sym
else {
@@ -544,7 +636,16 @@ self: Analyzer =>
applicable
}
- (Map[ImplicitInfo, SearchResult]() /: (iss map appInfos))(_ ++ _)
+ // #3453 -- alternative fix, seems not to be faster than encoding the set as the boolean predicate nonImplicitSynonymInScope
+ // in addition to the *implicit* symbols that may shadow the implicit with name `name` (added to shadowed by addAppInfos)
+ // add names of non-implicit symbols that are in scope (accessible without prefix)
+ // for(sym <- context.scope; if !sym.isImplicit) shadowed addEntry sym.name
+
+ var applicable = Map[ImplicitInfo, SearchResult]()
+ for (is <- iss) applicable = addAppInfos(is, applicable)
+
+ stopCounter(subtypeAppInfos, start)
+ applicable
}
/** Search list of implicit info lists for one matching prototype
@@ -568,11 +669,13 @@ self: Analyzer =>
val applicable = applicableInfos(implicitInfoss, isLocal, invalidImplicits)
if (applicable.isEmpty && !invalidImplicits.isEmpty) {
- infer.setAddendum(tree.pos, () =>
+ setAddendum(tree.pos, () =>
"\n Note: implicit "+invalidImplicits.head+" is not applicable here"+
- "\n because it comes after the application point and it lacks an explicit result type")
+ " because it comes after the application point and it lacks an explicit result type")
}
+ val start = startCounter(subtypeImprovCount)
+
/** A candidate for best applicable info wrt `improves` */
val best = (NoImplicitInfo /: applicable.keysIterator) (
(best, alt) => if (improves(alt, best)) alt else best)
@@ -582,67 +685,49 @@ self: Analyzer =>
val competing = applicable.keySet dropWhile (alt => best == alt || improves(best, alt))
if (!competing.isEmpty) ambiguousImplicitError(best, competing.head, "both", "and", "")
- // Also check that applicable infos that did not get selected are not
- // in (a companion object of) a subclass of (a companion object of) the class
- // containing the winning info.
- // (no longer needed; rules have changed)
- /*
- for (alt <- applicable.keySet) {
- if (isProperSubClassOrObject(alt.sym.owner, best.sym.owner)) {
- ambiguousImplicitError(best, alt,
- "most specific definition is:",
- "yet alternative definition ",
- "is defined in a subclass.\n Both definitions ")
- }
- }
- */
+ stopCounter(subtypeImprovCount, start)
applicable(best)
}
} // end searchImplicit
- /** The implicits made available directly by class type `tp`.
- * If `tp` refers to class C, these are all implicit members of the companion object of C.
- */
- private def implicitsOfClass(tp: Type): List[ImplicitInfo] = tp match {
- case TypeRef(pre, clazz, _) =>
- clazz.initialize.linkedClassOfClass.info.members.toList.filter(_.hasFlag(IMPLICIT)) map
- (sym => new ImplicitInfo(sym.name, pre.memberType(clazz.linkedModuleOfClass), sym))
- case _ =>
- List()
- }
-
/** The parts of a type is the smallest set of types that contains
* - the type itself
* - the parts of its immediate components (prefix and argument)
* - the parts of its base types
+ * - for alias types and abstract types, we take instead the parts
+ * - of their upper bounds.
+ * @return For those parts that refer to classes with companion objects that
+ * can be accessed with unambiguous stable prefixes, the implicits infos
+ * which are members of these companion objects.
*/
- private def parts(tp: Type): List[Type] = {
- val partMap = new collection.mutable.LinkedHashMap[Symbol, List[Type]]
- /** Add a new type to partMap, unless a subtype of it with the same
- * type symbol exists already.
- */
- def addType(newtp: Type): Boolean = {
- val tsym = newtp.typeSymbol
- partMap.get(tsym) match {
- case Some(ts) =>
- if (ts exists (_ <:< newtp)) false
- else { partMap.put(tsym, newtp :: ts); true }
- case None =>
- partMap.put(tsym, List(newtp)); true
- }
- }
- /** Enter all parts of `tp` into `partMap`
+ private def companionImplicits(tp: Type): List[List[ImplicitInfo]] = {
+
+ val partMap = new LinkedHashMap[Symbol, Type]
+
+ /** Enter all parts of `tp` into `parts` set.
+ * This method is performance critical: about 2-4% of all type checking is spent here
*/
def getParts(tp: Type) {
tp match {
- case TypeRef(pre, sym, args) if (!sym.isPackageClass) =>
- if (sym.isClass && !sym.isRefinementClass && !sym.isAnonymousClass) {
- if (addType(tp)) {
- for (bc <- sym.ancestors)
- getParts(tp.baseType(bc))
- getParts(pre)
- args foreach getParts
- }
+ case TypeRef(pre, sym, args) =>
+ if (sym.isClass) {
+ if (!((sym.name == nme.REFINE_CLASS_NAME.toTypeName) ||
+ (sym.name startsWith nme.ANON_CLASS_NAME) ||
+ (sym.name == nme.ROOT.toTypeName)))
+ partMap get sym match {
+ case Some(pre1) =>
+ if (!(pre =:= pre1)) partMap(sym) = NoType // ambiguous prefix - ignore implicit members
+ case None =>
+ if (pre.isStable) partMap(sym) = pre
+ val bts = tp.baseTypeSeq
+ var i = 1
+ while (i < bts.length) {
+ getParts(bts(i))
+ i += 1
+ }
+ getParts(pre)
+ args foreach getParts
+ }
} else if (sym.isAliasType) {
getParts(tp.normalize)
} else if (sym.isAbstractType) {
@@ -656,23 +741,29 @@ self: Analyzer =>
for (p <- ps) getParts(p)
case AnnotatedType(_, t, _) =>
getParts(t)
- case ExistentialType(tparams, t) =>
+ case ExistentialType(_, t) =>
+ getParts(t)
+ case PolyType(_, t) =>
getParts(t)
case _ =>
}
}
- /** Gives a list of typerefs with the same type symbol,
- * remove all those that have a prefix which is a supertype
- * of some other elements's prefix.
- */
- def compactify(ts: List[Type]): List[Type] = ts match {
- case List() => ts
- case (t @ TypeRef(pre, _, _)) :: ts1 =>
- if (ts1 exists (_.prefix <:< pre)) compactify(ts1)
- else t :: compactify(ts1 filterNot (pre <:< _.prefix))
- }
+
getParts(tp)
- for ((k, ts) <- partMap.iterator.toList; t <- compactify(ts)) yield t
+ val buf = new ListBuffer[List[ImplicitInfo]]
+ for ((clazz, pre) <- partMap) {
+ if (pre != NoType) {
+ val companion = clazz.companionModule
+ companion.moduleClass match {
+ case mc: ModuleClassSymbol =>
+ buf += (mc.implicitMembers map (im =>
+ new ImplicitInfo(im.name, SingleType(pre, companion), im)))
+ case _ =>
+ }
+ }
+ }
+ //println("companion implicits of "+tp+" = "+buf.toList) // DEBUG
+ buf.toList
}
/** The implicits made available by type `pt`.
@@ -680,106 +771,124 @@ self: Analyzer =>
* such that some part of `tp` has C as one of its superclasses.
*/
private def implicitsOfExpectedType: List[List[ImplicitInfo]] = implicitsCache get pt match {
- case Some(implicitInfoss) => hits += 1; implicitInfoss
- case None => {
- misses += 1
- val implicitInfoss = parts(pt).iterator.map(implicitsOfClass).toList
+ case Some(implicitInfoss) =>
+ incCounter(implicitCacheHits)
+ implicitInfoss
+ case None =>
+ incCounter(implicitCacheMisses)
+ val start = startTimer(subtypeETNanos)
+ val implicitInfoss = companionImplicits(pt)
+ stopTimer(subtypeETNanos, start)
implicitsCache(pt) = implicitInfoss
if (implicitsCache.size >= sizeLimit)
implicitsCache -= implicitsCache.keysIterator.next
implicitInfoss
- }
- }
-
-
- /** The manifest corresponding to type `pt`, provided `pt` is an instance of Manifest.
- */
- private def implicitManifest(pt: Type): Tree = pt.dealias match {
- case TypeRef(_, FullManifestClass, List(arg)) =>
- manifestOfType(arg, true)
- case TypeRef(_, PartialManifestClass, List(arg)) =>
- manifestOfType(arg, false)
- case TypeRef(_, OptManifestClass, List(arg)) =>
- val itree = manifestOfType(arg, false)
- if (itree == EmptyTree) gen.mkAttributedRef(NoManifest) else itree
- case TypeRef(_, tsym, _) if (tsym.isAbstractType) =>
- implicitManifest(pt.bounds.lo)
- case _ =>
- EmptyTree
}
/** Creates a tree that calls the relevant factory method in object
* reflect.Manifest for type 'tp'. An EmptyTree is returned if
* no manifest is found. todo: make this instantiate take type params as well?
*/
- private def manifestOfType(tp: Type, full: Boolean): Tree = {
+ private def manifestOfType(tp: Type, full: Boolean): SearchResult = {
/** Creates a tree that calls the factory method called constructor in object reflect.Manifest */
def manifestFactoryCall(constructor: String, tparg: Type, args: Tree*): Tree =
if (args contains EmptyTree) EmptyTree
- else
- typed { atPos(tree.pos.focus) {
- Apply(
- TypeApply(
- Select(gen.mkAttributedRef(if (full) FullManifestModule else PartialManifestModule), constructor),
- List(TypeTree(tparg))
- ),
- args.toList
- )
- }}
+ else typedPos(tree.pos.focus) {
+ Apply(
+ TypeApply(
+ Select(gen.mkAttributedRef(if (full) FullManifestModule else PartialManifestModule), constructor),
+ List(TypeTree(tparg))
+ ),
+ args.toList
+ )
+ }
+
+ /** Creates a tree representing one of the singleton manifests.*/
+ def findSingletonManifest(name: String) = typedPos(tree.pos.focus) {
+ Select(gen.mkAttributedRef(FullManifestModule), name)
+ }
/** Re-wraps a type in a manifest before calling inferImplicit on the result */
def findManifest(tp: Type, manifestClass: Symbol = if (full) FullManifestClass else PartialManifestClass) =
inferImplicit(tree, appliedType(manifestClass.typeConstructor, List(tp)), true, false, context).tree
def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass)
-
- def mot(tp0: Type): Tree = tp0.normalize match {
- case ThisType(_) | SingleType(_, _) =>
- manifestFactoryCall("singleType", tp, gen.mkAttributedQualifier(tp0))
- case ConstantType(value) =>
- manifestOfType(tp0.deconst, full)
- case TypeRef(pre, sym, args) =>
- if (isValueClass(sym) || isPhantomClass(sym)) {
- typed { atPos(tree.pos.focus) {
- Select(gen.mkAttributedRef(FullManifestModule), sym.name.toString)
- }}
- } else if (sym == ArrayClass && args.length == 1) {
- manifestFactoryCall("arrayType", args.head, findSubManifest(args.head))
- } else if (sym.isClass) {
- val suffix = gen.mkClassOf(tp0) :: (args map findSubManifest)
- manifestFactoryCall(
- "classType", tp,
- (if ((pre eq NoPrefix) || pre.typeSymbol.isStaticOwner) suffix
- else findSubManifest(pre) :: suffix): _*)
- } else if (sym.isAbstractType) {
- if (sym.isExistential)
- EmptyTree // todo: change to existential parameter manifest
- else if (sym.isTypeParameterOrSkolem)
- EmptyTree // a manifest should have been found by normal searchImplicit
- else
+ def mot(tp0: Type)(implicit from: List[Symbol] = List(), to: List[Type] = List()): SearchResult = {
+ implicit def wrapResult(tree: Tree): SearchResult =
+ if (tree == EmptyTree) SearchFailure else new SearchResult(tree, new TreeTypeSubstituter(from, to))
+
+ val tp1 = tp0.normalize
+ tp1 match {
+ case ThisType(_) | SingleType(_, _) if !(tp1 exists {tp => tp.typeSymbol.isExistentiallyBound}) => // can't generate a reference to a value that's abstracted over by an existential
+ manifestFactoryCall("singleType", tp, gen.mkAttributedQualifier(tp1))
+ case ConstantType(value) =>
+ manifestOfType(tp1.deconst, full)
+ case TypeRef(pre, sym, args) =>
+ if (isValueClass(sym) || isPhantomClass(sym)) {
+ findSingletonManifest(sym.name.toString)
+ } else if (sym == ObjectClass || sym == AnyRefClass) {
+ findSingletonManifest("Object")
+ } else if (sym == RepeatedParamClass || sym == ByNameParamClass) {
+ EmptyTree
+ } else if (sym == ArrayClass && args.length == 1) {
+ manifestFactoryCall("arrayType", args.head, findManifest(args.head))
+ } else if (sym.isClass) {
+ val classarg0 = gen.mkClassOf(tp1)
+ val classarg = tp match {
+ case ExistentialType(_, _) =>
+ TypeApply(Select(classarg0, Any_asInstanceOf),
+ List(TypeTree(appliedType(ClassClass.typeConstructor, List(tp)))))
+ case _ =>
+ classarg0
+ }
+ val suffix = classarg :: (args map findSubManifest)
manifestFactoryCall(
- "abstractType", tp,
- findSubManifest(pre) :: Literal(sym.name.toString) :: findManifest(tp0.bounds.hi) :: (args map findSubManifest): _*)
- } else {
- EmptyTree // a manifest should have been found by normal searchImplicit
- }
- case RefinedType(parents, decls) =>
- // refinement is not generated yet
- if (parents.length == 1) findManifest(parents.head)
- else manifestFactoryCall("intersectionType", tp, parents map (findSubManifest(_)): _*)
- case ExistentialType(tparams, result) =>
- existentialAbstraction(tparams, result) match {
- case ExistentialType(_, _) => mot(result)
- case t => mot(t)
- }
- case _ =>
- EmptyTree
+ "classType", tp,
+ (if ((pre eq NoPrefix) || pre.typeSymbol.isStaticOwner) suffix
+ else findSubManifest(pre) :: suffix): _*)
+ } else if (sym.isExistentiallyBound && full) {
+ manifestFactoryCall("wildcardType", tp,
+ findManifest(tp.bounds.lo), findManifest(tp.bounds.hi))
+ } else if(undetParams contains sym) { // looking for a manifest of a type parameter that hasn't been inferred by now, can't do much, but let's not fail
+ mot(NothingClass.tpe)(sym :: from, NothingClass.tpe :: to) // #3859: need to include the mapping from sym -> NothingClass.tpe in the SearchResult
+ } else {
+ EmptyTree // a manifest should have been found by normal searchImplicit
+ }
+ case RefinedType(parents, decls) =>
+ // refinement is not generated yet
+ if (parents.length == 1) findManifest(parents.head)
+ else if (full) manifestFactoryCall("intersectionType", tp, parents map (findSubManifest(_)): _*)
+ else mot(erasure.erasure.intersectionDominator(parents))
+ case ExistentialType(tparams, result) =>
+ mot(tp1.skolemizeExistential)
+ case _ =>
+ EmptyTree
+ }
}
mot(tp)
}
+ def wrapResult(tree: Tree): SearchResult =
+ if (tree == EmptyTree) SearchFailure else new SearchResult(tree, EmptyTreeTypeSubstituter)
+
+ /** The manifest corresponding to type `pt`, provided `pt` is an instance of Manifest.
+ */
+ private def implicitManifestOrOfExpectedType(pt: Type): SearchResult = pt.dealias match {
+ case TypeRef(_, FullManifestClass, List(arg)) =>
+ manifestOfType(arg, true)
+ case TypeRef(_, PartialManifestClass, List(arg)) =>
+ manifestOfType(arg, false)
+ case TypeRef(_, OptManifestClass, List(arg)) =>
+ val res = manifestOfType(arg, false)
+ if (res == SearchFailure) wrapResult(gen.mkAttributedRef(NoManifest)) else res
+ case TypeRef(_, tsym, _) if (tsym.isAbstractType) =>
+ implicitManifestOrOfExpectedType(pt.bounds.lo)
+ case _ =>
+ searchImplicit(implicitsOfExpectedType, false)
+ }
+
/** The result of the implicit search:
* First search implicits visible in current context.
* If that fails, search implicits in expected type `pt`.
@@ -787,34 +896,85 @@ self: Analyzer =>
* If all fails return SearchFailure
*/
def bestImplicit: SearchResult = {
- val start = System.nanoTime()
+ val failstart = startTimer(inscopeFailNanos)
+ val succstart = startTimer(inscopeSucceedNanos)
+
var result = searchImplicit(context.implicitss, true)
- val timer1 = System.nanoTime()
- if (result == SearchFailure) inscopeFail += timer1 - start else inscopeSucceed += timer1 - start
- if (result == SearchFailure)
- result = searchImplicit(implicitsOfExpectedType, false)
- val timer2 = System.nanoTime()
- if (result == SearchFailure) oftypeFail += timer2 - timer1 else oftypeSucceed += timer2 - timer1
if (result == SearchFailure) {
- val resultTree = implicitManifest(pt)
- if (resultTree != EmptyTree) result = new SearchResult(resultTree, EmptyTreeTypeSubstituter)
+ stopTimer(inscopeFailNanos, failstart)
+ } else {
+ stopTimer(inscopeSucceedNanos, succstart)
+ incCounter(inscopeImplicitHits)
}
- val timer3 = System.nanoTime()
- if (result == SearchFailure) manifFail += timer3 - timer2 else manifSucceed += timer3 - timer2
+ if (result == SearchFailure) {
+ val failstart = startTimer(oftypeFailNanos)
+ val succstart = startTimer(oftypeSucceedNanos)
+
+ result = implicitManifestOrOfExpectedType(pt)
+
+ if (result == SearchFailure) {
+ stopTimer(oftypeFailNanos, failstart)
+ } else {
+ stopTimer(oftypeSucceedNanos, succstart)
+ incCounter(oftypeImplicitHits)
+ }
+ }
+
if (result == SearchFailure && settings.debug.value)
- log("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+parts(pt)+implicitsOfExpectedType)
- implicitTime += System.nanoTime() - start
+ log("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType)
+
result
}
def allImplicits: List[SearchResult] = {
val invalidImplicits = new ListBuffer[Symbol]
def search(iss: List[List[ImplicitInfo]], isLocal: Boolean) =
- applicableInfos(iss, isLocal, invalidImplicits).valuesIterator.toList
+ applicableInfos(iss, isLocal, invalidImplicits).values.toList
search(context.implicitss, true) ::: search(implicitsOfExpectedType, false)
}
}
- private val DivergentImplicit = new Exception()
+ object ImplicitNotFoundMsg {
+ def unapply(sym: Symbol): Option[(Message)] = sym.implicitNotFoundMsg map (m => (new Message(sym, m)))
+ // check the message's syntax: should be a string literal that may contain occurences of the string "${X}",
+ // where `X` refers to a type parameter of `sym`
+ def check(sym: Symbol): Option[String] =
+ sym.getAnnotation(ImplicitNotFoundClass).flatMap(_.stringArg(0) match {
+ case Some(m) => new Message(sym, m) validate
+ case None => Some("Missing argument `msg` on implicitNotFound annotation.")
+ })
+
+
+ class Message(sym: Symbol, msg: String) {
+ // http://dcsobral.blogspot.com/2010/01/string-interpolation-in-scala-with.html
+ private def interpolate(text: String, vars: Map[String, String]) = { import scala.util.matching.Regex
+ """\$\{([^}]+)\}""".r.replaceAllIn(text, (_: Regex.Match) match {
+ case Regex.Groups(v) => java.util.regex.Matcher.quoteReplacement(vars.getOrElse(v, "")) // #3915: need to quote replacement string since it may include $'s (such as the interpreter's $iw)
+ })}
+
+ private lazy val typeParamNames: List[String] = sym.typeParams.map(_.decodedName)
+
+ def format(paramName: Name, paramTp: Type): String = format(paramTp.typeArgs map (_.toString))
+ def format(typeArgs: List[String]): String =
+ interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
+
+ def validate: Option[String] = {
+ import scala.util.matching.Regex; import collection.breakOut
+ // is there a shorter way to avoid the intermediate toList?
+ val refs = Set("""\$\{([^}]+)\}""".r.findAllIn(msg).matchData.map(_.group(1)).toList : _*)
+ val decls = Set(typeParamNames : _*)
+ (refs &~ decls) match {
+ case s if s isEmpty => None
+ case unboundNames =>
+ val singular = unboundNames.size == 1
+ Some("The type parameter"+( if(singular) " " else "s " )+ unboundNames.mkString(", ") +
+ " referenced in the message of the @implicitNotFound annotation "+( if(singular) "is" else "are" )+
+ " not defined by "+ sym +".")
+ }
+ }
+ }
+ }
}
+class DivergentImplicit extends Exception
+object DivergentImplicit extends DivergentImplicit
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 8ed799ed60..98bb1828b2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1,14 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
-import scala.tools.nsc.util.{Position, NoPosition}
+
import scala.collection.mutable.ListBuffer
-import scala.util.control.ControlException
+import scala.util.control.ControlThrowable
import symtab.Flags._
/** This trait ...
@@ -21,11 +20,6 @@ trait Infer {
import global._
import definitions._
- // statistics
- var normM = 0
- var normP = 0
- var normO = 0
-
private final val inferInfo = false //@MDEBUG
/* -- Type parameter inference utility functions --------------------------- */
@@ -33,26 +27,23 @@ trait Infer {
private def assertNonCyclic(tvar: TypeVar) =
assert(tvar.constr.inst != tvar, tvar.origin)
- def isVarArgs(formals: List[Type]) = !formals.isEmpty && isRepeatedParamType(formals.last)
+ def isVarArgs(params: List[Symbol]) = !params.isEmpty && isRepeatedParamType(params.last.tpe)
+ def isVarArgTpes(formals: List[Type]) = !formals.isEmpty && isRepeatedParamType(formals.last)
/** The formal parameter types corresponding to <code>formals</code>.
* If <code>formals</code> has a repeated last parameter, a list of
* (nargs - params.length + 1) copies of its type is returned.
* By-name types are replaced with their underlying type.
*
- * @param formals ...
- * @param nargs ...
+ * @param removeByName allows keeping ByName parameters. Used in NamesDefaults.
+ * @param removeRepeated allows keeping repeated parameter (if there's one argument). Used in NamesDefaults.
*/
- def formalTypes(formals: List[Type], nargs: Int): List[Type] =
- formalTypes(formals, nargs, true)
-
- /** This variant allows keeping ByName parameters. Useed in NamesDefaults. */
- def formalTypes(formals: List[Type], nargs: Int, removeByName: Boolean): List[Type] = {
- val formals1 = if (removeByName) formals map {
+ def formalTypes(formals: List[Type], nargs: Int, removeByName: Boolean = true, removeRepeated: Boolean = true): List[Type] = {
+ val formals1 = if (removeByName) formals mapConserve {
case TypeRef(_, sym, List(arg)) if (sym == ByNameParamClass) => arg
case formal => formal
} else formals
- if (isVarArgs(formals1)) {
+ if (isVarArgTpes(formals1) && (removeRepeated || formals.length != nargs)) {
val ft = formals1.last.normalize.typeArgs.head
formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft)
} else formals1
@@ -67,19 +58,15 @@ trait Infer {
if (nformals == 1 && actuals.length != 1 && actuals.length <= definitions.MaxTupleArity && !phase.erasedTypes)
List(atPos(pos)(gen.mkTuple(actuals))) else actuals
- /** A fresh type varable with given type parameter as origin.
+ /** A fresh type variable with given type parameter as origin.
*
* @param tparam ...
* @return ...
*/
def freshVar(tparam: Symbol): TypeVar = TypeVar(tparam)
- //todo: remove comments around following privates; right now they cause an IllegalAccess
- // error when built with scalac
-
- /*private*/ class NoInstance(msg: String) extends RuntimeException(msg) with ControlException
-
- /*private*/ class DeferredNoInstance(getmsg: () => String) extends NoInstance("") {
+ private class NoInstance(msg: String) extends Throwable(msg) with ControlThrowable { }
+ private class DeferredNoInstance(getmsg: () => String) extends NoInstance("") {
override def getMessage(): String = getmsg()
}
@@ -93,7 +80,7 @@ trait Infer {
object instantiate extends TypeMap {
private var excludedVars = scala.collection.immutable.Set[TypeVar]()
def apply(tp: Type): Type = tp match {
- case WildcardType | NoType =>
+ case WildcardType | BoundedWildcardType(_) | NoType =>
throw new NoInstance("undetermined type")
case tv @ TypeVar(origin, constr) =>
if (constr.inst == NoType) {
@@ -118,12 +105,12 @@ trait Infer {
* @return ...
*/
private[typechecker] def isFullyDefined(tp: Type): Boolean = tp match {
- case WildcardType | NoType =>
+ case WildcardType | BoundedWildcardType(_) | NoType =>
false
case NoPrefix | ThisType(_) | ConstantType(_) =>
true
case TypeRef(pre, sym, args) =>
- isFullyDefined(pre) && (args.isEmpty || (args forall isFullyDefined))
+ isFullyDefined(pre) && (args forall isFullyDefined)
case SingleType(pre, sym) =>
isFullyDefined(pre)
case RefinedType(ts, decls) =>
@@ -156,38 +143,43 @@ trait Infer {
if (!solve(tvars, tparams, variances, upper, depth)) {
// no panic, it's good enough to just guess a solution, we'll find out
// later whether it works.
+// @M danger, Will Robinson! this means that you should never trust inferred type arguments!
+// need to call checkBounds on the args/typars or type1 on the tree for the expression that results from type inference
+// see e.g., #2421: implicit search had been ignoring this caveat
// throw new DeferredNoInstance(() =>
// "no solution exists for constraints"+(tvars map boundsString))
}
for (tvar <- tvars)
if (tvar.constr.inst == tvar)
if (tvar.origin.typeSymbol.info eq ErrorType) {
- // this can happen if during solving a cyclic type paramater
+ // this can happen if during solving a cyclic type parameter
// such as T <: T gets completed. See #360
tvar.constr.inst = ErrorType
} else assert(false, tvar.origin+" at "+tvar.origin.typeSymbol.owner)
tvars map instantiate
}
- def skipImplicit(tp: Type) =
- if (tp.isInstanceOf[ImplicitMethodType]) tp.resultType else tp
+ def skipImplicit(tp: Type) = tp match {
+ case mt: MethodType if mt.isImplicit => mt.resultType
+ case _ => tp
+ }
/** Automatically perform the following conversions on expression types:
* A method type becomes the corresponding function type.
* A nullary method type becomes its result type.
* Implicit parameters are skipped.
+ * This method seems to be performance critical.
*/
- def normalize(tp: Type): Type = skipImplicit(tp) match {
- case MethodType(params, restpe) if (!restpe.isDependent) =>
- if (util.Statistics.enabled) normM += 1
+ def normalize(tp: Type): Type = tp match {
+ case mt @ MethodType(params, restpe) if mt.isImplicit =>
+ normalize(restpe)
+ case mt @ MethodType(params, restpe) if !restpe.isDependent =>
functionType(params map (_.tpe), normalize(restpe))
- case PolyType(List(), restpe) =>
- if (util.Statistics.enabled) normP += 1
+ case PolyType(List(), restpe) => // nullary method type
normalize(restpe)
case ExistentialType(tparams, qtpe) =>
ExistentialType(tparams, normalize(qtpe))
case tp1 =>
- if (util.Statistics.enabled) normO += 1
tp1 // @MAT aliases already handled by subtyping
}
@@ -196,72 +188,17 @@ trait Infer {
/** The context-dependent inferencer part */
class Inferencer(context: Context) {
-
/* -- Error Messages --------------------------------------------------- */
-
- private var addendumPos: Position = NoPosition
- private var addendum: () => String = _
-
- def setAddendum(pos: Position, msg: () => String) = {
- addendumPos = pos
- addendum = msg
- }
-
def setError[T <: Tree](tree: T): T = {
- if (tree.hasSymbol)
- if (context.reportGeneralErrors) {
- val name = newTermName("<error: " + tree.symbol + ">")
- tree.setSymbol(
- if (tree.isType) context.owner.newErrorClass(name.toTypeName)
- else context.owner.newErrorValue(name))
- } else {
- tree.setSymbol(if (tree.isType) stdErrorClass else stdErrorValue)
- }
- tree.setType(ErrorType)
- }
+ def name = newTermName("<error: " + tree.symbol + ">")
+ def errorClass = if (context.reportGeneralErrors) context.owner.newErrorClass(name.toTypeName) else stdErrorClass
+ def errorValue = if (context.reportGeneralErrors) context.owner.newErrorValue(name) else stdErrorValue
+ def errorSym = if (tree.isType) errorClass else errorValue
- def decode(name: Name): String =
- (if (name.isTypeName) "type " else "value ") + name.decode
-
- def treeSymTypeMsg(tree: Tree): String =
- if (tree.symbol eq null)
- "expression of type " + tree.tpe
- else if (tree.symbol.hasFlag(OVERLOADED))
- "overloaded method " + tree.symbol + " with alternatives " + tree.tpe
- else
- tree.symbol.toString() +
- (if (tree.symbol.isModule) ""
- else if (tree.tpe.paramSectionCount > 0) ": "+tree.tpe
- else " of type "+tree.tpe) +
- (if (tree.symbol.name == nme.apply) tree.symbol.locationString else "")
-
- def applyErrorMsg(tree: Tree, msg: String, argtpes: List[Type], pt: Type) =
- treeSymTypeMsg(tree) + msg + argtpes.mkString("(", ",", ")") +
- (if (pt == WildcardType) "" else " with expected result type " + pt)
-
- // todo: use also for other error messages
- private def existentialContext(tp: Type) = tp.existentialSkolems match {
- case List() => ""
- case skolems =>
- def disambiguate(ss: List[String]) = ss match {
- case List() => ss
- case s :: ss1 => s :: (ss1 map (s1 => if (s1 == s) "(some other)"+s1 else s1))
- }
- " where "+(disambiguate(skolems map (_.existentialToString)) mkString ", ")
- }
-
- def foundReqMsg(found: Type, req: Type): String =
- withDisambiguation(found, req) {
- ";\n found : " + found.toLongString + existentialContext(found) +
- "\n required: " + req + existentialContext(req)
- }
+ if (tree.hasSymbol)
+ tree setSymbol errorSym
- def typeErrorMsg(found: Type, req: Type) = {
- //println(found.baseTypeSeq)
- "type mismatch" + foundReqMsg(found, req) +
- (if ((found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req))
- "\n possible cause: missing arguments for method or constructor"
- else "")
+ tree setType ErrorType
}
def error(pos: Position, msg: String) {
@@ -275,72 +212,35 @@ trait Infer {
def typeError(pos: Position, found: Type, req: Type) {
if (!found.isErroneous && !req.isErroneous) {
- error(pos,
- typeErrorMsg(found, req)+
- (if (pos != NoPosition && pos == addendumPos) addendum()
- else ""))
- if (settings.explaintypes.value) explainTypes(found, req)
+ error(pos, withAddendum(pos)(typeErrorMsg(found, req)))
+
+ if (settings.explaintypes.value)
+ explainTypes(found, req)
}
}
+ def typeErrorMsg(found: Type, req: Type) = {
+ def isPossiblyMissingArgs = (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req)
+ def missingArgsMsg = if (isPossiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else ""
+
+ "type mismatch" + foundReqMsg(found, req) + missingArgsMsg
+ }
+
def typeErrorTree(tree: Tree, found: Type, req: Type): Tree = {
typeError(tree.pos, found, req)
setError(tree)
}
def explainTypes(tp1: Type, tp2: Type) =
- withDisambiguation(tp1, tp2) { global.explainTypes(tp1, tp2) }
-
- /** If types `tp1' `tp2' contain different type variables with same name
- * differentiate the names by including owner information. Also, if the
- * type error is because of a conflict between two identically named
- * classes and one is in package scala, fully qualify the name so one
- * need not deduce why "java.util.Iterator" and "Iterator" don't match.
- */
- private def withDisambiguation[T](tp1: Type, tp2: Type)(op: => T): T = {
-
- def explainName(sym: Symbol) = {
- if (!sym.name.toString.endsWith(")")) {
- sym.name = newTypeName(sym.name.toString+"(in "+sym.owner+")")
- }
- }
-
- val patches = new ListBuffer[(Symbol, Symbol, Name)]
- for {
- t1 @ TypeRef(_, sym1, _) <- tp1
- t2 @ TypeRef(_, sym2, _) <- tp2
- if sym1 != sym2
- } {
- if (t1.toString == t2.toString) { // type variable collisions
- val name = sym1.name
- explainName(sym1)
- explainName(sym2)
- if (sym1.owner == sym2.owner) sym2.name = newTypeName("(some other)"+sym2.name)
- patches += ((sym1, sym2, name))
- }
- else if (sym1.name == sym2.name) { // symbol name collisions where one is in scala._
- val name = sym1.name
- def scalaQualify(s: Symbol) =
- if (s.owner.isScalaPackageClass) s.name = newTypeName("scala." + s.name)
- List(sym1, sym2) foreach scalaQualify
- patches += ((sym1, sym2, name))
- }
- }
-
- val result = op
-
- for ((sym1, sym2, name) <- patches) {
- sym1.name = name
- sym2.name = name
- }
-
- result
- }
+ withDisambiguation(tp1, tp2)(global.explainTypes(tp1, tp2))
/* -- Tests & Checks---------------------------------------------------- */
/** Check that <code>sym</code> is defined and accessible as a member of
* tree <code>site</code> with type <code>pre</code> in current context.
+ *
+ * Note: pre is not refchecked -- moreover, refchecking the resulting tree may not refcheck pre,
+ * since pre may not occur in its type (callers should wrap the result in a TypeTreeWithDeferredRefCheck)
*/
def checkAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree =
if (sym.isError) {
@@ -356,6 +256,8 @@ trait Infer {
context.unit.depends += sym.toplevelClass
val sym1 = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super]))
+ // Console.println("check acc " + (sym, sym1) + ":" + (sym.tpe, sym1.tpe) + " from " + pre);//DEBUG
+
if (sym1 == NoSymbol) {
if (settings.debug.value) {
Console.println(context)
@@ -364,26 +266,9 @@ trait Infer {
}
accessError("")
} else {
- // Modify symbol's type so that raw types C
- // are converted to existentials C[T] forSome { type T }.
- // We can't do this on class loading because it would result
- // in infinite cycles.
- def cook(sym: Symbol) {
- val tpe1 = rawToExistential(sym.tpe)
- if (tpe1 ne sym.tpe) {
- if (settings.debug.value) println("cooked: "+sym+":"+sym.tpe)
- sym.setInfo(tpe1)
- }
- }
- if (sym1.isTerm) {
- if (sym1 hasFlag JAVA)
- cook(sym1)
- else if (sym1 hasFlag OVERLOADED)
- for (sym2 <- sym1.alternatives)
- if (sym2 hasFlag JAVA)
- cook(sym2)
- }
- //Console.println("check acc " + sym1 + ":" + sym1.tpe + " from " + pre);//DEBUG
+ if(sym1.isTerm)
+ sym1.cookJavaRawInfo() // xform java rawtypes into existentials
+
var owntype = try{
pre.memberType(sym1)
} catch {
@@ -402,40 +287,51 @@ trait Infer {
}
}
- def isPlausiblyPopulated(tp1: Type, tp2: Type): Boolean = true
-
def isPlausiblyCompatible(tp: Type, pt: Type): Boolean = tp match {
case PolyType(_, restpe) =>
isPlausiblyCompatible(restpe, pt)
- case mt: ImplicitMethodType =>
- isPlausiblyCompatible(mt.resultType, pt)
case ExistentialType(tparams, qtpe) =>
isPlausiblyCompatible(qtpe, pt)
- case MethodType(params, _) =>
- val formals = tp.paramTypes
- pt.normalize match {
+ case mt @ MethodType(params, restpe) =>
+ if (mt.isImplicit) isPlausiblyCompatible(restpe, pt)
+ else pt match {
case TypeRef(pre, sym, args) =>
- !sym.isClass || {
+ if (sym.isAliasType) {
+ isPlausiblyCompatible(tp, pt.dealias)
+ } else if (sym.isAbstractType) {
+ isPlausiblyCompatible(tp, pt.bounds.lo)
+ } else {
val l = args.length - 1
- l == formals.length &&
- sym == FunctionClass(l) &&
- List.forall2(args, formals) (isPlausiblySubType) &&
- isPlausiblySubType(tp.resultApprox, args.last)
+ l == params.length &&
+ sym == FunctionClass(l) && {
+ var curargs = args
+ var curparams = params
+ while (curparams.nonEmpty) {
+ if (!isPlausiblySubType(curargs.head, curparams.head.tpe))
+ return false
+ curargs = curargs.tail
+ curparams = curparams.tail
+ }
+ isPlausiblySubType(restpe, curargs.head)
+ }
}
case _ =>
- true
+ false
}
case _ =>
- true
+ isPlausiblySubType(tp, pt)
}
- private def isPlausiblySubType(tp1: Type, tp2: Type): Boolean = tp1.normalize match {
+ private def isPlausiblySubType(tp1: Type, tp2: Type): Boolean = tp1 match {
case TypeRef(_, sym1, _) =>
- !sym1.isClass || {
- tp2.normalize match {
- case TypeRef(_, sym2, _) => !sym2.isClass || (sym1 isSubClass sym2)
- case _ => true
- }
+ if (sym1.isAliasType) isPlausiblySubType(tp1.dealias, tp2)
+ else if (!sym1.isClass) true
+ else tp2 match {
+ case TypeRef(_, sym2, _) =>
+ if (sym2.isAliasType) isPlausiblySubType(tp1, tp2.dealias)
+ else !sym2.isClass || (sym1 isSubClass sym2) || isNumericSubClass(sym1, sym2)
+ case _ =>
+ true
}
case _ =>
true
@@ -443,7 +339,42 @@ trait Infer {
def isCompatible(tp: Type, pt: Type): Boolean = {
val tp1 = normalize(tp)
- (tp1 <:< pt) || isCoercible(tp1, pt)
+ (tp1 weak_<:< pt) || isCoercible(tp1, pt)
+ }
+
+ final def normSubType(tp: Type, pt: Type): Boolean = tp match {
+ case mt @ MethodType(params, restpe) =>
+ if (mt.isImplicit) normSubType(restpe, pt)
+ else pt match {
+ case TypeRef(pre, sym, args) =>
+ if (sym.isAliasType) {
+ normSubType(tp, pt.dealias)
+ } else if (sym.isAbstractType) {
+ normSubType(tp, pt.bounds.lo)
+ } else {
+ val l = args.length - 1
+ l == params.length &&
+ sym == FunctionClass(l) && {
+ var curargs = args
+ var curparams = params
+ while (curparams.nonEmpty) {
+ if (!(curargs.head <:< curparams.head.tpe))
+ return false
+ curargs = curargs.tail
+ curparams = curparams.tail
+ }
+ normSubType(restpe, curargs.head)
+ }
+ }
+ case _ =>
+ tp <:< pt
+ }
+ case PolyType(List(), restpe) => // nullary method type
+ normSubType(restpe, pt)
+ case ExistentialType(tparams, qtpe) =>
+ normalize(tp) <:< pt
+ case _ =>
+ tp <:< pt
}
def isCompatibleArg(tp: Type, pt: Type): Boolean = {
@@ -455,7 +386,7 @@ trait Infer {
pt.typeSymbol == UnitClass || // can perform unit coercion
isCompatible(tp, pt) ||
tp.isInstanceOf[MethodType] && // can perform implicit () instantiation
- tp.paramTypes.length == 0 && isCompatible(tp.resultType, pt)
+ tp.params.isEmpty && isCompatible(tp.resultType, pt)
/** Like weakly compatible but don't apply any implicit conversions yet.
* Used when comparing the result type of a method with its prototype.
@@ -472,8 +403,8 @@ trait Infer {
def isCoercible(tp: Type, pt: Type): Boolean = false
- def isCompatibleArgs(tps: List[Type], pts: List[Type]): Boolean =
- List.map2(tps, pts)((tp, pt) => isCompatibleArg(tp, pt)) forall (x => x)
+ def isCompatibleArgs(tps: List[Type], pts: List[Type]) =
+ (tps corresponds pts)(isCompatibleArg) // @PP: corresponds
/* -- Type instantiation------------------------------------------------ */
@@ -510,15 +441,15 @@ trait Infer {
* @param pt ...
* @return ...
*/
- private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type): List[Type] = {
+ private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, checkCompat: (Type, Type) => Boolean = isCompatible): List[Type] = {
val tvars = tparams map freshVar
- if (isCompatible(restpe.instantiateTypeParams(tparams, tvars), pt)) {
+ if (checkCompat(restpe.instantiateTypeParams(tparams, tvars), pt)) {
try {
// If the restpe is an implicit method, and the expected type is fully defined
- // optimze type varianbles wrt to the implicit formals only; ignore the result type.
+ // optimze type variables wrt to the implicit formals only; ignore the result type.
// See test pos/jesper.scala
val varianceType = restpe match {
- case mt: ImplicitMethodType if isFullyDefined(pt) =>
+ case mt: MethodType if mt.isImplicit && isFullyDefined(pt) =>
MethodType(mt.params, AnyClass.tpe)
case _ =>
restpe
@@ -577,40 +508,70 @@ trait Infer {
}
val tvars = tparams map freshVar
if (isConservativelyCompatible(restpe.instantiateTypeParams(tparams, tvars), pt))
- List.map2(tparams, tvars) ((tparam, tvar) =>
+ (tparams, tvars).zipped map ((tparam, tvar) =>
instantiateToBound(tvar, varianceInTypes(formals)(tparam)))
else
tvars map (tvar => WildcardType)
}
+ object AdjustedTypeArgs {
+ type Result = collection.mutable.LinkedHashMap[Symbol, Option[Type]]
+
+ def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
+ m collect {case (p, Some(a)) => (p, a)} unzip ))
+
+ object Undets {
+ def unapply(m: Result): Some[(List[Symbol], List[Type], List[Symbol])] = Some(toLists{
+ val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null)
+ val (okArgs, okTparams) = ok.unzip
+ (okArgs, okTparams, nok.keys)
+ })
+ }
+
+ object AllArgsAndUndets {
+ def unapply(m: Result): Some[(List[Symbol], List[Type], List[Type], List[Symbol])] = Some(toLists{
+ val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null)
+ val (okArgs, okTparams) = ok.unzip
+ (okArgs, okTparams, m.values.map(_.getOrElse(NothingClass.tpe)), nok.keys)
+ })
+ }
+
+ @inline private def toLists[A1, A2](pxs: (Iterable[A1], Iterable[A2])) = (pxs._1.toList, pxs._2.toList)
+ @inline private def toLists[A1, A2, A3](pxs: (Iterable[A1], Iterable[A2], Iterable[A3])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList)
+ @inline private def toLists[A1, A2, A3, A4](pxs: (Iterable[A1], Iterable[A2], Iterable[A3], Iterable[A4])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList, pxs._4.toList)
+ }
+
/** Retract arguments that were inferred to Nothing because inference failed. Correct types for repeated params.
*
* We detect Nothing-due-to-failure by only retracting a parameter if either:
* - it occurs in an invariant/contravariant position in `restpe`
* - `restpe == WildcardType`
*
- * Retracted parameters are collected in `uninstantiated`.
+ * Retracted parameters are mapped to None.
+ * TODO:
+ * - make sure the performance hit of storing these in a map is acceptable (it's going to be a small map in 90% of the cases, I think)
+ * - refactor further up the callstack so that we don't have to do this post-factum adjustment?
*
* Rewrite for repeated param types: Map T* entries to Seq[T].
+ * @return map from tparams to inferred arg, if inference was successful, tparams that map to None are considered left undetermined
+ * type parameters that are inferred as `scala.Nothing' and that are not covariant in <code>restpe</code> are taken to be undetermined
*/
- def adjustTypeArgs(tparams: List[Symbol], targs: List[Type], restpe: Type, uninstantiated: ListBuffer[Symbol]): List[Type] = {
+ def adjustTypeArgs(tparams: List[Symbol], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = {
@inline def notCovariantIn(tparam: Symbol, restpe: Type) =
(varianceInType(restpe)(tparam) & COVARIANT) == 0 // tparam occurred non-covariantly (in invariant or contravariant position)
- List.map2(tparams, targs) {(tparam, targ) =>
- if (targ.typeSymbol == NothingClass && (restpe == WildcardType || notCovariantIn(tparam, restpe))) {
- uninstantiated += tparam
- tparam.tpeHK //@M tparam.tpe was wrong: we only want the type constructor,
- // not the type constructor applied to dummy arguments
- // see ticket 474 for an example that crashes if we use .tpe instead of .tpeHK)
- } else if (targ.typeSymbol == RepeatedParamClass) {
- targ.baseType(SeqClass)
- } else if (targ.typeSymbol == JavaRepeatedParamClass) {
- targ.baseType(ArrayClass)
+ (tparams, targs).zipped.map{ (tparam, targ) =>
+ if (targ.typeSymbol == NothingClass &&
+ (restpe.isWildcard || notCovariantIn(tparam, restpe))) {
+ tparam -> None
} else {
- targ.widen
+ tparam -> Some(
+ if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
+ else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
+ else targ.widen
+ )
}
- }
+ }(collection.breakOut)
}
/** Return inferred type arguments, given type parameters, formal parameters,
@@ -626,17 +587,12 @@ trait Infer {
* @param restp the result type of the method
* @param argtpes the argument types of the application
* @param pt the expected return type of the application
- * @param uninstantiated a listbuffer receiving all uninstantiated type parameters
- * (type parameters mapped by the constraint solver to `scala.All'
- * and not covariant in <code>restpe</code> are taken to be
- * uninstantiated. Maps all those type arguments to their
- * corresponding type parameters).
- * @return ...
+ * @return @see adjustTypeArgs
+
* @throws NoInstance
*/
def methTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
- argtpes: List[Type], pt: Type,
- uninstantiated: ListBuffer[Symbol]): List[Type] = {
+ argtpes: List[Type], pt: Type): AdjustedTypeArgs.Result = {
val tvars = tparams map freshVar
if (inferInfo)
println("methTypeArgs tparams = "+tparams+
@@ -644,7 +600,6 @@ trait Infer {
", restpe = "+restpe+
", argtpes = "+argtpes+
", pt = "+pt+
- ", uninstantiated = "+uninstantiated+
", tvars = "+tvars+" "+(tvars map (_.constr)))
if (formals.length != argtpes.length) {
throw new NoInstance("parameter lists differ in length")
@@ -674,7 +629,7 @@ trait Infer {
if (!isFullyDefined(tvar)) tvar.constr.inst = NoType
// Then define remaining type variables from argument types.
- List.map2(argtpes, formals) {(argtpe, formal) =>
+ (argtpes, formals).zipped map { (argtpe, formal) =>
//@M isCompatible has side-effect: isSubtype0 will register subtype checks in the tvar's bounds
if (!isCompatibleArg(argtpe.deconst.instantiateTypeParams(tparams, tvars),
formal.instantiateTypeParams(tparams, tvars))) {
@@ -689,7 +644,7 @@ trait Infer {
val targs = solvedTypes(tvars, tparams, tparams map varianceInTypes(formals),
false, lubDepth(formals) max lubDepth(argtpes))
// val res =
- adjustTypeArgs(tparams, targs, restpe, uninstantiated)
+ adjustTypeArgs(tparams, targs, restpe)
// println("meth type args "+", tparams = "+tparams+", formals = "+formals+", restpe = "+restpe+", argtpes = "+argtpes+", underlying = "+(argtpes map (_.widen))+", pt = "+pt+", uninstantiated = "+uninstantiated.toList+", result = "+res) //DEBUG
// res
}
@@ -708,8 +663,10 @@ trait Infer {
case OverloadedType(pre, alts) =>
alts exists (alt => hasExactlyNumParams(pre.memberType(alt), n))
case _ =>
- formalTypes(tp.paramTypes, n).length == n
+ val len = tp.params.length
+ len == n || isVarArgs(tp.params) && len <= n + 1
}
+
/**
* Verifies whether the named application is valid. The logic is very
* similar to the one in NamesDefaults.removeNames.
@@ -754,6 +711,14 @@ trait Infer {
(argtpes1, argPos, namesOK)
}
+ /** don't do a () to (()) conversion for methods whose second parameter
+ * is a varargs. This is a fairly kludgey way to address #3224.
+ * We'll probably find a better way to do this by identifying
+ * tupled and n-ary methods, but thiws is something for a future major revision.
+ */
+ def isUnitForVarArgs(args: List[AnyRef], params: List[Symbol]): Boolean =
+ args.length == 0 && params.length == 2 && isVarArgs(params)
+
/** Is there an instantiation of free type variables <code>undetparams</code>
* such that function type <code>ftpe</code> is applicable to
* <code>argtpes</code> and its result conform to <code>pt</code>?
@@ -775,24 +740,25 @@ trait Infer {
case ExistentialType(tparams, qtpe) =>
isApplicable(undetparams, qtpe, argtpes0, pt)
case MethodType(params, _) =>
- def paramType(param: Symbol) = param.tpe match {
- case TypeRef(_, sym, List(tpe)) if sym isNonBottomSubClass CodeClass =>
- tpe
- case tpe =>
- tpe
+ val formals0 = params map { param =>
+ param.tpe match {
+ case TypeRef(_, sym, List(tpe)) if sym isNonBottomSubClass CodeClass => tpe
+ case tpe => tpe
+ }
}
- val formals = formalTypes(params map paramType, argtpes0.length)
+ val formals = formalTypes(formals0, argtpes0.length)
def tryTupleApply: Boolean = {
// if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0
- val tupleArgTpe = actualTypes(argtpes0 map {
+ val tupleArgTpes = actualTypes(argtpes0 map {
// no assignment is treated as named argument here
case NamedType(name, tp) => UnitClass.tpe
case tp => tp
}, formals.length)
- argtpes0.length != tupleArgTpe.length &&
- isApplicable(undetparams, ftpe, tupleArgTpe, pt)
+ argtpes0.length != tupleArgTpes.length &&
+ !isUnitForVarArgs(argtpes0, params) &&
+ isApplicable(undetparams, ftpe, tupleArgTpes, pt)
}
def typesCompatible(argtpes: List[Type]) = {
val restpe = ftpe.resultType(argtpes)
@@ -800,10 +766,10 @@ trait Infer {
isCompatibleArgs(argtpes, formals) && isWeaklyCompatible(restpe, pt)
} else {
try {
- val uninstantiated = new ListBuffer[Symbol]
- val targs = methTypeArgs(undetparams, formals, restpe, argtpes, pt, uninstantiated)
- (exprTypeArgs(uninstantiated.toList, restpe.instantiateTypeParams(undetparams, targs), pt) ne null) &&
- isWithinBounds(NoPrefix, NoSymbol, undetparams, targs)
+ val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
+ // #2665: must use weak conformance, not regular one (follow the monomorphic case above)
+ (exprTypeArgs(leftUndet, restpe.instantiateTypeParams(okparams, okargs), pt, isWeaklyCompatible) ne null) &&
+ isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
} catch {
case ex: NoInstance => false
}
@@ -884,16 +850,17 @@ trait Infer {
case OverloadedType(pre, alts) =>
alts exists (alt => isAsSpecific(pre.memberType(alt), ftpe2))
case et: ExistentialType =>
- et.withTypeVars(isAsSpecific(_, ftpe2))
- case mt: ImplicitMethodType =>
+ isAsSpecific(ftpe1.skolemizeExistential, ftpe2)
+ //et.withTypeVars(isAsSpecific(_, ftpe2))
+ case mt: MethodType if mt.isImplicit =>
isAsSpecific(ftpe1.resultType, ftpe2)
case MethodType(params @ (x :: xs), _) =>
var argtpes = params map (_.tpe)
- if (isVarArgs(argtpes) && isVarArgs(ftpe2.paramTypes))
+ if (isVarArgs(params) && isVarArgs(ftpe2.params))
argtpes = argtpes map (argtpe =>
if (isRepeatedParamType(argtpe)) argtpe.typeArgs.head else argtpe)
isApplicable(List(), ftpe2, argtpes, WildcardType)
- case PolyType(tparams, mt: ImplicitMethodType) =>
+ case PolyType(tparams, mt: MethodType) if mt.isImplicit =>
isAsSpecific(PolyType(tparams, mt.resultType), ftpe2)
case PolyType(_, MethodType(params @ (x :: xs), _)) =>
isApplicable(List(), ftpe2, params map (_.tpe), WildcardType)
@@ -905,12 +872,10 @@ trait Infer {
alts forall (alt => isAsSpecific(ftpe1, pre.memberType(alt)))
case et: ExistentialType =>
et.withTypeVars(isAsSpecific(ftpe1, _))
- case mt: ImplicitMethodType =>
- isAsSpecific(ftpe1, mt.resultType)
- case PolyType(tparams, mt: ImplicitMethodType) =>
- isAsSpecific(ftpe1, PolyType(tparams, mt.resultType))
- case MethodType(_, _) | PolyType(_, MethodType(_, _)) =>
- true
+ case mt: MethodType =>
+ !mt.isImplicit || isAsSpecific(ftpe1, mt.resultType)
+ case PolyType(tparams, mt: MethodType) =>
+ !mt.isImplicit || isAsSpecific(ftpe1, PolyType(tparams, mt.resultType))
case _ =>
isAsSpecificValueType(ftpe1, ftpe2, List(), List())
}
@@ -942,13 +907,13 @@ trait Infer {
ftpe1.isError || {
val specificCount = (if (isAsSpecific(ftpe1, ftpe2)) 1 else 0) -
(if (isAsSpecific(ftpe2, ftpe1) &&
- // todo: move to isAsSepecific test
- (!ftpe2.isInstanceOf[OverloadedType] || ftpe1.isInstanceOf[OverloadedType]) &&
+ // todo: move to isAsSpecific test
+// (!ftpe2.isInstanceOf[OverloadedType] || ftpe1.isInstanceOf[OverloadedType]) &&
(!phase.erasedTypes || covariantReturnOverride(ftpe1, ftpe2))) 1 else 0)
val subClassCount = (if (isInProperSubClassOrObject(sym1, sym2)) 1 else 0) -
(if (isInProperSubClassOrObject(sym2, sym1)) 1 else 0)
- //println("is more specific? "+sym1+sym1.locationString+"/"+sym2+sym2.locationString+":"+
- // specificCount+"/"+subClassCount)
+// println("is more specific? "+sym1+":"+ftpe1+sym1.locationString+"/"+sym2+":"+ftpe2+sym2.locationString+":"+
+// specificCount+"/"+subClassCount)
specificCount + subClassCount > 0
}
}
@@ -1000,7 +965,7 @@ trait Infer {
def isStrictlyBetter(tpe1: Type, tpe2: Type) = {
def isNullary(tpe: Type): Boolean = tpe match {
case tp: RewrappingTypeProxy => isNullary(tp.underlying)
- case _ => tpe.paramSectionCount == 0 || tpe.paramTypes.isEmpty
+ case _ => tpe.paramSectionCount == 0 || tpe.params.isEmpty
}
def isMethod(tpe: Type): Boolean = tpe match {
case tp: RewrappingTypeProxy => isMethod(tp.underlying)
@@ -1046,79 +1011,16 @@ trait Infer {
(tparams map (_.defString)).mkString("[", ",", "]"))
if (settings.explaintypes.value) {
val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds)
- List.map2(targs, bounds)((targ, bound) => explainTypes(bound.lo, targ))
- List.map2(targs, bounds)((targ, bound) => explainTypes(targ, bound.hi))
+ (targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ))
+ (targs, bounds).zipped foreach ((targ, bound) => explainTypes(targ, bound.hi))
()
}
}
}
}
- /** Check whether <arg>sym1</arg>'s variance conforms to <arg>sym2</arg>'s variance
- *
- * If <arg>sym2</arg> is invariant, <arg>sym1</arg>'s variance is irrelevant. Otherwise they must be equal.
- */
- def variancesMatch(sym1: Symbol, sym2: Symbol): Boolean = (sym2.variance==0 || sym1.variance==sym2.variance)
- /** Check well-kindedness of type application (assumes arities are already checked) -- @M
- *
- * This check is also performed when abstract type members become concrete (aka a "type alias") -- then tparams.length==1
- * (checked one type member at a time -- in that case, prefix is the name of the type alias)
- *
- * Type application is just like value application: it's "contravariant" in the sense that
- * the type parameters of the supplied type arguments must conform to the type parameters of
- * the required type parameters:
- * - their bounds must be less strict
- * - variances must match (here, variances are absolute, the variance of a type parameter does not influence the variance of its higher-order parameters)
- * - @M TODO: are these conditions correct,sufficient&necessary?
- *
- * e.g. class Iterable[t, m[+x <: t]] --> the application Iterable[Int, List] is okay, since
- * List's type parameter is also covariant and its bounds are weaker than <: Int
- */
def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = {
- def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz) // instantiate type params that come from outside the abstract type we're currently checking
-
- // check that the type parameters <arg>hkargs</arg> to a higher-kinded type conform to the expected params <arg>hkparams</arg>
- def checkKindBoundsHK(hkargs: List[Symbol], arg: Symbol, param: Symbol, paramowner: Symbol): (List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)]) = {
- // @M sometimes hkargs != arg.typeParams, the symbol and the type may have very different type parameters
- val hkparams = param.typeParams
-
- if(hkargs.length != hkparams.length) {
- if(arg == AnyClass || arg == NothingClass) (Nil, Nil, Nil) // Any and Nothing are kind-overloaded
- else (List((arg, param)), Nil, Nil)
- } else {
- val _arityMismatches = new ListBuffer[(Symbol, Symbol)]
- val _varianceMismatches = new ListBuffer[(Symbol, Symbol)]
- val _stricterBounds = new ListBuffer[(Symbol, Symbol)]
- def varianceMismatch(a: Symbol, p: Symbol) { _varianceMismatches += ((a, p)) }
- def stricterBound(a: Symbol, p: Symbol) { _stricterBounds += ((a, p)) }
- def arityMismatches(as: Iterable[(Symbol, Symbol)]) { _arityMismatches ++= as }
- def varianceMismatches(as: Iterable[(Symbol, Symbol)]) { _varianceMismatches ++= as }
- def stricterBounds(as: Iterable[(Symbol, Symbol)]) { _stricterBounds ++= as }
-
- for ((hkarg, hkparam) <- hkargs zip hkparams) {
- if (hkparam.typeParams.isEmpty) { // base-case: kind *
- if (!variancesMatch(hkarg, hkparam))
- varianceMismatch(hkarg, hkparam)
-
- // instantiateTypeParams(tparams, targs) --> higher-order bounds may contain references to type arguments
- // substSym(hkparams, hkargs) --> these types are going to be compared as types of kind *
- // --> their arguments use different symbols, but are conceptually the same
- // (could also replace the types by polytypes, but can't just strip the symbols, as ordering is lost then)
- if (!(transform(hkparam.info.instantiateTypeParams(tparams, targs).bounds.substSym(hkparams, hkargs), paramowner) <:< transform(hkarg.info.bounds, owner)))
- stricterBound(hkarg, hkparam)
- } else {
- val (am, vm, sb) = checkKindBoundsHK(hkarg.typeParams, hkarg, hkparam, paramowner)
- arityMismatches(am)
- varianceMismatches(vm)
- stricterBounds(sb)
- }
- }
-
- (_arityMismatches.toList, _varianceMismatches.toList, _stricterBounds.toList)
- }
- }
-
// @M TODO this method is duplicated all over the place (varianceString)
def varStr(s: Symbol): String =
if (s.isCovariant) "covariant"
@@ -1134,32 +1036,22 @@ trait Infer {
}
}
- val errors = new ListBuffer[String]
- (tparams zip targs).foreach{ case (tparam, targ) if (targ.isHigherKinded || !tparam.typeParams.isEmpty) =>
- // @M must use the typeParams of the type targ, not the typeParams of the symbol of targ!!
- val tparamsHO = targ.typeParams
-
- val (arityMismatches, varianceMismatches, stricterBounds) =
- checkKindBoundsHK(tparamsHO, targ.typeSymbolDirect, tparam, tparam.owner) // NOTE: *not* targ.typeSymbol, which normalizes
- if (!(arityMismatches.isEmpty && varianceMismatches.isEmpty && stricterBounds.isEmpty)){
- errors += (targ+"'s type parameters do not match "+tparam+"'s expected parameters: "+
- (for ((a, p) <- arityMismatches)
- yield a+qualify(a,p)+ " has "+reporter.countElementsAsString(a.typeParams.length, "type parameter")+", but "+
- p+qualify(p,a)+" has "+reporter.countAsString(p.typeParams.length)).toList.mkString(", ") +
- (for ((a, p) <- varianceMismatches)
- yield a+qualify(a,p)+ " is "+varStr(a)+", but "+
- p+qualify(p,a)+" is declared "+varStr(p)).toList.mkString(", ") +
- (for ((a, p) <- stricterBounds)
- yield a+qualify(a,p)+"'s bounds "+a.info+" are stricter than "+
- p+qualify(p,a)+"'s declared bounds "+p.info).toList.mkString(", "))
- }
- // case (tparam, targ) => println("no check: "+(tparam, targ, tparam.typeParams.isEmpty))
- case _ =>
+ val errors = checkKindBounds0(tparams, targs, pre, owner, true)
+ val errorMessages = new ListBuffer[String]
+ errors foreach {case (targ, tparam, arityMismatches, varianceMismatches, stricterBounds) => errorMessages +=
+ (targ+"'s type parameters do not match "+tparam+"'s expected parameters: "+
+ (for ((a, p) <- arityMismatches)
+ yield a+qualify(a,p)+ " has "+reporter.countElementsAsString(a.typeParams.length, "type parameter")+", but "+
+ p+qualify(p,a)+" has "+reporter.countAsString(p.typeParams.length)).toList.mkString(", ") +
+ (for ((a, p) <- varianceMismatches)
+ yield a+qualify(a,p)+ " is "+varStr(a)+", but "+
+ p+qualify(p,a)+" is declared "+varStr(p)).toList.mkString(", ") +
+ (for ((a, p) <- stricterBounds)
+ yield a+qualify(a,p)+"'s bounds "+a.info+" are stricter than "+
+ p+qualify(p,a)+"'s declared bounds "+p.info).toList.mkString(", "))
}
-
- errors.toList
+ errorMessages.toList
}
-
/** Substitite free type variables `undetparams' of polymorphic argument
* expression `tree', given two prototypes `strictPt', and `lenientPt'.
* `strictPt' is the first attempt prototype where type parameters
@@ -1182,28 +1074,40 @@ trait Infer {
substExpr(tree, undetparams, targs, lenientPt)
}
- /** Substitute free type variables `undetparams; of polymorphic expression
- * <code>tree</code>, given prototype <code>pt</code>.
- *
- * @param tree ...
- * @param undetparams ...
- * @param pt ...
+ /** Infer type arguments for `tparams` of polymorphic expression in `tree`, given prototype `pt`.
*/
def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type, keepNothings: Boolean): List[Symbol] = {
if (inferInfo)
println("infer expr instance "+tree+":"+tree.tpe+"\n"+
" tparams = "+tparams+"\n"+
" pt = "+pt)
- val targs = exprTypeArgs(tparams, tree.tpe, pt)
- val uninstantiated = new ListBuffer[Symbol]
- val detargs = if (keepNothings || (targs eq null)) targs //@M: adjustTypeArgs fails if targs==null, neg/t0226
- else adjustTypeArgs(tparams, targs, WildcardType, uninstantiated)
- val undetparams = uninstantiated.toList
- val detparams = tparams filterNot (undetparams contains _)
- substExpr(tree, detparams, detargs, pt)
+ substAdjustedArgs(tree, tparams, pt, exprTypeArgs(tparams, tree.tpe, pt), keepNothings)
+ }
+
+ /** Infer type arguments for `tparams` of polymorphic expression in `tree`, given prototype `pt`.
+ * Use specified type `treeTp` instead of `tree.tp`
+ */
+ def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type, treeTp: Type, keepNothings: Boolean): List[Symbol] = {
if (inferInfo)
- println("inferred expr instance "+tree+", detargs = "+detargs+", undetparams = "+undetparams)
- undetparams
+ println("infer expr instance "+tree+":"+tree.tpe+"\n"+
+ " tparams = "+tparams+"\n"+
+ " pt = "+pt)
+ substAdjustedArgs(tree, tparams, pt, exprTypeArgs(tparams, treeTp, pt), keepNothings)
+ }
+
+ /** Substitute tparams to targs, after adjustment by adjustTypeArgs,
+ * return tparams that were not determined
+ */
+ def substAdjustedArgs(tree: Tree, tparams: List[Symbol], pt: Type, targs: List[Type], keepNothings: Boolean): List[Symbol] = {
+ if (keepNothings || (targs eq null)) { //@M: adjustTypeArgs fails if targs==null, neg/t0226
+ substExpr(tree, tparams, targs, pt)
+ List()
+ } else {
+ val AdjustedTypeArgs.Undets(okParams, okArgs, leftUndet) = adjustTypeArgs(tparams, targs)
+ if (inferInfo) println("inferred expr instance for "+ tree +" --> (okParams, okArgs, leftUndet)= "+(okParams, okArgs, leftUndet))
+ substExpr(tree, okParams, okArgs, pt)
+ leftUndet
+ }
}
/** Substitite free type variables `undetparams' of polymorphic argument
@@ -1225,14 +1129,15 @@ trait Infer {
}
}
- /** Substitite free type variables <code>undetparams</code> of application
+ /** Substitute free type variables <code>undetparams</code> of application
* <code>fn(args)</code>, given prototype <code>pt</code>.
*
* @param fn ...
* @param undetparams ...
* @param args ...
* @param pt ...
- * @return Return the list of type parameters that remain uninstantiated.
+ * @return The type parameters that remain uninstantiated,
+ * and that thus have not been substituted.
*/
def inferMethodInstance(fn: Tree, undetparams: List[Symbol],
args: List[Tree], pt0: Type): List[Symbol] = fn.tpe match {
@@ -1247,13 +1152,12 @@ trait Infer {
val formals = formalTypes(params0 map (_.tpe), args.length)
val argtpes = actualTypes(args map (_.tpe.deconst), formals.length)
val restpe = fn.tpe.resultType(argtpes)
- val uninstantiated = new ListBuffer[Symbol]
- val targs = methTypeArgs(undetparams, formals, restpe, argtpes, pt, uninstantiated)
- checkBounds(fn.pos, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
- val treeSubst = new TreeTypeSubstituter(undetparams, targs)
+ val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
+ checkBounds(fn.pos, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")
+ val treeSubst = new TreeTypeSubstituter(okparams, okargs)
treeSubst.traverse(fn)
treeSubst.traverseTrees(args)
- uninstantiated.toList
+ leftUndet
} catch {
case ex: NoInstance =>
errorTree(fn,
@@ -1353,6 +1257,8 @@ trait Infer {
solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (x => COVARIANT), false)
}
+ // this is quite nasty: it destructively changes the info of the syms of e.g., method type params (see #3692, where the type param T's bounds were set to >: T <: T, so that parts looped)
+ // the changes are rolled back by restoreTypeBounds, but might be unintentially observed in the mean time
def instantiateTypeVar(tvar: TypeVar) {
val tparam = tvar.origin.typeSymbol
if (false &&
@@ -1366,9 +1272,10 @@ trait Infer {
} else {
val (lo, hi) = instBounds(tvar)
if (lo <:< hi) {
- if (!((lo <:< tparam.info.bounds.lo) && (tparam.info.bounds.hi <:< hi))) {
+ if (!((lo <:< tparam.info.bounds.lo) && (tparam.info.bounds.hi <:< hi)) // bounds were improved
+ && tparam != lo.typeSymbolDirect && tparam != hi.typeSymbolDirect) { // don't create illegal cycles
context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam)
- tparam setInfo mkTypeBounds(lo, hi)
+ tparam setInfo TypeBounds(lo, hi)
if (settings.debug.value) log("new bounds of " + tparam + " = " + tparam.info)
} else {
if (settings.debug.value) log("redundant: "+tparam+" "+tparam.info+"/"+lo+" "+hi)
@@ -1380,8 +1287,8 @@ trait Infer {
}
def checkCheckable(pos: Position, tp: Type, kind: String) {
- def patternWarning(tp: Type, prefix: String) = {
- context.unit.uncheckedWarning(pos, prefix+tp+" in type"+kind+" is unchecked since it is eliminated by erasure")
+ def patternWarning(tp0: Type, prefix: String) = {
+ context.unit.uncheckedWarning(pos, prefix+tp0+" in type "+kind+tp+" is unchecked since it is eliminated by erasure")
}
def check(tp: Type, bound: List[Symbol]) {
def isLocalBinding(sym: Symbol) =
@@ -1395,13 +1302,13 @@ trait Infer {
case SingleType(pre, _) =>
check(pre, bound)
case TypeRef(pre, sym, args) =>
- if (sym.isAbstractType)
+ if (sym.isAbstractType) {
if (!isLocalBinding(sym)) patternWarning(tp, "abstract type ")
- else if (sym.isAliasType)
+ } else if (sym.isAliasType) {
check(tp.normalize, bound)
- else if (sym == NothingClass || sym == NullClass || sym == AnyValClass)
+ } else if (sym == NothingClass || sym == NullClass || sym == AnyValClass) {
error(pos, "type "+tp+" cannot be used in a type pattern or isInstanceOf test")
- else
+ } else {
for (arg <- args) {
if (sym == ArrayClass) check(arg, bound)
else arg match {
@@ -1411,6 +1318,7 @@ trait Infer {
patternWarning(arg, "non variable type-argument ")
}
}
+ }
check(pre, bound)
case RefinedType(parents, decls) =>
if (decls.isEmpty) for (p <- parents) check(p, bound)
@@ -1447,7 +1355,12 @@ trait Infer {
def inferTypedPattern(pos: Position, pattp: Type, pt0: Type): Type = {
val pt = widen(pt0)
- checkCheckable(pos, pattp, " pattern")
+
+ /** If we can absolutely rule out a match we can fail fast. */
+ if (pt.isFinalType && !(pt matchesPattern pattp))
+ error(pos, "scrutinee is incompatible with pattern type"+foundReqMsg(pattp, pt))
+
+ checkCheckable(pos, pattp, "pattern ")
if (!(pattp <:< pt)) {
val tpparams = freeTypeParamsOfTerms.collect(pattp)
if (settings.debug.value) log("free type params (1) = " + tpparams)
@@ -1460,24 +1373,11 @@ trait Infer {
if (settings.debug.value) log("free type params (2) = " + ptparams)
val ptvars = ptparams map freshVar
val pt1 = pt.instantiateTypeParams(ptparams, ptvars)
- if (!(isPopulated(tp, pt1) && isInstantiatable(tvars ::: ptvars))) {
- // In ticket #2486 we have this example of code which would fail
- // here without a change:
- //
- // class A[T]
- // class B extends A[Int]
- // class C[T] extends A[T] { def f(t: A[T]) = t match { case x: B => () } }
- //
- // This reports error: pattern type is incompatible with expected type;
- // found : B
- // required: A[T]
- //
- // I am not sure what is the ideal fix, but for the moment I am intercepting
- // it at the last minute and applying a looser check before failing.
- if (!isPlausiblyCompatible(pattp, pt)) {
- error(pos, "pattern type is incompatible with expected type"+foundReqMsg(pattp, pt))
- return pattp
- }
+ // See ticket #2486 we have this example of code which would incorrectly
+ // fail without verifying that !(pattp matchesPattern pt)
+ if (!(isPopulated(tp, pt1) && isInstantiatable(tvars ::: ptvars)) && !(pattp matchesPattern pt)) {
+ error(pos, "pattern type is incompatible with expected type"+foundReqMsg(pattp, pt))
+ return pattp
}
ptvars foreach instantiateTypeVar
}
@@ -1544,7 +1444,7 @@ trait Infer {
}
def checkDead(tree: Tree): Tree = {
- if (settings.Xwarndeadcode.value && tree.tpe != null && tree.tpe.typeSymbol == NothingClass)
+ if (settings.Ywarndeadcode.value && tree.tpe != null && tree.tpe.typeSymbol == NothingClass)
context.warning (tree.pos, "dead code following this construct")
tree
}
@@ -1633,7 +1533,7 @@ trait Infer {
* assignment expression.
*/
def inferMethodAlternative(tree: Tree, undetparams: List[Symbol],
- argtpes: List[Type], pt0: Type): Unit = tree.tpe match {
+ argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false): Unit = tree.tpe match {
case OverloadedType(pre, alts) =>
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
tryTwice {
@@ -1644,6 +1544,11 @@ trait Infer {
var allApplicable = alts filter (alt =>
isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt))
+ //log("applicable: "+ (allApplicable map pre.memberType))
+
+ if (varArgsOnly)
+ allApplicable = allApplicable filter (alt => isVarArgs(alt.tpe.params))
+
// if there are multiple, drop those that use a default
// (keep those that use vararg / tupling conversion)
val applicable =
@@ -1655,7 +1560,7 @@ trait Infer {
alts map (_.tpe)
case t => List(t)
}
- mtypes.exists(t => t.paramTypes.length < argtpes.length || // tupling (*)
+ mtypes.exists(t => t.params.length < argtpes.length || // tupling (*)
hasExactlyNumParams(t, argtpes.length)) // same nb or vararg
// (*) more arguments than parameters, but still applicable: tuplig conversion works.
// todo: should not return "false" when paramTypes = (Unit) no argument is given
@@ -1663,6 +1568,7 @@ trait Infer {
})
def improves(sym1: Symbol, sym2: Symbol) =
+// util.trace("improve "+sym1+sym1.locationString+" on "+sym2+sym2.locationString)(
sym2 == NoSymbol || sym2.isError ||
isStrictlyMoreSpecific(followApply(pre.memberType(sym1)),
followApply(pre.memberType(sym2)), sym1, sym2)
@@ -1749,7 +1655,7 @@ trait Infer {
if (sym.hasFlag(OVERLOADED)) {
val tparams = new AsSeenFromMap(pre, sym.alternatives.head.owner).mapOver(
sym.alternatives.head.typeParams)
- val bounds = tparams map (_.tpe) //@M TODO: might be affected by change to tpe in Symbol
+ val bounds = tparams map (_.tpeHK) // see e.g., #1236
val tpe =
PolyType(tparams,
OverloadedType(AntiPolyType(pre, bounds), sym.alternatives))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index d4a7b9e1e7..f873a7118f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -1,14 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
-import scala.collection.mutable.HashMap
-import scala.tools.nsc.util.Position
+import scala.collection.mutable.{HashMap, WeakHashMap}
+import scala.ref.WeakReference
import symtab.Flags
import symtab.Flags._
@@ -29,7 +28,7 @@ trait Namers { self: Analyzer =>
case TypeRef(pre, sym, args)
if (sym.isTypeSkolem && (tparams contains sym.deSkolemize)) =>
// println("DESKOLEMIZING "+sym+" in "+sym.owner)
- mapOver(rawTypeRef(NoPrefix, sym.deSkolemize, args))
+ mapOver(TypeRef(NoPrefix, sym.deSkolemize, args))
/*
case PolyType(tparams1, restpe) =>
new DeSkolemizeMap(tparams1 ::: tparams).mapOver(tp)
@@ -49,8 +48,10 @@ trait Namers { self: Analyzer =>
// synthetic `copy' (reps `apply', `unapply') methods are added. To compute
// their signatures, the corresponding ClassDef is needed.
// During naming, for each case class module symbol, the corresponding ClassDef
- // is stored in this map.
- private[typechecker] val caseClassOfModuleClass = new HashMap[Symbol, ClassDef]
+ // is stored in this map. The map is cleared lazily, i.e. when the new symbol
+ // is created with the same name, the old one (if present) is wiped out, or the
+ // entry is deleted when it is used and no longer needed.
+ private val caseClassOfModuleClass = new WeakHashMap[Symbol, WeakReference[ClassDef]]
// Default getters of constructors are added to the companion object in the
// typeCompleter of the constructor (methodSig). To compute the signature,
@@ -61,7 +62,6 @@ trait Namers { self: Analyzer =>
private[typechecker] val classAndNamerOfModule = new HashMap[Symbol, (ClassDef, Namer)]
def resetNamer() {
- caseClassOfModuleClass.clear
classAndNamerOfModule.clear
}
@@ -76,7 +76,7 @@ trait Namers { self: Analyzer =>
}
def inConstructorFlag: Long =
- if (context.owner.isConstructor && !context.inConstructorSuffix || context.owner.isEarly) INCONSTRUCTOR
+ if (context.owner.isConstructor && !context.inConstructorSuffix || context.owner.isEarlyInitialized) INCONSTRUCTOR
else 0l
def moduleClassFlags(moduleFlags: Long) =
@@ -90,11 +90,12 @@ trait Namers { self: Analyzer =>
sym.flags = flags | lockedFlag
if (sym.isModule && sym.moduleClass != NoSymbol)
updatePosFlags(sym.moduleClass, pos, moduleClassFlags(flags))
- if (sym.owner.isPackageClass &&
- (sym.linkedSym.rawInfo.isInstanceOf[loaders.SymbolLoader] ||
- sym.linkedSym.rawInfo.isComplete && runId(sym.validTo) != currentRunId))
+ var companion: Symbol = NoSymbol
+ if (sym.owner.isPackageClass && {companion = companionSymbolOf(sym, context); true} &&
+ (companion.rawInfo.isInstanceOf[loaders.SymbolLoader] ||
+ companion.rawInfo.isComplete && runId(sym.validTo) != currentRunId))
// pre-set linked symbol to NoType, in case it is not loaded together with this symbol.
- sym.linkedSym.setInfo(NoType)
+ companion.setInfo(NoType)
sym
}
@@ -127,6 +128,7 @@ trait Namers { self: Analyzer =>
unsafeTypeParams foreach(sym => paramContext.scope.enter(sym))
newNamer(paramContext)
}
+
def usePrimary = sym.isTerm && (
(sym hasFlag PARAMACCESSOR) ||
((sym hasFlag PARAM) && sym.owner.isPrimaryConstructor)
@@ -197,7 +199,7 @@ trait Namers { self: Analyzer =>
def enterClassSymbol(tree : ClassDef): Symbol = {
var c: Symbol = context.scope.lookup(tree.name)
- if (c.isType && c.owner.isPackageClass && context.scope == c.owner.info.decls && !currentRun.compiles(c)) {
+ if (c.isType && c.owner.isPackageClass && context.scope == c.owner.info.decls && currentRun.canRedefine(c)) {
updatePosFlags(c, tree.pos, tree.mods.flags)
setPrivateWithin(tree, c, tree.mods)
} else {
@@ -214,9 +216,10 @@ trait Namers { self: Analyzer =>
}
clazz.sourceFile = file
if (clazz.sourceFile ne null) {
- assert(!currentRun.compiles(clazz) || clazz.sourceFile == currentRun.symSource(c));
+ assert(currentRun.canRedefine(clazz) || clazz.sourceFile == currentRun.symSource(c));
currentRun.symSource(c) = clazz.sourceFile
}
+ registerTopLevelSym(clazz)
}
assert(c.name.toString.indexOf('(') == -1)
c
@@ -229,9 +232,12 @@ trait Namers { self: Analyzer =>
var m: Symbol = context.scope.lookup(tree.name)
val moduleFlags = tree.mods.flags | MODULE | FINAL
if (m.isModule && !m.isPackage && inCurrentScope(m) &&
- (!currentRun.compiles(m) || (m hasFlag SYNTHETIC))) {
+ (currentRun.canRedefine(m) || (m hasFlag SYNTHETIC))) {
updatePosFlags(m, tree.pos, moduleFlags)
setPrivateWithin(tree, m, tree.mods)
+ if (m.moduleClass != NoSymbol)
+ setPrivateWithin(tree, m.moduleClass, tree.mods)
+
context.unit.synthetics -= m
} else {
m = context.owner.newModule(tree.pos, tree.name)
@@ -242,9 +248,10 @@ trait Namers { self: Analyzer =>
m.moduleClass.setFlag(moduleClassFlags(moduleFlags))
setPrivateWithin(tree, m.moduleClass, tree.mods)
}
- if (m.owner.isPackageClass) {
+ if (m.owner.isPackageClass && !m.isPackage) {
m.moduleClass.sourceFile = context.unit.source.file
currentRun.symSource(m) = m.moduleClass.sourceFile
+ registerTopLevelSym(m)
}
m
}
@@ -285,123 +292,152 @@ trait Namers { self: Analyzer =>
* class definition tree.
* @return the companion object symbol.
*/
- def ensureCompanionObject(tree: ClassDef, creator: => Tree): Symbol = {
- val m: Symbol = context.scope.lookup(tree.name.toTermName).filter(! _.isSourceMethod)
- if (m.isModule && inCurrentScope(m) && currentRun.compiles(m)) m
- else enterSyntheticSym(creator)
- }
+ def ensureCompanionObject(tree: ClassDef, creator: => Tree): Symbol = {
+ val m = companionModuleOf(tree.symbol, context)
+ // @luc: not sure why "currentRun.compiles(m)" is needed, things breaks
+ // otherwise. documentation welcome.
+ if (m != NoSymbol && currentRun.compiles(m)) m
+ else enterSyntheticSym(creator)
+ }
+
+ private def enterSymFinishWith(tree: Tree, tparams: List[TypeDef]) {
+ val sym = tree.symbol
+ if (settings.debug.value) log("entered " + sym + " in " + context.owner + ", scope-id = " + context.scope.## )
+ var ltype = namerOf(sym).typeCompleter(tree)
+ if (tparams nonEmpty) {
+ //@M! TypeDef's type params are handled differently
+ //@M e.g., in [A[x <: B], B], A and B are entered first as both are in scope in the definition of x
+ //@M x is only in scope in `A[x <: B]'
+ if(!sym.isAbstractType) //@M TODO: change to isTypeMember ?
+ newNamer(context.makeNewScope(tree, sym)).enterSyms(tparams)
+
+ ltype = new PolyTypeCompleter(tparams, ltype, tree, sym, context) //@M
+ if (sym.isTerm) skolemize(tparams)
+ }
- def enterSym(tree: Tree): Context = try {
-
- def finishWith(tparams: List[TypeDef]) {
- val sym = tree.symbol
- if (settings.debug.value) log("entered " + sym + " in " + context.owner + ", scope-id = " + context.scope.hashCode());
- var ltype = namerOf(sym).typeCompleter(tree)
- if (!tparams.isEmpty) {
- //@M! TypeDef's type params are handled differently
- //@M e.g., in [A[x <: B], B], A and B are entered first as both are in scope in the definition of x
- //@M x is only in scope in `A[x <: B]'
- if(!sym.isAbstractType) //@M TODO: change to isTypeMember ?
- newNamer(context.makeNewScope(tree, sym)).enterSyms(tparams)
-
- ltype = new PolyTypeCompleter(tparams, ltype, tree, sym, context) //@M
- if (sym.isTerm) skolemize(tparams)
- }
- def copyIsSynthetic() = sym.owner.info.member(nme.copy).hasFlag(SYNTHETIC)
- if (sym.name == nme.copy && sym.hasFlag(SYNTHETIC) ||
- sym.name.startsWith(nme.copy + "$default$") && copyIsSynthetic()){
- // the 'copy' method of case classes needs a special type completer to make bug0054.scala (and others)
- // work. the copy method has to take exactly the same parameter types as the primary constructor.
- setInfo(sym)(mkTypeCompleter(tree)(copySym => {
+ if (sym.name == nme.copy || sym.name.startsWith(nme.copy + "$default$")) {
+ // it could be a compiler-generated copy method or one of its default getters
+ setInfo(sym)(mkTypeCompleter(tree)(copySym => {
+ def copyIsSynthetic() = sym.owner.info.member(nme.copy).hasFlag(SYNTHETIC)
+ if (sym.hasFlag(SYNTHETIC) && (!sym.hasFlag(DEFAULTPARAM) || copyIsSynthetic())) {
+ // the 'copy' method of case classes needs a special type completer to make bug0054.scala (and others)
+ // work. the copy method has to take exactly the same parameter types as the primary constructor.
val constrType = copySym.owner.primaryConstructor.tpe
val subst = new SubstSymMap(copySym.owner.typeParams, tparams map (_.symbol))
for ((params, cparams) <- tree.asInstanceOf[DefDef].vparamss.zip(constrType.paramss);
(param, cparam) <- params.zip(cparams)) {
// need to clone the type cparam.tpe??? problem is: we don't have the new owner yet (the new param symbol)
param.tpt.setType(subst(cparam.tpe))
- () // @LUC TODO workaround for #1996
}
- ltype.complete(sym)
- }))
- } else setInfo(sym)(ltype)
- }
- def finish = finishWith(List())
+ }
+ ltype.complete(sym)
+ }))
+ } else setInfo(sym)(ltype)
+ }
- if (tree.symbol == NoSymbol) {
+ def enterIfNotThere(sym: Symbol) {
+ val scope = context.scope
+ var e = scope.lookupEntry(sym.name)
+ while ((e ne null) && (e.owner eq scope) && (e.sym ne sym)) e = e.tail
+ if (!((e ne null) && (e.owner eq scope))) context.scope.enter(sym)
+ }
+
+ def enterSym(tree: Tree): Context = {
+ def finishWith(tparams: List[TypeDef]) { enterSymFinishWith(tree, tparams) }
+ def finish = finishWith(Nil)
+ def sym = tree.symbol
+ if (sym != NoSymbol) {
+ if (forInteractive && sym != null && sym.owner.isTerm) {
+ // this logic is needed in case typer was interrupted half way through and then comes
+ // back to do the tree again. In that case the definitions that were already
+ // attributed as well as any default parameters of such methods need to be
+ // re-entered in the current scope.
+ enterIfNotThere(sym)
+ if (sym.isLazy) {
+ val acc = sym.lazyAccessor
+ if (acc != NoSymbol) enterIfNotThere(acc)
+ }
+ defaultParametersOfMethod(sym) foreach enterIfNotThere
+ }
+ return this.context
+ }
+ try {
val owner = context.owner
tree match {
case PackageDef(pid, stats) =>
tree.symbol = enterPackageSymbol(tree.pos, pid,
if (context.owner == EmptyPackageClass) RootClass else context.owner)
- val namer = newNamer(
- context.make(tree, tree.symbol.moduleClass, tree.symbol.info.decls))
- namer.enterSyms(stats)
+ val namer = newNamer(context.make(tree, sym.moduleClass, sym.info.decls))
+ namer enterSyms stats
+
case tree @ ClassDef(mods, name, tparams, impl) =>
tree.symbol = enterClassSymbol(tree)
finishWith(tparams)
if (mods.isCase) {
+ if (treeInfo.firstConstructorArgs(impl.body).size > MaxFunctionArity)
+ context.error(tree.pos, "Implementation restriction: case classes cannot have more than " + MaxFunctionArity + " parameters.")
+
val m = ensureCompanionObject(tree, caseModuleDef(tree))
- caseClassOfModuleClass(m.moduleClass) = tree
- }
- val constrs = impl.body filter {
- case DefDef(_, name, _, _, _, _) => name == nme.CONSTRUCTOR
- case _ => false
+ caseClassOfModuleClass(m.moduleClass) = new WeakReference(tree)
}
- val hasDefault = constrs.exists(c => {
- val DefDef(_, _, _, vparamss, _, _) = c
- vparamss.exists(_.exists(_.mods hasFlag DEFAULTPARAM))
- })
+ val hasDefault = impl.body flatMap {
+ case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => vparamss.flatten
+ case _ => Nil
+ } exists (_.mods hasFlag DEFAULTPARAM)
+
if (hasDefault) {
- val m = ensureCompanionObject(tree, companionModuleDef(tree, List(gen.scalaScalaObjectConstr)))
+ val m = ensureCompanionObject(tree, companionModuleDef(tree))
classAndNamerOfModule(m) = (tree, null)
}
case tree @ ModuleDef(mods, name, _) =>
tree.symbol = enterModuleSymbol(tree)
- tree.symbol.moduleClass.setInfo(namerOf(tree.symbol).moduleClassTypeCompleter((tree)))
+ sym.moduleClass setInfo namerOf(sym).moduleClassTypeCompleter(tree)
finish
case vd @ ValDef(mods, name, tp, rhs) =>
if ((!context.owner.isClass ||
- (mods.flags & (PRIVATE | LOCAL)) == (PRIVATE | LOCAL).toLong ||
+ (mods.flags & (PRIVATE | LOCAL | CASEACCESSOR)) == (PRIVATE | LOCAL) ||
name.endsWith(nme.OUTER, nme.OUTER.length) ||
context.unit.isJava) &&
!mods.isLazy) {
- tree.symbol = enterInScope(owner.newValue(tree.pos, name)
- .setFlag(mods.flags))
+ val vsym = owner.newValue(tree.pos, name).setFlag(mods.flags);
+ if(context.unit.isJava) setPrivateWithin(tree, vsym, mods) // #3663 -- for Scala fields we assume private[this]
+ tree.symbol = enterInScope(vsym)
finish
} else {
+ val mods1 =
+ if (mods.hasFlag(PRIVATE) && mods.hasFlag(LOCAL) && !mods.isLazy) {
+ context.error(tree.pos, "private[this] not allowed for case class parameters")
+ mods &~ LOCAL
+ } else mods
// add getter and possibly also setter
- val accflags: Long = ACCESSOR |
- (if ((mods.flags & MUTABLE) != 0L) mods.flags & ~MUTABLE & ~PRESUPER
- else mods.flags & ~PRESUPER | STABLE)
if (nme.isSetterName(name))
context.error(tree.pos, "Names of vals or vars may not end in `_='")
// .isInstanceOf[..]: probably for (old) IDE hook. is this obsolete?
- val getter = enterAliasMethod(tree, name, accflags, mods)
+ val getter = enterAccessorMethod(tree, name, getterFlags(mods1.flags), mods1)
setInfo(getter)(namerOf(getter).getterTypeCompleter(vd))
- if ((mods.flags & MUTABLE) != 0L) {
- val setter = enterAliasMethod(tree, nme.getterToSetter(name),
- accflags & ~STABLE & ~CASEACCESSOR,
- mods)
+ if (mods1.isVariable) {
+ val setter = enterAccessorMethod(tree, nme.getterToSetter(name), setterFlags(mods1.flags), mods1)
setInfo(setter)(namerOf(setter).setterTypeCompleter(vd))
}
+
tree.symbol =
- if (mods.isDeferred) {
+ if (mods1.isDeferred) {
getter setPos tree.pos // unfocus getter position, because there won't be a separate value
} else {
val vsym =
if (!context.owner.isClass) {
- assert(mods.isLazy) // if not a field, it has to be a lazy val
- owner.newValue(tree.pos, name + "$lzy" ).setFlag(mods.flags | MUTABLE)
+ assert(mods1.isLazy) // if not a field, it has to be a lazy val
+ owner.newValue(tree.pos, name + "$lzy" ).setFlag((mods1.flags | MUTABLE) & ~IMPLICIT)
} else {
- val mflag = if (mods.isLazy) MUTABLE else 0
- val newflags = mods.flags & FieldFlags | PRIVATE | LOCAL | mflag
-
+ val mFlag = if (mods1.isLazy) MUTABLE else 0
+ val lFlag = if (mods.hasFlag(PRIVATE) && mods.hasFlag(LOCAL)) 0 else LOCAL
+ val newflags = mods1.flags & FieldFlags | PRIVATE | lFlag | mFlag
owner.newValue(tree.pos, nme.getterToLocal(name)) setFlag newflags
}
enterInScope(vsym)
setInfo(vsym)(namerOf(vsym).typeCompleter(tree))
- if (mods.isLazy)
+ if (mods1.isLazy)
vsym.setLazyAccessor(getter)
vsym
@@ -409,7 +445,7 @@ trait Namers { self: Analyzer =>
addBeanGetterSetter(vd, getter)
}
case DefDef(mods, nme.CONSTRUCTOR, tparams, _, _, _) =>
- var sym = owner.newConstructor(tree.pos).setFlag(mods.flags | owner.getFlag(ConstrFlags))
+ val sym = owner.newConstructor(tree.pos).setFlag(mods.flags | owner.getFlag(ConstrFlags))
setPrivateWithin(tree, sym, mods)
tree.symbol = enterInScope(sym)
finishWith(tparams)
@@ -419,7 +455,7 @@ trait Namers { self: Analyzer =>
case TypeDef(mods, name, tparams, _) =>
var flags: Long = mods.flags
if ((flags & PARAM) != 0L) flags |= DEFERRED
- var sym = new TypeSymbol(owner, tree.pos, name).setFlag(flags)
+ val sym = new TypeSymbol(owner, tree.pos, name).setFlag(flags)
setPrivateWithin(tree, sym, mods)
tree.symbol = enterInScope(sym)
finishWith(tparams)
@@ -427,17 +463,18 @@ trait Namers { self: Analyzer =>
enterSym(defn)
case imp @ Import(_, _) =>
tree.symbol = NoSymbol.newImport(tree.pos)
- setInfo(tree.symbol)(namerOf(tree.symbol).typeCompleter(tree))
- return (context.makeNewImport(imp))
+ setInfo(sym)(namerOf(sym).typeCompleter(tree))
+ return context.makeNewImport(imp)
case _ =>
}
}
+ catch {
+ case ex: TypeError =>
+ //Console.println("caught " + ex + " in enterSym")//DEBUG
+ typer.reportTypeError(tree.pos, ex)
+ this.context
+ }
this.context
- } catch {
- case ex: TypeError =>
- //Console.println("caught " + ex + " in enterSym")//DEBUG
- typer.reportTypeError(tree.pos, ex)
- this.context
}
def enterSyntheticSym(tree: Tree): Symbol = {
@@ -453,7 +490,7 @@ trait Namers { self: Analyzer =>
sym
}
- def enterAliasMethod(tree: Tree, name: Name, flags: Long, mods: Modifiers): TermSymbol =
+ def enterAccessorMethod(tree: Tree, name: Name, flags: Long, mods: Modifiers): TermSymbol =
enterNewMethod(tree, name, flags, mods, tree.pos.focus)
private def addBeanGetterSetter(vd: ValDef, getter: Symbol) {
@@ -493,7 +530,7 @@ trait Namers { self: Analyzer =>
// known. instead, uses the same machinery as for the non-bean setter:
// create and enter the symbol here, add the tree in Typer.addGettterSetter.
val setterName = "set" + beanName
- val setter = enterAliasMethod(vd, setterName, flags, mods)
+ val setter = enterAccessorMethod(vd, setterName, flags, mods)
.setPos(vd.pos.focus)
setInfo(setter)(namerOf(setter).setterTypeCompleter(vd))
}
@@ -615,7 +652,7 @@ trait Namers { self: Analyzer =>
clazz.typeOfThis = selfTypeCompleter(self.tpt)
self.symbol = clazz.thisSym.setPos(self.pos)
} else {
- self.tpt.tpe = NoType
+ self.tpt defineType NoType
if (self.name != nme.WILDCARD) {
clazz.typeOfThis = clazz.tpe
self.symbol = clazz.thisSym
@@ -678,7 +715,7 @@ trait Namers { self: Analyzer =>
sym => TypeRef(clazz.owner.thisType, sym, clazz.typeParams map (_.tpe))))
println("Parents of "+clazz+":"+parents)
- // check that virtual classses are only defined as members of templates
+ // check that virtual classes are only defined as members of templates
if (clazz.isVirtualClass && !clazz.owner.isClass)
context.error(
clazz.pos,
@@ -702,29 +739,38 @@ trait Namers { self: Analyzer =>
// add apply and unapply methods to companion objects of case classes,
// unless they exist already; here, "clazz" is the module class
- Namers.this.caseClassOfModuleClass get clazz match {
- case Some(cdef) =>
- addApplyUnapply(cdef, templateNamer)
- caseClassOfModuleClass -= clazz
- case None =>
+ if (clazz.isModuleClass) {
+ Namers.this.caseClassOfModuleClass get clazz match {
+ case Some(cdefRef) =>
+ val cdef = cdefRef()
+ addApplyUnapply(cdef, templateNamer)
+ caseClassOfModuleClass -= clazz
+ case None =>
+ }
}
// add the copy method to case classes; this needs to be done here, not in SyntheticMethods, because
// the namer phase must traverse this copy method to create default getters for its parameters.
- Namers.this.caseClassOfModuleClass get clazz.linkedModuleOfClass.moduleClass match {
- case Some(cdef) =>
- def hasCopy(decls: Scope) = {
- decls.iterator exists (_.name == nme.copy)
- }
- if (!hasCopy(decls) &&
- !parents.exists(p => hasCopy(p.typeSymbol.info.decls)) &&
- !parents.flatMap(_.baseClasses).removeDuplicates.exists(bc => hasCopy(bc.info.decls)))
- addCopyMethod(cdef, templateNamer)
- case None =>
+ // here, clazz is the ClassSymbol of the case class (not the module).
+ // @check: this seems to work only if the type completer of the class runs before the one of the
+ // module class: the one from the module class removes the entry form caseClassOfModuleClass (see above).
+ if (clazz.isClass && !clazz.hasFlag(MODULE)) {
+ Namers.this.caseClassOfModuleClass get companionModuleOf(clazz, context).moduleClass match {
+ case Some(cdefRef) =>
+ val cdef = cdefRef()
+ def hasCopy(decls: Scope) = {
+ decls.iterator exists (_.name == nme.copy)
+ }
+ if (!hasCopy(decls) &&
+ !parents.exists(p => hasCopy(p.typeSymbol.info.decls)) &&
+ !parents.flatMap(_.baseClasses).distinct.exists(bc => hasCopy(bc.info.decls)))
+ addCopyMethod(cdef, templateNamer)
+ case None =>
+ }
}
- // if default getters (for constructor defaults) need to be added to that module,
- // here's the namer to use
+ // if default getters (for constructor defaults) need to be added to that module, here's the namer
+ // to use. clazz is the ModuleClass. sourceModule works also for classes defined in methods.
val module = clazz.sourceModule
if (classAndNamerOfModule contains module) {
val (cdef, _) = classAndNamerOfModule(module)
@@ -745,110 +791,74 @@ trait Namers { self: Analyzer =>
val tparamSyms = typer.reenterTypeParams(tparams)
// since the skolemized tparams are in scope, the TypeRefs in vparamSymss refer to skolemized tparams
var vparamSymss = enterValueParams(meth, vparamss)
+ // DEPMETTODO: do we need to skolemize value parameter symbols?
if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
- tpt.tpe = context.enclClass.owner.tpe
+ tpt defineType context.enclClass.owner.tpe
tpt setPos meth.pos.focus
}
- if (onlyPresentation && methodArgumentNames != null)
- methodArgumentNames(meth) = vparamss.map(_.map(_.symbol));
-
- def convertToDeBruijn(vparams: List[Symbol], level: Int): TypeMap = new TypeMap {
- def debruijnFor(param: Symbol) =
- DeBruijnIndex(level, vparams indexOf param)
- def apply(tp: Type) = {
- tp match {
- case SingleType(_, sym) =>
- if (settings.Xexperimental.value && sym.owner == meth && (vparams contains sym)) {
-/*
- if (sym hasFlag IMPLICIT) {
- context.error(sym.pos, "illegal type dependence on implicit parameter")
- ErrorType
- } else
-*/
- debruijnFor(sym)
- } else tp
- case MethodType(params, restpe) =>
- val params1 = this.mapOver(params)
- val restpe1 = convertToDeBruijn(vparams, level + 1)(restpe)
- if ((params1 eq params) && (restpe1 eq restpe)) tp
- else copyMethodType(tp, params1, restpe1)
- case _ =>
- mapOver(tp)
- }
- }
-
- // AnnotatedTypes can contain trees in the annotation arguments. When accessing a
- // parameter in an annotation, set the type of the Ident to the DeBruijnIndex
- object treeTrans extends TypeMapTransformer {
- override def transform(tree: Tree): Tree =
- tree match {
- case Ident(name) if (vparams contains tree.symbol) =>
- val dtpe = debruijnFor(tree.symbol)
- val dsym =
- context.owner.newLocalDummy(tree.symbol.pos)
- .newValue(tree.symbol.pos, name)
-
- dsym.setFlag(PARAM)
- dsym.setInfo(dtpe)
- Ident(name).setSymbol(dsym).copyAttrs(tree).setType(dtpe)
- case tree => super.transform(tree)
- }
- }
-
- // for type annotations (which may contain trees)
- override def mapOver(arg: Tree) = Some(treeTrans.transform(arg))
- }
-
- val checkDependencies: TypeTraverser = new TypeTraverser {
- def traverse(tp: Type) = {
- tp match {
- case SingleType(_, sym) =>
- if (sym.owner == meth && (vparamSymss exists (_ contains sym)))
- context.error(
- sym.pos,
- "illegal dependent method type"+
- (if (settings.Xexperimental.value)
- ": parameter appears in the type of another parameter in the same section or an earlier one"
- else ""))
- case _ =>
- mapOver(tp)
- }
- this
- }
- }
/** Called for all value parameter lists, right to left
* @param vparams the symbols of one parameter list
* @param restpe the result type (possibly a MethodType)
*/
def makeMethodType(vparams: List[Symbol], restpe: Type) = {
+ // TODODEPMET: check that we actually don't need to do anything here
// new dependent method types: probably OK already, since 'enterValueParams' above
// enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
// check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
// so re-use / adapt that)
val params = vparams map (vparam =>
if (meth hasFlag JAVA) vparam.setInfo(objToAny(vparam.tpe)) else vparam)
- val restpe1 = convertToDeBruijn(vparams, 1)(restpe) // new dependent types: replace symbols in restpe with the ones in vparams
- if (!vparams.isEmpty && vparams.head.hasFlag(IMPLICIT))
- ImplicitMethodType(params, restpe1)
- else if (meth hasFlag JAVA) JavaMethodType(params, restpe1)
- else MethodType(params, restpe1)
+ // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
+ if (meth hasFlag JAVA) JavaMethodType(params, restpe)
+ else MethodType(params, restpe)
}
- def thisMethodType(restpe: Type) =
+ def thisMethodType(restpe: Type) = {
+ import scala.collection.mutable.ListBuffer
+ val okParams = ListBuffer[Symbol]()
+ // can we relax these restrictions? see test/files/pos/depmet_implicit_oopsla_session_2.scala and neg/depmet_try_implicit.scala for motivation
+ // should allow forward references since type selections on implicit args are like type parameters:
+ // def foo[T](a: T, x: w.T2)(implicit w: ComputeT2[T])
+ // is more compact than: def foo[T, T2](a: T, x: T2)(implicit w: ComputeT2[T, T2])
+ // moreover, the latter is not an encoding of the former, which hides type inference of T2, so you can specify T while T2 is purely computed
+ val checkDependencies: TypeTraverser = new TypeTraverser {
+ def traverse(tp: Type) = {
+ tp match {
+ case SingleType(_, sym) =>
+ if (sym.owner == meth && sym.isValueParameter && !(okParams contains sym))
+ context.error(
+ sym.pos,
+ "illegal dependent method type"+
+ (if (settings.YdepMethTpes.value)
+ ": parameter appears in the type of another parameter in the same section or an earlier one"
+ else ""))
+ case _ =>
+ mapOver(tp)
+ }
+ this
+ }
+ }
+ for(vps <- vparamSymss) {
+ for(p <- vps) checkDependencies(p.info)
+ if(settings.YdepMethTpes.value) okParams ++= vps // can only refer to symbols in earlier parameter sections (if the extension is enabled)
+ }
+ checkDependencies(restpe) // DEPMETTODO: check not needed when they become on by default
+
polyType(
- tparamSyms, // deSkolemized symbols
- if (vparamSymss.isEmpty) PolyType(List(), restpe)
+ tparamSyms, // deSkolemized symbols -- TODO: check that their infos don't refer to method args?
+ if (vparamSymss.isEmpty) PolyType(List(), restpe) // nullary method type
// vparamss refer (if they do) to skolemized tparams
- else checkDependencies((vparamSymss :\ restpe) (makeMethodType)))
+ else (vparamSymss :\ restpe) (makeMethodType))
+ }
var resultPt = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
val site = meth.owner.thisType
- def overriddenSymbol = intersectionType(meth.owner.info.parents).member(meth.name).filter(sym => {
- // luc: added .syubstSym from skolemized to deSkolemized
+ def overriddenSymbol = intersectionType(meth.owner.info.parents).nonPrivateMember(meth.name).filter(sym => {
+ // luc: added .substSym from skolemized to deSkolemized
// site.memberType(sym): PolyType(tparams, MethodType(..., ...)) ==> all references to tparams are deSkolemized
// thisMethodType: tparams in PolyType are deSkolemized, the references in the MethodTypes are skolemized. ==> the two didn't match
// for instance, B.foo would not override A.foo, and the default on parameter b would not be inherited
@@ -864,20 +874,22 @@ trait Namers { self: Analyzer =>
if (vparam.tpt.isEmpty) vparam.symbol setInfo WildcardType
val overridden = overriddenSymbol
if (overridden != NoSymbol && !(overridden hasFlag OVERLOADED)) {
+ overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
resultPt = site.memberType(overridden) match {
case PolyType(tparams, rt) => rt.substSym(tparams, tparamSyms)
case mt => mt
}
for (vparams <- vparamss) {
- var pfs = resultPt.paramTypes
+ var pps = resultPt.params
for (vparam <- vparams) {
if (vparam.tpt.isEmpty) {
- vparam.tpt.tpe = pfs.head
+ val paramtpe = pps.head.tpe
+ vparam.symbol setInfo paramtpe
+ vparam.tpt defineType paramtpe
vparam.tpt setPos vparam.pos.focus
- vparam.symbol setInfo pfs.head
}
- pfs = pfs.tail
+ pps = pps.tail
}
resultPt = resultPt.resultType
}
@@ -900,7 +912,7 @@ trait Namers { self: Analyzer =>
}
for (vparams <- vparamss; vparam <- vparams if vparam.tpt.isEmpty) {
context.error(vparam.pos, "missing parameter type")
- vparam.tpt.tpe = ErrorType
+ vparam.tpt defineType ErrorType
}
addDefaultGetters(meth, vparamss, tparams, overriddenSymbol)
@@ -910,7 +922,7 @@ trait Namers { self: Analyzer =>
// replace deSkolemized symbols with skolemized ones (for resultPt computed by looking at overridden symbol, right?)
val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
// compute result type from rhs
- tpt.tpe = widenIfNotFinal(meth, typer.computeType(rhs, pt), pt)
+ tpt defineType widenIfNotFinal(meth, typer.computeType(rhs, pt), pt)
tpt setPos meth.pos.focus
tpt.tpe
} else typer.typedType(tpt).tpe
@@ -939,8 +951,9 @@ trait Namers { self: Analyzer =>
// match empty and missing parameter list
if (vparamss.isEmpty && baseParamss == List(Nil)) baseParamss = Nil
if (vparamss == List(Nil) && baseParamss.isEmpty) baseParamss = List(Nil)
- assert(!overrides || vparamss.length == baseParamss.length, ""+ meth.fullNameString + ", "+ overridden.fullNameString)
+ assert(!overrides || vparamss.length == baseParamss.length, ""+ meth.fullName + ", "+ overridden.fullName)
+ // cache the namer used for entering the default getter symbols
var ownerNamer: Option[Namer] = None
var moduleNamer: Option[(ClassDef, Namer)] = None
@@ -950,7 +963,7 @@ trait Namers { self: Analyzer =>
// denotes the parameter lists which are on the left side of the current one. these get added
// to the default getter. Example: "def foo(a: Int)(b: Int = a)" gives "foo$default$1(a: Int) = a"
(List[List[ValDef]]() /: (vparamss))((previous: List[List[ValDef]], vparams: List[ValDef]) => {
- assert(!overrides || vparams.length == baseParamss.head.length, ""+ meth.fullNameString + ", "+ overridden.fullNameString)
+ assert(!overrides || vparams.length == baseParamss.head.length, ""+ meth.fullName + ", "+ overridden.fullName)
var baseParams = if (overrides) baseParamss.head else Nil
for (vparam <- vparams) {
val sym = vparam.symbol
@@ -972,9 +985,11 @@ trait Namers { self: Analyzer =>
val parentNamer = if (isConstr) {
val (cdef, nmr) = moduleNamer.getOrElse {
- val module = meth.owner.linkedModuleOfClass
+ val module = companionModuleOf(meth.owner, context)
module.initialize // call type completer (typedTemplate), adds the
// module's templateNamer to classAndNamerOfModule
+ if (!classAndNamerOfModule.contains(module))
+ return // fix #3649 (prevent crash in erroneous source code)
val (cdef, nmr) = classAndNamerOfModule(module)
moduleNamer = Some(cdef, nmr)
(cdef, nmr)
@@ -1023,14 +1038,20 @@ trait Namers { self: Analyzer =>
Modifiers(meth.flags & (PRIVATE | PROTECTED | FINAL)) | SYNTHETIC | DEFAULTPARAM | oflag,
name, deftParams, defvParamss, defTpt, defRhs)
}
- meth.owner.resetFlag(INTERFACE) // there's a concrete member now
+ if (!isConstr)
+ meth.owner.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
- sym.defaultGetter = default
+ if (forInteractive && default.owner.isTerm) {
+ // enter into map from method symbols to default arguments.
+ // if compiling the same local block several times (which can happen in interactive mode)
+ // we might otherwise not find the default symbol, because the second time it the
+ // method symbol will be re-entered in the scope but the default parameter will not.
+ defaultParametersOfMethod(meth) += default
+ }
} else if (baseHasDefault) {
// the parameter does not have a default itself, but the corresponding parameter
// in the base class does.
sym.setFlag(DEFAULTPARAM)
- sym.defaultGetter = baseParams.head.defaultGetter
}
posCounter += 1
if (overrides) baseParams = baseParams.tail
@@ -1052,23 +1073,17 @@ trait Namers { self: Analyzer =>
tp
}
- def verifyOverriding(other: Symbol): Boolean = {
- if(other.unsafeTypeParams.length != tparamSyms.length) {
- context.error(tpsym.pos,
- "The kind of "+tpsym.keyString+" "+tpsym.varianceString + tpsym.nameString+
- " does not conform to the expected kind of " + other.defString + other.locationString + ".")
- false
- } else true
- }
-
- // @M: make sure overriding in refinements respects rudimentary kinding
- // have to do this early, as otherwise we might get crashes: (see neg/bug1275.scala)
- // suppose some parameterized type member is overridden by a type member w/o params,
- // then appliedType will be called on a type that does not expect type args --> crash
- if (tpsym.owner.isRefinementClass && // only needed in refinements
- !tpsym.allOverriddenSymbols.forall{verifyOverriding(_)})
- ErrorType
- else polyType(tparamSyms, tp)
+ // see neg/bug1275, #3419
+ // used to do a rudimentary kind check here to ensure overriding in refinements
+ // doesn't change a type member's arity (number of type parameters),
+ // e.g. trait T { type X[A] }; type S = T{type X}; val x: S
+ // X in x.X[A] will get rebound to the X in the refinement, which does not take any type parameters
+ // this mismatch does not crash the compiler (anymore), but leads to weird type errors,
+ // as x.X[A] will become NoType internally
+ // it's not obvious the errror refers to the X in the refinement and not the original X
+ // however, separate compilation requires the symbol info to be loaded to do this check,
+ // but loading the info will probably lead to spurious cyclic errors --> omit the check
+ polyType(tparamSyms, tp)
}
/** Given a case class
@@ -1098,30 +1113,42 @@ trait Namers { self: Analyzer =>
caseClassCopyMeth(cdef) foreach (namer.enterSyntheticSym(_))
}
+
def typeSig(tree: Tree): Type = {
- val sym: Symbol = tree.symbol
- // For definitions, transform Annotation trees to AnnotationInfos, assign
- // them to the sym's annotations. Type annotations: see Typer.typedAnnotated
-
- // We have to parse definition annotatinos here (not in the typer when traversing
- // the MemberDef tree): the typer looks at annotations of certain symbols; if
- // they were added only in typer, depending on the compilation order, they would
- // be visible or not
- val annotated = if (sym.isModule) sym.moduleClass else sym
- // typeSig might be called multiple times, e.g. on a ValDef: val, getter, setter
- // parse the annotations only once.
- if (!annotated.isInitialized) tree match {
- case defn: MemberDef =>
- val ainfos = defn.mods.annotations filter { _ != null } map { ann =>
- // need to be lazy, #1782
- LazyAnnotationInfo(() => typer.typedAnnotation(ann))
- }
- if (!ainfos.isEmpty)
- annotated.setAnnotations(ainfos)
- if (annotated.isTypeSkolem)
- annotated.deSkolemize.setAnnotations(ainfos)
- case _ =>
+
+ /** For definitions, transform Annotation trees to AnnotationInfos, assign
+ * them to the sym's annotations. Type annotations: see Typer.typedAnnotated
+ * We have to parse definition annotations here (not in the typer when traversing
+ * the MemberDef tree): the typer looks at annotations of certain symbols; if
+ * they were added only in typer, depending on the compilation order, they would
+ * be visible or not
+ */
+ def annotate(annotated: Symbol) = {
+ // typeSig might be called multiple times, e.g. on a ValDef: val, getter, setter
+ // parse the annotations only once.
+ if (!annotated.isInitialized) tree match {
+ case defn: MemberDef =>
+ val ainfos = defn.mods.annotations filter { _ != null } map { ann =>
+ // need to be lazy, #1782
+ LazyAnnotationInfo(() => typer.typedAnnotation(ann))
+ }
+ if (!ainfos.isEmpty)
+ annotated.setAnnotations(ainfos)
+ if (annotated.isTypeSkolem)
+ annotated.deSkolemize.setAnnotations(ainfos)
+ case _ =>
+ }
}
+
+ val sym: Symbol = tree.symbol
+
+ // @Lukas: I am not sure this is the right way to do things.
+ // We used to only decorate the module class with annotations, which is
+ // clearly wrong. Now we decorate both the class and the object.
+ // But maybe some annotations are only meant for one of these but not for the other?
+ annotate(sym)
+ if (sym.isModule) annotate(sym.moduleClass)
+
val result =
try {
tree match {
@@ -1132,23 +1159,19 @@ trait Namers { self: Analyzer =>
val clazz = sym.moduleClass
clazz.setInfo(newNamer(context.makeNewScope(tree, clazz)).templateSig(impl))
//clazz.typeOfThis = singleType(sym.owner.thisType, sym);
- tree.symbol.setInfo(clazz.tpe) // initialize module to avoid cycles
- if (tree.symbol.name == nme.PACKAGEkw) {
- loaders.openPackageModule(tree.symbol)
- }
clazz.tpe
case DefDef(mods, _, tparams, vparamss, tpt, rhs) =>
newNamer(context.makeNewScope(tree, sym)).methodSig(mods, tparams, vparamss, tpt, rhs)
case vdef @ ValDef(mods, name, tpt, rhs) =>
- val typer1 = typer.constrTyperIf(sym.hasFlag(PARAM | PRESUPER) && sym.owner.isConstructor)
+ val typer1 = typer.constrTyperIf(sym.hasFlag(PARAM | PRESUPER) && !mods.hasFlag(JAVA) && sym.owner.isConstructor)
if (tpt.isEmpty) {
if (rhs.isEmpty) {
context.error(tpt.pos, "missing parameter type");
ErrorType
} else {
- tpt.tpe = widenIfNotFinal(
+ tpt defineType widenIfNotFinal(
sym,
newTyper(typer1.context.make(vdef, sym)).computeType(rhs, WildcardType),
WildcardType)
@@ -1164,7 +1187,7 @@ trait Namers { self: Analyzer =>
val expr1 = typer.typedQualifier(expr)
val base = expr1.tpe
typer.checkStable(expr1)
- if (expr1.symbol.isRootPackage) context.error(tree.pos, "_root_ cannot be imported")
+ if ((expr1.symbol ne null) && expr1.symbol.isRootPackage) context.error(tree.pos, "_root_ cannot be imported")
def checkNotRedundant(pos: Position, from: Name, to: Name): Boolean = {
if (!tree.symbol.hasFlag(SYNTHETIC) &&
!((expr1.symbol ne null) && expr1.symbol.isInterpreterWrapper) &&
@@ -1184,22 +1207,40 @@ trait Namers { self: Analyzer =>
}
true
}
+
+ def isValidSelector(from: Name)(fun : => Unit) {
+ if (base.nonLocalMember(from) == NoSymbol &&
+ base.nonLocalMember(from.toTypeName) == NoSymbol) fun
+ }
+
def checkSelectors(selectors: List[ImportSelector]): Unit = selectors match {
case ImportSelector(from, _, to, _) :: rest =>
if (from != nme.WILDCARD && base != ErrorType) {
- if (base.member(from) == NoSymbol && base.member(from.toTypeName) == NoSymbol)
- context.error(tree.pos, from.decode + " is not a member of " + expr);
+ isValidSelector(from) {
+ if (currentRun.compileSourceFor(expr, from))
+ return typeSig(tree)
+ // for Java code importing Scala objects
+ if (from.endsWith(nme.DOLLARraw))
+ isValidSelector(from.subName(0, from.length -1)) {
+ context.error(tree.pos, from.decode + " is not a member of " + expr)
+ }
+ else
+ context.error(tree.pos, from.decode + " is not a member of " + expr)
+ }
+
if (checkNotRedundant(tree.pos, from, to))
checkNotRedundant(tree.pos, from.toTypeName, to.toTypeName)
}
if (from != nme.WILDCARD && (rest.exists (sel => sel.name == from)))
- context.error(tree.pos, from.decode + " is renamed twice");
+ context.error(tree.pos, from.decode + " is renamed twice")
if ((to ne null) && to != nme.WILDCARD && (rest exists (sel => sel.rename == to)))
- context.error(tree.pos, to.decode + " appears twice as a target of a renaming");
+ context.error(tree.pos, to.decode + " appears twice as a target of a renaming")
checkSelectors(rest)
case Nil =>
}
+
checkSelectors(selectors)
+ transformed(tree) = treeCopy.Import(tree, expr1, selectors)
ImportType(expr1)
}
} catch {
@@ -1211,11 +1252,11 @@ trait Namers { self: Analyzer =>
result match {
case PolyType(tparams, restpe)
if (!tparams.isEmpty && tparams.head.owner.isTerm ||
- // Adriaan: The added conditon below is quite a hack. It seems that HK type parameters is relying
+ // Adriaan: The added condition below is quite a hack. It seems that HK type parameters is relying
// on a pass that forces all infos in the type to get everything right.
// The problem is that the same pass causes cyclic reference errors in
// test pos/cyclics.scala. It turned out that deSkolemize is run way more often than necessary,
- // ruinning it only when needed fixes the cuclic reference errors.
+ // running it only when needed fixes the cyclic reference errors.
// But correcting deSkolemize broke HK types, because we don't do the traversal anymore.
// For the moment I made a special hack to do the traversal if we have HK type parameters.
// Maybe it's not a hack, then we need to document it better. But ideally, we should find
@@ -1268,7 +1309,7 @@ trait Namers { self: Analyzer =>
context.error(sym.pos, "`lazy' definitions may not be initialized early")
if (sym.info.typeSymbol == FunctionClass(0) &&
sym.isValueParameter && sym.owner.isClass && sym.owner.hasFlag(CASE))
- context.error(sym.pos, "pass-by-name arguments not allowed for case class parameters");
+ context.error(sym.pos, "pass-by-name arguments not allowed for case class parameters")
if (sym hasFlag DEFERRED) { // virtual classes count, too
if (sym.hasAnnotation(definitions.NativeAttr))
sym.resetFlag(DEFERRED)
@@ -1295,9 +1336,16 @@ trait Namers { self: Analyzer =>
val tree: Tree
}
+ var lockedCount = 0
+
def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new TypeCompleter {
val tree = t
- override def complete(sym: Symbol) = c(sym)
+ override def complete(sym: Symbol) = try {
+ lockedCount += 1
+ c(sym)
+ } finally {
+ lockedCount -= 1
+ }
}
/** A class representing a lazy type with known type parameters.
@@ -1305,10 +1353,13 @@ trait Namers { self: Analyzer =>
class PolyTypeCompleter(tparams: List[Tree], restp: TypeCompleter, owner: Tree, ownerSym: Symbol, ctx: Context) extends TypeCompleter {
override val typeParams: List[Symbol]= tparams map (_.symbol) //@M
override val tree = restp.tree
- override def complete(sym: Symbol) {
+ override def complete(sym: Symbol) = try {
+ lockedCount += 1
if(ownerSym.isAbstractType) //@M an abstract type's type parameters are entered -- TODO: change to isTypeMember ?
newNamer(ctx.makeNewScope(owner, ownerSym)).enterSyms(tparams) //@M
restp.complete(sym)
+ } finally {
+ lockedCount -= 1
}
}
@@ -1328,6 +1379,40 @@ trait Namers { self: Analyzer =>
} else member.accessed
} else member
+ /**
+ * Finds the companion module of a class symbol. Calling .companionModule
+ * does not work for classes defined inside methods.
+ */
+ def companionModuleOf(clazz: Symbol, context: Context) =
+ try {
+ var res = clazz.companionModule
+ if (res == NoSymbol)
+ res = context.lookup(clazz.name.toTermName, clazz.owner).suchThat(sym =>
+ sym.hasFlag(MODULE) && sym.isCoDefinedWith(clazz))
+ res
+ } catch {
+ case e: InvalidCompanions =>
+ context.error(clazz.pos, e.getMessage)
+ NoSymbol
+ }
+
+ def companionClassOf(module: Symbol, context: Context) =
+ try {
+ var res = module.companionClass
+ if (res == NoSymbol)
+ res = context.lookup(module.name.toTypeName, module.owner).suchThat(_.isCoDefinedWith(module))
+ res
+ } catch {
+ case e: InvalidCompanions =>
+ context.error(module.pos, e.getMessage)
+ NoSymbol
+ }
+
+ def companionSymbolOf(sym: Symbol, context: Context) =
+ if (sym.isTerm) companionClassOf(sym, context)
+ else if (sym.isClass) companionModuleOf(sym, context)
+ else NoSymbol
+
/** An explanatory note to be added to error messages
* when there's a problem with abstract var defs */
def varNotice(sym: Symbol): String =
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index f19f5516c0..c08c614e1e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -1,14 +1,15 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
-import scala.collection.mutable.ListBuffer
- import symtab.Flags._
+import symtab.Flags._
+
+import scala.collection.mutable.{ListBuffer, WeakHashMap}
+import scala.collection.immutable.Set
/**
* @author Lukas Rytz
@@ -19,6 +20,10 @@ trait NamesDefaults { self: Analyzer =>
import global._
import definitions._
+ val defaultParametersOfMethod = new WeakHashMap[Symbol, Set[Symbol]] {
+ override def default(key: Symbol) = Set()
+ }
+
case class NamedApplyInfo(qual: Option[Tree], targs: List[Tree],
vargss: List[List[Tree]], blockTyper: Typer)
val noApplyInfo = NamedApplyInfo(None, Nil, Nil, null)
@@ -99,7 +104,7 @@ trait NamesDefaults { self: Analyzer =>
import context.unit
/**
- * Transform a function into a block, and assing context.namedApplyBlockInfo to
+ * Transform a function into a block, and passing context.namedApplyBlockInfo to
* the new block as side-effect.
*
* `baseFun' is typed, the resulting block must be typed as well.
@@ -132,10 +137,17 @@ trait NamesDefaults { self: Analyzer =>
case TypeRef(pre, sym, args)
if (!args.forall(a => context.undetparams contains a.typeSymbol)) =>
args.map(TypeTree(_))
- case _ => Nil
+ case _ =>
+ Nil
}
(baseFun, Nil, targsInSource)
+ case Select(TypeApply(New(TypeTree()), targs), _) if isConstr =>
+ val targsInSource =
+ if (targs.forall(a => context.undetparams contains a.symbol)) Nil
+ else targs
+ (baseFun, Nil, targsInSource)
+
case _ => (baseFun, Nil, Nil)
}
@@ -170,33 +182,52 @@ trait NamesDefaults { self: Analyzer =>
b
}
+ def moduleQual(pos: Position, classType: Type) = {
+ // prefix does 'normalize', which fixes #3384
+ val pre = classType.prefix
+ if (pre == NoType) {
+ None
+ } else {
+ val module = companionModuleOf(baseFun.symbol.owner, context)
+ if (module == NoSymbol) None
+ else Some(atPos(pos.focus)(gen.mkAttributedRef(pre, module)))
+ }
+ }
+
baseFun1 match {
// constructor calls
- case Select(New(TypeTree()), _) if isConstr =>
- blockWithoutQualifier(None)
+ case Select(New(tp @ TypeTree()), _) if isConstr =>
+ // 'moduleQual' fixes #3338. Same qualifier for selecting the companion object as for the class.
+ blockWithoutQualifier(moduleQual(tp.pos, tp.tpe))
+ case Select(TypeApply(New(tp @ TypeTree()), _), _) if isConstr =>
+ blockWithoutQualifier(moduleQual(tp.pos, tp.tpe))
- case Select(New(Ident(_)), _) if isConstr =>
- blockWithoutQualifier(None)
+ case Select(New(tp @ Ident(_)), _) if isConstr =>
+ // 'moduleQual' fixes #3344
+ blockWithoutQualifier(moduleQual(tp.pos, tp.tpe))
+ case Select(TypeApply(New(tp @ Ident(_)), _), _) if isConstr =>
+ blockWithoutQualifier(moduleQual(tp.pos, tp.tpe))
- case Select(nev @ New(sel @ Select(qual, typeName)), constr) if isConstr =>
- // #2057
- val module = baseFun.symbol.owner.linkedModuleOfClass
- val defaultQual =
- if (module == NoSymbol) None
- else Some(atPos(qual.pos.focus)(gen.mkAttributedSelect(qual.duplicate, module)))
+ case Select(New(tp @ Select(qual, _)), _) if isConstr =>
// in `new q.C()', q is always stable
assert(treeInfo.isPureExpr(qual), qual)
- blockWithoutQualifier(defaultQual)
+ // 'moduleQual' fixes #2057
+ blockWithoutQualifier(moduleQual(tp.pos, tp.tpe))
+ case Select(TypeApply(New(tp @ Select(qual, _)), _), _) if isConstr =>
+ assert(treeInfo.isPureExpr(qual), qual)
+ blockWithoutQualifier(moduleQual(tp.pos, tp.tpe))
// super constructor calls
- case Select(Super(_, _), _) if isConstr =>
- blockWithoutQualifier(None)
+ case Select(sp @ Super(_, _), _) if isConstr =>
+ // 'moduleQual' fixes #3207. selection of the companion module of the
+ // superclass needs to have the same prefix as the the superclass.
+ blockWithoutQualifier(moduleQual(baseFun.pos, sp.symbol.tpe.parents.head))
// self constructor calls (in secondary constructors)
- case Select(qual, name) if isConstr =>
- assert(treeInfo.isPureExpr(qual), qual)
- blockWithoutQualifier(None)
+ case Select(tp, name) if isConstr =>
+ assert(treeInfo.isPureExpr(tp), tp)
+ blockWithoutQualifier(moduleQual(tp.pos, tp.tpe))
// other method calls
@@ -223,20 +254,34 @@ trait NamesDefaults { self: Analyzer =>
*/
def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[ValDef] = {
val context = blockTyper.context
- val symPs = List.map2(args, paramTypes)((arg, tpe) => {
+ val symPs = (args, paramTypes).zipped map ((arg, tpe) => {
val byName = tpe.typeSymbol == ByNameParamClass
+ val (argTpe, repeated) =
+ if (tpe.typeSymbol == RepeatedParamClass) arg match {
+ case Typed(expr, tpt @ Ident(name)) if name == nme.WILDCARD_STAR.toTypeName =>
+ (expr.tpe, true)
+ case _ =>
+ (seqType(arg.tpe), true)
+ } else (arg.tpe, false)
val s = context.owner.newValue(arg.pos, unit.fresh.newName(arg.pos, "x$"))
- val valType = if (byName) functionType(List(), arg.tpe)
- else arg.tpe
+ val valType = if (byName) functionType(List(), argTpe)
+ else if (repeated) argTpe
+ else argTpe
s.setInfo(valType)
- (context.scope.enter(s), byName)
+ (context.scope.enter(s), byName, repeated)
})
- List.map2(symPs, args)((symP, arg) => {
- val (sym, byName) = symP
+ (symPs, args).zipped map ((symP, arg) => {
+ val (sym, byName, repeated) = symP
// resetAttrs required for #2290. given a block { val x = 1; x }, when wrapping into a function
// () => { val x = 1; x }, the owner of symbol x must change (to the apply method of the function).
val body = if (byName) blockTyper.typed(Function(List(), resetLocalAttrs(arg)))
- else arg
+ else if (repeated) arg match {
+ case Typed(expr, tpt @ Ident(name)) if name == nme.WILDCARD_STAR.toTypeName =>
+ expr
+ case _ =>
+ val factory = Select(gen.mkAttributedRef(SeqModule), nme.apply)
+ blockTyper.typed(Apply(factory, List(resetLocalAttrs(arg))))
+ } else arg
atPos(body.pos)(ValDef(sym, body).setType(NoType))
})
}
@@ -264,17 +309,18 @@ trait NamesDefaults { self: Analyzer =>
// ValDef's in the block), change the arguments to these local values.
case Apply(expr, typedArgs) =>
// typedArgs: definition-site order
- val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, false)
+ val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, false, false)
// valDefs: call-site order
val valDefs = argValDefs(reorderArgsInv(typedArgs, argPos),
reorderArgsInv(formals, argPos),
blockTyper)
// refArgs: definition-site order again
- val refArgs = List.map2(reorderArgs(valDefs, argPos), formals)((vDef, tpe) => {
+ val refArgs = (reorderArgs(valDefs, argPos), formals).zipped map ((vDef, tpe) => {
val ref = gen.mkAttributedRef(vDef.symbol)
atPos(vDef.pos.focus) {
// for by-name parameters, the local value is a nullary function returning the argument
if (tpe.typeSymbol == ByNameParamClass) Apply(ref, List())
+ else if (tpe.typeSymbol == RepeatedParamClass) Typed(ref, Ident(nme.WILDCARD_STAR.toTypeName))
else ref
}
})
@@ -315,18 +361,19 @@ trait NamesDefaults { self: Analyzer =>
*
* Example: given
* def foo(x: Int = 2, y: String = "def")
- * foo(1)
+ * foo(y = "lt")
* the argument list (y = "lt") is transformed to (y = "lt", x = foo$default$1())
*/
def addDefaults(givenArgs: List[Tree], qual: Option[Tree], targs: List[Tree],
- previousArgss: List[List[Tree]], params: List[Symbol], pos: util.Position): (List[Tree], List[Symbol]) = {
+ previousArgss: List[List[Tree]], params: List[Symbol],
+ pos: util.Position, context: Context): (List[Tree], List[Symbol]) = {
if (givenArgs.length < params.length) {
val (missing, positional) = missingParams(givenArgs, params)
if (missing forall (_.hasFlag(DEFAULTPARAM))) {
val defaultArgs = missing map (p => {
var default1 = qual match {
- case Some(q) => gen.mkAttributedSelect(q.duplicate, p.defaultGetter)
- case None => gen.mkAttributedRef(p.defaultGetter)
+ case Some(q) => gen.mkAttributedSelect(q.duplicate, defaultGetter(p, context))
+ case None => gen.mkAttributedRef(defaultGetter(p, context))
}
default1 = if (targs.isEmpty) default1
else TypeApply(default1, targs.map(_.duplicate))
@@ -343,6 +390,32 @@ trait NamesDefaults { self: Analyzer =>
}
/**
+ * For a parameter with default argument, find the method symbol of
+ * the default getter.
+ */
+ def defaultGetter(param: Symbol, context: Context): Symbol = {
+ val i = param.owner.paramss.flatten.findIndexOf(p => p.name == param.name) + 1
+ if (i > 0) {
+ if (param.owner.isConstructor) {
+ val defGetterName = "init$default$"+ i
+ val mod = companionModuleOf(param.owner.owner, context)
+ mod.info.member(defGetterName)
+ } else {
+ val defGetterName = param.owner.name +"$default$"+ i
+ // isClass also works for methods in objects, owner is the ModuleClassSymbol
+ if (param.owner.owner.isClass) {
+ // .toInterface: otherwise we get the method symbol of the impl class
+ param.owner.owner.toInterface.info.member(defGetterName)
+ } else {
+ // the owner of the method is another method. find the default
+ // getter in the context.
+ context.lookup(defGetterName, param.owner.owner)
+ }
+ }
+ } else NoSymbol
+ }
+
+ /**
* Removes name assignments from args. Additionally, returns an array mapping
* argument indicies from call-site-order to definition-site-order.
*
@@ -370,7 +443,7 @@ trait NamesDefaults { self: Analyzer =>
} else if (argPos contains pos) {
errorTree(arg, "parameter specified twice: "+ name)
} else {
- // for named arguments, check wether the assignment expression would
+ // for named arguments, check whether the assignment expression would
// typecheck. if it does, report an ambiguous error.
val param = params(pos)
val paramtpe = params(pos).tpe.cloneInfo(param)
@@ -386,20 +459,41 @@ trait NamesDefaults { self: Analyzer =>
case _ => super.apply(tp)
}
}
- val res = typer.silent(_.typed(arg, subst(paramtpe))) match {
+ val reportAmbiguousErrors = typer.context.reportAmbiguousErrors
+ typer.context.reportAmbiguousErrors = false
+
+ val typedAssign = try {
+ typer.silent(_.typed(arg, subst(paramtpe)))
+ } catch {
+ // `silent` only catches and returns TypeErrors which are not CyclicReferences
+ // fix for #3685
+ case cr @ CyclicReference(sym, info) if (sym.name == param.name) =>
+ if (sym.isVariable || sym.isGetter && sym.accessed.isVariable) {
+ // named arg not allowed
+ typer.context.error(sym.pos, "variable definition needs type because the name is used as named argument the definition.")
+ typer.infer.setError(arg)
+ } else cr // named arg OK
+ }
+ val res = typedAssign match {
case _: TypeError =>
- positionalAllowed = false
+ // if the named argument is on the original parameter
+ // position, positional after named is allowed.
+ if (index != pos)
+ positionalAllowed = false
argPos(index) = pos
rhs
case t: Tree =>
- // this throws an exception that's caught in `tryTypedApply` (as it uses `silent`)
- // unfortunately, tryTypedApply recovers from the exception if you use errorTree(arg, ...) and conforms is allowed as a view (see tryImplicit in Implicits)
- // because it tries to produce a new qualifier (if the old one was P, the new one will be conforms.apply(P)), and if that works, it pretends nothing happened
- // so, to make sure tryTypedApply fails, would like to pass EmptyTree instead of arg, but can't do that because eventually setType(ErrorType) is called, and EmptyTree only accepts NoType as its tpe
- // thus, we need to disable conforms as a view...
+ if (!t.isErroneous) {
+ // this throws an exception that's caught in `tryTypedApply` (as it uses `silent`)
+ // unfortunately, tryTypedApply recovers from the exception if you use errorTree(arg, ...) and conforms is allowed as a view (see tryImplicit in Implicits)
+ // because it tries to produce a new qualifier (if the old one was P, the new one will be conforms.apply(P)), and if that works, it pretends nothing happened
+ // so, to make sure tryTypedApply fails, would like to pass EmptyTree instead of arg, but can't do that because eventually setType(ErrorType) is called, and EmptyTree only accepts NoType as its tpe
+ // thus, we need to disable conforms as a view...
errorTree(arg, "reference to "+ name +" is ambiguous; it is both, a parameter\n"+
"name of the method and the name of a variable currently in scope.")
+ } else t // error was reported above
}
+ typer.context.reportAmbiguousErrors = reportAmbiguousErrors
//@M note that we don't get here when an ambiguity was detected (during the computation of res),
// as errorTree throws an exception
typer.context.undetparams = udp
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index aaefab1f74..73ef7472ac 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
@@ -10,7 +9,6 @@ package typechecker
import symtab.Flags._
import collection.mutable.{HashSet, HashMap}
import transform.InfoTransform
-import scala.tools.nsc.util.{Position, NoPosition}
import scala.collection.mutable.ListBuffer
/** <p>
@@ -56,7 +54,7 @@ abstract class RefChecks extends InfoTransform {
def transformInfo(sym: Symbol, tp: Type): Type =
if (sym.isModule && !sym.isStatic) {
sym setFlag (lateMETHOD | STABLE)
- PolyType(List(), tp)
+ PolyType(Nil, tp)
} else tp
val toJavaRepeatedParam = new TypeMap {
@@ -68,17 +66,27 @@ abstract class RefChecks extends InfoTransform {
}
}
+ val toScalaRepeatedParam = new TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case tp @ TypeRef(pre, JavaRepeatedParamClass, args) =>
+ typeRef(pre, RepeatedParamClass, args)
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
class RefCheckTransformer(unit: CompilationUnit) extends Transformer {
var localTyper: analyzer.Typer = typer;
var currentApplication: Tree = EmptyTree
var inPattern: Boolean = false
+ var checkedCombinations = Set[List[Type]]()
// only one overloaded alternative is allowed to define default arguments
private def checkDefaultsInOverloaded(clazz: Symbol) {
def check(members: List[Symbol]): Unit = members match {
case x :: xs =>
- if (x.paramss.exists(_.exists(p => p.hasFlag(DEFAULTPARAM)))) {
+ if (x.paramss.exists(_.exists(p => p.hasFlag(DEFAULTPARAM))) && !nme.isProtectedAccessor(x.name)) {
val others = xs.filter(alt => {
alt.name == x.name &&
alt.paramss.exists(_.exists(_.hasFlag(DEFAULTPARAM))) &&
@@ -87,7 +95,7 @@ abstract class RefChecks extends InfoTransform {
if (!others.isEmpty) {
val all = x :: others
val rest = if (all.exists(_.owner != clazz)) ".\nThe members with defaults are defined in "+
- all.map(_.owner).mkString("", " and ", ".")
+ all.map(_.owner).mkString("", " and ", ".") else "."
unit.error(clazz.pos, "in "+ clazz +", multiple overloaded alternatives of "+ x +
" define default arguments"+ rest)
}
@@ -179,7 +187,7 @@ abstract class RefChecks extends InfoTransform {
* 4. Check that every member with an `override' modifier
* overrides some other member.
*/
- private def checkAllOverrides(clazz: Symbol) {
+ private def checkAllOverrides(clazz: Symbol, typesOnly: Boolean = false) {
case class MixinOverrideError(member: Symbol, msg: String)
@@ -191,7 +199,7 @@ abstract class RefChecks extends InfoTransform {
case List(MixinOverrideError(_, msg)) =>
unit.error(clazz.pos, msg)
case MixinOverrideError(member, msg) :: others =>
- val others1 = others.map(_.member.name.decode).filter(member.name.decode != _).removeDuplicates
+ val others1 = others.map(_.member.name.decode).filter(member.name.decode != _).distinct
unit.error(
clazz.pos,
msg+(if (others1.isEmpty) ""
@@ -233,13 +241,15 @@ abstract class RefChecks extends InfoTransform {
tp1 <:< tp2
}
- /** Check that all conditions for overriding <code>other</code> by
- * <code>member</code> of class <code>clazz</code> are met.
+ /** Check that all conditions for overriding `other` by `member`
+ * of class `clazz` are met.
*/
def checkOverride(clazz: Symbol, member: Symbol, other: Symbol) {
+ def noErrorType = other.tpe != ErrorType && member.tpe != ErrorType
+ def isRootOrNone(sym: Symbol) = sym == RootClass || sym == NoSymbol
def overrideError(msg: String) {
- if (other.tpe != ErrorType && member.tpe != ErrorType) {
+ if (noErrorType) {
val fullmsg =
"overriding "+infoStringWithLocation(other)+";\n "+
infoString(member)+" "+msg+
@@ -253,17 +263,17 @@ abstract class RefChecks extends InfoTransform {
}
def overrideTypeError() {
- if (other.tpe != ErrorType && member.tpe != ErrorType) {
+ if (noErrorType) {
overrideError("has incompatible type")
}
}
+ def accessFlagsToString(sym: Symbol)
+ = flagsToString(sym getFlag (PRIVATE | PROTECTED), if (!sym.hasAccessBoundary) "" else sym.privateWithin.name.toString)
+
def overrideAccessError() {
- val pwString = if (other.privateWithin == NoSymbol) ""
- else other.privateWithin.name.toString
- val otherAccess = flagsToString(other getFlag (PRIVATE | PROTECTED), pwString)
- overrideError("has weaker access privileges; it should be "+
- (if (otherAccess == "") "public" else "at least "+otherAccess))
+ val otherAccess = accessFlagsToString(other)
+ overrideError("has weaker access privileges; it should be "+ (if (otherAccess == "") "public" else "at least "+otherAccess))
}
//Console.println(infoString(member) + " overrides " + infoString(other) + " in " + clazz);//DEBUG
@@ -292,56 +302,72 @@ abstract class RefChecks extends InfoTransform {
def intersectionIsEmpty(syms1: List[Symbol], syms2: List[Symbol]) =
!(syms1 exists (syms2 contains))
- if (member hasFlag PRIVATE) { // (1.1)
- overrideError("has weaker access privileges; it should not be private")
- }
- val mb = member.accessBoundary(member.owner)
- val ob = other.accessBoundary(member.owner)
- if (mb != RootClass && mb != NoSymbol && // todo: change
- (ob == RootClass || ob == NoSymbol || !ob.hasTransOwner(mb) ||
- (other hasFlag PROTECTED) && !(member hasFlag PROTECTED))) {
- overrideAccessError()
+ if (typesOnly) checkOverrideTypes()
+ else {
+ // o: public | protected | package-protected (aka java's default access)
+ // ^-may be overridden by member with access privileges-v
+ // m: public | public/protected | public/protected/package-protected-in-same-package-as-o
+
+ if (member.isPrivate) // (1.1)
+ overrideError("has weaker access privileges; it should not be private")
+
+ // todo: align accessibility implication checking with isAccessible in Contexts
+ val ob = other.accessBoundary(member.owner)
+ val mb = member.accessBoundary(member.owner)
+ def isOverrideAccessOK = member.isPublic || { // member is public, definitely same or relaxed access
+ (!other.isProtected || member.isProtected) && // if o is protected, so is m
+ ((!isRootOrNone(ob) && ob.hasTransOwner(mb)) || // m relaxes o's access boundary
+ other.isJavaDefined) // overriding a protected java member, see #3946
+ }
+ if (!isOverrideAccessOK) {
+ overrideAccessError()
+ } else if (other.isClass || other.isModule) {
+ overrideError("cannot be used here - classes and objects cannot be overridden");
+ } else if (!other.isDeferred && (member.isClass || member.isModule)) {
+ overrideError("cannot be used here - classes and objects can only override abstract types");
+ } else if (other hasFlag FINAL) { // (1.2)
+ overrideError("cannot override final member");
+ } else if (!other.isDeferred && !(member hasFlag (OVERRIDE | ABSOVERRIDE | SYNTHETIC))) { // (1.3), SYNTHETIC because of DEVIRTUALIZE
+ overrideError("needs `override' modifier");
+ } else if ((other hasFlag ABSOVERRIDE) && other.isIncompleteIn(clazz) && !(member hasFlag ABSOVERRIDE)) {
+ overrideError("needs `abstract override' modifiers")
+ } else if ((member hasFlag (OVERRIDE | ABSOVERRIDE)) &&
+ (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.hasFlag(LAZY)) {
+ overrideError("cannot override a mutable variable")
+ } else if ((member hasFlag (OVERRIDE | ABSOVERRIDE)) &&
+ !(member.owner.thisType.baseClasses exists (_ isSubClass other.owner)) &&
+ !member.isDeferred && !other.isDeferred &&
+ intersectionIsEmpty(member.allOverriddenSymbols, other.allOverriddenSymbols)) {
+ overrideError("cannot override a concrete member without a third member that's overridden by both "+
+ "(this rule is designed to prevent ``accidental overrides'')")
+ } else if (other.isStable && !member.isStable) { // (1.4)
+ overrideError("needs to be a stable, immutable value")
+ } else if (member.isValue && (member hasFlag LAZY) &&
+ other.isValue && !other.isSourceMethod && !other.isDeferred && !(other hasFlag LAZY)) {
+ overrideError("cannot override a concrete non-lazy value")
+ } else if (other.isValue && (other hasFlag LAZY) && !other.isSourceMethod && !other.isDeferred &&
+ member.isValue && !(member hasFlag LAZY)) {
+ overrideError("must be declared lazy to override a concrete lazy value")
+ } else {
+ checkOverrideTypes()
+ }
}
- else if (other.isClass || other.isModule) {
- overrideError("cannot be used here - classes and objects cannot be overridden");
- } else if (!other.isDeferred && (member.isClass || member.isModule)) {
- overrideError("cannot be used here - classes and objects can only override abstract types");
- } else if (other hasFlag FINAL) { // (1.2)
- overrideError("cannot override final member");
- } else if (!other.isDeferred && !(member hasFlag (OVERRIDE | ABSOVERRIDE | SYNTHETIC))) { // (1.3), SYNTHETIC because of DEVIRTUALIZE
- overrideError("needs `override' modifier");
- } else if ((other hasFlag ABSOVERRIDE) && other.isIncompleteIn(clazz) && !(member hasFlag ABSOVERRIDE)) {
- overrideError("needs `abstract override' modifiers")
- } else if ((member hasFlag (OVERRIDE | ABSOVERRIDE)) &&
- (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.hasFlag(LAZY)) {
- overrideError("cannot override a mutable variable")
- } else if ((member hasFlag (OVERRIDE | ABSOVERRIDE)) &&
- !(member.owner isSubClass other.owner) &&
- !member.isDeferred && !other.isDeferred &&
- intersectionIsEmpty(member.allOverriddenSymbols, other.allOverriddenSymbols)) {
- overrideError("cannot override a concrete member without a third member that's overridden by both "+
- "(this rule is designed to prevent ``accidental overrides'')")
- } else if (other.isStable && !member.isStable) { // (1.4)
- overrideError("needs to be a stable, immutable value")
- } else if (member.isValue && (member hasFlag LAZY) &&
- other.isValue && !other.isSourceMethod && !other.isDeferred && !(other hasFlag LAZY)) {
- overrideError("cannot override a concrete non-lazy value")
- } else if (other.isValue && (other hasFlag LAZY) && !other.isSourceMethod && !other.isDeferred &&
- member.isValue && !(member hasFlag LAZY)) {
- overrideError("must be declared lazy to override a concrete lazy value")
- } else {
+
+ def checkOverrideTypes() {
if (other.isAliasType) {
- //if (!member.typeParams.isEmpty) // (1.5) @MAT
+ //if (!member.typeParams.isEmpty) (1.5) @MAT
// overrideError("may not be parameterized");
- //if (!other.typeParams.isEmpty) // (1.5) @MAT
+ //if (!other.typeParams.isEmpty) (1.5) @MAT
// overrideError("may not override parameterized type");
// @M: substSym
+
if (!(self.memberType(member).substSym(member.typeParams, other.typeParams) =:= self.memberType(other))) // (1.6)
overrideTypeError();
} else if (other.isAbstractType) {
//if (!member.typeParams.isEmpty) // (1.7) @MAT
// overrideError("may not be parameterized");
- var memberTp = self.memberType(member)
+
+ val memberTp = self.memberType(member)
val otherTp = self.memberInfo(other)
if (!(otherTp.bounds containsType memberTp)) { // (1.7.1)
overrideTypeError(); // todo: do an explaintypes with bounds here
@@ -368,11 +394,31 @@ abstract class RefChecks extends InfoTransform {
"The kind of the right-hand side "+memberTp.normalize+" of "+member.keyString+" "+
member.varianceString + member.nameString+ " does not conform to its expected kind."+
kindErrors.toList.mkString("\n", ", ", ""))
+ } else if (member.isAbstractType) {
+ if (memberTp.isVolatile && !otherTp.bounds.hi.isVolatile)
+ overrideError("is a volatile type; cannot override a type with non-volatile upper bound")
}
} else if (other.isTerm) {
- if (!overridesType(self.memberInfo(member), self.memberInfo(other))) { // 8
+ other.cookJavaRawInfo() // #2454
+ val memberTp = self.memberType(member)
+ val otherTp = self.memberType(other)
+ if (!overridesType(memberTp, otherTp)) { // 8
overrideTypeError()
- explainTypes(self.memberInfo(member), self.memberInfo(other))
+ explainTypes(memberTp, otherTp)
+ }
+
+ if (member.isStable && !otherTp.isVolatile) {
+ if (memberTp.isVolatile)
+ overrideError("has a volatile type; cannot override a member with non-volatile type")
+ else memberTp.normalize.resultType match {
+ case rt: RefinedType if !(rt =:= otherTp) && !(checkedCombinations contains rt.parents) =>
+ // might mask some inconsistencies -- check overrides
+ checkedCombinations += rt.parents
+ val tsym = rt.typeSymbol;
+ if (tsym.pos == NoPosition) tsym setPos member.pos
+ checkAllOverrides(tsym, typesOnly = true)
+ case _ =>
+ }
}
}
}
@@ -380,49 +426,86 @@ abstract class RefChecks extends InfoTransform {
val opc = new overridingPairs.Cursor(clazz)
while (opc.hasNext) {
- //Console.println(opc.overriding/* + ":" + opc.overriding.tpe*/ + " in "+opc.overriding.fullNameString + " overrides " + opc.overridden/* + ":" + opc.overridden.tpe*/ + " in "+opc.overridden.fullNameString + "/"+ opc.overridden.hasFlag(DEFERRED));//debug
+ //Console.println(opc.overriding/* + ":" + opc.overriding.tpe*/ + " in "+opc.overriding.fullName + " overrides " + opc.overridden/* + ":" + opc.overridden.tpe*/ + " in "+opc.overridden.fullName + "/"+ opc.overridden.hasFlag(DEFERRED));//debug
if (!opc.overridden.isClass) checkOverride(clazz, opc.overriding, opc.overridden);
opc.next
}
printMixinOverrideErrors()
- // 2. Check that only abstract classes have deferred members
- if (clazz.isClass && !clazz.isTrait) {
+ // Verifying a concrete class has nothing unimplemented.
+ if (clazz.isClass && !clazz.isTrait && !(clazz hasFlag ABSTRACT) && !typesOnly) {
+ val abstractErrors = new ListBuffer[String]
+ def abstractErrorMessage =
+ // a little formatting polish
+ if (abstractErrors.size <= 2) abstractErrors mkString " "
+ else abstractErrors.tail.mkString(abstractErrors.head + ":\n", "\n", "")
+
def abstractClassError(mustBeMixin: Boolean, msg: String) {
- unit.error(clazz.pos,
- (if (clazz.isAnonymousClass || clazz.isModuleClass) "object creation impossible"
- else if (mustBeMixin) clazz.toString() + " needs to be a mixin"
- else clazz.toString() + " needs to be abstract") + ", since " + msg);
- clazz.setFlag(ABSTRACT)
+ def prelude = (
+ if (clazz.isAnonymousClass || clazz.isModuleClass) "object creation impossible"
+ else if (mustBeMixin) clazz + " needs to be a mixin"
+ else clazz + " needs to be abstract"
+ ) + ", since"
+
+ if (abstractErrors.isEmpty) abstractErrors ++= List(prelude, msg)
+ else abstractErrors += msg
}
- // Find a concrete Java method that overrides `sym' under the erasure model.
- // Bridge symbols qualify.
- // Used as a fall back if no overriding symbol of a Java abstract method can be found
+
def javaErasedOverridingSym(sym: Symbol): Symbol =
clazz.tpe.nonPrivateMemberAdmitting(sym.name, BRIDGE).filter(other =>
- !other.isDeferred &&
- (other hasFlag JAVA) && {
- val tp1 = erasure.erasure(clazz.thisType.memberType(sym))
- val tp2 = erasure.erasure(clazz.thisType.memberType(other))
+ !other.isDeferred && other.isJavaDefined && {
+ def uncurryAndErase(tp: Type) = erasure.erasure(uncurry.transformInfo(sym, tp)) // #3622: erasure operates on uncurried types -- note on passing sym in both cases: only sym.isType is relevant for uncurry.transformInfo
+ val tp1 = uncurryAndErase(clazz.thisType.memberType(sym))
+ val tp2 = uncurryAndErase(clazz.thisType.memberType(other))
atPhase(currentRun.erasurePhase.next)(tp1 matches tp2)
})
- for (member <- clazz.tpe.nonPrivateMembersAdmitting(VBRIDGE))
- if (member.isDeferred && !(clazz hasFlag ABSTRACT) &&
- !isAbstractTypeWithoutFBound(member) &&
- !((member hasFlag JAVA) && javaErasedOverridingSym(member) != NoSymbol)) {
- abstractClassError(
- false, infoString(member) + " is not defined" + analyzer.varNotice(member))
- } else if ((member hasFlag ABSOVERRIDE) && member.isIncompleteIn(clazz)) {
- val other = member.superSymbol(clazz);
- abstractClassError(true,
- infoString(member) + " is marked `abstract' and `override'" +
- (if (other != NoSymbol)
- " and overrides incomplete superclass member " + infoString(other)
- else ""))
+ def ignoreDeferred(member: Symbol) =
+ isAbstractTypeWithoutFBound(member) ||
+ (member.isJavaDefined &&
+ (currentRun.erasurePhase == NoPhase || // the test requires atPhase(erasurePhase.next) so shouldn't be done if the compiler has no erasure phase available
+ javaErasedOverridingSym(member) != NoSymbol))
+
+ // 2. Check that only abstract classes have deferred members
+ def checkNoAbstractMembers() = {
+ // Avoid spurious duplicates: first gather any missing members.
+ def memberList = clazz.tpe.nonPrivateMembersAdmitting(VBRIDGE)
+ val (missing, rest) = memberList partition (m => m.isDeferred && !ignoreDeferred(m))
+ // Group missing members by the underlying symbol.
+ val grouped = missing groupBy (analyzer underlying _ name)
+
+ for (member <- missing) {
+ def undefined(msg: String) = abstractClassError(false, infoString(member) + " is not defined" + msg)
+ val underlying = analyzer.underlying(member)
+
+ // Give a specific error message for abstract vars based on why it fails:
+ // It could be unimplemented, have only one accessor, or be uninitialized.
+ if (underlying.isVariable) {
+ // If both getter and setter are missing, squelch the setter error.
+ val isMultiple = grouped(underlying.name).size > 1
+ // TODO: messages shouldn't be spread over two files, and varNotice is not a clear name
+ if (member.isSetter && isMultiple) ()
+ else undefined(
+ if (member.isSetter) "\n(Note that an abstract var requires a setter in addition to the getter)"
+ else if (member.isGetter && !isMultiple) "\n(Note that an abstract var requires a getter in addition to the setter)"
+ else analyzer.varNotice(member)
+ )
+ }
+ else undefined("")
}
+ // Check the remainder for invalid absoverride.
+ for (member <- rest ; if ((member hasFlag ABSOVERRIDE) && member.isIncompleteIn(clazz))) {
+ val other = member.superSymbol(clazz)
+ val explanation =
+ if (other != NoSymbol) " and overrides incomplete superclass member " + infoString(other)
+ else ", but no concrete implementation could be found in a base class"
+
+ abstractClassError(true, infoString(member) + " is marked `abstract' and `override'" + explanation)
+ }
+ }
+
// 3. Check that concrete classes do not have deferred definitions
// that are not implemented in a subclass.
// Note that this is not the same as (2); In a situation like
@@ -433,8 +516,8 @@ abstract class RefChecks extends InfoTransform {
// (3) is violated but not (2).
def checkNoAbstractDecls(bc: Symbol) {
for (decl <- bc.info.decls.iterator) {
- if (decl.isDeferred && !isAbstractTypeWithoutFBound(decl)) {
- val impl = decl.matchingSymbol(clazz.thisType)
+ if (decl.isDeferred && !ignoreDeferred(decl)) {
+ val impl = decl.matchingSymbol(clazz.thisType, admit = VBRIDGE)
if (impl == NoSymbol || (decl.owner isSubClass impl.owner)) {
abstractClassError(false, "there is a deferred declaration of "+infoString(decl)+
" which is not implemented in a subclass"+analyzer.varNotice(decl))
@@ -445,24 +528,62 @@ abstract class RefChecks extends InfoTransform {
if (!parents.isEmpty && parents.head.typeSymbol.hasFlag(ABSTRACT))
checkNoAbstractDecls(parents.head.typeSymbol)
}
- if (!(clazz hasFlag ABSTRACT)) checkNoAbstractDecls(clazz)
+
+ checkNoAbstractMembers()
+ if (abstractErrors.isEmpty)
+ checkNoAbstractDecls(clazz)
+
+ if (abstractErrors.nonEmpty)
+ unit.error(clazz.pos, abstractErrorMessage)
}
- /** Does there exists a symbol declared in class `inclazz` with name `name` and
- * whose type seen as a member of `class.thisType` matches `tpe`?
+ /** Returns whether there is a symbol declared in class `inclazz`
+ * (which must be different from `clazz`) whose name and type
+ * seen as a member of `class.thisType` matches `member`'s.
*/
- def hasMatchingSym(inclazz: Symbol, name: Name, tpe: Type): Boolean =
- inclazz.info.nonPrivateDecl(name).filter(sym =>
- !sym.isTerm || (tpe matches clazz.thisType.memberType(sym))) != NoSymbol
+ def hasMatchingSym(inclazz: Symbol, member: Symbol): Boolean = {
+ val isVarargs = hasRepeatedParam(member.tpe)
+ lazy val varargsType = toJavaRepeatedParam(member.tpe)
+
+ def isSignatureMatch(sym: Symbol) = !sym.isTerm || {
+ val symtpe = clazz.thisType memberType sym
+ def matches(tp: Type) = tp matches symtpe
+
+ matches(member.tpe) || (isVarargs && matches(varargsType))
+ }
+ /** The rules for accessing members which have an access boundary are more
+ * restrictive in java than scala. Since java has no concept of package nesting,
+ * a member with "default" (package-level) access can only be accessed by members
+ * in the exact same package. Example:
+ *
+ * package a.b;
+ * public class JavaClass { void foo() { } }
+ *
+ * The member foo() can be accessed only from members of package a.b, and not
+ * nested packages like a.b.c. In the analogous scala class:
+ *
+ * package a.b
+ * class ScalaClass { private[b] def foo() = () }
+ *
+ * The member IS accessible to classes in package a.b.c. The javaAccessCheck logic
+ * is restricting the set of matching signatures according to the above semantics.
+ */
+ def javaAccessCheck(sym: Symbol) = (
+ !inclazz.isJavaDefined // not a java defined member
+ || !sym.hasAccessBoundary // no access boundary
+ || sym.isProtected // marked protected in java, thus accessible to subclasses
+ || sym.privateWithin == member.enclosingPackageClass // exact package match
+ )
+ def classDecls = inclazz.info.nonPrivateDecl(member.name)
+ def matchingSyms = classDecls filter (sym => isSignatureMatch(sym) && javaAccessCheck(sym))
+
+ (inclazz != clazz) && (matchingSyms != NoSymbol)
+ }
// 4. Check that every defined member with an `override' modifier overrides some other member.
for (member <- clazz.info.decls.toList)
if ((member hasFlag (OVERRIDE | ABSOVERRIDE)) &&
- !(clazz.ancestors exists { bc =>
- hasMatchingSym(bc, member.name, member.tpe) ||
- hasRepeatedParam(member.tpe) &&
- hasMatchingSym(bc, member.name, toJavaRepeatedParam(member.tpe))
- })) {
+ !(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) {
// for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG
unit.error(member.pos, member.toString() + " overrides nothing");
member resetFlag OVERRIDE
@@ -484,7 +605,7 @@ abstract class RefChecks extends InfoTransform {
/** validate all base types of a class in reverse linear order. */
def register(tp: Type) {
-// if (clazz.fullNameString.endsWith("Collection.Projection"))
+// if (clazz.fullName.endsWith("Collection.Projection"))
// println("validate base type "+tp)
val baseClass = tp.typeSymbol
if (baseClass.isClass) {
@@ -581,11 +702,12 @@ abstract class RefChecks extends InfoTransform {
case NoPrefix => ;
case ThisType(_) => ;
case ConstantType(_) => ;
- case DeBruijnIndex(_, _) => ;
+ // case DeBruijnIndex(_, _) => ;
case SingleType(pre, sym) =>
validateVariance(pre, variance)
case TypeRef(pre, sym, args) =>
- if (sym.isAliasType && relativeVariance(sym) == AnyVariance)
+// println("validate "+sym+" at "+relativeVariance(sym))
+ if (sym.isAliasType/* && relativeVariance(sym) == AnyVariance*/)
validateVariance(tp.normalize, variance)
else if (sym.variance != NoVariance) {
val v = relativeVariance(sym)
@@ -663,7 +785,7 @@ abstract class RefChecks extends InfoTransform {
class LevelInfo(val outer: LevelInfo) {
val scope: Scope = if (outer eq null) new Scope else new Scope(outer.scope)
- var maxindex: Int = Math.MIN_INT
+ var maxindex: Int = Int.MinValue
var refpos: Position = _
var refsym: Symbol = _
}
@@ -685,10 +807,11 @@ abstract class RefChecks extends InfoTransform {
index = index + 1;
stat match {
case ClassDef(_, _, _, _) | DefDef(_, _, _, _, _, _) | ModuleDef(_, _, _) | ValDef(_, _, _, _) =>
- assert(stat.symbol != NoSymbol, stat);//debug
- if (stat.symbol.isLocal) {
- currentLevel.scope.enter(stat.symbol)
- symIndex(stat.symbol) = index;
+ //assert(stat.symbol != NoSymbol, stat);//debug
+ val sym = stat.symbol.lazyAccessorOrSelf
+ if (sym.isLocal) {
+ currentLevel.scope.enter(sym)
+ symIndex(sym) = index;
}
case _ =>
}
@@ -731,7 +854,7 @@ abstract class RefChecks extends InfoTransform {
sym = sym.info.bounds.hi.widen.typeSymbol
sym
}
- val formal = underlyingClass(fn.tpe.paramTypes.head)
+ val formal = underlyingClass(fn.tpe.params.head.tpe)
val actual = underlyingClass(args.head.tpe)
val receiver = underlyingClass(qual.tpe)
def nonSensibleWarning(what: String, alwaysEqual: Boolean) =
@@ -781,11 +904,63 @@ abstract class RefChecks extends InfoTransform {
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
pushLevel()
- enterSyms(stats)
- var index = -1
- val stats1 = stats flatMap { stat => index += 1; transformStat(stat, index) }
- popLevel()
- stats1
+ try {
+ enterSyms(stats)
+ var index = -1
+ stats flatMap { stat => index += 1; transformStat(stat, index) }
+ }
+ finally popLevel()
+ }
+
+ /** Eliminate ModuleDefs.
+ * - A top level object is replaced with their module class.
+ * - An inner object is transformed into a module var, created on first access.
+ *
+ * In both cases, this transformation returns the list of replacement trees:
+ * - Top level: the module class accessor definition
+ * - Inner: a class definition, declaration of module var, and module var accessor
+ */
+ private def eliminateModuleDefs(tree: Tree): List[Tree] = {
+ val ModuleDef(mods, name, impl) = tree
+ val sym = tree.symbol
+
+ val classSym = sym.moduleClass
+ val cdef = ClassDef(mods | MODULE, name.toTypeName, Nil, impl) setSymbol classSym setType NoType
+
+ def findOrCreateModuleVar() = localTyper.typedPos(tree.pos) {
+ lazy val createModuleVar = gen.mkModuleVarDef(sym)
+ sym.owner.info.decl(nme.moduleVarName(sym.name.toTermName)) match {
+ // In case we are dealing with local symbol then we already have
+ // to correct error with forward reference
+ case NoSymbol => createModuleVar
+ case vsym => ValDef(vsym)
+ }
+ }
+ def createStaticModuleAccessor() = atPhase(phase.next) {
+ val method = (
+ sym.owner.newMethod(sym.pos, sym.name.toTermName)
+ setFlag (sym.flags | STABLE) resetFlag MODULE setInfo PolyType(Nil, sym.moduleClass.tpe)
+ )
+ sym.owner.info.decls enter method
+ localTyper.typedPos(tree.pos)(gen.mkModuleAccessDef(method, sym.tpe))
+ }
+ def createInnerModuleAccessor(vdef: Tree) = List(
+ vdef,
+ localTyper.typedPos(tree.pos) {
+ val vsym = vdef.symbol
+ atPhase(phase.next) {
+ val rhs = gen.newModule(sym, vsym.tpe)
+ val body = if (sym.owner.isTrait) rhs else gen.mkAssignAndReturn(vsym, rhs)
+ DefDef(sym, body.changeOwner(vsym -> sym))
+ }
+ }
+ )
+ transformTrees(cdef :: {
+ if (sym.isStatic)
+ if (sym.allOverriddenSymbols.isEmpty) Nil
+ else List(createStaticModuleAccessor())
+ else createInnerModuleAccessor(findOrCreateModuleVar)
+ })
}
/** Implements lazy value accessors:
@@ -794,338 +969,369 @@ abstract class RefChecks extends InfoTransform {
* - for all other lazy values z the accessor is a block of this form:
* { z = <rhs>; z } where z can be an identifier or a field.
*/
- def transformStat(tree: Tree, index: Int): List[Tree] = tree match {
- case ModuleDef(mods, name, impl) =>
- val sym = tree.symbol
- val cdef = ClassDef(mods | MODULE, name, List(), impl)
- .setPos(tree.pos)
- .setSymbol(sym.moduleClass)
- .setType(NoType)
- if (sym.isStatic) {
- if (!sym.allOverriddenSymbols.isEmpty) {
- val factory = sym.owner.newMethod(sym.pos, sym.name)
- .setFlag(sym.flags | STABLE).resetFlag(MODULE)
- .setInfo(PolyType(List(), sym.moduleClass.tpe))
- sym.owner.info.decls.enter(factory)
- val ddef =
- atPhase(phase.next) {
- localTyper.typed {
- gen.mkModuleAccessDef(factory, sym.tpe)
- }
- }
- transformTrees(List(cdef, ddef))
- } else {
- List(transform(cdef))
- }
- } else {
- val vdef = localTyper.typedPos(tree.pos) { gen.mkModuleVarDef(sym) }
- val ddef =
- atPhase(phase.next) {
- localTyper.typed {
- if (sym.owner.isTrait) gen.mkModuleAccessDcl(sym)
- else gen.mkCachedModuleAccessDef(sym, vdef.symbol)
- }
- }
-
- if (sym.owner.isTrait) transformTrees(List(cdef, ddef))
- else transformTrees(List(cdef, vdef, ddef))
- }
+ private def makeLazyAccessor(tree: Tree, rhs: Tree): List[Tree] = {
+ val vsym = tree.symbol
+ assert(vsym.isTerm, vsym)
+ val hasUnitType = vsym.tpe.typeSymbol == UnitClass
+ val lazySym = vsym.lazyAccessor
+ assert(lazySym != NoSymbol, vsym)
+
+ // for traits, this is further transformed in mixins
+ val body = (
+ if (tree.symbol.owner.isTrait || hasUnitType) rhs
+ else gen.mkAssignAndReturn(vsym, rhs)
+ )
+ val lazyDef = atPos(tree.pos)(DefDef(lazySym, body.changeOwner(vsym -> lazySym)))
+ log("Made lazy def: " + lazyDef)
+
+ if (hasUnitType) List(typed(lazyDef))
+ else List(
+ typed(ValDef(vsym)),
+ atPhase(phase.next)(typed(lazyDef))
+ )
+ }
+ def transformStat(tree: Tree, index: Int): List[Tree] = tree match {
+ case ModuleDef(_, _, _) => eliminateModuleDefs(tree)
case ValDef(_, _, _, _) =>
- val tree1 = transform(tree); // important to do before forward reference check
- val ValDef(_, _, _, rhs) = tree1
- if (tree.symbol.hasFlag(LAZY)) {
- assert(tree.symbol.isTerm, tree.symbol)
- val vsym = tree.symbol
- val hasUnitType = (tree.symbol.tpe.typeSymbol == definitions.UnitClass)
- val lazyDefSym = vsym.lazyAccessor
- assert(lazyDefSym != NoSymbol, vsym)
- val ownerTransformer = new ChangeOwnerTraverser(vsym, lazyDefSym)
- val lazyDef = atPos(tree.pos)(
- DefDef(lazyDefSym, ownerTransformer(
- if (tree.symbol.owner.isTrait // for traits, this is further tranformed in mixins
- || hasUnitType) rhs
- else Block(List(
- Assign(gen.mkAttributedRef(vsym), rhs)),
- gen.mkAttributedRef(vsym)))))
- log("Made lazy def: " + lazyDef)
- if (hasUnitType)
- typed(lazyDef) :: Nil
- else
- typed(ValDef(vsym, EmptyTree)) :: typed(lazyDef) :: Nil
- } else {
- if (tree.symbol.isLocal && index <= currentLevel.maxindex && !tree.symbol.hasFlag(LAZY)) {
- if (settings.debug.value) Console.println(currentLevel.refsym);
- unit.error(currentLevel.refpos, "forward reference extends over definition of " + tree.symbol);
+ val tree1 @ ValDef(_, _, _, rhs) = transform(tree) // important to do before forward reference check
+ if (tree.symbol.isLazy)
+ makeLazyAccessor(tree, rhs)
+ else {
+ val lazySym = tree.symbol.lazyAccessorOrSelf
+ if (lazySym.isLocal && index <= currentLevel.maxindex) {
+ if (settings.debug.value)
+ Console.println(currentLevel.refsym)
+ unit.error(currentLevel.refpos, "forward reference extends over definition of " + lazySym)
}
List(tree1)
}
-
- case Import(_, _) =>
- List()
-
- case _ =>
- List(transform(tree))
+ case Import(_, _) => Nil
+ case _ => List(transform(tree))
}
- override def transform(tree: Tree): Tree = try {
-
- /* Check whether argument types conform to bounds of type parameters */
- def checkBounds(pre: Type, owner: Symbol, tparams: List[Symbol], argtps: List[Type]): Unit =
- checkBoundsWithPos(pre, owner, tparams, argtps, tree.pos)
- def checkBoundsWithPos(pre: Type, owner: Symbol, tparams: List[Symbol], argtps: List[Type], pos: Position): Unit = try {
- typer.infer.checkBounds(pos, pre, owner, tparams, argtps, "");
- } catch {
+ /* Check whether argument types conform to bounds of type parameters */
+ private def checkBounds(pre: Type, owner: Symbol, tparams: List[Symbol], argtps: List[Type], pos: Position): Unit =
+ try typer.infer.checkBounds(pos, pre, owner, tparams, argtps, "")
+ catch {
case ex: TypeError =>
unit.error(pos, ex.getMessage());
if (settings.explaintypes.value) {
val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, argtps).bounds)
- List.map2(argtps, bounds)((targ, bound) => explainTypes(bound.lo, targ))
- List.map2(argtps, bounds)((targ, bound) => explainTypes(targ, bound.hi))
+ (argtps, bounds).zipped map ((targ, bound) => explainTypes(bound.lo, targ))
+ (argtps, bounds).zipped map ((targ, bound) => explainTypes(targ, bound.hi))
()
}
}
+ private def isIrrefutable(pat: Tree, seltpe: Type): Boolean = pat match {
+ case Apply(_, args) =>
+ val clazz = pat.tpe.typeSymbol
+ clazz == seltpe.typeSymbol &&
+ (clazz.isClass && clazz.isCase) &&
+ (args corresponds clazz.primaryConstructor.tpe.asSeenFrom(seltpe, clazz).paramTypes)(isIrrefutable)
+ case Typed(pat, tpt) =>
+ seltpe <:< tpt.tpe
+ case Ident(nme.WILDCARD) =>
+ true
+ case Bind(_, pat) =>
+ isIrrefutable(pat, seltpe)
+ case _ =>
+ false
+ }
- def isIrrefutable(pat: Tree, seltpe: Type): Boolean = {
- val result = pat match {
- case Apply(_, args) =>
- val clazz = pat.tpe.typeSymbol;
- clazz == seltpe.typeSymbol &&
- clazz.isClass && (clazz hasFlag CASE) &&
- List.forall2(
- args,
- clazz.primaryConstructor.tpe.asSeenFrom(seltpe, clazz).paramTypes)(isIrrefutable)
- case Typed(pat, tpt) =>
- seltpe <:< tpt.tpe
- case Ident(nme.WILDCARD) =>
- true
- case Bind(_, pat) =>
- isIrrefutable(pat, seltpe)
- case _ =>
- false
- }
- //Console.println("is irefutable? " + pat + ":" + pat.tpe + " against " + seltpe + ": " + result);//DEBUG
- result
+ /** If symbol is deprecated, and the point of reference is not enclosed
+ * in either a deprecated member or a scala bridge method, issue a warning.
+ */
+ private def checkDeprecated(sym: Symbol, pos: Position) {
+ if (sym.isDeprecated && !currentOwner.ownerChain.exists(_.isDeprecated)) {
+ val dmsg = sym.deprecationMessage
+ val msg = sym.toString + sym.locationString +" is deprecated"+
+ (if (dmsg.isDefined) ": "+ dmsg.get else "")
+ unit.deprecationWarning(pos, msg)
}
+ }
+ /** Similar to deprecation: check if the symbol is marked with @migration
+ * indicating it has changed semantics between versions.
+ */
+ private def checkMigration(sym: Symbol, pos: Position) = {
+ for (msg <- sym.migrationMessage)
+ unit.warning(pos, "%s%s has changed semantics:\n%s".format(sym, sym.locationString, msg))
+ }
- /** If symbol is deprecated and is not contained in a deprecated definition,
- * issue a deprecated warning
- */
- def checkDeprecated(sym: Symbol, pos: Position) {
- if (sym.isDeprecated && !currentOwner.ownerChain.exists(_.isDeprecated)) {
- val dmsg = sym.deprecationMessage
- val msg = sym.toString + sym.locationString +" is deprecated"+
- (if (dmsg.isDefined) ": "+ dmsg.get
- else "")
- unit.deprecationWarning(pos, msg)
- }
+ /** Check that a deprecated val or def does not override a
+ * concrete, non-deprecated method. If it does, then
+ * deprecation is meaningless.
+ */
+ private def checkDeprecatedOvers(tree: Tree) {
+ val symbol = tree.symbol
+ if (symbol.isDeprecated) {
+ val concrOvers =
+ symbol.allOverriddenSymbols.filter(sym =>
+ !sym.isDeprecated && !sym.isDeferred)
+ if(!concrOvers.isEmpty)
+ unit.deprecationWarning(
+ tree.pos,
+ symbol.toString + " overrides concrete, non-deprecated symbol(s):" +
+ concrOvers.map(_.name.decode).mkString(" ", ", ", ""))
}
+ }
+ private def isRepeatedParamArg(tree: Tree) = currentApplication match {
+ case Apply(fn, args) =>
+ !args.isEmpty && (args.last eq tree) &&
+ fn.tpe.params.length == args.length && isRepeatedParamType(fn.tpe.params.last.tpe)
+ case _ =>
+ false
+ }
+ private def checkTypeRef(tp: Type, pos: Position) = tp match {
+ case TypeRef(pre, sym, args) =>
+ checkDeprecated(sym, pos)
+ if(sym.hasFlag(JAVA))
+ sym.typeParams foreach (_.cookJavaRawInfo())
+ if (!tp.isHigherKinded)
+ checkBounds(pre, sym.owner, sym.typeParams, args, pos)
+ case _ =>
+ }
- /** Check that a deprecated val or def does not override a
- * concrete, non-deprecated method. If it does, then
- * deprecation is meaningless.
- */
- def checkDeprecatedOvers() {
- val symbol = tree.symbol
- if (symbol.isDeprecated) {
- val concrOvers =
- symbol.allOverriddenSymbols.filter(sym =>
- !sym.isDeprecated && !sym.isDeferred)
- if(!concrOvers.isEmpty)
- unit.deprecationWarning(
- tree.pos,
- symbol.toString + " overrides concrete, non-deprecated symbol(s):" +
- concrOvers.map(_.name.decode).mkString(" ", ", ", ""))
- }
- }
+ private def checkAnnotations(tpes: List[Type], pos: Position) = tpes foreach (tp => checkTypeRef(tp, pos))
+ private def doTypeTraversal(tree: Tree)(f: Type => Unit) = if (!inPattern) tree.tpe foreach f
- def isRepeatedParamArg(tree: Tree) = currentApplication match {
- case Apply(fn, args) =>
- !args.isEmpty && (args.last eq tree) &&
- fn.tpe.paramTypes.length == args.length && isRepeatedParamType(fn.tpe.paramTypes.last)
- case _ =>
- false
+ private def applyRefchecksToAnnotations(tree: Tree): Unit = {
+ def applyChecks(annots: List[AnnotationInfo]) = {
+ checkAnnotations(annots map (_.atp), tree.pos)
+ transformTrees(annots flatMap (_.args))
}
- def isCaseApply(sym : Symbol) = sym.isSourceMethod && sym.hasFlag(CASE) && sym.name == nme.apply
+ tree match {
+ case m: MemberDef =>
+ val sym = m.symbol
+ applyChecks(sym.annotations)
+ // validate implicitNotFoundMessage
+ analyzer.ImplicitNotFoundMsg.check(sym) foreach { warn =>
+ unit.warning(tree.pos, "Invalid implicitNotFound message for %s%s:\n%s".format(sym, sym.locationString, warn))
+ }
+ case tpt@TypeTree() =>
+ if(tpt.original != null) {
+ tpt.original foreach {
+ case dc@TypeTreeWithDeferredRefCheck() => applyRefchecksToAnnotations(dc.check()) // #2416
+ case _ =>
+ }
+ }
- def checkTypeRef(tp: Type, pos: Position) = tp match {
- case TypeRef(pre, sym, args) =>
- checkDeprecated(sym, pos)
- if (!tp.isHigherKinded)
- checkBoundsWithPos(pre, sym.owner, sym.typeParams, args, pos)
+ doTypeTraversal(tree) {
+ case AnnotatedType(annots, _, _) => applyChecks(annots)
+ case _ =>
+ }
case _ =>
}
- def checkAnnotations(tpes: List[(Type, Position)]) {
- for ((tp, pos) <- tpes) checkTypeRef(tp, pos)
+ }
+
+ private def transformCaseApply(tree: Tree, ifNot: => Unit) = {
+ val sym = tree.symbol
+
+ if (sym.isSourceMethod && sym.hasFlag(CASE) && sym.name == nme.apply)
+ toConstructor(tree.pos, tree.tpe)
+ else {
+ ifNot
+ tree
}
+ }
- val savedLocalTyper = localTyper
- val savedCurrentApplication = currentApplication
+ private def transformApply(tree: Apply): Tree = tree match {
+ case Apply(
+ Select(qual, nme.filter),
+ List(Function(
+ List(ValDef(_, pname, tpt, _)),
+ Match(_, CaseDef(pat1, _, _) :: _))))
+ if ((pname startsWith nme.CHECK_IF_REFUTABLE_STRING) &&
+ isIrrefutable(pat1, tpt.tpe) && (qual.tpe <:< tree.tpe)) =>
+
+ transform(qual)
+
+ case Apply(Select(New(tpt), name), args)
+ if (tpt.tpe.typeSymbol == ArrayClass && args.length >= 2) =>
+ unit.deprecationWarning(tree.pos,
+ "new Array(...) with multiple dimensions has been deprecated; use Array.ofDim(...) instead")
+ val manif = {
+ var etpe = tpt.tpe
+ for (_ <- args) { etpe = etpe.typeArgs.headOption.getOrElse(NoType) }
+ if (etpe == NoType) {
+ unit.error(tree.pos, "too many dimensions for array creation")
+ Literal(Constant(null))
+ } else {
+ localTyper.getManifestTree(tree.pos, etpe, false)
+ }
+ }
+ val newResult = localTyper.typedPos(tree.pos) {
+ new ApplyToImplicitArgs(Apply(Select(gen.mkAttributedRef(ArrayModule), nme.ofDim), args), List(manif))
+ }
+ currentApplication = tree
+ newResult
+
+ case Apply(fn, args) =>
+ checkSensible(tree.pos, fn, args)
+ currentApplication = tree
+ tree
+ }
+ private def transformSelect(tree: Select): Tree = {
+ val Select(qual, name) = tree
val sym = tree.symbol
- var result = tree
- def doTypeTraversal(f: (Type) => Unit) =
- if (!inPattern) {
- for (tp <- tree.tpe) f(tp)
+ /** Note: if a symbol has both @deprecated and @migration annotations and both
+ * warnings are enabled, only the first one checked here will be emitted.
+ * I assume that's a consequence of some code trying to avoid noise by suppressing
+ * warnings after the first, but I think it'd be better if we didn't have to
+ * arbitrarily choose one as more important than the other.
+ */
+ checkDeprecated(sym, tree.pos)
+ if (settings.Xmigration28.value)
+ checkMigration(sym, tree.pos)
+
+ if (currentClass != sym.owner && (sym hasFlag LOCAL)) {
+ var o = currentClass
+ var hidden = false
+ while (!hidden && o != sym.owner && o != sym.owner.moduleClass && !o.isPackage) {
+ hidden = o.isTerm || o.isPrivateLocal
+ o = o.owner
}
+ if (!hidden) escapedPrivateLocals += sym
+ }
- // Apply RefChecks to annotations. Makes sure the annotations conform to
- // type bounds (bug #935), issues deprecation warnings for symbols used
- // inside annotations.
- tree match {
- case m: MemberDef =>
- checkAnnotations(m.symbol.annotations.map(a => (a.atp, tree.pos)))
- transformTrees(m.symbol.annotations.flatMap(_.args))
- case TypeTree() => doTypeTraversal {
- case AnnotatedType(annots, _, _) =>
- checkAnnotations(annots.map(a => (a.atp, tree.pos)))
- transformTrees(annots.flatMap(_.args))
- case _ =>
+ def checkSuper(mix: Name) =
+ // term should have been eliminated by super accessors
+ assert(!(qual.symbol.isTrait && sym.isTerm && mix == nme.EMPTY.toTypeName))
+
+ transformCaseApply(tree,
+ qual match {
+ case Super(_, mix) => checkSuper(mix)
+ case _ =>
}
- case _ =>
+ )
+ }
+ private def transformIf(tree: If): Tree = {
+ val If(cond, thenpart, elsepart) = tree
+ def unitIfEmpty(t: Tree): Tree =
+ if (t == EmptyTree) Literal(()).setPos(tree.pos).setType(UnitClass.tpe) else t
+
+ cond.tpe match {
+ case ConstantType(value) =>
+ val res = if (value.booleanValue) thenpart else elsepart
+ unitIfEmpty(res)
+ case _ => tree
}
+ }
- tree match {
- case DefDef(mods, name, tparams, vparams, tpt, EmptyTree) if tree.symbol.hasAnnotation(definitions.NativeAttr) =>
- tree.symbol.resetFlag(DEFERRED)
- result = transform(treeCopy.DefDef(tree, mods, name, tparams, vparams, tpt,
- typed(Apply(gen.mkAttributedRef(definitions.Predef_error), List(Literal("native method stub"))))))
-
- case DefDef(_, _, _, _, _, _) =>
- checkDeprecatedOvers()
-
- case ValDef(_, _, _, _) =>
- checkDeprecatedOvers()
-
- case Template(parents, self, body) =>
- localTyper = localTyper.atOwner(tree, currentOwner)
- validateBaseTypes(currentOwner)
- checkDefaultsInOverloaded(currentOwner)
- val bridges = addVarargBridges(currentOwner)
- checkAllOverrides(currentOwner)
- if (bridges.nonEmpty)
- result = treeCopy.Template(tree, parents, self, body ::: bridges)
-
- case TypeTree() =>
- val existentialParams = new ListBuffer[Symbol]
- doTypeTraversal { // check all bounds, except those that are
- // existential type parameters
- case ExistentialType(tparams, tpe) =>
- existentialParams ++= tparams
- case t: TypeRef =>
- val exparams = existentialParams.toList
- val wildcards = exparams map (_ => WildcardType)
- checkTypeRef(t.subst(exparams, wildcards), tree.pos)
- case _ =>
- }
+ override def transform(tree: Tree): Tree = {
+ val savedLocalTyper = localTyper
+ val savedCurrentApplication = currentApplication
+ try {
+ val sym = tree.symbol
- case TypeApply(fn, args) =>
- checkBounds(NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe))
- if (isCaseApply(sym)) result = toConstructor(tree.pos, tree.tpe)
-
- case Apply(
- Select(qual, nme.filter),
- List(Function(
- List(ValDef(_, pname, tpt, _)),
- Match(_, CaseDef(pat1, _, _) :: _))))
- if ((pname startsWith nme.CHECK_IF_REFUTABLE_STRING) &&
- isIrrefutable(pat1, tpt.tpe) && (qual.tpe <:< tree.tpe)) =>
- result = qual
-
- case Apply(Select(New(tpt), name), args)
- if (tpt.tpe.typeSymbol == ArrayClass && args.length >= 2) =>
- unit.deprecationWarning(tree.pos,
- "new Array(...) with multiple dimensions has been deprecated; use Array.ofDim(...) instead")
- val manif = {
- var etpe = tpt.tpe
- for (_ <- args) { etpe = etpe.typeArgs.headOption.getOrElse(NoType) }
- if (etpe == NoType) {
- unit.error(tree.pos, "too many dimensions for array creation")
- Literal(Constant(null))
- } else {
- localTyper.getManifestTree(tree.pos, etpe, false)
+ // Apply RefChecks to annotations. Makes sure the annotations conform to
+ // type bounds (bug #935), issues deprecation warnings for symbols used
+ // inside annotations.
+ applyRefchecksToAnnotations(tree)
+ var result: Tree = tree match {
+ case DefDef(mods, name, tparams, vparams, tpt, EmptyTree) if tree.symbol.hasAnnotation(NativeAttr) =>
+ tree.symbol.resetFlag(DEFERRED)
+ transform(treeCopy.DefDef(tree, mods, name, tparams, vparams, tpt,
+ typed(Apply(gen.mkAttributedRef(Predef_error), List(Literal("native method stub"))))))
+
+ case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) =>
+ checkDeprecatedOvers(tree)
+ tree
+
+ case Template(parents, self, body) =>
+ localTyper = localTyper.atOwner(tree, currentOwner)
+ validateBaseTypes(currentOwner)
+ checkDefaultsInOverloaded(currentOwner)
+ val bridges = addVarargBridges(currentOwner)
+ checkAllOverrides(currentOwner)
+
+ if (bridges.nonEmpty) treeCopy.Template(tree, parents, self, body ::: bridges)
+ else tree
+
+ case dc@TypeTreeWithDeferredRefCheck() => assert(false, "adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc"); dc
+ case tpt@TypeTree() =>
+ if(tpt.original != null) {
+ tpt.original foreach {
+ case dc@TypeTreeWithDeferredRefCheck() =>
+ transform(dc.check()) // #2416 -- only call transform to do refchecks, but discard results
+ // tpt has the right type if the deferred checks are ok
+ case _ =>
+ }
}
- }
- result = localTyper.typedPos(tree.pos) {
- Apply(Apply(Select(gen.mkAttributedRef(ArrayModule), nme.ofDim), args), List(manif))
- }
- currentApplication = tree
- case Apply(fn, args) =>
- checkSensible(tree.pos, fn, args)
- currentApplication = tree
+ val existentialParams = new ListBuffer[Symbol]
+ doTypeTraversal(tree) { // check all bounds, except those that are
+ // existential type parameters
+ case ExistentialType(tparams, tpe) =>
+ existentialParams ++= tparams
+ case t: TypeRef =>
+ val exparams = existentialParams.toList
+ val wildcards = exparams map (_ => WildcardType)
+ checkTypeRef(t.subst(exparams, wildcards), tree.pos)
+ case _ =>
+ }
+ tree
- case If(cond, thenpart, elsepart) =>
- cond.tpe match {
- case ConstantType(value) =>
- result = if (value.booleanValue) thenpart else elsepart;
- if (result == EmptyTree) result = Literal(()).setPos(tree.pos).setType(UnitClass.tpe)
- case _ =>
- }
+ case TypeApply(fn, args) =>
+ checkBounds(NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe), tree.pos)
+ transformCaseApply(tree, ())
+
+ case x @ Apply(_, _) =>
+ transformApply(x)
- case New(tpt) =>
- enterReference(tree.pos, tpt.tpe.typeSymbol)
+ case x @ If(_, _, _) =>
+ transformIf(x)
- case Typed(expr, tpt @ Ident(name)) if (name == nme.WILDCARD_STAR.toTypeName) =>
- if (!isRepeatedParamArg(tree))
+ case New(tpt) =>
+ enterReference(tree.pos, tpt.tpe.typeSymbol)
+ tree
+
+ case Typed(expr, tpt @ Ident(name)) if name == nme.WILDCARD_STAR.toTypeName && !isRepeatedParamArg(tree) =>
unit.error(tree.pos, "no `: _*' annotation allowed here\n"+
"(such annotations are only allowed in arguments to *-parameters)")
+ tree
- case Ident(name) =>
- if (isCaseApply(sym))
- result = toConstructor(tree.pos, tree.tpe)
- else if (name != nme.WILDCARD && name != nme.WILDCARD_STAR.toTypeName) {
- assert(sym != NoSymbol, tree)//debug
- enterReference(tree.pos, sym)
- }
+ case Ident(name) =>
+ transformCaseApply(tree,
+ if (name != nme.WILDCARD && name != nme.WILDCARD_STAR.toTypeName) {
+ assert(sym != NoSymbol, tree) //debug
+ enterReference(tree.pos, sym)
+ }
+ )
- case Select(qual, name) =>
- checkDeprecated(sym, tree.pos)
- if (currentClass != sym.owner && (sym hasFlag LOCAL)) {
- var o = currentClass
- var hidden = false
- while (!hidden && o != sym.owner && o != sym.owner.moduleClass && !o.isPackage) {
- hidden = o.isTerm || o.isPrivateLocal
- o = o.owner
- }
- if (!hidden) escapedPrivateLocals += sym
- }
- if (isCaseApply(sym))
- result = toConstructor(tree.pos, tree.tpe)
- else qual match {
- case Super(qualifier, mix) =>
- val base = qual.symbol;
- //Console.println("super: " + tree + " in " + base);//DEBUG
- assert(!(base.isTrait && sym.isTerm && mix == nme.EMPTY.toTypeName)) // term should have been eliminated by super accessors
- case _ =>
- }
- case _ =>
- }
- result = result match {
- case CaseDef(pat, guard, body) =>
- inPattern = true
- val pat1 = transform(pat)
- inPattern = false
- treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
- case _ =>
- super.transform(result)
- }
- result match {
- case ClassDef(_, _, _, _)
- | TypeDef(_, _, _, _) =>
- if (result.symbol.isLocal || result.symbol.owner.isPackageClass)
- varianceValidator.traverse(result)
- case _ =>
+ case x @ Select(_, _) =>
+ transformSelect(x)
+
+ case _ => tree
+ }
+ result = result match {
+ case CaseDef(pat, guard, body) =>
+ inPattern = true
+ val pat1 = transform(pat)
+ inPattern = false
+ treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
+ case _ =>
+ super.transform(result)
+ }
+ result match {
+ case ClassDef(_, _, _, _)
+ | TypeDef(_, _, _, _) =>
+ if (result.symbol.isLocal || result.symbol.owner.isPackageClass)
+ varianceValidator.traverse(result)
+ case _ =>
+ }
+ result
+ } catch {
+ case ex: TypeError =>
+ if (settings.debug.value) ex.printStackTrace();
+ unit.error(tree.pos, ex.getMessage())
+ tree
+ } finally {
+ localTyper = savedLocalTyper
+ currentApplication = savedCurrentApplication
}
- localTyper = savedLocalTyper
- currentApplication = savedCurrentApplication
- result
- } catch {
- case ex: TypeError =>
- if (settings.debug.value) ex.printStackTrace();
- unit.error(tree.pos, ex.getMessage())
- tree
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 5001f8b9bf..76cdf2e9c8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -1,15 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
import scala.collection.mutable.ListBuffer
import symtab.Flags._
-import util.Position
/** This phase adds super accessors for all super calls that
* either appear in a trait or have as a target a member of some outer class.
@@ -27,7 +25,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
// inherits abstract value `global' and class `Phase' from Transform
import global._
- import typer.typed
+ import definitions.{ IntClass, UnitClass, ByNameParamClass, Any_asInstanceOf, Object_## }
/** the following two members override abstract members in Transform */
val phaseName: String = "superaccessors"
@@ -38,33 +36,28 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
class SuperAccTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
private var validCurrentOwner = true
private var accDefs: List[(Symbol, ListBuffer[Tree])] = List()
- private val typer = analyzer.newTyper(analyzer.rootContext(unit))
private def accDefBuf(clazz: Symbol) = accDefs find (_._1 == clazz) match {
case Some((_, buf)) => buf
case None => throw new AssertionError("no acc def buf for "+clazz)
}
-/*
- private def transformArgs(args: List[Tree], formals: List[Type]) = {
- if (!formals.isEmpty && formals.last.symbol == definitions.ByNameParamClass)
- ((args take (formals.length - 1) map transform) :::
- withInvalidOwner { args drop (formals.length - 1) map transform })
- else
- args map transform
- }
-*/
- private def transformArgs(args: List[Tree], formals: List[Type]) =
- List.map2(args, formals){ (arg, formal) =>
- if (formal.typeSymbol == definitions.ByNameParamClass)
- withInvalidOwner { checkPackedConforms(transform(arg), formal.typeArgs.head) }
+
+ private def transformArgs(args: List[Tree], params: List[Symbol]) =
+ ((args, params).zipped map { (arg, param) =>
+ if (param.tpe.typeSymbol == ByNameParamClass)
+ withInvalidOwner { checkPackedConforms(transform(arg), param.tpe.typeArgs.head) }
else transform(arg)
- } :::
- (args drop formals.length map transform)
+ }) :::
+ (args drop params.length map transform)
private def checkPackedConforms(tree: Tree, pt: Type): Tree = {
if (tree.tpe exists (_.typeSymbol.isExistentialSkolem)) {
- val packed = typer.packedType(tree, NoSymbol)
- if (!(packed <:< pt)) typer.infer.typeError(tree.pos, packed, pt)
+ val packed = localTyper.packedType(tree, NoSymbol)
+ if (!(packed <:< pt)) {
+ val errorContext = localTyper.context.make(localTyper.context.tree)
+ errorContext.reportGeneralErrors = true
+ analyzer.newTyper(errorContext).infer.typeError(tree.pos, packed, pt)
+ }
}
tree
}
@@ -81,15 +74,21 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
other = linked.info.decl(sym.name.toTermName).filter(_.isModule)
if (other != NoSymbol)
unit.error(sym.pos, "name clash: "+sym.owner+" defines "+sym+
- "\nand its companion "+sym.owner.linkedModuleOfClass+" also defines "+
+ "\nand its companion "+sym.owner.companionModule+" also defines "+
other)
}
}
- private def transformSuperSelect(tree: Tree) = tree match {
- case Select(sup @ Super(_, mix), name) =>
+ private def transformSuperSelect(tree: Tree): Tree = tree match {
+ // Intercept super.## and translate it to this.##
+ // which is fine since it's final.
+ case Select(sup @ Super(_, _), nme.HASHHASH) =>
+ Select(gen.mkAttributedThis(sup.symbol), Object_##) setType IntClass.tpe
+
+ case Select(sup @ Super(_, mix), name) =>
val sym = tree.symbol
val clazz = sup.symbol
+
if (sym.isDeferred) {
val member = sym.overridingSymbol(clazz);
if (mix != nme.EMPTY.toTypeName || member == NoSymbol ||
@@ -115,7 +114,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
superAcc.setInfo(superAccTpe.cloneInfo(superAcc))
//println("creating super acc "+superAcc+":"+superAcc.tpe)//DEBUG
clazz.info.decls enter superAcc;
- accDefBuf(clazz) += typed(DefDef(superAcc, EmptyTree))
+ accDefBuf(clazz) += typers(clazz).typed(DefDef(superAcc, EmptyTree))
}
atPos(sup.pos) {
Select(gen.mkAttributedThis(clazz), superAcc) setType tree.tpe;
@@ -128,112 +127,120 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
tree
}
- override def transform(tree: Tree): Tree = try { tree match {
- case ClassDef(_, _, _, _) =>
- checkCompanionNameClashes(tree.symbol)
- val decls = tree.symbol.info.decls
- for (sym <- decls.toList) {
- if (sym.privateWithin.isClass && !sym.privateWithin.isModuleClass &&
- !sym.hasFlag(EXPANDEDNAME) && !sym.isConstructor) {
- decls.unlink(sym)
- sym.expandName(sym.privateWithin)
- decls.enter(sym)
- }
- }
- super.transform(tree)
- case ModuleDef(_, _, _) =>
- checkCompanionNameClashes(tree.symbol)
- super.transform(tree)
- case Template(parents, self, body) =>
- val ownAccDefs = new ListBuffer[Tree];
- accDefs = (currentOwner, ownAccDefs) :: accDefs;
-
- // ugly hack... normally, the following line should not be
- // necessary, the 'super' method taking care of that. but because
- // that one is iterating through parents (and we dont want that here)
- // we need to inline it.
- curTree = tree
- val body1 = atOwner(currentOwner) { transformTrees(body) }
- accDefs = accDefs.tail;
- treeCopy.Template(tree, parents, self, ownAccDefs.toList ::: body1);
-
- case TypeApply(sel @ Select(This(_), name), args) =>
- val sym = tree.symbol
- if (needsProtectedAccessor(sym, tree.pos)) {
- if (settings.debug.value) log("Adding protected accessor for " + tree);
- transform(makeAccessor(sel.asInstanceOf[Select], args))
- } else
- tree
+ override def transform(tree: Tree): Tree = {
+ val sym = tree.symbol
- case Select(qual @ This(_), name) =>
- val sym = tree.symbol
- if ((sym hasFlag PARAMACCESSOR) && (sym.alias != NoSymbol)) {
- val result = typed {
- Select(
- Super(qual.symbol, nme.EMPTY.toTypeName/*qual.symbol.info.parents.head.symbol.name*/) setPos qual.pos,
- sym.alias) setPos tree.pos
- }
+ def mayNeedProtectedAccessor(sel: Select, args: List[Tree], goToSuper: Boolean) =
+ if (needsProtectedAccessor(sym, tree.pos)) {
if (settings.debug.value)
- Console.println("alias replacement: " + tree + " ==> " + result);//debug
- transformSuperSelect(result)
- } else {
- if (needsProtectedAccessor(sym, tree.pos)) {
- if (settings.debug.value) log("Adding protected accessor for " + tree);
- transform(makeAccessor(tree.asInstanceOf[Select], List(EmptyTree)))
- } else
- tree
- }
- case Select(sup @ Super(_, mix), name) =>
- val sym = tree.symbol
- if (sym.isValue && !sym.isMethod || sym.hasFlag(ACCESSOR)) {
- unit.error(tree.pos, "super may be not be used on "+
- (if (sym.hasFlag(ACCESSOR)) sym.accessed else sym))
- }
- transformSuperSelect(tree)
+ log("Adding protected accessor for " + tree)
- case TypeApply(sel @ Select(qual, name), args) =>
- val sym = tree.symbol
- if (needsProtectedAccessor(sym, tree.pos)) {
- if (settings.debug.value) log("Adding protected accessor for tree: " + tree);
- transform(makeAccessor(sel.asInstanceOf[Select], args))
- } else
+ transform(makeAccessor(sel, args))
+ }
+ else if (goToSuper) super.transform(tree)
+ else tree
+
+ try tree match {
+ case ClassDef(_, _, _, _) =>
+ checkCompanionNameClashes(sym)
+ val decls = sym.info.decls
+ for (s <- decls.toList) {
+ if (s.privateWithin.isClass && !s.isProtected && !s.privateWithin.isModuleClass &&
+ !s.hasFlag(EXPANDEDNAME) && !s.isConstructor) {
+ decls.unlink(s)
+ s.expandName(s.privateWithin)
+ decls.enter(s)
+ }
+ }
+ if (settings.verbose.value && onlyPresentation && !sym.isAnonymousClass) {
+ println("========== scaladoc of "+sym+" =============================")
+ println(toJavaDoc(expandedDocComment(sym)))
+ for (member <- sym.info.members) {
+ println(member+":"+sym.thisType.memberInfo(member)+"\n"+
+ toJavaDoc(expandedDocComment(member, sym)))
+ for ((useCase, comment, pos) <- useCases(member, sym)) {
+ println("usecase "+useCase+":"+useCase.info)
+ println(toJavaDoc(comment))
+ }
+ }
+ }
super.transform(tree)
-
- case Select(qual, name) =>
- val sym = tree.symbol
- if (needsProtectedAccessor(sym, tree.pos)) {
- if (settings.debug.value) log("Adding protected accessor for tree: " + tree);
- transform(makeAccessor(tree.asInstanceOf[Select], List(EmptyTree)))
- } else
+ case ModuleDef(_, _, _) =>
+ checkCompanionNameClashes(sym)
super.transform(tree)
+ case Template(parents, self, body) =>
+ val ownAccDefs = new ListBuffer[Tree];
+ accDefs = (currentOwner, ownAccDefs) :: accDefs;
+
+ // ugly hack... normally, the following line should not be
+ // necessary, the 'super' method taking care of that. but because
+ // that one is iterating through parents (and we dont want that here)
+ // we need to inline it.
+ curTree = tree
+ val body1 = atOwner(currentOwner) { transformTrees(body) }
+ accDefs = accDefs.tail;
+ treeCopy.Template(tree, parents, self, ownAccDefs.toList ::: body1);
+
+ case TypeApply(sel @ Select(This(_), name), args) =>
+ mayNeedProtectedAccessor(sel, args, false)
+
+ case sel @ Select(qual @ This(_), name) =>
+ if ((sym hasFlag PARAMACCESSOR)
+ && (sym.alias != NoSymbol)) {
+ val result = localTyper.typed {
+ Select(
+ Super(qual.symbol, nme.EMPTY.toTypeName/*qual.symbol.info.parents.head.symbol.name*/) setPos qual.pos,
+ sym.alias) setPos tree.pos
+ }
+ if (settings.debug.value)
+ Console.println("alias replacement: " + tree + " ==> " + result);//debug
+ localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, true))
+ }
+ else mayNeedProtectedAccessor(sel, List(EmptyTree), false)
- case Assign(lhs @ Select(qual, name), rhs) =>
- if (lhs.symbol.isVariable &&
- lhs.symbol.hasFlag(JAVA) &&
- needsProtectedAccessor(lhs.symbol, tree.pos)) {
- if (settings.debug.value) log("Adding protected setter for " + tree)
- val setter = makeSetter(lhs);
- if (settings.debug.value)
- log("Replaced " + tree + " with " + setter);
- transform(typed(Apply(setter, List(qual, rhs))))
- } else
+ case Select(sup @ Super(_, mix), name) =>
+ if (sym.isValue && !sym.isMethod || sym.hasFlag(ACCESSOR)) {
+ unit.error(tree.pos, "super may be not be used on "+
+ (if (sym.hasFlag(ACCESSOR)) sym.accessed else sym))
+ }
+ transformSuperSelect(tree)
+
+ case TypeApply(sel @ Select(qual, name), args) =>
+ mayNeedProtectedAccessor(sel, args, true)
+
+ case sel @ Select(qual, name) =>
+ mayNeedProtectedAccessor(sel, List(EmptyTree), true)
+
+ case Assign(lhs @ Select(qual, name), rhs) =>
+ if (lhs.symbol.isVariable &&
+ lhs.symbol.hasFlag(JAVA) &&
+ needsProtectedAccessor(lhs.symbol, tree.pos)) {
+ if (settings.debug.value) log("Adding protected setter for " + tree)
+ val setter = makeSetter(lhs);
+ if (settings.debug.value)
+ log("Replaced " + tree + " with " + setter);
+ transform(localTyper.typed(Apply(setter, List(qual, rhs))))
+ } else
+ super.transform(tree)
+
+ case Apply(fn, args) =>
+ assert(fn.tpe != null, tree)
+ treeCopy.Apply(tree, transform(fn), transformArgs(args, fn.tpe.params))
+ case Function(vparams, body) =>
+ withInvalidOwner {
+ treeCopy.Function(tree, vparams, transform(body))
+ }
+ case _ =>
super.transform(tree)
+ }
+ catch {
+ case ex : AssertionError =>
+ if (sym != null && sym != NoSymbol)
+ Console.println("TRANSFORM: " + tree.symbol.sourceFile)
- case Apply(fn, args) =>
- assert(fn.tpe != null, tree)
- treeCopy.Apply(tree, transform(fn), transformArgs(args, fn.tpe.paramTypes))
- case Function(vparams, body) =>
- withInvalidOwner {
- treeCopy.Function(tree, vparams, transform(body))
- }
- case _ =>
- super.transform(tree)
- }} catch {
- case ex : AssertionError =>
- if (tree.symbol != null && tree.symbol != NoSymbol)
- Console.println("TRANSFORM: " + tree.symbol.sourceFile)
- Console.println("TREE: " + tree)
- throw ex
+ Console.println("TREE: " + tree)
+ throw ex
+ }
}
override def atOwner[A](owner: Symbol)(trans: => A): A = {
@@ -265,12 +272,13 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case _ => Nil
}
+
assert(clazz != NoSymbol, sym)
if (settings.debug.value) log("Decided for host class: " + clazz)
val accName = nme.protName(sym.originalName)
val hasArgs = sym.tpe.paramTypes != Nil
- val memberType = sym.tpe // transform(sym.tpe)
+ val memberType = refchecks.toScalaRepeatedParam(sym.tpe) // fix for #2413
// if the result type depends on the this type of an enclosing class, the accessor
// has to take an object of exactly this type, otherwise it's more general
@@ -295,7 +303,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val obj = protAcc.paramss.head.head // receiver
protAcc.paramss.tail.zip(allParamTypes(sym.tpe)).foldLeft(Select(Ident(obj), sym): Tree) (
(fun, pvparams) => {
- Apply(fun, (List.map2(pvparams._1, pvparams._2) { (v, origTpe) => makeArg(v, obj, origTpe) } ))
+ Apply(fun, (pvparams._1, pvparams._2).zipped map (makeArg(_, obj, _)))
})
})
@@ -311,7 +319,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
if (settings.debug.value)
log("Replaced " + tree + " with " + res)
- if (hasArgs) typer.typedOperator(res) else typer.typed(res)
+ if (hasArgs) localTyper.typedOperator(res) else localTyper.typed(res)
}
/** Adapt the given argument in call to protected member.
@@ -342,7 +350,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
if (isDependentType) {
val preciseTpe = expectedTpe.asSeenFrom(singleType(NoPrefix, obj), ownerClass) //typeRef(singleType(NoPrefix, obj), v.tpe.symbol, List())
- TypeApply(Select(res, definitions.Any_asInstanceOf),
+ TypeApply(Select(res, Any_asInstanceOf),
List(TypeTree(preciseTpe)))
} else res
}
@@ -368,7 +376,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
if (protAcc == NoSymbol) {
protAcc = clazz.newMethod(field.pos, nme.protSetterName(field.originalName))
protAcc.setInfo(MethodType(protAcc.newSyntheticValueParams(List(clazz.typeOfThis, field.tpe)),
- definitions.UnitClass.tpe))
+ UnitClass.tpe))
clazz.info.decls.enter(protAcc)
val code = DefDef(protAcc, {
val obj :: value :: Nil = protAcc.paramss.head;
@@ -400,34 +408,43 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
* classes, this has to be signaled as error.
*/
private def needsProtectedAccessor(sym: Symbol, pos: Position): Boolean = {
- def errorRestriction(msg: String) {
- unit.error(pos, "Implementation restriction: " + msg)
+ val clazz = currentOwner.enclClass
+ def accessibleThroughSubclassing =
+ validCurrentOwner && clazz.thisSym.isSubClass(sym.owner) && !clazz.isTrait
+
+ def packageAccessBoundry(sym: Symbol) = {
+ val b = sym.accessBoundary(sym.owner)
+ if (b.isPackageClass) b
+ else b.enclosingPackageClass
}
- val res = /* settings.debug.value && */
- ((sym hasFlag PROTECTED)
- && !sym.owner.isPackageClass
- && (!validCurrentOwner || !(currentOwner.enclClass.thisSym isSubClass sym.owner))
- && (enclPackage(sym.owner) != enclPackage(currentOwner))
- && (enclPackage(sym.owner) == enclPackage(sym.accessBoundary(sym.owner))))
-
- if (res) {
- val host = hostForAccessorOf(sym, currentOwner.enclClass)
- // bug #1393 - as things stand now the "host" could be a package.
- if (host.isPackageClass) false
- else if (host.thisSym != host) {
- if (host.thisSym.tpe.typeSymbol.hasFlag(JAVA))
- errorRestriction(currentOwner.enclClass + " accesses protected " + sym
- + " from self type " + host.thisSym.tpe)
- false
- } else res
- } else res
+ val isCandidate = (
+ sym.isProtected
+ && sym.isJavaDefined
+ && !sym.definedInPackage
+ && !accessibleThroughSubclassing
+ && (sym.owner.enclosingPackageClass != currentOwner.enclosingPackageClass)
+ && (sym.owner.enclosingPackageClass == packageAccessBoundry(sym))
+ )
+ val host = hostForAccessorOf(sym, clazz)
+ def isSelfType = !(host.tpe <:< host.typeOfThis) && {
+ if (host.typeOfThis.typeSymbol.isJavaDefined)
+ unit.error(pos, "Implementation restriction: " +
+ "%s accesses protected %s from self type %s.".format(clazz, sym, host.typeOfThis)
+ )
+ true
+ }
+ def isJavaProtected = host.isTrait && sym.isJavaDefined && {
+ unit.error(pos, "Implementation restriction: " +
+ """|%s accesses protected %s inside a concrete trait method.
+ |Add an accessor in a class extending %s as a workaround.""".stripMargin.format(
+ clazz, sym, sym.enclClass)
+ )
+ true
+ }
+ isCandidate && !host.isPackageClass && !isSelfType && !isJavaProtected
}
- /** Return the enclosing package of the given symbol. */
- private def enclPackage(sym: Symbol): Symbol =
- if ((sym == NoSymbol) || sym.isPackageClass) sym else enclPackage(sym.owner)
-
/** Return the innermost enclosing class C of referencingClass for which either
* of the following holds:
* - C is a subclass of sym.owner or
@@ -436,11 +453,12 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
private def hostForAccessorOf(sym: Symbol, referencingClass: Symbol): Symbol = {
if (referencingClass.isSubClass(sym.owner.enclClass)
|| referencingClass.thisSym.isSubClass(sym.owner.enclClass)
- || enclPackage(referencingClass) == enclPackage(sym.owner)) {
+ || referencingClass.enclosingPackageClass == sym.owner.enclosingPackageClass) {
assert(referencingClass.isClass)
referencingClass
- } else
+ } else if(referencingClass.owner.enclClass != NoSymbol)
hostForAccessorOf(sym, referencingClass.owner.enclClass)
+ else referencingClass
}
/** Is 'tpe' the type of a member of an enclosing class? */
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index a3f628ebb4..60374b3a55 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
@@ -88,14 +87,16 @@ trait SyntheticMethods extends ast.TreeDSL {
typer typed { DEF(method) === LIT(nargs) }
}
- def productElementMethod(accs: List[Symbol]): Tree = {
- val symToTpe = makeTypeConstructor(List(IntClass.tpe), AnyClass.tpe)
- val method = syntheticMethod(nme.productElement, 0, symToTpe)
+ /** Common code for productElement and (currently disabled) productElementName
+ */
+ def perElementMethod(accs: List[Symbol], methodName: Name, resType: Type, caseFn: Symbol => Tree): Tree = {
+ val symToTpe = makeTypeConstructor(List(IntClass.tpe), resType)
+ val method = syntheticMethod(methodName, 0, symToTpe)
val arg = method ARG 0
- val default = List( DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg) )
+ val default = List(DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg))
val cases =
for ((sym, i) <- accs.zipWithIndex) yield
- CASE(LIT(i)) ==> Ident(sym)
+ CASE(LIT(i)) ==> caseFn(sym)
typer typed {
DEF(method) === {
@@ -103,6 +104,11 @@ trait SyntheticMethods extends ast.TreeDSL {
}
}
}
+ def productElementMethod(accs: List[Symbol]): Tree =
+ perElementMethod(accs, nme.productElement, AnyClass.tpe, x => Ident(x))
+
+ // def productElementNameMethod(accs: List[Symbol]): Tree =
+ // perElementMethod(accs, nme.productElementName, StringClass.tpe, x => Literal(x.name.toString))
def moduleToStringMethod: Tree = {
val method = syntheticMethod(nme.toString_, FINAL, makeNoArgConstructor(StringClass.tpe))
@@ -123,8 +129,8 @@ trait SyntheticMethods extends ast.TreeDSL {
}
}
- def hashCodeTarget: Name =
- if (settings.Yjenkins.value) "hashCodeJenkins" else nme.hashCode_
+ def hashCodeTarget: Name = nme.hashCode_
+ // if (settings.Yjenkins.value) "hashCodeJenkins" else nme.hashCode_
def equalsSym = syntheticMethod(
nme.equals_, 0, makeTypeConstructor(List(AnyClass.tpe), BooleanClass.tpe)
@@ -173,16 +179,18 @@ trait SyntheticMethods extends ast.TreeDSL {
// returns (Apply, Bind)
def makeTrees(acc: Symbol, cpt: Type): (Tree, Bind) = {
- val varName = context.unit.fresh.newName(clazz.pos.focus, acc.name + "$")
- val (eqMethod, binding) =
- if (isRepeatedParamType(cpt)) (nme.sameElements, Star(WILD()))
- else (nme.EQ , WILD() )
-
- ((varName DOT eqMethod)(Ident(acc)), varName BIND binding)
+ val varName = context.unit.fresh.newName(clazz.pos.focus, acc.name + "$")
+ val isRepeated = isRepeatedParamType(cpt)
+ val binding = if (isRepeated) Star(WILD()) else WILD()
+ val eqMethod: Tree =
+ if (isRepeated) gen.mkRuntimeCall(nme.sameElements, List(Ident(varName), Ident(acc)))
+ else (varName DOT nme.EQ)(Ident(acc))
+
+ (eqMethod, varName BIND binding)
}
// Creates list of parameters and a guard for each
- val (guards, params) = List.map2(clazz.caseFieldAccessors, constrParamTypes)(makeTrees) unzip
+ val (guards, params) = (clazz.caseFieldAccessors, constrParamTypes).zipped map makeTrees unzip
// Verify with canEqual method before returning true.
def canEqualCheck() = {
@@ -219,6 +227,7 @@ trait SyntheticMethods extends ast.TreeDSL {
var newAcc = tree.symbol.cloneSymbol
newAcc.name = context.unit.fresh.newName(tree.symbol.pos.focus, tree.symbol.name + "$")
newAcc setFlag SYNTHETIC resetFlag (ACCESSOR | PARAMACCESSOR | PRIVATE)
+ newAcc.privateWithin = NoSymbol
newAcc = newAcc.owner.info.decls enter newAcc
val result = typer typed { DEF(newAcc) === rhs.duplicate }
log("new accessor method " + result)
@@ -262,6 +271,9 @@ trait SyntheticMethods extends ast.TreeDSL {
Product_productPrefix -> (() => productPrefixMethod),
Product_productArity -> (() => productArityMethod(accessors.length)),
Product_productElement -> (() => productElementMethod(accessors)),
+ // This is disabled pending a reimplementation which doesn't add any
+ // weight to case classes (i.e. inspects the bytecode.)
+ // Product_productElementName -> (() => productElementNameMethod(accessors)),
Product_canEqual -> (() => canEqualMethod)
)
}
@@ -278,14 +290,20 @@ trait SyntheticMethods extends ast.TreeDSL {
ts += impl()
}
- if (clazz.isModuleClass && hasSerializableAnnotation(clazz)) {
- // If you serialize a singleton and then deserialize it twice,
- // you will have two instances of your singleton, unless you implement
- // the readResolve() method (see http://www.javaworld.com/javaworld/
- // jw-04-2003/jw-0425-designpatterns_p.html)
- // question: should we do this for all serializable singletons, or (as currently done)
- // only for those that carry a @serializable annotation?
- if (!hasImplementation(nme.readResolve)) ts += readResolveMethod
+ if (clazz.isModuleClass) {
+ if (!hasSerializableAnnotation(clazz)) {
+ val comp = companionClassOf(clazz, context)
+ if (comp.hasFlag(Flags.CASE) || hasSerializableAnnotation(comp))
+ clazz addAnnotation AnnotationInfo(SerializableAttr.tpe, Nil, Nil)
+ }
+
+ /** If you serialize a singleton and then deserialize it twice,
+ * you will have two instances of your singleton, unless you implement
+ * the readResolve() method (see http://www.javaworld.com/javaworld/
+ * jw-04-2003/jw-0425-designpatterns_p.html)
+ */
+ if (hasSerializableAnnotation(clazz) && !hasImplementation(nme.readResolve))
+ ts += readResolveMethod
}
} catch {
case ex: TypeError =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 3e8e803c13..de9da9d814 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -1,14 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
import scala.tools.nsc.symtab.Flags._
-import scala.tools.nsc.util.{Position, NoPosition}
abstract class TreeCheckers extends Analyzer {
@@ -42,7 +40,6 @@ abstract class TreeCheckers extends Analyzer {
override def newTyper(context: Context): Typer = new TreeChecker(context)
class TreeChecker(context0: Context) extends Typer(context0) {
-
import infer._
override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
@@ -50,104 +47,80 @@ abstract class TreeCheckers extends Analyzer {
case EmptyTree | TypeTree() =>
;
case _ =>
- try {
- if (!tpeOfTree.contains(tree)) {
- tpeOfTree.update(tree, tree.tpe)
- tree.tpe = null
- }
- val newtree = super.typed(tree, mode, pt);
- if ((newtree ne tree) && !newtree.isInstanceOf[Literal])
- error(tree.pos, "trees differ\n old: " + tree + " [" + tree.getClass() +
- "]\n new: " + newtree + " [" + newtree.getClass() + "]")
- } catch {
- case ex: Throwable =>
- Console.println("exception while typing "+tree)
- throw ex
+ if (!tpeOfTree.contains(tree)) {
+ tpeOfTree.update(tree, tree.tpe)
+ tree.tpe = null
}
+ val newtree = super.typed(tree, mode, pt);
+ if ((newtree ne tree) && !newtree.isInstanceOf[Literal])
+ error(tree.pos, "trees differ\n old: " + tree + " [" + tree.getClass() +
+ "]\n new: " + newtree + " [" + newtree.getClass() + "]")
}
tree
}
object precheck extends Traverser {
override def traverse(tree: Tree) {
- try {
- tree match {
- case DefDef(_, _, _, _, _, _) =>
- if (tree.symbol.hasFlag(ACCESSOR) &&
- !tree.symbol.isDeferred &&
- !tree.symbol.tpe.resultType.isInstanceOf[ConstantType]) {
- assert(tree.symbol.accessed != NoSymbol, tree.symbol)
- assert(tree.symbol.accessed.getter(tree.symbol.owner) == tree.symbol ||
- tree.symbol.accessed.setter(tree.symbol.owner) == tree.symbol)
- }
- case ValDef(_, _, _, _) =>
- if (tree.symbol.hasGetter) {
- assert(tree.symbol.getter(tree.symbol.owner) != NoSymbol, tree.symbol)
- }
- case Apply(_, args) =>
- assert(args forall (EmptyTree !=))
- case Select(_, _) =>
- assert(tree.symbol != NoSymbol, tree)
- case This(_) =>
- if (!(tree.symbol.isStatic && (tree.symbol hasFlag MODULE))) {
- var o = currentOwner
- while (o != tree.symbol) {
- o = o.owner
- if (o == NoSymbol) {
- error(tree.pos, "tree symbol "+tree.symbol+" does not point to enclosing class; tree = "+tree)
- return
- }
- }
+ tree match {
+ case DefDef(_, _, _, _, _, _) =>
+ if (tree.symbol.hasFlag(ACCESSOR) &&
+ !tree.symbol.isDeferred &&
+ !tree.symbol.tpe.resultType.isInstanceOf[ConstantType]) {
+ assert(tree.symbol.accessed != NoSymbol, tree.symbol)
+ assert(tree.symbol.accessed.getter(tree.symbol.owner) == tree.symbol ||
+ tree.symbol.accessed.setter(tree.symbol.owner) == tree.symbol)
+ }
+ case ValDef(_, _, _, _) =>
+ if (tree.symbol.hasGetter) {
+ assert(tree.symbol.getter(tree.symbol.owner) != NoSymbol, tree.symbol)
+ }
+ case Apply(_, args) =>
+ assert(args forall (EmptyTree !=))
+ case Select(_, _) =>
+ assert(tree.symbol != NoSymbol, tree)
+ case This(_) =>
+ if (!(tree.symbol.isStatic && (tree.symbol hasFlag MODULE))) {
+ if (currentOwner.ownerChain takeWhile (_ != tree.symbol) exists (_ == NoSymbol)) {
+ error(tree.pos, "tree symbol "+tree.symbol+" does not point to enclosing class; tree = "+tree)
+ return
}
- case _ =>
- }
- if (tree.pos == NoPosition && tree != EmptyTree) {
- error(tree.pos, "tree without position: " + tree)
- } else if ((tree.tpe eq null) && phase.id >= currentRun.typerPhase.id) {
- error(tree.pos, "tree without type: " + tree)
- } else if (tree.isDef && tree.symbol.owner != currentOwner) {
- var owner = currentOwner
- while (owner.isTerm && !owner.isMethod && tree.symbol.owner != owner)
- owner = owner.owner;
- if (tree.symbol.owner != owner) {
- error(tree.pos, "" + tree.symbol + " has wrong owner: " + tree.symbol.owner +
- tree.symbol.owner.locationString + ", should be: " +
- currentOwner + currentOwner.locationString)
}
- } else {
- super.traverse(tree)
+ case _ =>
+ }
+ if (tree.pos == NoPosition && tree != EmptyTree) {
+ error(tree.pos, "tree without position: " + tree)
+ } else if ((tree.tpe eq null) && phase.id >= currentRun.typerPhase.id) {
+ error(tree.pos, "tree without type: " + tree)
+ } else if (tree.isDef && tree.symbol.owner != currentOwner) {
+ var owner = currentOwner
+ while (owner.isTerm && !owner.isMethod && tree.symbol.owner != owner)
+ owner = owner.owner;
+ if (tree.symbol.owner != owner) {
+ error(tree.pos, "" + tree.symbol + " has wrong owner: " + tree.symbol.owner +
+ tree.symbol.owner.locationString + ", should be: " +
+ currentOwner + currentOwner.locationString)
}
- } catch {
- case ex: Throwable =>
- if (settings.debug.value)
- Console.println("exception when traversing " + tree);
- throw(ex)
+ } else {
+ super.traverse(tree)
}
}
}
object postcheck extends Traverser {
override def traverse(tree: Tree) {
- try {
- tree match {
- case EmptyTree | TypeTree() =>
- ;
- case _ =>
- tpeOfTree.get(tree) match {
- case Some(oldtpe) =>
- if (!(oldtpe =:= tree.tpe))
- error(tree.pos, "types differ\n old: " + oldtpe +
- "\n new: " + tree.tpe + "\n tree: " + tree)
- tree.tpe = oldtpe
- super.traverse(tree)
- case None =>
- }
- }
- } catch {
- case ex: Throwable =>
- if (settings.debug.value)
- Console.println("exception when traversing " + tree);
- throw(ex)
+ tree match {
+ case EmptyTree | TypeTree() =>
+ ;
+ case _ =>
+ tpeOfTree.get(tree) match {
+ case Some(oldtpe) =>
+ if (!(oldtpe =:= tree.tpe))
+ error(tree.pos, "types differ\n old: " + oldtpe +
+ "\n new: " + tree.tpe + "\n tree: " + tree)
+ tree.tpe = oldtpe
+ super.traverse(tree)
+ case None =>
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
new file mode 100644
index 0000000000..be576289f6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -0,0 +1,268 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package typechecker
+
+import scala.collection.mutable
+import scala.collection.mutable.ListBuffer
+import scala.util.control.ControlThrowable
+import scala.util.control.Exception.ultimately
+import symtab.Flags._
+import PartialFunction._
+
+/** An interface to enable higher configurability of diagnostic messages
+ * regarding type errors. This is barely a beginning as error messages are
+ * distributed far and wide across the codebase. The plan is to partition
+ * error messages into some broad groups and provide some mechanism for
+ * being more or less verbose on a selective basis. Possible groups include
+ * such examples as
+ *
+ * arity errors
+ * kind errors
+ * variance errors
+ * ambiguity errors
+ * volatility/stability errors
+ * implementation restrictions
+ *
+ * And more, and there is plenty of overlap, so it'll be a process.
+ *
+ * @author Paul Phillips
+ * @version 1.0
+ */
+trait TypeDiagnostics {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+ import global.typer.infer
+
+ /** It can be quite difficult to know which of the many functions called "error"
+ * is being called at any given point in the compiler. To alleviate this I am
+ * renaming such functions inside this trait based on where it originated.
+ */
+ def inferError(pos: Position, msg: String) = infer.error(pos, msg)
+
+ /** The common situation of making sure nothing is erroneous could be
+ * nicer if Symbols, Types, and Trees all implemented some common interface
+ * in which isErroneous and similar would be placed.
+ */
+ def noErroneousTypes(tps: Type*) = tps forall (x => !x.isErroneous)
+ def noErroneousSyms(syms: Symbol*) = syms forall (x => !x.isErroneous)
+ def noErroneousTrees(trees: Tree*) = trees forall (x => !x.isErroneous)
+
+ /** A map of Positions to addendums - if an error involves a position in
+ * the map, the addendum should also be printed.
+ */
+ private var addendums = mutable.Map[Position, () => String]()
+
+ def setAddendum(pos: Position, msg: () => String) =
+ if (pos != NoPosition)
+ addendums(pos) = msg
+
+ def withAddendum(pos: Position) = (_: String) + addendums.getOrElse(pos, () => "")()
+
+ def decodeWithNamespace(name: Name): String = {
+ val prefix = if (name.isTypeName) "type " else "value "
+ prefix + name.decode
+ }
+
+ /** Does the positioned line assigned to t1 precede that of t2?
+ */
+ def linePrecedes(t1: Tree, t2: Tree) = t1.pos.isDefined && t1.pos.isDefined && t1.pos.line < t2.pos.line
+
+ def notAMember(sel: Tree, qual: Tree, name: Name) = {
+ def decoded = decodeWithNamespace(name)
+
+ def msg: String = name match {
+ case nme.CONSTRUCTOR => qual.tpe.widen+" does not have a constructor"
+ case _ =>
+ def memberOf = if (qual.tpe.typeSymbol.isTypeParameterOrSkolem) "type parameter " else ""
+ def possibleCause =
+ if (linePrecedes(qual, sel))
+ "\npossible cause: maybe a semicolon is missing before `"+decoded+"'?"
+ else
+ ""
+
+ decoded+" is not a member of "+ memberOf + qual.tpe.widen + possibleCause
+ }
+ inferError(sel.pos, withAddendum(qual.pos)(msg))
+ }
+
+ /** Only prints the parameter names if they're not synthetic,
+ * since "x$1: Int" does not offer any more information than "Int".
+ */
+ private def methodTypeErrorString(tp: Type) = tp match {
+ case mt @ MethodType(params, resultType) =>
+ def forString =
+ if (params exists (_.isSynthetic)) params map (_.tpe)
+ else params map (_.defString)
+
+ forString.mkString("(", ",", ")") + resultType
+ case x => x.toString
+ }
+
+ def alternatives(tree: Tree): List[Type] = tree.tpe match {
+ case OverloadedType(pre, alternatives) => alternatives map pre.memberType
+ case _ => Nil
+ }
+ def alternativesString(tree: Tree) =
+ alternatives(tree) map (x => " " + methodTypeErrorString(x)) mkString ("", " <and>\n", "\n")
+
+ def missingParameterTypeError(fun: Tree, vparam: ValDef) = {
+ val suffix = if (vparam.mods.isSynthetic) " for expanded function "+fun else ""
+
+ inferError(vparam.pos, "missing parameter type" + suffix)
+ ErrorType
+ }
+
+ def treeSymTypeMsg(tree: Tree): String = {
+ val sym = tree.symbol
+ def hasParams = tree.tpe.paramSectionCount > 0
+ def preResultString = if (hasParams) ": " else " of type "
+
+ def nullMessage = "expression of type " + tree.tpe
+ def overloadedMessage = "overloaded method " + sym + " with alternatives:\n" + alternativesString(tree)
+ def moduleMessage = "" + sym
+ def defaultMessage = moduleMessage + preResultString + tree.tpe
+ def applyMessage = defaultMessage + tree.symbol.locationString
+
+ if (sym == null) nullMessage
+ else if (sym.isOverloaded) overloadedMessage
+ else if (sym.isModule) moduleMessage
+ else if (sym.name == nme.apply) applyMessage
+ else defaultMessage
+ }
+
+ def applyErrorMsg(tree: Tree, msg: String, argtpes: List[Type], pt: Type) = {
+ def asParams(xs: List[Any]) = xs.mkString("(", ", ", ")")
+
+ def resType = if (pt isWildcard) "" else " with expected result type " + pt
+ def allTypes = (alternatives(tree) flatMap (_.paramTypes)) ++ argtpes :+ pt
+
+ withDisambiguation(allTypes: _*) {
+ treeSymTypeMsg(tree) + msg + asParams(argtpes) + resType
+ }
+ }
+
+ def disambiguate(ss: List[String]) = ss match {
+ case Nil => Nil
+ case s :: ss => s :: (ss map { case `s` => "(some other)"+s ; case x => x })
+ }
+
+ // todo: use also for other error messages
+ def existentialContext(tp: Type) = tp.existentialSkolems match {
+ case Nil => ""
+ case xs => " where " + (disambiguate(xs map (_.existentialToString)) mkString ", ")
+ }
+
+ def foundReqMsg(found: Type, req: Type): String =
+ withDisambiguation(found, req) {
+ ";\n found : " + found.toLongString + existentialContext(found) +
+ "\n required: " + req + existentialContext(req)
+ }
+
+ /** If two given types contain different type variables with the same name
+ * differentiate the names by including owner information. Also, if the
+ * type error is because of a conflict between two identically named
+ * classes and one is in package scala, fully qualify the name so one
+ * need not deduce why "java.util.Iterator" and "Iterator" don't match.
+ * Another disambiguation performed is to address the confusion present
+ * in the following snippet:
+ * def f[Int](x: Int) = x + 5.
+ */
+ def withDisambiguation[T](types: Type*)(op: => T): T = {
+ object SymExtractor {
+ def unapply(x: Any) = x match {
+ case t @ TypeRef(_, sym, _) => Some(t -> sym)
+ case t @ ConstantType(value) => Some(t -> t.underlying.typeSymbol)
+ case _ => None
+ }
+ }
+ val typerefs =
+ for (tp <- types.toList ; SymExtractor(t, sym) <- tp) yield
+ t -> sym
+
+ val savedNames = typerefs map { case (_, sym) => sym -> sym.name } toMap
+ def restoreNames = savedNames foreach { case (sym, name) => sym.name = name }
+
+ def isAlreadyAltered(sym: Symbol) = sym.name != savedNames(sym)
+
+ def modifyName(sym: Symbol)(f: String => String): Unit =
+ sym.name = newTypeName(f(sym.name.toString))
+
+ def scalaQualify(sym: Symbol) =
+ if (sym.owner.isScalaPackageClass)
+ modifyName(sym)("scala." + _)
+
+ def explainName(sym: Symbol) = {
+ scalaQualify(sym)
+
+ if (!isAlreadyAltered(sym))
+ modifyName(sym)(_ + "(in " + sym.owner + ")")
+ }
+
+ ultimately(restoreNames) {
+ for ((t1, sym1) <- typerefs ; (t2, sym2) <- typerefs ; if sym1 != sym2 && (sym1 isLess sym2)) {
+
+ if (t1.toString == t2.toString) { // type variable collisions
+ List(sym1, sym2) foreach explainName
+ if (sym1.owner == sym2.owner)
+ sym2.name = newTypeName("(some other)"+sym2.name)
+ }
+ else if (sym1.name == sym2.name) { // symbol name collisions
+ List(sym1, sym2) foreach { x =>
+ if (x.owner.isScalaPackageClass)
+ modifyName(x)("scala." + _)
+ else if (x.isTypeParameterOrSkolem)
+ explainName(x)
+ }
+ }
+ }
+
+ // performing the actual operation
+ op
+ }
+ }
+
+ trait TyperDiagnostics {
+ self: Typer =>
+
+ private def contextError(pos: Position, msg: String) = context.error(pos, msg)
+ private def contextError(pos: Position, err: Throwable) = context.error(pos, err)
+
+ def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded
+ def cyclicAdjective(sym: Symbol) = if (symWasOverloaded(sym)) "overloaded" else "recursive"
+
+ /** Returns Some(msg) if the given tree is untyped apparently due
+ * to a cyclic reference, and None otherwise.
+ */
+ def cyclicReferenceMessage(sym: Symbol, tree: Tree) = condOpt(tree) {
+ case ValDef(_, _, tpt, _) if tpt.tpe == null => "recursive "+sym+" needs type"
+ case DefDef(_, _, _, _, tpt, _) if tpt.tpe == null => List(cyclicAdjective(sym), sym, "needs result type") mkString " "
+ }
+
+ /** Report a type error.
+ *
+ * @param pos0 The position where to report the error
+ * @param ex The exception that caused the error
+ */
+ def reportTypeError(pos: Position, ex: TypeError) {
+ if (ex.pos == NoPosition) ex.pos = pos
+ if (!context.reportGeneralErrors) throw ex
+ if (settings.debug.value) ex.printStackTrace()
+
+ ex match {
+ case CyclicReference(sym, info: TypeCompleter) =>
+ contextError(ex.pos, cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage())
+
+ if (sym == ObjectClass)
+ throw new FatalError("cannot redefine root "+sym)
+ case _ =>
+ contextError(ex.pos, ex)
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 50627268c8..ebfd10e39b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
//todo: rewrite or disllow new T where T is a mixin (currently: <init> not a member of T)
//todo: use inherited type info also for vars and values
@@ -12,12 +11,14 @@ package scala.tools.nsc
package typechecker
import scala.collection.mutable.{HashMap, ListBuffer}
-import scala.util.control.ControlException
-import scala.compat.Platform.currentTime
+import scala.util.control.ControlThrowable
import scala.tools.nsc.interactive.RangePositions
-import scala.tools.nsc.util.{ Position, Set, NoPosition, SourceFile }
+import scala.tools.nsc.util.{Set, SourceFile, BatchSourceFile}
import symtab.Flags._
+import util.Statistics
+import util.Statistics._
+
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
/** This trait provides methods to assign types to trees.
@@ -29,23 +30,15 @@ trait Typers { self: Analyzer =>
import global._
import definitions._
- var appcnt = 0
- var idcnt = 0
- var selcnt = 0
- var implcnt = 0
- var impltime = 0l
-
- var failedApplies = 0L
- var failedOpEqs = 0L
- var failedSilent = 0L
-
// namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result
// is cached here and re-used in typedDefDef / typedValDef
- private val transformed = new HashMap[Tree, Tree]
+ val transformed = new HashMap[Tree, Tree]
// currently not used at all (March 09)
private val superDefs = new HashMap[Symbol, ListBuffer[Tree]]
+ final val shortenImports = false
+
def resetTyper() {
resetContexts
resetNamer()
@@ -76,6 +69,10 @@ trait Typers { self: Analyzer =>
private class NormalTyper(context : Context) extends Typer(context)
// hooks for auto completion
+ // A transient flag to mark members of anonymous classes
+ // that are turned private by typedBlock
+ private final val SYNTHETIC_PRIVATE = TRANS_FLAG
+
// Mode constants
/** The three mode <code>NOmode</code>, <code>EXPRmode</code>
@@ -141,23 +138,33 @@ trait Typers { self: Analyzer =>
*/
val HKmode = 0x4000 // @M: could also use POLYmode | TAPPmode
+ /** The mode <code>BYVALmode</code> is set when we are typing an expression
+ * that occurs in a by-value position. An expression e1 is in by-value
+ * position within expression e2 iff it will be reduced to a value at that
+ * position during the evaluation of e2. Examples are by-value function
+ * arguments or the conditional of an if-then-else clause.
+ * This mode has been added to support continuations.
+ */
+ val BYVALmode = 0x8000
+
/** The mode <code>TYPEPATmode</code> is set when we are typing a type in a pattern
*/
val TYPEPATmode = 0x10000
private val stickyModes: Int = EXPRmode | PATTERNmode | TYPEmode | ALTmode
- private def funMode(mode: Int) = mode & (stickyModes | SCCmode) | FUNmode | POLYmode
+ private def funMode(mode: Int) = mode & (stickyModes | SCCmode) | FUNmode | POLYmode | BYVALmode
private def typeMode(mode: Int) =
if ((mode & (PATTERNmode | TYPEPATmode)) != 0) TYPEmode | TYPEPATmode
else TYPEmode
- private def argMode(fun: Tree, mode: Int) =
+ private def argMode(fun: Tree, mode: Int) = {
if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode
else mode
+ }
- abstract class Typer(context0: Context) {
+ abstract class Typer(context0: Context) extends TyperDiagnostics {
import context0.unit
val infer = new Inferencer(context0) {
@@ -171,9 +178,24 @@ trait Typers { self: Analyzer =>
*/
def applyImplicitArgs(fun: Tree): Tree = fun.tpe match {
case MethodType(params, _) =>
- var positional = true
val argResultsBuff = new ListBuffer[SearchResult]()
+ val argBuff = new ListBuffer[Tree]()
+ def mkPositionalArg(argTree: Tree, paramName: Name) = argTree
+ def mkNamedArg(argTree: Tree, paramName: Name) = atPos(argTree.pos)(new AssignOrNamedArg(Ident(paramName), (argTree)))
+ var mkArg: (Tree, Name) => Tree = mkPositionalArg
+
+ def errorMessage(paramName: Name, paramTp: Type) =
+ paramTp.typeSymbol match {
+ case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp)
+ case _ =>
+ "could not find implicit value for "+
+ (if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX) "evidence parameter of type "
+ else "parameter "+paramName+": ")+paramTp
+ }
+
+ // DEPMETTODO: instantiate type vars that depend on earlier implicit args (see adapt (4.1))
+ //
// apply the substitutions (undet type param -> type) that were determined
// by implicit resolution of implicit arguments on the left of this argument
for(param <- params) {
@@ -181,30 +203,32 @@ trait Typers { self: Analyzer =>
for(ar <- argResultsBuff)
paramTp = paramTp.subst(ar.subst.from, ar.subst.to)
- argResultsBuff += inferImplicit(fun, paramTp, true, false, context)
- }
+ val res = inferImplicit(fun, paramTp, true, false, context)
+ argResultsBuff += res
- val argResults = argResultsBuff.toList
- val args = argResults.zip(params) flatMap {
- case (arg, param) =>
- if (arg != SearchFailure) {
- if (positional) List(arg.tree)
- else List(atPos(arg.tree.pos)(new AssignOrNamedArg(Ident(param.name), (arg.tree))))
- } else {
- if (!param.hasFlag(DEFAULTPARAM))
- context.error(
- fun.pos, "could not find implicit value for "+
- (if (param.name startsWith nme.EVIDENCE_PARAM_PREFIX) "evidence parameter of type "
- else "parameter "+param.name+": ")+param.tpe)
- positional = false
- Nil
- }
+ if (res != SearchFailure) {
+ argBuff += mkArg(res.tree, param.name)
+ } else {
+ mkArg = mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
+ if (!param.hasFlag(DEFAULTPARAM))
+ context.error(fun.pos, errorMessage(param.name, param.tpe))
+ /* else {
+ TODO: alternative (to expose implicit search failure more) -->
+ resolve argument, do type inference, keep emitting positional args, infer type params based on default value for arg
+ for (ar <- argResultsBuff) ar.subst traverse defaultVal
+ val targs = exprTypeArgs(context.undetparams, defaultVal.tpe, paramTp)
+ substExpr(tree, tparams, targs, pt)
+ }*/
+ }
}
- for (s <- argResults map (_.subst)) {
- s traverse fun
- for (arg <- args) s traverse arg
+
+ val args = argBuff.toList
+ for (ar <- argResultsBuff) {
+ ar.subst traverse fun
+ for (arg <- args) ar.subst traverse arg
}
- Apply(fun, args) setPos fun.pos
+
+ new ApplyToImplicitArgs(fun, args) setPos fun.pos
case ErrorType =>
fun
}
@@ -248,35 +272,6 @@ trait Typers { self: Analyzer =>
private[typechecker] var context = context0
def context1 = context
- /** Report a type error.
- *
- * @param pos0 The position where to report the error
- * @param ex The exception that caused the error
- */
- def reportTypeError(pos: Position, ex: TypeError) {
- if (ex.pos == NoPosition) ex.pos = pos
- if (!context.reportGeneralErrors) throw ex
- if (settings.debug.value) ex.printStackTrace()
- ex match {
- case CyclicReference(sym, info: TypeCompleter) =>
- val msg =
- info.tree match {
- case ValDef(_, _, tpt, _) if (tpt.tpe eq null) =>
- "recursive "+sym+" needs type"
- case DefDef(_, _, _, _, tpt, _) if (tpt.tpe eq null) =>
- (if (sym.owner.isClass && sym.owner.info.member(sym.name).hasFlag(OVERLOADED)) "overloaded "
- else "recursive ")+sym+" needs result type"
- case _ =>
- ex.getMessage()
- }
- context.error(ex.pos, msg)
- if (sym == ObjectClass)
- throw new FatalError("cannot redefine root "+sym)
- case _ =>
- context.error(ex.pos, ex)
- }
- }
-
/** Check that <code>tree</code> is a stable expression.
*
* @param tree ...
@@ -304,7 +299,7 @@ trait Typers { self: Analyzer =>
val savedSTABLE = tree.symbol getFlag STABLE
tree.symbol setInfo AnyRefClass.tpe
tree.symbol setFlag STABLE
- val result = treeInfo.isPureExpr(tree)
+ val result = treeInfo.isPureExpr(tree)
tree.symbol setInfo savedTpe
tree.symbol setFlag savedSTABLE
result
@@ -340,7 +335,7 @@ trait Typers { self: Analyzer =>
tp match {
case TypeRef(pre, sym, args) =>
(checkNotLocked(sym)) && (
- !sym.isTypeMember ||
+ !sym.isNonClassType ||
checkNonCyclic(pos, appliedType(pre.memberInfo(sym), args), sym) // @M! info for a type ref to a type parameter now returns a polytype
// @M was: checkNonCyclic(pos, pre.memberInfo(sym).subst(sym.typeParams, args), sym)
)
@@ -363,13 +358,13 @@ trait Typers { self: Analyzer =>
}
}
- def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = {
+ def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = try {
lockedSym.lock {
throw new TypeError("illegal cyclic reference involving " + lockedSym)
}
- val result = checkNonCyclic(pos, tp)
+ checkNonCyclic(pos, tp)
+ } finally {
lockedSym.unlock()
- result
}
def checkNonCyclic(sym: Symbol) {
@@ -393,7 +388,7 @@ trait Typers { self: Analyzer =>
error(pos, "methods with `*'-parameters cannot be converted to function values");
*/
if (restpe.isDependent)
- error(pos, "method with dependent type "+tpe+" cannot be converted to function value");
+ error(pos, "method with dependent type "+tpe+" cannot be converted to function value")
checkParamsConvertible(pos, restpe)
case _ =>
}
@@ -445,13 +440,13 @@ trait Typers { self: Analyzer =>
check(owner, scope, pt, tree setType tp1.typeSymbol.classBound)
else if (owner == NoSymbol)
tree setType packSymbols(hiddenSymbols.reverse, tp1)
- else { // privates
+ else if (!phase.erasedTypes) { // privates
val badSymbol = hiddenSymbols.head
error(tree.pos,
(if (badSymbol hasFlag PRIVATE) "private " else "") + badSymbol +
" escapes its defining scope as part of type "+tree.tpe)
setError(tree)
- }
+ } else tree
}
def addHidden(sym: Symbol) =
@@ -459,13 +454,14 @@ trait Typers { self: Analyzer =>
override def apply(t: Type): Type = {
def checkNoEscape(sym: Symbol) {
- if (sym.hasFlag(PRIVATE)) {
+ if (sym.hasFlag(PRIVATE) && !sym.hasFlag(SYNTHETIC_PRIVATE)) {
var o = owner
while (o != NoSymbol && o != sym.owner &&
!o.isLocal && !o.hasFlag(PRIVATE) &&
!o.privateWithin.hasTransOwner(sym.owner))
o = o.owner
- if (o == sym.owner) addHidden(sym)
+ if (o == sym.owner || o == sym.owner.linkedClassOfClass)
+ addHidden(sym)
} else if (sym.owner.isTerm && !sym.isTypeParameterOrSkolem) {
var e = scope.lookupEntry(sym.name)
var found = false
@@ -545,6 +541,24 @@ trait Typers { self: Analyzer =>
final val xtypes = false
+ /** Is symbol defined and not stale?
+ */
+ def reallyExists(sym: Symbol) = {
+ if (isStale(sym)) sym.setInfo(NoType)
+ sym.exists
+ }
+
+ /** A symbol is stale if it is toplevel, to be loaded from a classfile, and
+ * the classfile is produced from a sourcefile which is compiled in the current run.
+ */
+ def isStale(sym: Symbol): Boolean = {
+ sym.rawInfo.isInstanceOf[loaders.ClassfileLoader] && {
+ sym.rawInfo.load(sym)
+ (sym.sourceFile ne null) &&
+ (currentRun.compiledFiles contains sym.sourceFile.path)
+ }
+ }
+
/** Does the context of tree <code>tree</code> require a stable type?
*/
private def isStableContext(tree: Tree, mode: Int, pt: Type) =
@@ -559,6 +573,8 @@ trait Typers { self: Analyzer =>
* If symbol refers to package object, insert `.package` as second to last selector.
* (exception for some symbols in scala package which are dealiased immediately)
* Call checkAccessible, which sets tree's attributes.
+ * Also note that checkAccessible looks up sym on pre without checking that pre is well-formed
+ * (illegal type applications in pre will be skipped -- that's why typedSelect wraps the resulting tree in a TreeWithDeferredChecks)
* @return modified tree and new prefix type
*/
private def makeAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): (Tree, Type) =
@@ -617,19 +633,17 @@ trait Typers { self: Analyzer =>
* 1. Check that non-function pattern expressions are stable
* 2. Check that packages and static modules are not used as values
* 3. Turn tree type into stable type if possible and required by context.
- * </ol>
*/
private def stabilize(tree: Tree, pre: Type, mode: Int, pt: Type): Tree = {
- def isNotAValue(sym: Symbol) = // bug #1392
- !sym.isValue || (sym.isModule && isValueClass(sym.linkedClassOfModule))
-
if (tree.symbol.hasFlag(OVERLOADED) && (mode & FUNmode) == 0)
inferExprAlternative(tree, pt)
val sym = tree.symbol
+
if (tree.tpe.isError) tree
else if ((mode & (PATTERNmode | FUNmode)) == PATTERNmode && tree.isTerm) { // (1)
- checkStable(tree)
- } else if ((mode & (EXPRmode | QUALmode)) == EXPRmode && isNotAValue(sym) && !phase.erasedTypes) { // (2)
+ if (sym.isValue) checkStable(tree)
+ else errorTree(tree, sym+" is not a value")
+ } else if ((mode & (EXPRmode | QUALmode)) == EXPRmode && !sym.isValue && !phase.erasedTypes) { // (2)
errorTree(tree, sym+" is not a value")
} else {
if (sym.isStable && pre.isStable && tree.tpe.typeSymbol != ByNameParamClass &&
@@ -674,44 +688,58 @@ trait Typers { self: Analyzer =>
case Select(qual, _) => qual.tpe
case _ => NoPrefix
}
- if (tree.tpe.isInstanceOf[MethodType] && pre.isStable && sym.tpe.paramTypes.isEmpty &&
+ if (tree.tpe.isInstanceOf[MethodType] && pre.isStable && sym.tpe.params.isEmpty &&
(isStableContext(tree, mode, pt) || sym.isModule))
tree.setType(MethodType(List(), singleType(pre, sym)))
else tree
}
/** The member with given name of given qualifier tree */
- def member(qual: Tree, name: Name) = qual.tpe match {
- case ThisType(clazz) if (context.enclClass.owner.hasTransOwner(clazz)) =>
- qual.tpe.member(name)
- case _ =>
- if (phase.next.erasedTypes) qual.tpe.member(name)
- else qual.tpe.nonLocalMember(name)
+ def member(qual: Tree, name: Name) = {
+ def callSiteWithinClass(clazz: Symbol) = context.enclClass.owner hasTransOwner clazz
+ val includeLocals = qual.tpe match {
+ case ThisType(clazz) if callSiteWithinClass(clazz) => true
+ case SuperType(clazz, _) if callSiteWithinClass(clazz.typeSymbol) => true
+ case _ => phase.next.erasedTypes
+ }
+ if (includeLocals) qual.tpe member name
+ else qual.tpe nonLocalMember name
}
- def silent(op: Typer => Tree): AnyRef /* in fact, TypeError or Tree */ = {
- val start = System.nanoTime()
+ def silent[T](op: Typer => T,
+ reportAmbiguousErrors: Boolean = context.reportAmbiguousErrors,
+ newtree: Tree = context.tree): Any /* in fact, TypeError or T */ = {
+ val rawTypeStart = startCounter(rawTypeFailed)
+ val findMemberStart = startCounter(findMemberFailed)
+ val subtypeStart = startCounter(subtypeFailed)
+ val failedSilentStart = startTimer(failedSilentNanos)
try {
- if (context.reportGeneralErrors) {
- val context1 = context.makeSilent(context.reportAmbiguousErrors)
- context1.undetparams = context.undetparams
- context1.savedTypeBounds = context.savedTypeBounds
- context1.namedApplyBlockInfo = context.namedApplyBlockInfo
- val typer1 = newTyper(context1)
- val result = op(typer1)
- context.undetparams = context1.undetparams
- context.savedTypeBounds = context1.savedTypeBounds
- context.namedApplyBlockInfo = context1.namedApplyBlockInfo
- result
- } else {
- op(this)
+ if (context.reportGeneralErrors ||
+ reportAmbiguousErrors != context.reportAmbiguousErrors ||
+ newtree != context.tree) {
+ val context1 = context.makeSilent(reportAmbiguousErrors, newtree)
+ context1.undetparams = context.undetparams
+ context1.savedTypeBounds = context.savedTypeBounds
+ context1.namedApplyBlockInfo = context.namedApplyBlockInfo
+ val typer1 = newTyper(context1)
+ val result = op(typer1)
+ context.undetparams = context1.undetparams
+ context.savedTypeBounds = context1.savedTypeBounds
+ context.namedApplyBlockInfo = context1.namedApplyBlockInfo
+ result
+ } else {
+ op(this)
+ }
+ } catch {
+ case ex: CyclicReference => throw ex
+ case ex: TypeError =>
+ stopCounter(rawTypeFailed, rawTypeStart)
+ stopCounter(findMemberFailed, findMemberStart)
+ stopCounter(subtypeFailed, subtypeStart)
+ stopTimer(failedSilentNanos, failedSilentStart)
+ ex
}
- } catch {
- case ex: CyclicReference => throw ex
- case ex: TypeError =>
- failedSilent += System.nanoTime() - start
- ex
- }}
+ }
/** Utility method: Try op1 on tree. If that gives an error try op2 instead.
*/
@@ -733,7 +761,7 @@ trait Typers { self: Analyzer =>
/** Perform the following adaptations of expression, pattern or type `tree' wrt to
* given mode `mode' and given prototype `pt':
* (-1) For expressions with annotated types, let AnnotationCheckers decide what to do
- * (0) Convert expressions with constant types to literals
+ * (0) Convert expressions with constant types to literals (unless in interactive/scaladoc mode)
* (1) Resolve overloading, unless mode contains FUNmode
* (2) Apply parameterless functions
* (3) Apply polymorphic types to fresh instances of their type parameters and
@@ -749,7 +777,7 @@ trait Typers { self: Analyzer =>
* (5) Convert constructors in a pattern as follows:
* (5.1) If constructor refers to a case class factory, set tree's type to the unique
* instance of its primary constructor that is a subtype of the expected type.
- * (5.2) If constructor refers to an exractor, convert to application of
+ * (5.2) If constructor refers to an extractor, convert to application of
* unapply or unapplySeq method.
*
* (6) Convert all other types to TypeTree nodes.
@@ -762,13 +790,14 @@ trait Typers { self: Analyzer =>
* is an integer fitting in the range of that type, convert it to that type.
* (11) Widen numeric literals to their expected type, if necessary
* (12) When in mode EXPRmode, convert E to { E; () } if expected type is scala.Unit.
- * (13) When in mode EXPRmode, apply a view
+ * (13) When in mode EXPRmode, apply AnnotationChecker conversion if expected type is annotated.
+ * (14) When in mode EXPRmode, apply a view
* If all this fails, error
*/
protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree = tree.tpe match {
case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (-1)
adaptAnnotations(tree, mode, pt)
- case ct @ ConstantType(value) if ((mode & (TYPEmode | FUNmode)) == 0 && (ct <:< pt) && !onlyPresentation) => // (0)
+ case ct @ ConstantType(value) if ((mode & (TYPEmode | FUNmode)) == 0 && (ct <:< pt) && !forScaladoc && !forInteractive) => // (0)
treeCopy.Literal(tree, value)
case OverloadedType(pre, alts) if ((mode & FUNmode) == 0) => // (1)
inferExprAlternative(tree, pt)
@@ -798,16 +827,21 @@ trait Typers { self: Analyzer =>
TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos //@M/tcpolyinfer: changed tparam.tpe to tparam.tpeHK
context.undetparams = context.undetparams ::: tparams1
adapt(tree1 setType restpe.substSym(tparams, tparams1), mode, pt, original)
- case mt: ImplicitMethodType if ((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) => // (4.1)
- if (!context.undetparams.isEmpty/* && (mode & POLYmode) == 0 disabled to make implicits in new collection work; we should revisit this. */) { // (9)
- // println("adapt IMT: "+(context.undetparams, pt)) //@MDEBUG
- context.undetparams = inferExprInstance(
- tree, context.extractUndetparams(), pt, mt.paramTypes exists isManifest)
- // if we are looking for a manifest, instantiate type to Nothing anyway,
- // as we would get amnbiguity errors otherwise. Example
- // Looking for a manifest of Nil: This mas many potential types,
- // so we need to instantiate to minimal type List[Nothing].
+ case mt: MethodType if mt.isImplicit && ((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) => // (4.1)
+ if (context.undetparams nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
+ // dropped so that type args of implicit method are inferred even if polymorphic expressions are allowed
+ // needed for implicits in 2.8 collection library -- maybe once #3346 is fixed, we can reinstate the condition?
+ context.undetparams =
+ inferExprInstance(tree, context.extractUndetparams(), pt,
+ // approximate types that depend on arguments since dependency on implicit argument is like dependency on type parameter
+ if(settings.YdepMethTpes.value) mt.approximate else mt,
+ // if we are looking for a manifest, instantiate type to Nothing anyway,
+ // as we would get ambiguity errors otherwise. Example
+ // Looking for a manifest of Nil: This mas many potential types,
+ // so we need to instantiate to minimal type List[Nothing].
+ false) // false: retract Nothing's that indicate failure, ambiguities in manifests are dealt with in manifestOfType
}
+
val typer1 = constrTyperIf(treeInfo.isSelfOrSuperConstrCall(tree))
if (original != EmptyTree && pt != WildcardType)
typer1.silent(tpr => tpr.typed(tpr.applyImplicitArgs(tree), mode, pt)) match {
@@ -829,15 +863,22 @@ trait Typers { self: Analyzer =>
case Block(_, tree1) => tree1.symbol
case _ => tree.symbol
}
- if (!meth.isConstructor &&
- //isCompatible(tparamsToWildcards(mt, context.undetparams), pt) &&
- isFunctionType(pt))/* &&
- (pt <:< functionType(mt.paramTypes map (t => WildcardType), WildcardType)))*/ { // (4.2)
+ if (!meth.isConstructor && isFunctionType(pt)) { // (4.2)
if (settings.debug.value) log("eta-expanding "+tree+":"+tree.tpe+" to "+pt)
checkParamsConvertible(tree.pos, tree.tpe)
- val tree1 = etaExpand(context.unit, tree)
- //println("eta "+tree+" ---> "+tree1+":"+tree1.tpe)
- typed(tree1, mode, pt)
+ val tree0 = etaExpand(context.unit, tree)
+ // println("eta "+tree+" ---> "+tree0+":"+tree0.tpe+" undet: "+context.undetparams+ " mode: "+Integer.toHexString(mode))
+
+ if(meth.typeParams.nonEmpty) {
+ // #2624: need to infer type arguments for eta expansion of a polymorphic method
+ // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand)
+ // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null
+ // can't type with the expected type, as we can't recreate the setup in (3) without calling typed
+ // (note that (3) does not call typed to do the polymorphic type instantiation --
+ // it is called after the tree has been typed with a polymorphic expected result type)
+ instantiate(typed(tree0, mode, WildcardType), mode, pt)
+ } else
+ typed(tree0, mode, pt)
} else if (!meth.isConstructor && mt.params.isEmpty) { // (4.3)
adapt(typed(Apply(tree, List()) setPos tree.pos), mode, pt, original)
} else if (context.implicitsEnabled) {
@@ -862,7 +903,7 @@ trait Typers { self: Analyzer =>
!(tree.symbol.hasFlag(JAVA) && context.unit.isJava)) { // (7)
// @M When not typing a higher-kinded type ((mode & HKmode) == 0)
// or raw type (tree.symbol.hasFlag(JAVA) && context.unit.isJava), types must be of kind *,
- // and thus parameterised types must be applied to their type arguments
+ // and thus parameterized types must be applied to their type arguments
// @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
errorTree(tree, tree.symbol+" takes type parameters")
tree setType tree.tpe
@@ -888,7 +929,7 @@ trait Typers { self: Analyzer =>
case _ => TypeTree(tree.tpe) setOriginal(tree)
}
} else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode)) { // (5)
- val extractor = tree.symbol.filter(sym => unapplyMember(sym.tpe).exists)
+ val extractor = tree.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe)))
if (extractor != NoSymbol) {
tree setSymbol extractor
val unapply = unapplyMember(extractor.tpe)
@@ -951,13 +992,15 @@ trait Typers { self: Analyzer =>
return typed(atPos(tree.pos)(Block(List(tree), Literal(()))), mode, pt)
else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt))
return typed(atPos(tree.pos)(Select(tree, "to"+sym.name)), mode, pt)
+ case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (13)
+ return typed(adaptAnnotations(tree, mode, pt), mode, pt)
case _ =>
}
if (!context.undetparams.isEmpty) {
return instantiate(tree, mode, pt)
}
if (context.implicitsEnabled && !tree.tpe.isError && !pt.isError) {
- // (13); the condition prevents chains of views
+ // (14); the condition prevents chains of views
if (settings.debug.value) log("inferring view from "+tree.tpe+" to "+pt)
val coercion = inferView(tree, tree.tpe, pt, true)
// convert forward views of delegate types into closures wrapped around
@@ -971,7 +1014,7 @@ trait Typers { self: Analyzer =>
if (coercion != EmptyTree) {
if (settings.debug.value) log("inferred view from "+tree.tpe+" to "+pt+" = "+coercion+":"+coercion.tpe)
return newTyper(context.makeImplicit(context.reportAmbiguousErrors)).typed(
- Apply(coercion, List(tree)) setPos tree.pos, mode, pt)
+ new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
}
}
}
@@ -1001,18 +1044,19 @@ trait Typers { self: Analyzer =>
((qual.symbol eq null) || !qual.symbol.isTerm || qual.symbol.isValue) &&
phase.id <= currentRun.typerPhase.id && !qtpe.isError &&
qtpe.typeSymbol != NullClass && qtpe.typeSymbol != NothingClass && qtpe != WildcardType &&
+ !qual.isInstanceOf[ApplyImplicitView] && // don't chain views
context.implicitsEnabled) { // don't try to adapt a top-level type that's the subject of an implicit search
// this happens because, if isView, typedImplicit tries to apply the "current" implicit value to
// a value that needs to be coerced, so we check whether the implicit value has an `apply` method
// (if we allow this, we get divergence, e.g., starting at `conforms` during ant quick.bin)
// note: implicit arguments are still inferred (this kind of "chaining" is allowed)
if (qtpe.normalize.isInstanceOf[ExistentialType]) {
- qtpe = qtpe.normalize.skolemizeExistential(context.owner, qual)
+ qtpe = qtpe.normalize.skolemizeExistential(context.owner, qual) // open the existential
qual setType qtpe
}
val coercion = inferView(qual, qtpe, searchTemplate, true)
if (coercion != EmptyTree)
- typedQualifier(atPos(qual.pos)(Apply(coercion, List(qual))))
+ typedQualifier(atPos(qual.pos)(new ApplyImplicitView(coercion, List(qual))))
else
qual
} else {
@@ -1141,7 +1185,7 @@ trait Typers { self: Analyzer =>
if (!supertparams.isEmpty) error(supertpt.pos, "missing type arguments")
}
- List.map2(cstats1, treeInfo.preSuperFields(templ.body)) {
+ (cstats1, treeInfo.preSuperFields(templ.body)).zipped map {
(ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe
}
case _ =>
@@ -1191,7 +1235,7 @@ trait Typers { self: Analyzer =>
if (!ps.isEmpty && !superclazz.isSubClass(ps.head.typeSymbol))
error(parent.pos, "illegal inheritance; super"+superclazz+
"\n is not a subclass of the super"+ps.head.typeSymbol+
- "\n of the mixin " + psym);
+ "\n of the mixin " + psym)
} else {
error(parent.pos, psym+" needs to be a trait to be mixed in")
}
@@ -1208,7 +1252,8 @@ trait Typers { self: Analyzer =>
if (!(selfType <:< parent.tpe.typeOfThis) &&
!phase.erasedTypes &&
!(context.owner hasFlag SYNTHETIC) && // don't do this check for synthetic concrete classes for virtuals (part of DEVIRTUALIZE)
- !(settings.suppressVTWarn.value))
+ !(settings.suppressVTWarn.value) &&
+ !selfType.isErroneous && !parent.tpe.isErroneous)
{
//Console.println(context.owner);//DEBUG
//Console.println(context.owner.unsafeTypeParams);//DEBUG
@@ -1228,9 +1273,9 @@ trait Typers { self: Analyzer =>
/*
if (settings.Xshowcls.value != "" &&
- settings.Xshowcls.value == context.owner.fullNameString)
+ settings.Xshowcls.value == context.owner.fullName)
println("INFO "+context.owner+
- ", baseclasses = "+(context.owner.info.baseClasses map (_.fullNameString))+
+ ", baseclasses = "+(context.owner.info.baseClasses map (_.fullName))+
", lin = "+(context.owner.info.baseClasses map (context.owner.thisType.baseType)))
*/
}
@@ -1274,6 +1319,14 @@ trait Typers { self: Analyzer =>
"implementation restriction: subclassing Classfile does not\n"+
"make your annotation visible at runtime. If that is what\n"+
"you want, you must write the annotation class in Java.")
+ if (phase.id <= currentRun.typerPhase.id) {
+ for (ann <- clazz.getAnnotation(DeprecatedAttr)) {
+ val m = companionModuleOf(clazz, context)
+ if (m != NoSymbol) {
+ m.moduleClass.addAnnotation(AnnotationInfo(ann.atp, ann.args, List()))
+ }
+ }
+ }
treeCopy.ClassDef(cdef, typedMods, cdef.name, tparams1, impl2)
.setType(NoType)
}
@@ -1287,7 +1340,7 @@ trait Typers { self: Analyzer =>
// attributes(mdef)
// initialize all constructors of the linked class: the type completer (Namer.methodSig)
// might add default getters to this object. example: "object T; class T(x: Int = 1)"
- val linkedClass = mdef.symbol.linkedClassOfModule
+ val linkedClass = companionClassOf(mdef.symbol, context)
if (linkedClass != NoSymbol)
for (c <- linkedClass.info.decl(nme.CONSTRUCTOR).alternatives)
c.initialize
@@ -1312,32 +1365,22 @@ trait Typers { self: Analyzer =>
*/
def addGetterSetter(stat: Tree): List[Tree] = stat match {
case ValDef(mods, name, tpt, rhs)
+ // PRIVATE | LOCAL are fields generated for primary constructor arguments
if (mods.flags & (PRIVATE | LOCAL)) != (PRIVATE | LOCAL).toLong && !stat.symbol.isModuleVar =>
- def memberAnnots(annots: List[AnnotationInfo], memberClass: Symbol) = {
- annots.filter(ann => ann.atp match {
- case AnnotatedType(annots, _, _) =>
- annots.exists(_.atp.typeSymbol == memberClass) ||
- (memberClass == FieldClass && annots.forall(ann => {
- val annClass = ann.atp.typeSymbol
- annClass != GetterClass && annClass != SetterClass &&
- annClass != BeanGetterClass && annClass != BeanSetterClass
- }))
- case _ => memberClass == FieldClass
- })
- }
-
val isDeferred = mods hasFlag DEFERRED
val value = stat.symbol
val allAnnots = value.annotations
if (!isDeferred)
- value.setAnnotations(memberAnnots(allAnnots, FieldClass))
+ // keepClean: by default annotations go to the field, except if the field is
+ // generated for a class parameter (PARAMACCESSOR).
+ value.setAnnotations(memberAnnots(allAnnots, FieldTargetClass, keepClean = !mods.hasFlag(PARAMACCESSOR)))
val getter = if (isDeferred) value else value.getter(value.owner)
assert(getter != NoSymbol, stat)
if (getter hasFlag OVERLOADED)
error(getter.pos, getter+" is defined twice")
- getter.setAnnotations(memberAnnots(allAnnots, GetterClass))
+ getter.setAnnotations(memberAnnots(allAnnots, GetterTargetClass))
if (value.hasFlag(LAZY)) List(stat)
else {
@@ -1360,7 +1403,7 @@ trait Typers { self: Analyzer =>
}
checkNoEscaping.privates(getter, getterDef.tpt)
def setterDef(setter: Symbol, isBean: Boolean = false): DefDef = {
- setter.setAnnotations(memberAnnots(allAnnots, if (isBean) BeanSetterClass else SetterClass))
+ setter.setAnnotations(memberAnnots(allAnnots, if (isBean) BeanSetterTargetClass else SetterTargetClass))
val result = typed {
atPos(vdef.pos.focus) {
DefDef(
@@ -1392,10 +1435,16 @@ trait Typers { self: Analyzer =>
(if (value.hasAnnotation(BooleanBeanPropertyAttr)) "is" else "get") +
nameSuffix
val beanGetter = value.owner.info.decl(beanGetterName)
- beanGetter.setAnnotations(memberAnnots(allAnnots, BeanGetterClass))
- if (mods hasFlag MUTABLE) {
+ if (beanGetter == NoSymbol) {
+ // the namer decides wether to generate these symbols or not. at that point, we don't
+ // have symbolic information yet, so we only look for annotations named "BeanProperty".
+ unit.error(stat.pos, "implementation limitation: the BeanProperty annotation cannot be used in a type alias or renamed import")
+ }
+ beanGetter.setAnnotations(memberAnnots(allAnnots, BeanGetterTargetClass))
+ if (mods.hasFlag(MUTABLE) && beanGetter != NoSymbol) {
val beanSetterName = "set" + nameSuffix
val beanSetter = value.owner.info.decl(beanSetterName)
+ // unlike for the beanGetter, the beanSetter body is generated here. see comment in Namers.
gs.append(setterDef(beanSetter, isBean = true))
}
}
@@ -1411,6 +1460,40 @@ trait Typers { self: Analyzer =>
List(stat)
}
+ /**
+ * The annotations amongst `annots` that should go on a member of class
+ * `memberClass` (field, getter, setter, beanGetter, beanSetter, param)
+ * If 'keepClean' is true, annotations without any meta-annotation are kept
+ */
+ protected def memberAnnots(annots: List[AnnotationInfo], memberClass: Symbol, keepClean: Boolean = false) = {
+
+ def hasMatching(metaAnnots: List[AnnotationInfo], orElse: => Boolean) = {
+ // either one of the meta-annotations matches the `memberClass`
+ metaAnnots.exists(_.atp.typeSymbol == memberClass) ||
+ // else, if there is no `target` meta-annotation at all, use the default case
+ (metaAnnots.forall(ann => {
+ val annClass = ann.atp.typeSymbol
+ annClass != FieldTargetClass && annClass != GetterTargetClass &&
+ annClass != SetterTargetClass && annClass != BeanGetterTargetClass &&
+ annClass != BeanSetterTargetClass && annClass != ParamTargetClass
+ }) && orElse)
+ }
+
+ // there was no meta-annotation on `ann`. Look if the class annotations of
+ // `ann` has a `target` annotation, otherwise put `ann` only on fields.
+ def noMetaAnnot(ann: AnnotationInfo) = {
+ hasMatching(ann.atp.typeSymbol.annotations, keepClean)
+ }
+
+ annots.filter(ann => ann.atp match {
+ // the annotation type has meta-annotations, e.g. @(foo @getter)
+ case AnnotatedType(metaAnnots, _, _) =>
+ hasMatching(metaAnnots, noMetaAnnot(ann))
+ // there are no meta-annotations, e.g. @foo
+ case _ => noMetaAnnot(ann)
+ })
+ }
+
protected def enterSyms(txt: Context, trees: List[Tree]) = {
var txt0 = txt
for (tree <- trees) txt0 = enterSym(txt0, tree)
@@ -1430,6 +1513,8 @@ trait Typers { self: Analyzer =>
*/
def typedTemplate(templ: Template, parents1: List[Tree]): Template = {
val clazz = context.owner
+ // complete lazy annotations
+ val annots = clazz.annotations
if (templ.symbol == NoSymbol)
templ setSymbol clazz.newLocalDummy(templ.pos)
val self1 = templ.self match {
@@ -1444,7 +1529,7 @@ trait Typers { self: Analyzer =>
// val tpt1 = checkNoEscaping.privates(clazz.thisSym, typedType(tpt))
// treeCopy.ValDef(vd, mods, name, tpt1, EmptyTree) setType NoType
// but this leads to cycles for existential self types ==> #2545
- if (self1.name != nme.WILDCARD) context.scope enter self1.symbol
+ if (self1.name != nme.WILDCARD) context.scope enter self1.symbol
val selfType =
if (clazz.isAnonymousClass && !phase.erasedTypes)
intersectionType(clazz.info.parents, clazz.owner)
@@ -1484,10 +1569,17 @@ trait Typers { self: Analyzer =>
val typer1 = constrTyperIf(sym.hasFlag(PARAM) && sym.owner.isConstructor)
val typedMods = removeAnnotations(vdef.mods)
+ // complete lazy annotations
+ val annots = sym.annotations
+
var tpt1 = checkNoEscaping.privates(sym, typer1.typedType(vdef.tpt))
checkNonCyclic(vdef, tpt1)
- if (sym.hasAnnotation(definitions.VolatileAttr) && !sym.hasFlag(MUTABLE))
- error(vdef.pos, "values cannot be volatile")
+ if (sym.hasAnnotation(definitions.VolatileAttr)) {
+ if (!sym.hasFlag(MUTABLE))
+ error(vdef.pos, "values cannot be volatile")
+ else if (sym.hasFlag(FINAL))
+ error(vdef.pos, "final vars cannot be volatile")
+ }
val rhs1 =
if (vdef.rhs.isEmpty) {
if (sym.isVariable && sym.owner.isTerm && phase.id <= currentRun.typerPhase.id)
@@ -1495,7 +1587,7 @@ trait Typers { self: Analyzer =>
vdef.rhs
} else {
val tpt2 = if (sym hasFlag DEFAULTPARAM) {
- // When typechecking default parameter, replace all type parameters in the expected type by Wildcarad.
+ // When typechecking default parameter, replace all type parameters in the expected type by Wildcard.
// This allows defining "def foo[T](a: T = 1)"
val tparams =
if (sym.owner.isConstructor) sym.owner.owner.info.typeParams
@@ -1508,11 +1600,11 @@ trait Typers { self: Analyzer =>
}
// allow defaults on by-name parameters
if (sym hasFlag BYNAMEPARAM)
- if (tpt1.tpe.typeArgs.isEmpty) WildcardType // during erasure tpt1 is Funciton0
+ if (tpt1.tpe.typeArgs.isEmpty) WildcardType // during erasure tpt1 is Function0
else subst(tpt1.tpe.typeArgs(0))
else subst(tpt1.tpe)
} else tpt1.tpe
- newTyper(typer1.context.make(vdef, sym)).transformedOrTyped(vdef.rhs, tpt2)
+ newTyper(typer1.context.make(vdef, sym)).transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
}
treeCopy.ValDef(vdef, typedMods, vdef.name, tpt1, checkDead(rhs1)) setType NoType
}
@@ -1524,15 +1616,15 @@ trait Typers { self: Analyzer =>
* @param rhs ...
*/
def computeParamAliases(clazz: Symbol, vparamss: List[List[ValDef]], rhs: Tree) {
- if (settings.debug.value) log("computing param aliases for "+clazz+":"+clazz.primaryConstructor.tpe+":"+rhs);//debug
+ if (settings.debug.value) log("computing param aliases for "+clazz+":"+clazz.primaryConstructor.tpe+":"+rhs)//debug
def decompose(call: Tree): (Tree, List[Tree]) = call match {
case Apply(fn, args) =>
val (superConstr, args1) = decompose(fn)
- val formals = fn.tpe.paramTypes
- val args2 = if (formals.isEmpty || !isRepeatedParamType(formals.last)) args
- else args.take(formals.length - 1) ::: List(EmptyTree)
- if (args2.length != formals.length)
- assert(false, "mismatch " + clazz + " " + formals + " " + args2);//debug
+ val params = fn.tpe.params
+ val args2 = if (params.isEmpty || !isRepeatedParamType(params.last.tpe)) args
+ else args.take(params.length - 1) ::: List(EmptyTree)
+ if (args2.length != params.length)
+ assert(false, "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2)//debug
(superConstr, args1 ::: args2)
case Block(stats, expr) if !stats.isEmpty =>
decompose(stats.last)
@@ -1555,7 +1647,7 @@ trait Typers { self: Analyzer =>
if (!superClazz.hasFlag(JAVA)) {
val superParamAccessors = superClazz.constrParamAccessors
if (superParamAccessors.length == superArgs.length) {
- List.map2(superParamAccessors, superArgs) { (superAcc, superArg) =>
+ (superParamAccessors, superArgs).zipped map { (superAcc, superArg) =>
superArg match {
case Ident(name) =>
if (vparamss.exists(_.exists(_.symbol == superArg.symbol))) {
@@ -1571,7 +1663,7 @@ trait Typers { self: Analyzer =>
ownAcc = ownAcc.accessed
if (!ownAcc.isVariable && !alias.accessed.isVariable) {
if (settings.debug.value)
- log("" + ownAcc + " has alias "+alias + alias.locationString);//debug
+ log("" + ownAcc + " has alias "+alias + alias.locationString) //debug
ownAcc.asInstanceOf[TermSymbol].setAlias(alias)
}
}
@@ -1608,75 +1700,43 @@ trait Typers { self: Analyzer =>
}
}
- /** does given name name an identifier visible at this point?
- *
- * @param name the given name
- * @return <code>true</code> if an identifier with the given name is visible.
- */
- def namesSomeIdent(name: Name): Boolean = namesWhatIdent(name).isDefined
-
- /** If this name returns a visible identifier, return its symbol.
- *
- * @param name the given name
- * @return <code>Some(sym)</code> if an ident is visible, None otherwise.
- */
- def namesWhatIdent(name: Name): Option[Symbol] = {
- var cx = context
- while (cx != NoContext) {
- val pre = cx.enclClass.prefix
- val defEntry = cx.scope.lookupEntry(name)
- if ((defEntry ne null) && defEntry.sym.exists)
- return Some(defEntry.sym)
-
- cx = cx.enclClass
- (pre member name filter (sym => sym.exists && context.isAccessible(sym, pre, false))) match {
- case NoSymbol => cx = cx.outer
- case other => return Some(other)
+ def typedUseCase(useCase: UseCase) {
+ def stringParser(str: String): syntaxAnalyzer.Parser = {
+ val file = new BatchSourceFile(context.unit.source.file, str) {
+ override def positionInUltimateSource(pos: Position) = {
+ pos.withSource(context.unit.source, useCase.pos.start)
+ }
}
+ val unit = new CompilationUnit(file)
+ new syntaxAnalyzer.UnitParser(unit)
}
- context.imports map (_ importedSymbol name) find (_ != NoSymbol)
- }
-
- /** Does this tree declare a val or def with the same name as one in scope?
- * This only catches identifiers in the same file, so more work is needed.
- *
- * @param tree the given tree
- * @param filt filter for any conflicting symbols found -- false means ignore
- */
- def checkShadowings(tree: Tree, filt: (Symbol) => Boolean = _ => true) {
- def sameFile(other: Symbol) =
- (tree.symbol != null) && tree.symbol.sourceFile == other.sourceFile
- def inFile(other: Symbol) =
- if (sameFile(other)) ""
- else if (other.sourceFile != null) "in %s ".format(other.sourceFile)
- else ""
-
- def positionStr(other: Symbol) = other.pos match {
- case NoPosition => inFile(other) match { case "" => "(location unknown) " ; case x => x }
- case pos => "%sat line %s\n%s".format(inFile(other), pos.line, pos.lineContent) + """ /* is shadowed by */"""
- }
- def include(v: ValOrDefDef, other: Symbol) = {
- // shadowing on the same line is a good bet for noise
- (v.pos == NoPosition || other.pos == NoPosition || v.pos.line != other.pos.line) &&
- // not likely we'll shadow a whole package without realizing it
- !other.isPackage &&
- // (v.symbol == null || !v.symbol.hasTransOwner(other)) &&
- filt(other)
- }
-
- tree match {
- // while I try to figure out how to limit the noise far enough to make this
- // genuinely useful, I'm setting minimum identifier length to 3 to omit all
- // those x's and i's we so enjoy reusing.
- case v: ValOrDefDef if v.name.toString.length > 2 =>
- namesWhatIdent(v.name) map { other =>
- if (include(v, other) && unit != null) {
- val fstr = "%s (%s) shadows usage %s"
- unit.warning(v.pos, fstr.format(v.name, v.tpt, positionStr(other)))
- }
+ val trees = stringParser(useCase.body+";").nonLocalDefOrDcl
+ val enclClass = context.enclClass.owner
+ def defineAlias(name: Name) =
+ if (context.scope.lookup(name) == NoSymbol) {
+ lookupVariable(name.toString.substring(1), enclClass) match {
+ case Some(repl) =>
+ silent(_.typedTypeConstructor(stringParser(repl).typ())) match {
+ case tpt: Tree =>
+ val alias = enclClass.newAliasType(useCase.pos, name)
+ val tparams = cloneSymbols(tpt.tpe.typeSymbol.typeParams, alias)
+ alias setInfo polyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
+ context.scope.enter(alias)
+ case _ =>
+ }
+ case _ =>
}
- case _ =>
- }
+ }
+ for (tree <- trees; t <- tree)
+ t match {
+ case Ident(name) if (name.length > 0 && name(0) == '$') => defineAlias(name)
+ case _ =>
+ }
+ useCase.aliases = context.scope.toList
+ namer.enterSyms(trees)
+ typedStats(trees, NoSymbol)
+ useCase.defined = context.scope.toList filterNot (useCase.aliases contains _)
+// println("defined use cases: "+(useCase.defined map (sym => sym+":"+sym.tpe)))
}
/**
@@ -1686,26 +1746,31 @@ trait Typers { self: Analyzer =>
def typedDefDef(ddef: DefDef): DefDef = {
val meth = ddef.symbol
- // If warnings are enabled, attempt to alert about variable shadowing. This only
- // catches method parameters shadowing identifiers declared in the same file, so more
- // work is needed. Most of the code here is to filter out false positives.
- def isAuxConstructor(sym: Symbol) = sym.isConstructor && !sym.isPrimaryConstructor
- if (settings.YwarnShadow.value && !isAuxConstructor(ddef.symbol)) {
- for (v <- ddef.vparamss.flatten ; if v.symbol != null && !(v.symbol hasFlag SYNTHETIC))
- checkShadowings(v, (sym => !sym.isDeferred && !sym.isMethod))
- }
-
reenterTypeParams(ddef.tparams)
reenterValueParams(ddef.vparamss)
+
+ // for `val` and `var` parameter, look at `target` meta-annotation
+ if (phase.id <= currentRun.typerPhase.id && meth.isPrimaryConstructor) {
+ for (vparams <- ddef.vparamss; vd <- vparams) {
+ if (vd hasFlag PARAMACCESSOR) {
+ val sym = vd.symbol
+ sym.setAnnotations(memberAnnots(sym.annotations, ParamTargetClass, keepClean = true))
+ }
+ }
+ }
+
val tparams1 = ddef.tparams mapConserve typedTypeDef
val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef)
+ // complete lazy annotations
+ val annots = meth.annotations
+
for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1)
if (isRepeatedParamType(vparam1.symbol.tpe))
error(vparam1.pos, "*-parameter must come last")
var tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
- if (!settings.Xexperimental.value) {
+ if (!settings.YdepMethTpes.value) {
for (vparams <- vparamss1; vparam <- vparams) {
checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); ()
}
@@ -1715,7 +1780,7 @@ trait Typers { self: Analyzer =>
ddef.tpt.setType(tpt1.tpe)
val typedMods = removeAnnotations(ddef.mods)
var rhs1 =
- if (ddef.name == nme.CONSTRUCTOR) {
+ if (ddef.name == nme.CONSTRUCTOR && !ddef.symbol.hasFlag(STATIC)) { // need this to make it possible to generate static ctors
if (!meth.isPrimaryConstructor &&
(!meth.owner.isClass ||
meth.owner.isModuleClass ||
@@ -1724,11 +1789,9 @@ trait Typers { self: Analyzer =>
error(ddef.pos, "constructor definition not allowed here")
typed(ddef.rhs)
} else {
- transformedOrTyped(ddef.rhs, tpt1.tpe)
+ transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe)
}
- checkMethodStructuralCompatible(meth)
-
if (meth.isPrimaryConstructor && meth.isClassConstructor &&
phase.id <= currentRun.typerPhase.id && !reporter.hasErrors)
computeParamAliases(meth.owner, vparamss1, rhs1)
@@ -1744,9 +1807,29 @@ trait Typers { self: Analyzer =>
}
def typedTypeDef(tdef: TypeDef): TypeDef = {
- reenterTypeParams(tdef.tparams) // @M!
- val tparams1 = tdef.tparams mapConserve (typedTypeDef) // @M!
+ def typeDefTyper = {
+ if(tdef.tparams isEmpty) Typer.this
+ else newTyper(context.makeNewScope(tdef, tdef.symbol))
+ }
+ typeDefTyper.typedTypeDef0(tdef)
+ }
+
+ // call typedTypeDef instead
+ // a TypeDef with type parameters must always be type checked in a new scope
+ private def typedTypeDef0(tdef: TypeDef): TypeDef = {
+ tdef.symbol.initialize
+ reenterTypeParams(tdef.tparams)
+ val tparams1 = tdef.tparams mapConserve {typedTypeDef(_)}
val typedMods = removeAnnotations(tdef.mods)
+ // complete lazy annotations
+ val annots = tdef.symbol.annotations
+
+ // @specialized should not be pickled when compiling with -no-specialize
+ if (settings.nospecialization.value && currentRun.compiles(tdef.symbol)) {
+ tdef.symbol.removeAnnotation(definitions.SpecializedClass)
+ tdef.symbol.deSkolemize.removeAnnotation(definitions.SpecializedClass)
+ }
+
val rhs1 = checkNoEscaping.privates(tdef.symbol, typedType(tdef.rhs))
checkNonCyclic(tdef.symbol)
if (tdef.symbol.owner.isType)
@@ -1769,15 +1852,33 @@ trait Typers { self: Analyzer =>
}
}
+ private def isLoopHeaderLabel(name: Name): Boolean =
+ name.startsWith("while$") || name.startsWith("doWhile$")
+
def typedLabelDef(ldef: LabelDef): LabelDef = {
- val restpe = ldef.symbol.tpe.resultType
- val rhs1 = typed(ldef.rhs, restpe)
- ldef.params foreach (param => param.tpe = param.symbol.tpe)
- treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe
+ if (!isLoopHeaderLabel(ldef.symbol.name) || phase.id > currentRun.typerPhase.id) {
+ val restpe = ldef.symbol.tpe.resultType
+ val rhs1 = typed(ldef.rhs, restpe)
+ ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe
+ } else {
+ val initpe = ldef.symbol.tpe.resultType
+ val rhs1 = typed(ldef.rhs)
+ val restpe = rhs1.tpe
+ if (restpe == initpe) { // stable result, no need to check again
+ ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe
+ } else {
+ context.scope.unlink(ldef.symbol)
+ val sym2 = namer.enterInScope(
+ context.owner.newLabel(ldef.pos, ldef.name) setInfo MethodType(List(), restpe))
+ val rhs2 = typed(resetAllAttrs(ldef.rhs), restpe)
+ ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs2) setSymbol sym2 setType restpe
+ }
+ }
}
- protected def typedFunctionIDE(fun : Function, txt : Context) = {}
-
/**
* @param block ...
* @param mode ...
@@ -1785,49 +1886,69 @@ trait Typers { self: Analyzer =>
* @return ...
*/
def typedBlock(block: Block, mode: Int, pt: Type): Block = {
- namer.enterSyms(block.stats)
- for (stat <- block.stats) {
- if (onlyPresentation && stat.isDef) {
- var e = context.scope.lookupEntry(stat.symbol.name)
- while ((e ne null) && (e.sym ne stat.symbol)) e = e.tail
- if (e eq null) context.scope.enter(stat.symbol)
- }
- if (settings.YwarnShadow.value) checkShadowings(stat)
- enterLabelDef(stat)
- }
- if (phaseId(currentPeriod) <= currentRun.typerPhase.id) {
- block match {
- case block @ Block(List(classDef @ ClassDef(_, _, _, _)), newInst @ Apply(Select(New(_), _), _)) =>
- // The block is an anonymous class definitions/instantiation pair
- // -> members that are hidden by the type of the block are made private
- val visibleMembers = pt match {
- case WildcardType => classDef.symbol.info.decls.toList
- case BoundedWildcardType(TypeBounds(lo, hi)) => lo.members
- case _ => pt.members
- }
- for (member <- classDef.symbol.info.decls.toList
- if member.isTerm && !member.isConstructor &&
- member.allOverriddenSymbols.isEmpty &&
- (!member.hasFlag(PRIVATE) && member.privateWithin == NoSymbol) &&
- !(visibleMembers exists { visible =>
- visible.name == member.name &&
- member.tpe <:< visible.tpe.substThis(visible.owner, ThisType(classDef.symbol))
- })
- ) {
- member.resetFlag(PROTECTED)
- member.resetFlag(LOCAL)
- member.setFlag(PRIVATE)
- member.privateWithin = NoSymbol
- }
- case _ =>
+ val syntheticPrivates = new ListBuffer[Symbol]
+ try {
+ namer.enterSyms(block.stats)
+ for (stat <- block.stats) enterLabelDef(stat)
+
+ if (phaseId(currentPeriod) <= currentRun.typerPhase.id) {
+ // This is very tricky stuff, because we are navigating
+ // the Skylla and Charybdis of anonymous classes and what to return
+ // from them here. On the one hand, we cannot admit
+ // every non-private member of an anonymous class as a part of
+ // the structural type of the enclosing block. This runs afoul of
+ // the restriction that a structural type may not refer to an enclosing
+ // type parameter or abstract types (which in turn is necessitated
+ // by what can be done in Java reflection. On the other hand,
+ // making every term member private conflicts with private escape checking
+ // see ticket #3174 for an example.
+ // The cleanest way forward is if we would find a way to suppress
+ // structural type checking for these members and maybe defer
+ // type errors to the places where members are called. But that would
+ // be a big refactoring and also a big departure from existing code.
+ // The probably safest fix for 2.8 is to keep members of an anonymous
+ // class that are not mentioned in a parent type private (as before)
+ // but to disable escape checking for code that's in the same anonymous class.
+ // That's what's done here.
+ // We really should go back and think hard whether we find a better
+ // way to address the problem of escaping idents on the one hand and well-formed
+ // structural types on the other.
+ block match {
+ case block @ Block(List(classDef @ ClassDef(_, _, _, _)), newInst @ Apply(Select(New(_), _), _)) =>
+ // The block is an anonymous class definitions/instantiation pair
+ // -> members that are hidden by the type of the block are made private
+ val visibleMembers = pt match {
+ case WildcardType => classDef.symbol.info.decls.toList
+ case BoundedWildcardType(TypeBounds(lo, hi)) => lo.members
+ case _ => pt.members
+ }
+ for (member <- classDef.symbol.info.decls.toList
+ if member.isTerm && !member.isConstructor &&
+ member.allOverriddenSymbols.isEmpty &&
+ (!member.hasFlag(PRIVATE) && member.privateWithin == NoSymbol) &&
+ !(visibleMembers exists { visible =>
+ visible.name == member.name &&
+ member.tpe <:< visible.tpe.substThis(visible.owner, ThisType(classDef.symbol))
+ })
+ ) {
+ member.resetFlag(PROTECTED)
+ member.resetFlag(LOCAL)
+ member.setFlag(PRIVATE | SYNTHETIC_PRIVATE)
+ syntheticPrivates += member
+ member.privateWithin = NoSymbol
+ }
+ case _ =>
+ }
}
+ val stats1 = typedStats(block.stats, context.owner)
+ val expr1 = typed(block.expr, mode & ~(FUNmode | QUALmode), pt)
+ treeCopy.Block(block, stats1, expr1)
+ .setType(if (treeInfo.isPureExpr(block)) expr1.tpe else expr1.tpe.deconst)
+ } finally {
+ // enable escaping privates checking from the outside and recycle
+ // transient flag
+ for (sym <- syntheticPrivates) sym resetFlag SYNTHETIC_PRIVATE
}
- val stats1 = typedStats(block.stats, context.owner)
- val expr1 = typed(block.expr, mode & ~(FUNmode | QUALmode), pt)
- val block1 = treeCopy.Block(block, stats1, expr1)
- .setType(if (treeInfo.isPureExpr(block)) expr1.tpe else expr1.tpe.deconst)
- //checkNoEscaping.locals(context.scope, pt, block1)
- block1
}
/**
@@ -1842,6 +1963,13 @@ trait Typers { self: Analyzer =>
error(x.pos, "_* may only come last")
val pat1: Tree = typedPattern(cdef.pat, pattpe)
+
+ if (forInteractive) {
+ for (bind @ Bind(name, _) <- cdef.pat)
+ if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol)
+ namer.enterIfNotThere(bind.symbol)
+ }
+
val guard1: Tree = if (cdef.guard == EmptyTree) EmptyTree
else typed(cdef.guard, BooleanClass.tpe)
var body1: Tree = typed(cdef.body, pt)
@@ -1893,7 +2021,7 @@ trait Typers { self: Analyzer =>
||
pt.typeSymbol == PartialFunctionClass &&
fun.vparams.length == 1 && fun.body.isInstanceOf[Match])
- && // see bug901 for a reason why next conditions are neeed
+ && // see bug901 for a reason why next conditions are needed
(pt.normalize.typeArgs.length - 1 == fun.vparams.length
||
fun.vparams.exists(_.tpt.isEmpty)))
@@ -1906,7 +2034,7 @@ trait Typers { self: Analyzer =>
if (fun.vparams.length != argpts.length)
errorTree(fun, "wrong number of parameters; expected = " + argpts.length)
else {
- val vparamSyms = List.map2(fun.vparams, argpts) { (vparam, argpt) =>
+ val vparamSyms = (fun.vparams, argpts).zipped map { (vparam, argpt) =>
if (vparam.tpt.isEmpty) {
vparam.tpt.tpe =
if (isFullyDefined(argpt)) argpt
@@ -1960,7 +2088,10 @@ trait Typers { self: Analyzer =>
namer.enterSyms(stats)
// need to delay rest of typedRefinement to avoid cyclic reference errors
unit.toCheck += { () =>
- val stats1 = typedStats(stats, NoSymbol)
+ // go to next outer context which is not silent, see #3614
+ var c = context
+ while (!c.reportGeneralErrors) c = c.outer
+ val stats1 = newTyper(c).typedStats(stats, NoSymbol)
for (stat <- stats1 if stat.isDef) {
val member = stat.symbol
if (!(context.owner.ancestors forall
@@ -1971,7 +2102,10 @@ trait Typers { self: Analyzer =>
}
}
- def typedImport(imp : Import) : Import = imp
+ def typedImport(imp : Import) : Import = (transformed remove imp) match {
+ case Some(imp1: Import) => imp1
+ case None => log("unhandled import: "+imp+" in "+unit); imp
+ }
def typedStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
val inBlock = exprOwner == context.owner
@@ -1984,20 +2118,18 @@ trait Typers { self: Analyzer =>
else
stat match {
case imp @ Import(_, _) =>
- val imp0 = typedImport(imp)
- if (imp0 ne null) {
- context = context.makeNewImport(imp0)
- imp0.symbol.initialize
- imp0
- } else
- EmptyTree
+ context = context.makeNewImport(imp)
+ imp.symbol.initialize
+ typedImport(imp)
case _ =>
if (localTarget && !includesTargetPos(stat)) {
+ // skip typechecking of statements in a sequence where some other statement includes
+ // the targetposition
stat
} else {
val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) this
else newTyper(context.make(stat, exprOwner))
- val result = checkDead(localTyper.typed(stat))
+ val result = checkDead(localTyper.typed(stat, EXPRmode | BYVALmode, WildcardType))
if (treeInfo.isSelfOrSuperConstrCall(result)) {
context.inConstructorSuffix = true
if (treeInfo.isSelfConstrCall(result) && result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0))
@@ -2014,27 +2146,29 @@ trait Typers { self: Analyzer =>
!(accessed hasFlag ACCESSOR) && accessed.isPrivateLocal
def checkNoDoubleDefsAndAddSynthetics(stats: List[Tree]): List[Tree] = {
- val scope = if (inBlock) context.scope else context.owner.info.decls;
+ val scope = if (inBlock) context.scope else context.owner.info.decls
val newStats = new ListBuffer[Tree]
var needsCheck = true
var moreToAdd = true
while (moreToAdd) {
val initSize = scope.size
- var e = scope.elems;
+ var e = scope.elems
while ((e ne null) && e.owner == scope) {
// check no double def
if (needsCheck) {
- var e1 = scope.lookupNextEntry(e);
+ var e1 = scope.lookupNextEntry(e)
while ((e1 ne null) && e1.owner == scope) {
if (!accesses(e.sym, e1.sym) && !accesses(e1.sym, e.sym) &&
(e.sym.isType || inBlock || (e.sym.tpe matches e1.sym.tpe)))
// default getters are defined twice when multiple overloads have defaults. an
// error for this is issued in RefChecks.checkDefaultsInOverloaded
- if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasFlag(DEFAULTPARAM))
+ if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasFlag(DEFAULTPARAM)) {
error(e.sym.pos, e1.sym+" is defined twice"+
{if(!settings.debug.value) "" else " in "+unit.toString})
- e1 = scope.lookupNextEntry(e1);
+ scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779
+ }
+ e1 = scope.lookupNextEntry(e1)
}
}
@@ -2054,7 +2188,14 @@ trait Typers { self: Analyzer =>
moreToAdd = initSize != scope.size
}
if (newStats.isEmpty) stats
- else stats ::: newStats.toList
+ else {
+ val (defaultGetters, others) = newStats.toList.partition {
+ case DefDef(mods, _, _, _, _, _) => mods.hasFlag(DEFAULTPARAM)
+ case _ => false
+ }
+ // default getters first: see #2489
+ defaultGetters ::: stats ::: others
+ }
}
val result = stats mapConserve (typedStat)
if (phase.erasedTypes) result
@@ -2068,11 +2209,11 @@ trait Typers { self: Analyzer =>
args mapConserve (arg => typedArg(arg, mode, 0, WildcardType))
def typedArgs(args: List[Tree], mode: Int, originalFormals: List[Type], adaptedFormals: List[Type]) = {
- def newmode(i: Int) =
- if (isVarArgs(originalFormals) && i >= originalFormals.length - 1) STARmode else 0
-
- for (((arg, formal), i) <- (args zip adaptedFormals).zipWithIndex) yield
- typedArg(arg, mode, newmode(i), formal)
+ var newmodes = originalFormals map ((tp: Type) => if (tp.typeSymbol != ByNameParamClass) BYVALmode else 0)
+ if (isVarArgTpes(originalFormals)) // TR check really necessary?
+ newmodes = newmodes.take(newmodes.length-1) ::: List.fill(args.length - originalFormals.length + 1)(STARmode | BYVALmode)
+ for (((arg, formal), m) <- ((args zip adaptedFormals) zip newmodes)) yield
+ typedArg(arg, mode, m, formal)
}
/** Does function need to be instantiated, because a missing parameter
@@ -2083,13 +2224,13 @@ trait Typers { self: Analyzer =>
val losym = tparam.info.bounds.lo.typeSymbol
losym != NothingClass && losym != NullClass
}
- List.exists2(formals, args) {
+ (formals, args).zipped exists {
case (formal, Function(vparams, _)) =>
(vparams exists (_.tpt.isEmpty)) &&
vparams.length <= MaxFunctionArity &&
(formal baseType FunctionClass(vparams.length) match {
case TypeRef(_, _, formalargs) =>
- List.exists2(formalargs, vparams) ((formalarg, vparam) =>
+ (formalargs, vparams).zipped.exists ((formalarg, vparam) =>
vparam.tpt.isEmpty && (tparams exists (formalarg contains))) &&
(tparams forall isLowerBounded)
case _ =>
@@ -2105,15 +2246,21 @@ trait Typers { self: Analyzer =>
def isNamedApplyBlock(tree: Tree) =
context.namedApplyBlockInfo exists (_._1 == tree)
- /**
- * @param tree ...
- * @param fun0 ...
- * @param args ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
+ def callToCompanionConstr(context: Context, calledFun: Symbol) = {
+ if (calledFun.isConstructor) {
+ val methCtx = context.enclMethod
+ if (methCtx != NoContext) {
+ val contextFun = methCtx.tree.symbol
+ contextFun.isPrimaryConstructor && contextFun.owner.isModuleClass &&
+ companionModuleOf(calledFun.owner, context).moduleClass == contextFun.owner
+ } else false
+ } else false
+ }
+
def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+ def errTree = setError(treeCopy.Apply(tree, fun0, args))
+ def errorTree(msg: String) = { error(tree.pos, msg); errTree }
+
var fun = fun0
if (fun.hasSymbol && (fun.symbol hasFlag OVERLOADED)) {
// remove alternatives with wrong number of parameters without looking at types.
@@ -2130,6 +2277,10 @@ trait Typers { self: Analyzer =>
val pre = fun.symbol.tpe.prefix
var sym = fun.symbol filter { alt =>
+ // must use pt as expected type, not WildcardType (a tempting quick fix to #2665)
+ // now fixed by using isWeaklyCompatible in exprTypeArgs
+ // TODO: understand why exactly -- some types were not inferred anymore (`ant clean quick.bin` failed)
+ // (I had expected inferMethodAlternative to pick up the slack introduced by using WildcardType here)
isApplicableSafe(context.undetparams, followApply(pre.memberType(alt)), argtypes, pt)
}
if (sym hasFlag OVERLOADED) {
@@ -2154,22 +2305,24 @@ trait Typers { self: Analyzer =>
val args1 = args map {
case arg @ AssignOrNamedArg(Ident(name), rhs) =>
// named args: only type the righthand sides ("unknown identifier" errors otherwise)
- val rhs1 = typedArg(rhs, amode, 0, WildcardType)
+ val rhs1 = typedArg(rhs, amode, BYVALmode, WildcardType)
argtpes += NamedType(name, rhs1.tpe.deconst)
// the assign is untyped; that's ok because we call doTypedApply
atPos(arg.pos) { new AssignOrNamedArg(arg.lhs , rhs1) }
case arg =>
- val arg1 = typedArg(arg, amode, 0, WildcardType)
+ val arg1 = typedArg(arg, amode, BYVALmode, WildcardType)
argtpes += arg1.tpe.deconst
arg1
}
context.undetparams = undetparams
- inferMethodAlternative(fun, undetparams, argtpes.toList, pt)
+ inferMethodAlternative(fun, undetparams, argtpes.toList, pt,
+ varArgsOnly = args.nonEmpty && treeInfo.isWildcardStarArg(args.last))
doTypedApply(tree, adapt(fun, funMode(mode), WildcardType), args1, mode, pt)
case mt @ MethodType(params, _) =>
+ val paramTypes = mt.paramTypes
// repeat vararg as often as needed, remove by-name
- val formals = formalTypes(mt.paramTypes, args.length)
+ val formals = formalTypes(paramTypes, args.length)
/** Try packing all arguments into a Tuple and apply `fun'
* to that. This is the last thing which is tried (after
@@ -2179,12 +2332,14 @@ trait Typers { self: Analyzer =>
// if 1 formal, 1 arg (a tuple), otherwise unmodified args
val tupleArgs = actualArgs(tree.pos.makeTransparent, args, formals.length)
- if (tupleArgs.length != args.length) {
+ if (tupleArgs.length != args.length && !isUnitForVarArgs(args, params)) {
// expected one argument, but got 0 or >1 ==> try applying to tuple
// the inner "doTypedApply" does "extractUndetparams" => restore when it fails
val savedUndetparams = context.undetparams
silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) match {
- case t: Tree => Some(t)
+ case t: Tree =>
+// println("tuple conversion to "+t+" for "+mt)//DEBUG
+ Some(t)
case ex =>
context.undetparams = savedUndetparams
None
@@ -2198,24 +2353,23 @@ trait Typers { self: Analyzer =>
* and defaults is ruled out by typedDefDef.
*/
def tryNamesDefaults: Tree = {
- if (mt.isErroneous) setError(tree)
+ if (mt.isErroneous) errTree
else if ((mode & PATTERNmode) != 0)
// #2064
- errorTree(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun))
+ errorTree("wrong number of arguments for "+ treeSymTypeMsg(fun))
else if (args.length > formals.length) {
tryTupleApply.getOrElse {
- errorTree(tree, "too many arguments for "+treeSymTypeMsg(fun))
+ errorTree("too many arguments for "+treeSymTypeMsg(fun))
}
} else if (args.length == formals.length) {
// we don't need defaults. names were used, so this application is transformed
// into a block (@see transformNamedApplication in NamesDefaults)
val (namelessArgs, argPos) = removeNames(Typer.this)(args, params)
if (namelessArgs exists (_.isErroneous)) {
- setError(tree)
+ errTree
} else if (!isIdentity(argPos) && (formals.length != params.length))
// !isIdentity indicates that named arguments are used to re-order arguments
- errorTree(tree, "when using named arguments, the vararg parameter "+
- "has to be specified exactly once")
+ errorTree("when using named arguments, the vararg parameter has to be specified exactly once")
else if (isIdentity(argPos) && !isNamedApplyBlock(fun)) {
// if there's no re-ordering, and fun is not transformed, no need to transform
// more than an optimization, e.g. important in "synchronized { x = update-x }"
@@ -2229,7 +2383,7 @@ trait Typers { self: Analyzer =>
// calls to the default getters. Example:
// foo[Int](a)() ==> foo[Int](a)(b = foo$qual.foo$default$2[Int](a))
val fun1 = transformNamedApplication(Typer.this, mode, pt)(fun, x => x)
- if (fun1.isErroneous) setError(tree)
+ if (fun1.isErroneous) errTree
else {
assert(isNamedApplyBlock(fun1), fun1)
val NamedApplyInfo(qual, targs, previousArgss, _) = context.namedApplyBlockInfo.get._2
@@ -2241,10 +2395,14 @@ trait Typers { self: Analyzer =>
true
case _ => false
}
- val (allArgs, missing) = addDefaults(args, qual, targs, previousArgss, params, fun.pos.focus)
- if (allArgs.length == formals.length) {
+ val (allArgs, missing) = addDefaults(args, qual, targs, previousArgss, params, fun.pos.focus, context)
+ val funSym = fun1 match { case Block(_, expr) => expr.symbol }
+ if (allArgs.length != args.length && callToCompanionConstr(context, funSym)) {
+ errorTree("module extending its companion class cannot use default constructor arguments")
+ } else if (allArgs.length == formals.length) {
// useful when a default doesn't match parameter type, e.g. def f[T](x:T="a"); f[Int]()
- context.diagnostic = "Error occured in an application involving default arguments." :: context.diagnostic
+ val note = "Error occurred in an application involving default arguments."
+ if (!(context.diagnostic contains note)) context.diagnostic = note :: context.diagnostic
doTypedApply(tree, if (blockIsEmpty) fun else fun1, allArgs, mode, pt)
} else {
tryTupleApply.getOrElse {
@@ -2255,7 +2413,7 @@ trait Typers { self: Analyzer =>
val sOpt = if (missing.length > 1) "s" else ""
".\nUnspecified value parameter"+ sOpt +" "+ missingStr
}
- errorTree(tree, "not enough arguments for "+treeSymTypeMsg(fun) + suffix)
+ errorTree("not enough arguments for "+treeSymTypeMsg(fun) + suffix)
}
}
}
@@ -2270,8 +2428,11 @@ trait Typers { self: Analyzer =>
} else {
val tparams = context.extractUndetparams()
if (tparams.isEmpty) { // all type params are defined
- val args1 = typedArgs(args, argMode(fun, mode), mt.paramTypes, formals)
- val restpe = mt.resultType(args1 map (_.tpe)) // instantiate dependent method types
+ val args1 = typedArgs(args, argMode(fun, mode), paramTypes, formals)
+ // instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
+ // val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
+ // precise(foo) : foo.type => foo.type
+ val restpe = mt.resultType(args1 map (arg => gen.stableTypeFor(arg) getOrElse arg.tpe))
def ifPatternSkipFormals(tp: Type) = tp match {
case MethodType(_, rtp) if ((mode & PATTERNmode) != 0) => rtp
case _ => tp
@@ -2298,7 +2459,7 @@ trait Typers { self: Analyzer =>
}
}
- if (fun.symbol == List_apply && args.isEmpty && !onlyPresentation) {
+ if (fun.symbol == List_apply && args.isEmpty && !forInteractive) {
atPos(tree.pos) { gen.mkNil setType restpe }
} else {
constfold(treeCopy.Apply(tree, fun, args1).setType(ifPatternSkipFormals(restpe)))
@@ -2316,24 +2477,31 @@ trait Typers { self: Analyzer =>
inferExprInstance(fun, tparams, WildcardType, true)
doTypedApply(tree, fun, args, mode, pt)
} else {
- assert((mode & PATTERNmode) == 0); // this case cannot arise for patterns
+ assert((mode & PATTERNmode) == 0) // this case cannot arise for patterns
val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
- val strictTargs = List.map2(lenientTargs, tparams)((targ, tparam) =>
+ val strictTargs = (lenientTargs, tparams).zipped map ((targ, tparam) =>
if (targ == WildcardType) tparam.tpe else targ) //@M TODO: should probably be .tpeHK
- def typedArgToPoly(arg: Tree, formal: Type): Tree = {
+ var remainingParams = paramTypes
+ def typedArgToPoly(arg: Tree, formal: Type): Tree = { //TR TODO: cleanup
val lenientPt = formal.instantiateTypeParams(tparams, lenientTargs)
- val arg1 = typedArg(arg, argMode(fun, mode), POLYmode, lenientPt)
+ val newmode =
+ if (remainingParams.head.typeSymbol == ByNameParamClass) POLYmode
+ else POLYmode | BYVALmode
+ if (remainingParams.tail.nonEmpty) remainingParams = remainingParams.tail
+ // println("typedArgToPoly(arg, formal): "+(arg, formal))
+ val arg1 = typedArg(arg, argMode(fun, mode), newmode, lenientPt)
val argtparams = context.extractUndetparams()
+ // println("typedArgToPoly(arg1, argtparams): "+(arg1, argtparams))
if (!argtparams.isEmpty) {
val strictPt = formal.instantiateTypeParams(tparams, strictTargs)
inferArgumentInstance(arg1, argtparams, strictPt, lenientPt)
}
arg1
}
- val args1 = List.map2(args, formals)(typedArgToPoly)
- if (args1 exists (_.tpe.isError)) setError(tree)
+ val args1 = (args, formals).zipped map typedArgToPoly
+ if (args1 exists (_.tpe.isError)) errTree
else {
- if (settings.debug.value) log("infer method inst "+fun+", tparams = "+tparams+", args = "+args1.map(_.tpe)+", pt = "+pt+", lobounds = "+tparams.map(_.tpe.bounds.lo)+", parambounds = "+tparams.map(_.info));//debug
+ if (settings.debug.value) log("infer method inst "+fun+", tparams = "+tparams+", args = "+args1.map(_.tpe)+", pt = "+pt+", lobounds = "+tparams.map(_.tpe.bounds.lo)+", parambounds = "+tparams.map(_.info)) //debug
// define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun"
// returns those undetparams which have not been instantiated.
val undetparams = inferMethodInstance(fun, tparams, args1, pt)
@@ -2376,11 +2544,19 @@ trait Typers { self: Analyzer =>
error(fun.pos, "cannot resolve overloaded unapply")
(ErrorType, List())
}
- val (unappFormal, freeVars) = freshArgType(unappType)
+ val (unappFormal, freeVars) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
val context1 = context.makeNewScope(context.tree, context.owner)
- freeVars foreach(sym => context1.scope.enter(sym))
+ freeVars foreach context1.scope.enter
val typer1 = newTyper(context1)
- arg.tpe = typer1.infer.inferTypedPattern(tree.pos, unappFormal, arg.tpe)
+ val pattp = typer1.infer.inferTypedPattern(tree.pos, unappFormal, arg.tpe)
+ // turn any unresolved type variables in freevars into existential skolems
+ val skolems = freeVars map { fv =>
+ val skolem = new TypeSkolem(context1.owner, fun.pos, fv.name, fv)
+ skolem.setInfo(fv.info.cloneInfo(skolem))
+ .setFlag(fv.flags | EXISTENTIAL).resetFlag(PARAM)
+ skolem
+ }
+ arg.tpe = pattp.substSym(freeVars, skolems)
//todo: replace arg with arg.asInstanceOf[inferTypedPattern(unappFormal, arg.tpe)] instead.
argDummy.setInfo(arg.tpe) // bq: this line fixed #1281. w.r.t. comment ^^^, maybe good enough?
}
@@ -2392,7 +2568,7 @@ trait Typers { self: Analyzer =>
//Console.println(" contains?"+sym.tpe.decls.lookup(fun.symbol.name));
if(sym != fun.symbol.owner && (sym.isPackageClass||sym.isModuleClass) /*(1)*/ ) { // (1) see 'files/pos/unapplyVal.scala'
if(fun.symbol.owner.isClass) {
- mkThisType(fun.symbol.owner)
+ ThisType(fun.symbol.owner)
} else {
//Console.println("2 ThisType("+fun.symbol.owner+")")
NoPrefix // see 'files/run/unapplyComplex.scala'
@@ -2420,40 +2596,25 @@ trait Typers { self: Analyzer =>
}
val fun1 = typed(fun1untyped)
- if (fun1.tpe.isErroneous) setError(tree)
+ if (fun1.tpe.isErroneous) errTree
else {
val formals0 = unapplyTypeList(fun1.symbol, fun1.tpe)
val formals1 = formalTypes(formals0, args.length)
if (formals1.length == args.length) {
val args1 = typedArgs(args, mode, formals0, formals1)
if (!isFullyDefined(pt)) assert(false, tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt)
- // <pending-change>
- // this would be a better choice (from #1196), but fails due to (broken?) refinements
val itype = glb(List(pt, arg.tpe))
- // </pending-change>
// restore old type (arg is a dummy tree, just needs to pass typechecking)
arg.tpe = oldArgType
- UnApply(fun1, args1) setPos tree.pos setType itype //pt
- //
- // if you use the better itype, then the following happens.
- // the required type looks wrong...
- //
- ///files/pos/bug0646.scala [FAILED]
- //
- //failed with type mismatch;
- // found : scala.xml.NodeSeq{ ... }
- // required: scala.xml.NodeSeq{ ... } with scala.xml.NodeSeq{ ... } with scala.xml.Node on: temp3._data().==("Blabla").&&({
- // exit(temp0);
- // true
- //})
+ UnApply(fun1, args1) setPos tree.pos setType itype
} else {
- errorTree(tree, "wrong number of arguments for "+treeSymTypeMsg(fun))
+ errorTree("wrong number of arguments for "+treeSymTypeMsg(fun))
}
}
/* --- end unapply --- */
case _ =>
- errorTree(tree, fun+" of type "+fun.tpe+" does not take parameters")
+ errorTree(fun+" of type "+fun.tpe+" does not take parameters")
}
}
@@ -2470,15 +2631,27 @@ trait Typers { self: Analyzer =>
hasError = true
annotationError
}
- def needConst(tr: Tree): None.type = {
- error(tr.pos, "annotation argument needs to be a constant; found: "+tr)
- None
+
+ /** Calling constfold right here is necessary because some trees (negated
+ * floats and literals in particular) are not yet folded.
+ */
+ def tryConst(tr: Tree, pt: Type) = typed(constfold(tr), EXPRmode, pt) match {
+ // null cannot be used as constant value for classfile annotations
+ case l @ Literal(c) if !(l.isErroneous || c.value == null) =>
+ Some(LiteralAnnotArg(c))
+ case _ =>
+ error(tr.pos, "annotation argument needs to be a constant; found: "+tr)
+ None
}
/** Converts an untyped tree to a ClassfileAnnotArg. If the conversion fails,
- * an error message is reporded and None is returned.
+ * an error message is reported and None is returned.
*/
def tree2ConstArg(tree: Tree, pt: Type): Option[ClassfileAnnotArg] = tree match {
+ case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) if (pt.typeSymbol == ArrayClass) =>
+ error(tree.pos, "Array constants have to be specified using the `Array(...)' factory method")
+ None
+
case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
val annInfo = typedAnnotation(ann, mode, NoSymbol, pt.typeSymbol, true)
if (annInfo.atp.isErroneous) {
@@ -2487,27 +2660,29 @@ trait Typers { self: Analyzer =>
None
} else Some(NestedAnnotArg(annInfo))
- // use of: object Array.apply[A <: AnyRef](args: A*): Array[A] = ...
- // and object Array.apply(args: Int*): Array[Int] = ... (and similar)
- case Apply(fun, members) =>
+ // use of Array.apply[T: ClassManifest](xs: T*): Array[T]
+ // and Array.apply(x: Int, xs: Int*): Array[Int] (and similar)
+ case Apply(fun, args) =>
val typedFun = typed(fun, funMode(mode), WildcardType)
- if (typedFun.symbol.owner == ArrayModule.moduleClass &&
- typedFun.symbol.name == nme.apply &&
- pt.typeSymbol == ArrayClass &&
- !pt.typeArgs.isEmpty)
- trees2ConstArg(members, pt.typeArgs.head)
+ if (typedFun.symbol.owner == ArrayModule.moduleClass && typedFun.symbol.name == nme.apply)
+ pt match {
+ case TypeRef(_, ArrayClass, targ :: _) =>
+ trees2ConstArg(args, targ)
+ case _ =>
+ // For classfile annotations, pt can only be T:
+ // BT = Int, .., String, Class[_], JavaAnnotClass
+ // T = BT | Array[BT]
+ // So an array literal as argument can only be valid if pt is Array[_]
+ error(tree.pos, "found array constant, expected argument of type "+ pt)
+ None
+ }
else
- needConst(tree)
+ tryConst(tree, pt)
case Typed(t, _) => tree2ConstArg(t, pt)
- case tree => typed(tree, EXPRmode, pt) match {
- // null cannot be used as constant value for classfile annotations
- case l @ Literal(c) if !(l.isErroneous || c.value == null) =>
- Some(LiteralAnnotArg(c))
- case _ =>
- needConst(tree)
- }
+ case tree =>
+ tryConst(tree, pt)
}
def trees2ConstArg(trees: List[Tree], pt: Type): Option[ArrayAnnotArg] = {
val args = trees.map(tree2ConstArg(_, pt))
@@ -2524,7 +2699,7 @@ trait Typers { self: Analyzer =>
case Select(New(tpt), nme.CONSTRUCTOR) =>
(fun, outerArgss)
case _ =>
- error(fun.pos, "unexpected tree in annotationn: "+ fun)
+ error(fun.pos, "unexpected tree in annotation: "+ fun)
(setError(fun), outerArgss)
}
extract(ann, List())
@@ -2561,10 +2736,11 @@ trait Typers { self: Analyzer =>
error(arg.pos, "unknown annotation argument name: " + name)
(nme.ERROR, None)
} else if (!names.contains(sym)) {
- error(arg.pos, "duplicate value for anontation argument " + name)
+ error(arg.pos, "duplicate value for annotation argument " + name)
(nme.ERROR, None)
} else {
names -= sym
+ if(isJava) sym.cookJavaRawInfo() // #3429
val annArg = tree2ConstArg(rhs, sym.tpe.resultType)
(sym.name, annArg)
}
@@ -2576,7 +2752,7 @@ trait Typers { self: Analyzer =>
for (name <- names) {
if (!name.annotations.contains(AnnotationInfo(AnnotationDefaultAttr.tpe, List(), List())) &&
!name.hasFlag(DEFAULTPARAM))
- error(ann.pos, "annotation " + annType.typeSymbol.fullNameString + " is missing argument " + name.name)
+ error(ann.pos, "annotation " + annType.typeSymbol.fullName + " is missing argument " + name.name)
}
if (hasError) annotationError
@@ -2674,10 +2850,12 @@ trait Typers { self: Analyzer =>
val name = if (sym.isType) sym.name else newTypeName(sym.name+".type")
val bound = sym.existentialBound
val sowner = if (isRawParameter(sym)) context.owner else sym.owner
- val quantified: Symbol = sowner.newAbstractType(sym.pos, name)
- trackSetInfo(quantified setFlag EXISTENTIAL)(bound.cloneInfo(quantified))
+ val quantified: Symbol = sowner.newAbstractType(sym.pos, name).setFlag(EXISTENTIAL)
+
+ quantified.setInfo(bound.cloneInfo(quantified))
+ quantified
}
- val typeParamTypes = typeParams map (_.tpe) // don't trackSetInfo here, since type already set!
+ val typeParamTypes = typeParams map (_.tpe)
//println("ex trans "+rawSyms+" . "+tp+" "+typeParamTypes+" "+(typeParams map (_.info)))//DEBUG
for (tparam <- typeParams) tparam.setInfo(tparam.info.subst(rawSyms, typeParamTypes))
(typeParams, tp.subst(rawSyms, typeParamTypes))
@@ -2697,16 +2875,6 @@ trait Typers { self: Analyzer =>
res
}
- class SymInstance(val sym: Symbol, val tp: Type) {
- override def equals(other: Any): Boolean = other match {
- case that: SymInstance =>
- this.sym == that.sym && this.tp =:= that.tp
- case _ =>
- false
- }
- override def hashCode: Int = sym.hashCode * 41 + tp.hashCode
- }
-
/** convert skolems to existentials */
def packedType(tree: Tree, owner: Symbol): Type = {
def defines(tree: Tree, sym: Symbol) =
@@ -2742,7 +2910,7 @@ trait Typers { self: Analyzer =>
}
}
// add all local symbols of `tp' to `localSyms'
- // expanding higher-kinded types into individual copies for each instance.
+ // TODO: expand higher-kinded types into individual copies for each instance.
def addLocals(tp: Type) {
val remainingSyms = new ListBuffer[Symbol]
def addIfLocal(sym: Symbol, tp: Type) {
@@ -2801,6 +2969,61 @@ trait Typers { self: Analyzer =>
TypeTree(ExistentialType(typeParams, tpe)) setOriginal tree
}
+ // lifted out of typed1 because it's needed in typedImplicit0
+ protected def typedTypeApply(tree: Tree, mode: Int, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
+ case OverloadedType(pre, alts) =>
+ inferPolyAlternatives(fun, args map (_.tpe))
+ val tparams = fun.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
+ val args1 = if(args.length == tparams.length) {
+ //@M: in case TypeApply we can't check the kind-arities of the type arguments,
+ // as we don't know which alternative to choose... here we do
+ map2Conserve(args, tparams) {
+ //@M! the polytype denotes the expected kind
+ (arg, tparam) => typedHigherKindedType(arg, mode, polyType(tparam.typeParams, AnyClass.tpe))
+ }
+ } else // @M: there's probably something wrong when args.length != tparams.length... (triggered by bug #320)
+ // Martin, I'm using fake trees, because, if you use args or arg.map(typedType),
+ // inferPolyAlternatives loops... -- I have no idea why :-(
+ // ...actually this was looping anyway, see bug #278.
+ return errorTree(fun, "wrong number of type parameters for "+treeSymTypeMsg(fun))
+
+ typedTypeApply(tree, mode, fun, args1)
+ case SingleType(_, _) =>
+ typedTypeApply(tree, mode, fun setType fun.tpe.widen, args)
+ case PolyType(tparams, restpe) if (tparams.length != 0) =>
+ if (tparams.length == args.length) {
+ val targs = args map (_.tpe)
+ checkBounds(tree.pos, NoPrefix, NoSymbol, tparams, targs, "")
+ if (fun.symbol == Predef_classOf) {
+ checkClassType(args.head, true, false)
+ atPos(tree.pos) { gen.mkClassOf(targs.head) }
+ } else {
+ if (phase.id <= currentRun.typerPhase.id &&
+ fun.symbol == Any_isInstanceOf && !targs.isEmpty)
+ checkCheckable(tree.pos, targs.head, "")
+ val resultpe = restpe.instantiateTypeParams(tparams, targs)
+ //@M substitution in instantiateParams needs to be careful!
+ //@M example: class Foo[a] { def foo[m[x]]: m[a] = error("") } (new Foo[Int]).foo[List] : List[Int]
+ //@M --> first, m[a] gets changed to m[Int], then m gets substituted for List,
+ // this must preserve m's type argument, so that we end up with List[Int], and not List[a]
+ //@M related bug: #1438
+ //println("instantiating type params "+restpe+" "+tparams+" "+targs+" = "+resultpe)
+ treeCopy.TypeApply(tree, fun, args) setType resultpe
+ }
+ } else {
+ errorTree(tree, "wrong number of type parameters for "+treeSymTypeMsg(fun))
+ }
+ case ErrorType =>
+ setError(treeCopy.TypeApply(tree, fun, args))
+ case _ =>
+ errorTree(tree, treeSymTypeMsg(fun)+" does not take type parameters.")
+ }
+
+ private[this] var typingIndent: String = ""
+ @inline final def deindentTyping() = if (printTypings) typingIndent = typingIndent.substring(0, typingIndent.length() - 2)
+ @inline final def indentTyping() = if (printTypings) typingIndent += " "
+ @inline final def printTyping(s: => String) = if (printTypings) println(typingIndent+s)
+
/**
* @param tree ...
* @param mode ...
@@ -2808,8 +3031,10 @@ trait Typers { self: Analyzer =>
* @return ...
*/
protected def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
+ def isPatternMode = (mode & PATTERNmode) != 0
+
//Console.println("typed1("+tree.getClass()+","+Integer.toHexString(mode)+","+pt+")")
- def ptOrLub(tps: List[Type]) = if (isFullyDefined(pt)) pt else weakLub(tps map (_.deconst))
+ def ptOrLub(tps: List[Type]) = if (isFullyDefined(pt)) (pt, false) else weakLub(tps map (_.deconst))
//@M! get the type of the qualifier in a Select tree, otherwise: NoType
def prefixType(fun: Tree): Type = fun match {
@@ -2843,7 +3068,7 @@ trait Typers { self: Analyzer =>
* class NPE[T <: NPE[T] @peer]
*
* (Note: -Yself-in-annots must be on to see the problem)
- **/
+ * */
val sym =
context.owner.newLocalDummy(ann.pos)
.newValue(ann.pos, nme.self)
@@ -2874,7 +3099,7 @@ trait Typers { self: Analyzer =>
} else {
// An annotated term, created with annotation ascription
// term : @annot()
- def annotTypeTree(ainfo: AnnotationInfo): Tree =
+ def annotTypeTree(ainfo: AnnotationInfo): Tree = //TR: function not used ??
TypeTree(arg1.tpe.withAnnotation(ainfo)) setOriginal tree
if (ann.tpe == null) {
@@ -2896,7 +3121,7 @@ trait Typers { self: Analyzer =>
context.owner.newAliasType(tree.pos, name) setInfo pt
else
context.owner.newAbstractType(tree.pos, name) setInfo
- mkTypeBounds(NothingClass.tpe, AnyClass.tpe)
+ TypeBounds(NothingClass.tpe, AnyClass.tpe)
val rawInfo = vble.rawInfo
vble = if (vble.name == nme.WILDCARD.toTypeName) context.scope.enter(vble)
else namer.enterInScope(vble)
@@ -2906,22 +3131,15 @@ trait Typers { self: Analyzer =>
if (vble == NoSymbol)
vble = context.owner.newValue(tree.pos, name)
if (vble.name.toTermName != nme.WILDCARD) {
-/*
- if (namesSomeIdent(vble.name))
- context.warning(tree.pos,
- "pattern variable"+vble.name+" shadows a value visible in the environment;\n"+
- "use backquotes `"+vble.name+"` if you mean to match against that value;\n" +
- "or rename the variable or use an explicit bind "+vble.name+"@_ to avoid this warning.")
-*/
if ((mode & ALTmode) != 0)
error(tree.pos, "illegal variable in pattern alternative")
vble = namer.enterInScope(vble)
}
val body1 = typed(body, mode, pt)
- trackSetInfo(vble)(
+ vble.setInfo(
if (treeInfo.isSequenceValued(body)) seqType(body1.tpe)
else body1.tpe)
- treeCopy.Bind(tree, name, body1) setSymbol vble setType body1.tpe // buraq, was: pt
+ treeCopy.Bind(tree, name, body1) setSymbol vble setType body1.tpe // burak, was: pt
}
}
@@ -2935,49 +3153,48 @@ trait Typers { self: Analyzer =>
}
def typedAssign(lhs: Tree, rhs: Tree): Tree = {
- def mayBeVarGetter(sym: Symbol) = sym.info match {
- case PolyType(List(), _) => sym.owner.isClass && !sym.isStable
- case _: ImplicitMethodType => sym.owner.isClass && !sym.isStable
- case _ => false
+ val lhs1 = typed(lhs, EXPRmode | LHSmode, WildcardType)
+ val varsym = lhs1.symbol
+ def failMsg =
+ if (varsym != null && varsym.isValue) "reassignment to val"
+ else "assignment to non variable"
+
+ def fail = {
+ if (!lhs1.tpe.isError)
+ error(tree.pos, failMsg)
+
+ setError(tree)
}
- val lhs1 = typed(lhs, EXPRmode | LHSmode, WildcardType)
- val varsym = lhs1.symbol
- if ((varsym ne null) && mayBeVarGetter(varsym))
+ if (varsym == null)
+ return fail
+
+ if (treeInfo.mayBeVarGetter(varsym)) {
lhs1 match {
case Select(qual, name) =>
- return typed(
- Apply(
- Select(qual, nme.getterToSetter(name)) setPos lhs.pos,
- List(rhs)) setPos tree.pos,
- mode, pt)
+ val sel = Select(qual, nme.getterToSetter(name)) setPos lhs.pos
+ val app = Apply(sel, List(rhs)) setPos tree.pos
+ return typed(app, mode, pt)
case _ =>
-
}
- if ((varsym ne null) && (varsym.isVariable || varsym.isValue && phase.erasedTypes)) {
- val rhs1 = typed(rhs, lhs1.tpe)
+ }
+ if (varsym.isVariable || varsym.isValue && phase.erasedTypes) {
+ val rhs1 = typed(rhs, EXPRmode | BYVALmode, lhs1.tpe)
treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitClass.tpe
- } else {
- if (!lhs1.tpe.isError) {
- //println(lhs1+" = "+rhs+" "+varsym+" "+mayBeVarGetter(varsym)+" "+varsym.ownerChain+" "+varsym.info)// DEBUG
- error(tree.pos,
- if ((varsym ne null) && varsym.isValue) "reassignment to val"
- else "assignment to non variable")
- }
- setError(tree)
}
+ else fail
}
def typedIf(cond: Tree, thenp: Tree, elsep: Tree) = {
- val cond1 = checkDead(typed(cond, BooleanClass.tpe))
- if (elsep.isEmpty) { // in the future, should be unecessary
+ val cond1 = checkDead(typed(cond, EXPRmode | BYVALmode, BooleanClass.tpe))
+ if (elsep.isEmpty) { // in the future, should be unnecessary
val thenp1 = typed(thenp, UnitClass.tpe)
treeCopy.If(tree, cond1, thenp1, elsep) setType thenp1.tpe
} else {
var thenp1 = typed(thenp, pt)
var elsep1 = typed(elsep, pt)
- val owntype = ptOrLub(List(thenp1.tpe, elsep1.tpe))
- if (isNumericValueType(owntype)) {
+ val (owntype, needAdapt) = ptOrLub(List(thenp1.tpe, elsep1.tpe))
+ if (needAdapt) { //isNumericValueType(owntype)) {
thenp1 = adapt(thenp1, mode, owntype)
elsep1 = adapt(elsep1, mode, owntype)
}
@@ -3000,51 +3217,50 @@ trait Typers { self: Analyzer =>
" has return statement; needs result type")
} else {
context.enclMethod.returnsSeen = true
- val expr1: Tree = typed(expr, restpt0.tpe)
+ val expr1: Tree = typed(expr, EXPRmode | BYVALmode, restpt0.tpe)
treeCopy.Return(tree, checkDead(expr1)) setSymbol enclMethod.owner setType NothingClass.tpe
}
}
}
def typedNew(tpt: Tree) = {
- var tpt1 = typedTypeConstructor(tpt)
- checkClassType(tpt1, false, true)
- if (tpt1.hasSymbol && !tpt1.symbol.typeParams.isEmpty) {
- context.undetparams = cloneSymbols(tpt1.symbol.typeParams)
- tpt1 = TypeTree()
- .setOriginal(tpt1)
- .setType(appliedType(tpt1.tpe, context.undetparams map (_.tpe)))
+ val tpt1 = {
+ val tpt0 = typedTypeConstructor(tpt)
+ checkClassType(tpt0, false, true)
+ if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) {
+ context.undetparams = cloneSymbols(tpt0.symbol.typeParams)
+ TypeTree().setOriginal(tpt0)
+ .setType(appliedType(tpt0.tpe, context.undetparams map (_.tpe)))
+ } else tpt0
}
/** If current tree <tree> appears in <val x(: T)? = <tree>>
* return `tp with x.type' else return `tp'.
*/
- def narrowRhs(tp: Type) = {
- var sym = context.tree.symbol
- if (sym != null && sym != NoSymbol)
- if (sym.owner.isClass) {
- if (sym.getter(sym.owner) != NoSymbol) sym = sym.getter(sym.owner)
- } else if (sym hasFlag LAZY) {
- if (sym.lazyAccessor != NoSymbol) sym = sym.lazyAccessor
- }
+ def narrowRhs(tp: Type) = { val sym = context.tree.symbol
context.tree match {
- case ValDef(mods, _, _, Apply(Select(`tree`, _), _)) if !(mods hasFlag MUTABLE) =>
- val pre = if (sym.owner.isClass) sym.owner.thisType else NoPrefix
- intersectionType(List(tp, singleType(pre, sym)))
- case _ =>
- tp
- }
- }
- if (tpt1.tpe.typeSymbol.isAbstractType || (tpt1.tpe.typeSymbol hasFlag ABSTRACT))
- error(tree.pos, tpt1.tpe.typeSymbol + " is abstract; cannot be instantiated")
- else if (tpt1.tpe.typeSymbol.initialize.thisSym != tpt1.tpe.typeSymbol &&
- !(narrowRhs(tpt1.tpe) <:< tpt1.tpe.typeOfThis) &&
- !phase.erasedTypes) {
- error(tree.pos, tpt1.tpe.typeSymbol +
+ case ValDef(mods, _, _, Apply(Select(`tree`, _), _)) if !(mods hasFlag MUTABLE) && sym != null && sym != NoSymbol =>
+ val sym1 = if (sym.owner.isClass && sym.getter(sym.owner) != NoSymbol) sym.getter(sym.owner)
+ else sym.lazyAccessorOrSelf
+ val pre = if (sym1.owner.isClass) sym1.owner.thisType else NoPrefix
+ intersectionType(List(tp, singleType(pre, sym1)))
+ case _ => tp
+ }}
+
+ val tp = tpt1.tpe
+ val sym = tp.typeSymbol
+ if (sym.isAbstractType || (sym hasFlag ABSTRACT))
+ error(tree.pos, sym + " is abstract; cannot be instantiated")
+ else if (!( tp == sym.initialize.thisSym.tpe // when there's no explicit self type -- with (#3612) or without self variable
+ // sym.thisSym.tpe == tp.typeOfThis (except for objects)
+ || narrowRhs(tp) <:< tp.typeOfThis
+ || phase.erasedTypes
+ )) {
+ error(tree.pos, sym +
" cannot be instantiated because it does not conform to its self-type "+
- tpt1.tpe.typeOfThis)
+ tp.typeOfThis)
}
- treeCopy.New(tree, tpt1).setType(tpt1.tpe)
+ treeCopy.New(tree, tpt1).setType(tp)
}
def typedEta(expr1: Tree): Tree = expr1.tpe match {
@@ -3066,7 +3282,7 @@ trait Typers { self: Analyzer =>
pt != WildcardType &&
pt != ErrorType &&
isSubType(pt, DelegateClass.tpe)) =>
- val scalaCaller = newScalaCaller(pt);
+ val scalaCaller = newScalaCaller(pt)
addScalaCallerInfo(scalaCaller, expr1.symbol)
val n: Name = scalaCaller.name
val del = Ident(DelegateClass) setType DelegateClass.tpe
@@ -3074,7 +3290,7 @@ trait Typers { self: Analyzer =>
//val f1 = TypeApply(f, List(Ident(pt.symbol) setType pt))
val args: List[Tree] = if(expr1.symbol.isStatic) List(Literal(Constant(null)))
else List(qual) // where the scala-method is located
- val rhs = Apply(f, args);
+ val rhs = Apply(f, args)
typed(rhs)
case _ =>
adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType))
@@ -3085,55 +3301,6 @@ trait Typers { self: Analyzer =>
errorTree(expr1, "_ must follow method; cannot follow " + expr1.tpe)
}
- def typedTypeApply(fun: Tree, args: List[Tree]): Tree = fun.tpe match {
- case OverloadedType(pre, alts) =>
- inferPolyAlternatives(fun, args map (_.tpe))
- val tparams = fun.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
- val args1 = if(args.length == tparams.length) {
- //@M: in case TypeApply we can't check the kind-arities of the type arguments,
- // as we don't know which alternative to choose... here we do
- map2Conserve(args, tparams) {
- //@M! the polytype denotes the expected kind
- (arg, tparam) => typedHigherKindedType(arg, mode, polyType(tparam.typeParams, AnyClass.tpe))
- }
- } else // @M: there's probably something wrong when args.length != tparams.length... (triggered by bug #320)
- // Martin, I'm using fake trees, because, if you use args or arg.map(typedType),
- // inferPolyAlternatives loops... -- I have no idea why :-(
- // ...actually this was looping anyway, see bug #278.
- return errorTree(fun, "wrong number of type parameters for "+treeSymTypeMsg(fun))
-
- typedTypeApply(fun, args1)
- case SingleType(_, _) =>
- typedTypeApply(fun setType fun.tpe.widen, args)
- case PolyType(tparams, restpe) if (tparams.length != 0) =>
- if (tparams.length == args.length) {
- val targs = args map (_.tpe)
- checkBounds(tree.pos, NoPrefix, NoSymbol, tparams, targs, "")
- if (fun.symbol == Predef_classOf) {
- checkClassType(args.head, true, false)
- atPos(tree.pos) { gen.mkClassOf(targs.head) }
- } else {
- if (phase.id <= currentRun.typerPhase.id &&
- fun.symbol == Any_isInstanceOf && !targs.isEmpty)
- checkCheckable(tree.pos, targs.head, "")
- val resultpe = restpe.instantiateTypeParams(tparams, targs)
- //@M substitution in instantiateParams needs to be careful!
- //@M example: class Foo[a] { def foo[m[x]]: m[a] = error("") } (new Foo[Int]).foo[List] : List[Int]
- //@M --> first, m[a] gets changed to m[Int], then m gets substituted for List,
- // this must preserve m's type argument, so that we end up with List[Int], and not List[a]
- //@M related bug: #1438
- //println("instantiating type params "+restpe+" "+tparams+" "+targs+" = "+resultpe)
- treeCopy.TypeApply(tree, fun, args) setType resultpe
- }
- } else {
- errorTree(tree, "wrong number of type parameters for "+treeSymTypeMsg(fun))
- }
- case ErrorType =>
- setError(tree)
- case _ =>
- errorTree(tree, treeSymTypeMsg(fun)+" does not take type parameters.")
- }
-
/**
* @param args ...
* @return ...
@@ -3154,26 +3321,26 @@ trait Typers { self: Analyzer =>
* insert an implicit conversion.
*/
def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
- val start = System.nanoTime()
+ val start = startTimer(failedApplyNanos)
silent(_.doTypedApply(tree, fun, args, mode, pt)) match {
case t: Tree =>
t
case ex: TypeError =>
- failedApplies += System.nanoTime() - start
- def errorInResult(tree: Tree): Boolean = tree.pos == ex.pos || {
- tree match {
- case Block(_, r) => errorInResult(r)
- case Match(_, cases) => cases exists errorInResult
- case CaseDef(_, _, r) => errorInResult(r)
- case Annotated(_, r) => errorInResult(r)
- case If(_, t, e) => errorInResult(t) || errorInResult(e)
- case Try(b, catches, _) => errorInResult(b) || (catches exists errorInResult)
- case Typed(r, Function(List(), EmptyTree)) => errorInResult(r)
- case _ => false
- }
- }
- if (errorInResult(fun) || (args exists errorInResult)) {
- if (printTypings) println("second try for: "+fun+" and "+args)
+ stopTimer(failedApplyNanos, start)
+ def treesInResult(tree: Tree): List[Tree] = tree :: (tree match {
+ case Block(_, r) => treesInResult(r)
+ case Match(_, cases) => cases
+ case CaseDef(_, _, r) => treesInResult(r)
+ case Annotated(_, r) => treesInResult(r)
+ case If(_, t, e) => treesInResult(t) ++ treesInResult(e)
+ case Try(b, catches, _) => treesInResult(b) ++ catches
+ case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r)
+ case _ => Nil
+ })
+ def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == ex.pos)
+
+ if (fun :: tree :: args exists errorInResult) {
+ printTyping("second try for: "+fun+" and "+args)
val Select(qual, name) = fun
val args1 = tryTypedArgs(args, argMode(fun, mode), ex)
val qual1 =
@@ -3183,30 +3350,36 @@ trait Typers { self: Analyzer =>
val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
return typed1(tree1, mode | SNDTRYmode, pt)
}
- } else if (printTypings) {
- println("no second try for "+fun+" and "+args+" because error not in result:"+ex.pos+"!="+tree.pos)
- }
+ } else printTyping("no second try for "+fun+" and "+args+" because error not in result:"+ex.pos+"!="+tree.pos)
+
reportTypeError(tree.pos, ex)
- setError(tree)
+ setError(treeCopy.Apply(tree, fun, args))
}
}
def typedApply(fun: Tree, args: List[Tree]) = {
val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable
- if (stableApplication && (mode & PATTERNmode) != 0) {
+ if (stableApplication && isPatternMode) {
// treat stable function applications f() as expressions.
typed1(tree, mode & ~PATTERNmode | EXPRmode, pt)
} else {
- val funpt = if ((mode & PATTERNmode) != 0) pt else WildcardType
- val start = System.nanoTime()
- silent(_.typed(fun, funMode(mode), funpt)) match {
+ val funpt = if (isPatternMode) pt else WildcardType
+ val appStart = startTimer(failedApplyNanos)
+ val opeqStart = startTimer(failedOpEqNanos)
+ silent(_.typed(fun, funMode(mode), funpt),
+ if ((mode & EXPRmode) != 0) false else context.reportAmbiguousErrors,
+ if ((mode & EXPRmode) != 0) tree else context.tree) match {
case fun1: Tree =>
val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
- if (util.Statistics.enabled) appcnt += 1
+ incCounter(typedApplyCount)
+ def isImplicitMethod(tpe: Type) = tpe match {
+ case mt: MethodType => mt.isImplicit
+ case _ => false
+ }
val res =
if (phase.id <= currentRun.typerPhase.id &&
fun2.isInstanceOf[Select] &&
- !fun2.tpe.isInstanceOf[ImplicitMethodType] &&
+ !isImplicitMethod(fun2.tpe) &&
((fun2.symbol eq null) || !fun2.symbol.isConstructor) &&
(mode & (EXPRmode | SNDTRYmode)) == EXPRmode) {
tryTypedApply(fun2, args)
@@ -3223,7 +3396,7 @@ trait Typers { self: Analyzer =>
// this check is needed to avoid infinite recursion in Duplicators
// (calling typed1 more than once for the same tree)
if (checked ne res) typed { atPos(tree.pos)(checked) }
- else res
+ else res
} else res
/* Would like to do the following instead, but curiously this fails; todo: investigate
if (fun2.symbol.name == nme.apply && fun2.symbol.owner == ArrayClass)
@@ -3231,14 +3404,15 @@ trait Typers { self: Analyzer =>
else res
*/
case ex: TypeError =>
- failedOpEqs += System.nanoTime() - start
fun match {
case Select(qual, name)
- if (mode & PATTERNmode) == 0 && nme.isOpAssignmentName(name.decode) =>
+ if !isPatternMode && nme.isOpAssignmentName(name.decode) =>
val qual1 = typedQualifier(qual)
if (treeInfo.isVariableOrGetter(qual1)) {
+ stopTimer(failedOpEqNanos, opeqStart)
convertToAssignment(fun, qual1, name, args, ex)
} else {
+ stopTimer(failedApplyNanos, appStart)
if ((qual1.symbol ne null) && qual1.symbol.isValue)
error(tree.pos, "reassignment to val")
else
@@ -3246,6 +3420,7 @@ trait Typers { self: Analyzer =>
setError(tree)
}
case _ =>
+ stopTimer(failedApplyNanos, appStart)
reportTypeError(fun.pos, ex)
setError(tree)
}
@@ -3261,31 +3436,41 @@ trait Typers { self: Analyzer =>
Apply(
Select(vble.duplicate, prefix) setPos fun.pos.focus, args) setPos tree.pos.makeTransparent
) setPos tree.pos
+
+ def mkUpdate(table: Tree, indices: List[Tree]) = {
+ gen.evalOnceAll(table :: indices, context.owner, context.unit) { ts =>
+ val tab = ts.head
+ val is = ts.tail
+ Apply(
+ Select(tab(), nme.update) setPos table.pos,
+ ((is map (i => i())) ::: List(
+ Apply(
+ Select(
+ Apply(
+ Select(tab(), nme.apply) setPos table.pos,
+ is map (i => i())) setPos qual.pos,
+ prefix) setPos fun.pos,
+ args) setPos tree.pos)
+ )
+ ) setPos tree.pos
+ }
+ }
+
val tree1 = qual match {
+ case Ident(_) =>
+ mkAssign(qual)
+
case Select(qualqual, vname) =>
gen.evalOnce(qualqual, context.owner, context.unit) { qq =>
val qq1 = qq()
mkAssign(Select(qq1, vname) setPos qual.pos)
}
- case Apply(Select(table, nme.apply), indices) =>
- gen.evalOnceAll(table :: indices, context.owner, context.unit) { ts =>
- val tab = ts.head
- val is = ts.tail
- Apply(
- Select(tab(), nme.update) setPos table.pos,
- ((is map (i => i())) ::: List(
- Apply(
- Select(
- Apply(
- Select(tab(), nme.apply) setPos table.pos,
- is map (i => i())) setPos qual.pos,
- prefix) setPos fun.pos,
- args) setPos tree.pos)
- )
- ) setPos tree.pos
- }
- case Ident(_) =>
- mkAssign(qual)
+
+ case Apply(fn, indices) =>
+ treeInfo.methPart(fn) match {
+ case Select(table, nme.apply) => mkUpdate(table, indices)
+ case _ => errorTree(qual, "Unexpected tree during assignment conversion.")
+ }
}
typed1(tree1, mode, pt)
/*
@@ -3335,7 +3520,7 @@ trait Typers { self: Analyzer =>
} else {
findMixinSuper(clazz.info)
}
- tree setSymbol clazz setType mkSuperType(clazz.thisType, owntype)
+ tree setSymbol clazz setType SuperType(clazz.thisType, owntype)
}
}
@@ -3375,45 +3560,60 @@ trait Typers { self: Analyzer =>
member(qual, name)
}
if (sym == NoSymbol && name != nme.CONSTRUCTOR && (mode & EXPRmode) != 0) {
- val qual1 = adaptToName(qual, name)
+ val qual1 = try {
+ adaptToName(qual, name)
+ } catch {
+ case ex: TypeError =>
+ // this happens if implicits are ambiguous; try again with more context info.
+ // println("last ditch effort: "+qual+" . "+name) // DEBUG
+ context.tree match {
+ case Apply(tree1, args) if tree1 eq tree => // try handling the arguments
+ // println("typing args: "+args) // DEBUG
+ silent(_.typedArgs(args, mode)) match {
+ case args: List[_] =>
+ adaptToArguments(qual, name, args.asInstanceOf[List[Tree]], WildcardType)
+ case _ =>
+ throw ex
+ }
+ case _ =>
+ // println("not in an apply: "+context.tree+"/"+tree) // DEBUG
+ throw ex
+ }
+ }
if (qual1 ne qual) return typed(treeCopy.Select(tree, qual1, name), mode, pt)
}
- if (!sym.exists) {
- if (settings.debug.value) Console.err.println("qual = "+qual+":"+qual.tpe+"\nSymbol="+qual.tpe.termSymbol+"\nsymbol-info = "+qual.tpe.termSymbol.info+"\nscope-id = "+qual.tpe.termSymbol.info.decls.hashCode()+"\nmembers = "+qual.tpe.members+"\nname = "+name+"\nfound = "+sym+"\nowner = "+context.enclClass.owner)
- if (!qual.tpe.widen.isErroneous) {
- error(tree.pos,
- if (name == nme.CONSTRUCTOR)
- qual.tpe.widen+" does not have a constructor"
- else
- decode(name)+" is not a member of "+qual.tpe.widen +
- (if ((context.unit ne null) && // Martin: why is this condition needed?
- qual.pos.isDefined && tree.pos.isDefined && qual.pos.line < tree.pos.line)
- "\npossible cause: maybe a semicolon is missing before `"+decode(name)+"'?"
- else ""))
+
+ if (!reallyExists(sym)) {
+ if (context.owner.toplevelClass.hasFlag(JAVA) && name.isTypeName) {
+ val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
+ if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
}
- // Temporary workaround to retain type information for qual so that askTypeCompletion has something to
- // work with. This appears to work in the context of the IDE, but is incorrect and needs to be
- // revisited.
- if (onlyPresentation) {
- // Nb. this appears to throw away the effects of setError, but some appear to be
- // retained across the copy.
- setError(tree)
+ if (settings.debug.value) Console.err.println("qual = "+qual+":"+qual.tpe+"\nSymbol="+qual.tpe.termSymbol+"\nsymbol-info = "+qual.tpe.termSymbol.info+"\nscope-id = "+qual.tpe.termSymbol.info.decls.hashCode()+"\nmembers = "+qual.tpe.members+"\nname = "+name+"\nfound = "+sym+"\nowner = "+context.enclClass.owner)
+
+ def makeErrorTree = {
val tree1 = tree match {
case Select(_, _) => treeCopy.Select(tree, qual, name)
case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
}
- tree1
- } else
- setError(tree)
+ setError(tree1)
+ }
+
+ if (name == nme.ERROR && forInteractive)
+ return makeErrorTree
+
+ if (!qual.tpe.widen.isErroneous)
+ notAMember(tree, qual, name)
+
+ if (forInteractive) makeErrorTree else setError(tree)
} else {
val tree1 = tree match {
case Select(_, _) => treeCopy.Select(tree, qual, name)
case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
}
- //if (name.toString == "Elem") println("typedSelect "+qual+":"+qual.tpe+" "+sym+"/"+tree1+":"+tree1.tpe)
val (tree2, pre2) = makeAccessible(tree1, sym, qual.tpe, qual)
val result = stabilize(tree2, pre2, mode, pt)
+
def isPotentialNullDeference() = {
phase.id <= currentRun.typerPhase.id &&
!sym.isConstructor &&
@@ -3424,7 +3624,20 @@ trait Typers { self: Analyzer =>
if (settings.Xchecknull.value && isPotentialNullDeference && unit != null)
unit.warning(tree.pos, "potential null pointer dereference: "+tree)
- result
+ result match {
+ // could checkAccessible (called by makeAccessible) potentially have skipped checking a type application in qual?
+ case SelectFromTypeTree(qual@TypeTree(), name) if qual.tpe.typeArgs nonEmpty => // TODO: somehow the new qual is not checked in refchecks
+ treeCopy.SelectFromTypeTree(
+ result,
+ (TypeTreeWithDeferredRefCheck(){ () => val tp = qual.tpe; val sym = tp.typeSymbolDirect
+ // will execute during refchecks -- TODO: make private checkTypeRef in refchecks public and call that one?
+ checkBounds(qual.pos, tp.prefix, sym.owner, sym.typeParams, tp.typeArgs, "")
+ qual // you only get to see the wrapped tree after running this check :-p
+ }) setType qual.tpe,
+ name)
+ case _ =>
+ result
+ }
}
}
@@ -3441,14 +3654,17 @@ trait Typers { self: Analyzer =>
var defSym: Symbol = tree.symbol // the directly found symbol
var pre: Type = NoPrefix // the prefix type of defSym, if a class member
- var qual: Tree = EmptyTree // the qualififier tree if transformed tree is a select
+ var qual: Tree = EmptyTree // the qualifier tree if transformed tree is a select
- // if we are in a constructor of a pattern, ignore all definitions
+ // A symbol qualifies if it exists and is not stale. Stale symbols
+ // are made to disappear here. In addition,
+ // if we are in a constructor of a pattern, we ignore all definitions
// which are methods (note: if we don't do that
// case x :: xs in class List would return the :: method).
- def qualifies(sym: Symbol): Boolean =
- sym.exists &&
- ((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod)
+ def qualifies(sym: Symbol): Boolean = {
+ reallyExists(sym) &&
+ ((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod || sym.hasFlag(ACCESSOR))
+ }
if (defSym == NoSymbol) {
var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope
@@ -3461,6 +3677,7 @@ trait Typers { self: Analyzer =>
}
while (defSym == NoSymbol && cx != NoContext) {
+ currentRun.compileSourceFor(context.asInstanceOf[analyzer.Context], name)
pre = cx.enclClass.prefix
defEntry = cx.scope.lookupEntry(name)
if ((defEntry ne null) && qualifies(defEntry.sym)) {
@@ -3476,9 +3693,9 @@ trait Typers { self: Analyzer =>
val symDepth = if (defEntry eq null) cx.depth
else cx.depth - (cx.scope.nestingLevel - defEntry.owner.nestingLevel)
- var impSym: Symbol = NoSymbol; // the imported symbol
- var imports = context.imports; // impSym != NoSymbol => it is imported from imports.head
- while (!impSym.exists && !imports.isEmpty && imports.head.depth > symDepth) {
+ var impSym: Symbol = NoSymbol // the imported symbol
+ var imports = context.imports // impSym != NoSymbol => it is imported from imports.head
+ while (!reallyExists(impSym) && !imports.isEmpty && imports.head.depth > symDepth) {
impSym = imports.head.importedSymbol(name)
if (!impSym.exists) imports = imports.tail
}
@@ -3490,7 +3707,7 @@ trait Typers { self: Analyzer =>
if (defSym.exists && impSym.exists) {
// imported symbols take precedence over package-owned symbols in different
- // compilation units. Defined symbols take precedence over errenous imports.
+ // compilation units. Defined symbols take precedence over erroneous imports.
if (defSym.definedInPackage &&
(!currentRun.compiles(defSym) ||
(context.unit ne null) && defSym.sourceFile != context.unit.source.file))
@@ -3520,7 +3737,7 @@ trait Typers { self: Analyzer =>
(!imports.head.isExplicitImport(name) ||
imports1.head.depth == imports.head.depth)) {
var impSym1 = imports1.head.importedSymbol(name)
- if (impSym1.exists) {
+ if (reallyExists(impSym1)) {
if (imports1.head.isExplicitImport(name)) {
if (imports.head.isExplicitImport(name) ||
imports1.head.depth != imports.head.depth) ambiguousImport()
@@ -3532,13 +3749,15 @@ trait Typers { self: Analyzer =>
imports1 = imports1.tail
}
defSym = impSym
- qual = atPos(tree.pos.focusStart)(resetPos(imports.head.qual.duplicate))
+ val qual0 = imports.head.qual
+ if (!(shortenImports && qual0.symbol.isPackage)) // optimization: don't write out package prefixes
+ qual = atPos(tree.pos.focusStart)(resetPos(qual0.duplicate))
pre = qual.tpe
} else {
if (settings.debug.value) {
log(context.imports)//debug
}
- error(tree.pos, "not found: "+decode(name))
+ error(tree.pos, "not found: "+decodeWithNamespace(name))
defSym = context.owner.newErrorSymbol(name)
}
}
@@ -3551,6 +3770,7 @@ trait Typers { self: Analyzer =>
else atPos(tree.pos)(Select(qual, name))
// atPos necessary because qualifier might come from startContext
val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual)
+ // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right?
stabilize(tree2, pre2, mode, pt)
}
}
@@ -3576,7 +3796,7 @@ trait Typers { self: Analyzer =>
} else {
val tparams = tpt1.symbol.typeParams
if (tparams.length == args.length) {
- // @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
+ // @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
val args1 =
if(!tpt1.symbol.rawInfo.isComplete)
args mapConserve (typedHigherKindedType(_, mode))
@@ -3587,11 +3807,8 @@ trait Typers { self: Analyzer =>
//@M! the polytype denotes the expected kind
}
val argtypes = args1 map (_.tpe)
- val owntype = if (tpt1.symbol.isClass || tpt1.symbol.isTypeMember)
- // @M! added the latter condition
- appliedType(tpt1.tpe, argtypes)
- else tpt1.tpe.instantiateTypeParams(tparams, argtypes)
- List.map2(args, tparams) { (arg, tparam) => arg match {
+
+ (args, tparams).zipped foreach { (arg, tparam) => arg match {
// note: can't use args1 in selector, because Bind's got replaced
case Bind(_, _) =>
if (arg.symbol.isAbstractType)
@@ -3601,12 +3818,22 @@ trait Typers { self: Analyzer =>
glb(List(arg.symbol.info.bounds.hi, tparam.info.bounds.hi.subst(tparams, argtypes))))
case _ =>
}}
- TypeTree(owntype) setOriginal(tree) // setPos tree.pos
+ val original = treeCopy.AppliedTypeTree(tree, tpt1, args1)
+ val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal original
+ if(tpt1.tpe.isInstanceOf[PolyType]) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
+ (TypeTreeWithDeferredRefCheck(){ () =>
+ // wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap
+ // we can't simply use original in refchecks because it does not contains types
+ // (and the only typed trees we have have been mangled so they're not quite the original tree anymore)
+ checkBounds(result.pos, tpt1.tpe.prefix, tpt1.symbol.owner, tpt1.symbol.typeParams, argtypes, "")
+ result // you only get to see the wrapped tree after running this check :-p
+ }).setType(result.tpe)
+ else result
} else if (tparams.length == 0) {
errorTree(tree, tpt1.tpe+" does not take type parameters")
} else {
//Console.println("\{tpt1}:\{tpt1.symbol}:\{tpt1.symbol.info}")
- if (settings.debug.value) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info);//debug
+ if (settings.debug.value) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug
errorTree(tree, "wrong number of type arguments for "+tpt1.tpe+", should be "+tparams.length)
}
}
@@ -3623,6 +3850,8 @@ trait Typers { self: Analyzer =>
case PackageDef(pid, stats) =>
val pid1 = typedQualifier(pid).asInstanceOf[RefTree]
assert(sym.moduleClass ne NoSymbol, sym)
+ // complete lazy annotations
+ val annots = sym.annotations
val stats1 = newTyper(context.make(tree, sym.moduleClass, sym.info.decls))
.typedStats(stats, NoSymbol)
treeCopy.PackageDef(tree, pid1, stats1) setType NoType
@@ -3640,18 +3869,29 @@ trait Typers { self: Analyzer =>
newTyper(context.makeNewScope(tree, sym)).typedDefDef(ddef)
case tdef @ TypeDef(_, _, _, _) =>
- newTyper(context.makeNewScope(tree, sym)).typedTypeDef(tdef)
+ typedTypeDef(tdef)
case ldef @ LabelDef(_, _, _) =>
labelTyper(ldef).typedLabelDef(ldef)
case ddef @ DocDef(comment, defn) =>
- val ret = typed(defn, mode, pt)
- if ((comments ne null) && (defn.symbol ne null) && (defn.symbol ne NoSymbol)) {
- comments(defn.symbol) = comment
- commentOffsets(defn.symbol) = ddef.pos.startOrPoint
+ if (forScaladoc && (sym ne null) && (sym ne NoSymbol)) {
+ docComments(sym) = comment
+ comment.defineVariables(sym)
+ val typer1 = newTyper(context.makeNewScope(tree, context.owner))
+ for (useCase <- comment.useCases) {
+ typer1.silent(_.typedUseCase(useCase)) match {
+ case ex: TypeError =>
+ unit.warning(useCase.pos, ex.msg)
+ case _ =>
+ }
+ for (useCaseSym <- useCase.defined) {
+ if (sym.name != useCaseSym.name)
+ unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
+ }
+ }
}
- ret
+ typed(defn, mode, pt)
case Annotated(constr, arg) =>
typedAnnotated(constr, typed(arg, mode, pt))
@@ -3667,7 +3907,7 @@ trait Typers { self: Analyzer =>
case Star(elem) =>
checkStarPatOK(tree.pos, mode)
val elem1 = typed(elem, mode, pt)
- treeCopy.Star(tree, elem1) setType pt
+ treeCopy.Star(tree, elem1) setType makeFullyDefined(pt)
case Bind(name, body) =>
typedBind(name, body)
@@ -3675,7 +3915,7 @@ trait Typers { self: Analyzer =>
case UnApply(fun, args) =>
val fun1 = typed(fun)
val tpes = formalTypes(unapplyTypeList(fun.symbol, fun1.tpe), args.length)
- val args1 = List.map2(args, tpes)(typedPattern(_, _))
+ val args1 = (args, tpes).zipped map (typedPattern(_, _))
treeCopy.UnApply(tree, fun1, args1) setType pt
case ArrayValue(elemtpt, elems) =>
@@ -3709,10 +3949,10 @@ trait Typers { self: Analyzer =>
val body = treeCopy.Match(tree, selector1, cases)
typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
} else {
- val selector1 = checkDead(typed(selector))
+ val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
var cases1 = typedCases(tree, cases, selector1.tpe.widen, pt)
- val owntype = ptOrLub(cases1 map (_.tpe))
- if (isNumericValueType(owntype)) {
+ val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
+ if (needAdapt) {
cases1 = cases1 map (adaptCase(_, owntype))
}
treeCopy.Match(tree, selector1, cases1) setType owntype
@@ -3726,15 +3966,15 @@ trait Typers { self: Analyzer =>
var catches1 = typedCases(tree, catches, ThrowableClass.tpe, pt)
val finalizer1 = if (finalizer.isEmpty) finalizer
else typed(finalizer, UnitClass.tpe)
- val owntype = ptOrLub(block1.tpe :: (catches1 map (_.tpe)))
- if (isNumericValueType(owntype)) {
+ val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)))
+ if (needAdapt) {
block1 = adapt(block1, mode, owntype)
catches1 = catches1 map (adaptCase(_, owntype))
}
treeCopy.Try(tree, block1, catches1, finalizer1) setType owntype
case Throw(expr) =>
- val expr1 = typed(expr, ThrowableClass.tpe)
+ val expr1 = typed(expr, EXPRmode | BYVALmode, ThrowableClass.tpe)
treeCopy.Throw(tree, expr1) setType NothingClass.tpe
case New(tpt: Tree) =>
@@ -3767,7 +4007,7 @@ trait Typers { self: Analyzer =>
val tpt1 = typedType(tpt, mode)
val expr1 = typed(expr, mode & stickyModes, tpt1.tpe.deconst)
val owntype =
- if ((mode & PATTERNmode) != 0) inferTypedPattern(tpt1.pos, tpt1.tpe, pt)
+ if (isPatternMode) inferTypedPattern(tpt1.pos, tpt1.tpe, pt)
else tpt1.tpe
//Console.println(typed pattern: "+tree+":"+", tp = "+tpt1.tpe+", pt = "+pt+" ==> "+owntype)//DEBUG
treeCopy.Typed(tree, expr1, tpt1) setType owntype
@@ -3807,7 +4047,7 @@ trait Typers { self: Analyzer =>
}
//@M TODO: context.undetparams = undets_fun ?
- typedTypeApply(fun1, args1)
+ typedTypeApply(tree, mode, fun1, args1)
case Apply(Block(stats, expr), args) =>
typed1(atPos(tree.pos)(Block(stats, Apply(expr, args))), mode, pt)
@@ -3826,7 +4066,7 @@ trait Typers { self: Analyzer =>
error(tree.pos, "cannot create a generic multi-dimensional array of more than "+MaxArrayDims+" dimensions")
val newArrayApp = atPos(tree.pos) {
val manif = getManifestTree(tree.pos, manifType, false)
- Apply(Select(manif, if (level == 1) "newArray" else "newArray"+level), args)
+ new ApplyToImplicitArgs(Select(manif, if (level == 1) "newArray" else "newArray"+level), args)
}
typed(newArrayApp, mode, pt)
case tree1 =>
@@ -3852,7 +4092,7 @@ trait Typers { self: Analyzer =>
typedSelect(qual1, nme.CONSTRUCTOR)
case Select(qual, name) =>
- if (util.Statistics.enabled) selcnt += 1
+ incCounter(typedSelectCount)
var qual1 = checkDead(typedQualifier(qual, mode))
if (name.isTypeName) qual1 = checkStable(qual1)
@@ -3880,7 +4120,7 @@ trait Typers { self: Analyzer =>
else tree1
case Ident(name) =>
- if (util.Statistics.enabled) idcnt += 1
+ incCounter(typedIdentCount)
if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) ||
(name == nme.WILDCARD.toTypeName && (mode & TYPEmode) != 0))
tree setType makeFullyDefined(pt)
@@ -3890,7 +4130,7 @@ trait Typers { self: Analyzer =>
case Literal(value) =>
tree setType (
if (value.tag == UnitTag) UnitClass.tpe
- else mkConstantType(value))
+ else ConstantType(value))
case SingletonTypeTree(ref) =>
val ref1 = checkStable(
@@ -3900,7 +4140,7 @@ trait Typers { self: Analyzer =>
case SelectFromTypeTree(qual, selector) =>
val qual1 = typedType(qual, mode)
if (qual1.tpe.isVolatile) error(tree.pos, "illegal type selection from volatile type "+qual.tpe)
- typedSelect(typedType(qual, mode), selector)
+ typedSelect(qual1, selector)
case CompoundTypeTree(templ) =>
typedCompoundTypeTree(templ)
@@ -3911,11 +4151,12 @@ trait Typers { self: Analyzer =>
case TypeBoundsTree(lo, hi) =>
val lo1 = typedType(lo, mode)
val hi1 = typedType(hi, mode)
- treeCopy.TypeBoundsTree(tree, lo1, hi1) setType mkTypeBounds(lo1.tpe, hi1.tpe)
+ treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe)
case etpt @ ExistentialTypeTree(_, _) =>
newTyper(context.makeNewScope(tree, context.owner)).typedExistentialTypeTree(etpt, mode)
+ case dc@TypeTreeWithDeferredRefCheck() => dc // TODO: should we re-type the wrapped tree? then we need to change TypeTreeWithDeferredRefCheck's representation to include the wrapped tree explicitly (instead of in its closure)
case tpt @ TypeTree() =>
if (tpt.original != null)
tree setType typedType(tpt.original, mode).tpe
@@ -3924,8 +4165,11 @@ trait Typers { self: Analyzer =>
// and we try again (@see tryTypedApply). In that case we can assign
// whatever type to tree; we just have to survive until a real error message is issued.
tree setType AnyClass.tpe
+ case Import(expr, selectors) =>
+ assert(forInteractive) // should not happen in normal circumstances.
+ tree setType tree.symbol.tpe
case _ =>
- throw new Error("unexpected tree: " + tree.getClass + "\n" + tree)//debug
+ abort("unexpected tree: " + tree.getClass + "\n" + tree)//debug
}
}
@@ -3935,11 +4179,11 @@ trait Typers { self: Analyzer =>
* @param pt ...
* @return ...
*/
- def typed(tree: Tree, mode: Int, pt: Type): Tree = {
-
+ def typed(tree: Tree, mode: Int, pt: Type): Tree = { indentTyping()
def dropExistential(tp: Type): Type = tp match {
case ExistentialType(tparams, tpe) =>
- if (settings.debug.value) println("drop ex "+tree+" "+tp)
+ if (settings.debug.value)
+ log("Dropping existential: " + tree + " " + tp)
new SubstWildcardMap(tparams).apply(tp)
case TypeRef(_, sym, _) if sym.isAliasType =>
val tp0 = tp.normalize
@@ -3949,29 +4193,36 @@ trait Typers { self: Analyzer =>
}
try {
+ if (Statistics.enabled) {
+ val t = currentTime()
+ if (pendingTreeTypes.nonEmpty) {
+ microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
+ }
+ typerTime = t
+ pendingTreeTypes = tree.getClass :: pendingTreeTypes
+ }
if (context.retyping &&
(tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< pt))) {
tree.tpe = null
if (tree.hasSymbol) tree.symbol = NoSymbol
}
- if (printTypings) println("typing "+tree+", pt = "+pt+", undetparams = "+context.undetparams+", implicits-enabled = "+context.implicitsEnabled+", silent = "+context.reportGeneralErrors); //DEBUG
+ printTyping("typing "+tree+", pt = "+pt+", undetparams = "+context.undetparams+", implicits-enabled = "+context.implicitsEnabled+", silent = "+context.reportGeneralErrors) //DEBUG
var tree1 = if (tree.tpe ne null) tree else typed1(tree, mode, dropExistential(pt))
- if (printTypings) println("typed "+tree1+":"+tree1.tpe+(if (isSingleType(tree1.tpe)) " with underlying "+tree1.tpe.widen else "")+", undetparams = "+context.undetparams+", pt = "+pt); //DEBUG
+ printTyping("typed "+tree1+":"+tree1.tpe+(if (isSingleType(tree1.tpe)) " with underlying "+tree1.tpe.widen else "")+", undetparams = "+context.undetparams+", pt = "+pt) //DEBUG
tree1.tpe = addAnnotations(tree1, tree1.tpe)
val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, tree)
- if (printTypings) println("adapted "+tree1+":"+tree1.tpe.widen+" to "+pt+", "+context.undetparams); //DEBUG
+ printTyping("adapted "+tree1+":"+tree1.tpe.widen+" to "+pt+", "+context.undetparams) //DEBUG
// for (t <- tree1.tpe) assert(t != WildcardType)
// if ((mode & TYPEmode) != 0) println("type: "+tree1+" has type "+tree1.tpe)
if (phase.id <= currentRun.typerPhase.id) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
result
} catch {
- case ex: ControlException => throw ex
case ex: TypeError =>
tree.tpe = null
- if (printTypings) println("caught "+ex+" in typed: "+tree);//DEBUG
+ printTyping("caught "+ex+" in typed: "+tree) //DEBUG
reportTypeError(tree.pos, ex)
setError(tree)
case ex: Exception =>
@@ -3979,13 +4230,18 @@ trait Typers { self: Analyzer =>
Console.println("exception when typing "+tree+", pt = "+pt)
if ((context ne null) && (context.unit ne null) &&
(context.unit.source ne null) && (tree ne null))
- logError("AT: " + (tree.pos).dbgString, ex);
- throw(ex)
-/*
- case ex: java.lang.Error =>
- Console.println("exception when typing "+tree+", pt = "+pt)
+ logError("AT: " + (tree.pos).dbgString, ex)
throw ex
-*/ //debug
+ }
+ finally {
+ deindentTyping()
+ if (Statistics.enabled) {
+ val t = currentTime()
+ microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
+ visitsByType(pendingTreeTypes.head) += 1
+ typerTime = t
+ pendingTreeTypes = pendingTreeTypes.tail
+ }
}
}
@@ -4018,14 +4274,17 @@ trait Typers { self: Analyzer =>
/** Types qualifier <code>tree</code> of a select node.
* E.g. is tree occurs in a context like <code>tree.m</code>.
- *
- * @param tree ...
- * @return ...
+ */
+ def typedQualifier(tree: Tree, mode: Int, pt: Type): Tree =
+ typed(tree, EXPRmode | QUALmode | POLYmode | mode & TYPEPATmode, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit
+
+ /** Types qualifier <code>tree</code> of a select node.
+ * E.g. is tree occurs in a context like <code>tree.m</code>.
*/
def typedQualifier(tree: Tree, mode: Int): Tree =
- typed(tree, EXPRmode | QUALmode | POLYmode | mode & TYPEPATmode, WildcardType)
+ typedQualifier(tree, mode, WildcardType)
- def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode)
+ def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode, WildcardType)
/** Types function part of an application */
def typedOperator(tree: Tree): Tree =
@@ -4033,20 +4292,13 @@ trait Typers { self: Analyzer =>
/** Types a pattern with prototype <code>pt</code> */
def typedPattern(tree: Tree, pt: Type): Tree = {
- // The commented out code stems from investigation into whether
- // "abc" match { case Seq('a', 'b', 'c') => true }
- // can be ruled out statically. At present this is a runtime
- // error both because there is an implicit from String to Seq
- // (even though such implicits are not used by the matcher) and
- // because the typer is fine with concluding that "abc" might
- // be of type "String with Seq[T]" and thus eligible for a call
- // to unapplySeq.
- //
- // val savedImplicitsEnabled = context.implicitsEnabled
- // context.implicitsEnabled = false
- // try
- typed(tree, PATTERNmode, pt)
- // finally context.implicitsEnabled = savedImplicitsEnabled
+ // We disable implicits because otherwise some constructs will
+ // type check which should not. The pattern matcher does not
+ // perform implicit conversions in an attempt to consummate a match.
+ val savedImplicitsEnabled = context.implicitsEnabled
+ context.implicitsEnabled = false
+ try typed(tree, PATTERNmode, pt)
+ finally context.implicitsEnabled = savedImplicitsEnabled
}
/** Types a (fully parameterized) type tree */
@@ -4099,9 +4351,9 @@ trait Typers { self: Analyzer =>
packedType(tree1, context.owner)
}
- def transformedOrTyped(tree: Tree, pt: Type): Tree = transformed.get(tree) match {
+ def transformedOrTyped(tree: Tree, mode: Int, pt: Type): Tree = transformed.get(tree) match {
case Some(tree1) => transformed -= tree; tree1
- case None => typed(tree, pt)
+ case None => typed(tree, mode, pt)
}
def findManifest(tp: Type, full: Boolean) = atPhase(currentRun.typerPhase) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 262e648e39..9b78243910 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
@@ -94,19 +93,19 @@ trait Unapplies extends ast.TreeDSL
}
/** returns unapply member's parameter type. */
def unapplyParameterType(extractor: Symbol) = {
- val tps = extractor.tpe.paramTypes
- if (tps.length == 1) tps.head.typeSymbol
+ val ps = extractor.tpe.params
+ if (ps.length == 1) ps.head.tpe.typeSymbol
else NoSymbol
}
def copyUntyped[T <: Tree](tree: T): T =
- returning[T](UnTyper traverse _)(tree.duplicate)
+ returning[T](tree.duplicate)(UnTyper traverse _)
- def copyUntypedInvariant(td: TypeDef): TypeDef =
- returning[TypeDef](UnTyper traverse _)(
- treeCopy.TypeDef(td, td.mods &~ (COVARIANT | CONTRAVARIANT), td.name,
- td.tparams, td.rhs).duplicate
- )
+ def copyUntypedInvariant(td: TypeDef): TypeDef = {
+ val copy = treeCopy.TypeDef(td, td.mods &~ (COVARIANT | CONTRAVARIANT), td.name, td.tparams, td.rhs)
+
+ returning[TypeDef](copy.duplicate)(UnTyper traverse _)
+ }
private def classType(cdef: ClassDef, tparams: List[TypeDef]): Tree = {
val tycon = REF(cdef.symbol)
@@ -134,18 +133,22 @@ trait Unapplies extends ast.TreeDSL
/** The module corresponding to a case class; without any member definitions
*/
def caseModuleDef(cdef: ClassDef): ModuleDef = {
- def inheritFromFun1 = !(cdef.mods hasFlag ABSTRACT) && cdef.tparams.isEmpty && constrParamss(cdef).length == 1
- def createFun1 = gen.scalaFunctionConstr(constrParamss(cdef).head map (_.tpt), toIdent(cdef))
- def parents = if (inheritFromFun1) List(createFun1) else Nil
-
- companionModuleDef(cdef, parents ::: List(gen.scalaScalaObjectConstr))
+ // > MaxFunctionArity is caught in Namers, but for nice error reporting instead of
+ // an abrupt crash we trim the list here.
+ def primaries = constrParamss(cdef).head take MaxFunctionArity map (_.tpt)
+ def inheritFromFun = !cdef.mods.isAbstract && cdef.tparams.isEmpty && constrParamss(cdef).length == 1
+ def createFun = gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
+ def parents = if (inheritFromFun) List(createFun) else Nil
+
+ companionModuleDef(cdef, parents)
}
- def companionModuleDef(cdef: ClassDef, parents: List[Tree]): ModuleDef = atPos(cdef.pos.focus) {
+ def companionModuleDef(cdef: ClassDef, parents: List[Tree] = Nil): ModuleDef = atPos(cdef.pos.focus) {
+ val allParents = parents ::: List( gen.scalaScalaObjectConstr)
ModuleDef(
Modifiers(cdef.mods.flags & AccessFlags | SYNTHETIC, cdef.mods.privateWithin),
cdef.name.toTermName,
- Template(parents, emptyValDef, NoMods, Nil, List(Nil), Nil, cdef.impl.pos.focus))
+ Template(allParents, emptyValDef, NoMods, Nil, List(Nil), Nil, cdef.impl.pos.focus))
}
private val caseMods = Modifiers(SYNTHETIC | CASE)
@@ -171,10 +174,11 @@ trait Unapplies extends ast.TreeDSL
case _ => nme.unapply
}
val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), paramName, classType(cdef, tparams), EmptyTree))
+ val ifNull = if (constrParamss(cdef).head.size == 0) FALSE else REF(NoneModule)
+ val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef.symbol) }, ifNull)(Ident(paramName))
atPos(cdef.pos.focus)(
- DefDef(caseMods, method, tparams, List(cparams), TypeTree(),
- caseClassUnapplyReturnValue(paramName, cdef.symbol))
+ DefDef(caseMods, method, tparams, List(cparams), TypeTree(), body)
)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
index 6e2d61b94c..7d0500d598 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
@@ -1,8 +1,7 @@
/* NSC -- new scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package typechecker
diff --git a/src/compiler/scala/tools/nsc/util/ArgumentsExpander.scala b/src/compiler/scala/tools/nsc/util/ArgumentsExpander.scala
deleted file mode 100644
index 2d8fc8c502..0000000000
--- a/src/compiler/scala/tools/nsc/util/ArgumentsExpander.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package scala.tools.nsc
-package util
-
-import java.io.{FileReader, BufferedReader, StreamTokenizer, FileNotFoundException}
-import scala.tools.nsc.io.AbstractFile
-import scala.collection.mutable.ListBuffer
-
-/**
- * Expands all arguments starting with @ to the contents of the
- * file named like each argument.
- */
-object ArgumentsExpander {
-
- def expandArg(arg: String): List[String] =
- expandFromFile(arg.substring(1))
-
- /*
- * Extracts all the arguments in a specified file.
- * Throws FileNotFoundException if the file does not exist.
- */
- private def expandFromFile(fileName: String): List[String] = {
- val f = AbstractFile.getFile(fileName)
- if (f eq null) throw new FileNotFoundException(
- "argument file "+ fileName +" could not be found")
-
- val in = new BufferedReader(new FileReader(f.file))
-
- val tokenizer = new StreamTokenizer( in )
- tokenizer.resetSyntax
- tokenizer.wordChars(' ', 255)
- tokenizer.whitespaceChars(0, ' ')
- tokenizer.commentChar('#')
- tokenizer.quoteChar('"')
- tokenizer.quoteChar('\'')
-
- val ts = new ListBuffer[String]
- while (tokenizer.nextToken() != StreamTokenizer.TT_EOF) {
- ts += tokenizer.sval
- }
- in.close()
- ts.toList
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
index ca3eadbf87..907622e31f 100644
--- a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
@@ -1,13 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package util
-import scala.tools.nsc.util.SourceFile.{LF, FF, CR, SU}
+import Chars._
abstract class CharArrayReader { self =>
@@ -35,7 +34,7 @@ abstract class CharArrayReader { self =>
/** Is last character a unicode escape \\uxxxx? */
def isUnicodeEscape = charOffset == lastUnicodeOffset
- /** Advance one character */
+ /** Advance one character; reducing CR;LF pairs to just LF */
final def nextChar() {
if (charOffset >= buf.length) {
ch = SU
@@ -44,8 +43,20 @@ abstract class CharArrayReader { self =>
ch = c
charOffset += 1
if (c == '\\') potentialUnicode()
+ else if (c < ' ') { skipCR(); potentialLineEnd() }
+ }
+ }
+
+ /** Advance one character, leaving CR;LF pairs intact */
+ final def nextRawChar() {
+ if (charOffset >= buf.length) {
+ ch = SU
+ } else {
+ val c = buf(charOffset)
+ ch = c
+ charOffset += 1
+ if (c == '\\') potentialUnicode()
else if (c < ' ') potentialLineEnd()
-// print("`"+ch+"'")
}
}
@@ -71,32 +82,23 @@ abstract class CharArrayReader { self =>
}
}
- /** Handle line ends, replace CR+LF by LF */
- private def potentialLineEnd() {
+ /** replace CR;LF by LF */
+ private def skipCR() {
if (ch == CR)
if (charOffset < buf.length && buf(charOffset) == LF) {
charOffset += 1
ch = LF
}
+ }
+
+ /** Handle line ends */
+ private def potentialLineEnd() {
if (ch == LF || ch == FF) {
lastLineStartOffset = lineStartOffset
lineStartOffset = charOffset
}
}
- /** Convert a character digit to an Int according to given base,
- * -1 if no success */
- def digit2int(ch: Char, base: Int): Int = {
- if ('0' <= ch && ch <= '9' && ch < '0' + base)
- ch - '0'
- else if ('A' <= ch && ch < 'A' + base - 10)
- ch - 'A' + 10
- else if ('a' <= ch && ch < 'a' + base - 10)
- ch - 'a' + 10
- else
- -1
- }
-
/** A new reader that takes off at the current character position */
def lookaheadReader = new CharArrayReader {
val buf = self.buf
diff --git a/src/compiler/scala/tools/nsc/util/Chars.scala b/src/compiler/scala/tools/nsc/util/Chars.scala
new file mode 100755
index 0000000000..562806b6eb
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/Chars.scala
@@ -0,0 +1,85 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package util
+
+import annotation.{ tailrec, switch }
+
+/** Contains constants and classifier methods for characters */
+object Chars {
+ // Be very careful touching these.
+ // Apparently trivial changes to the way you write these constants
+ // will cause Scanners.scala to go from a nice efficient switch to
+ // a ghastly nested if statement which will bring the type checker
+ // to its knees. See ticket #1456
+ // Martin: (this should be verified now that the pattern rules have been redesigned).
+ final val LF = '\u000A'
+ final val FF = '\u000C'
+ final val CR = '\u000D'
+ final val SU = '\u001A'
+
+ /** Convert a character digit to an Int according to given base,
+ * -1 if no success */
+ def digit2int(ch: Char, base: Int): Int = {
+ if ('0' <= ch && ch <= '9' && ch < '0' + base)
+ ch - '0'
+ else if ('A' <= ch && ch < 'A' + base - 10)
+ ch - 'A' + 10
+ else if ('a' <= ch && ch < 'a' + base - 10)
+ ch - 'a' + 10
+ else
+ -1
+ }
+
+ /** Convert a character to a backslash-u escape */
+ def char2uescape(c: Char): String = {
+ var rest = c.toInt
+ val buf = new StringBuilder
+ for (i <- 1 to 4) {
+ buf ++= (rest % 16).toHexString
+ rest = rest / 16
+ }
+ "\\u" + buf.toString.reverse
+ }
+
+ /** Is character a line break? */
+ @inline def isLineBreakChar(c: Char) = (c: @switch) match {
+ case LF|FF|CR|SU => true
+ case _ => false
+ }
+
+ /** Is character a whitespace character (but not a new line)? */
+ def isWhitespace(c: Char) =
+ c == ' ' || c == '\t' || c == CR
+
+ /** Can character form part of a doc comment variable $xxx? */
+ def isVarPart(c: Char) =
+ '0' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z'
+
+ /** Can character start an alphanumeric Scala identifier? */
+ def isIdentifierStart(c: Char): Boolean =
+ (c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c)
+
+ /** Can character form part of an alphanumeric Scala identifier? */
+ def isIdentifierPart(c: Char) =
+ (c == '$') || Character.isUnicodeIdentifierPart(c)
+
+ /** Is character a math or other symbol in Unicode? */
+ def isSpecial(c: Char) = {
+ val chtp = Character.getType(c)
+ chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt
+ }
+
+ /** Can character form part of a Scala operator name? */
+ def isOperatorPart(c : Char) : Boolean = (c: @switch) match {
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | '<' |
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '/' | '\\' => true
+ case c => isSpecial(c)
+ }
+}
+
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index aab74d8c02..4994542fdb 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -1,23 +1,21 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2009 LAMP/EPFL
+ * Copyright 2006-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package util
-import java.io.File
+import java.io.{ File => JFile }
import java.net.URL
-import java.util.StringTokenizer
-import scala.util.Sorting
import scala.collection.mutable.{ListBuffer, ArrayBuffer, HashSet => MutHashSet}
-import scala.tools.nsc.io.AbstractFile
-
-import ch.epfl.lamp.compiler.msil.{Type => MSILType, Assembly}
-
+import io.{ File, Directory, Path, AbstractFile }
+import scala.tools.util.StringOps.splitWhere
+import Path.isJarOrZip
+import scala.tools.util.PathResolver
+import File.pathSeparator
/** <p>
* This module provides star expansion of '-classpath' option arguments, behaves the same as
@@ -29,255 +27,321 @@ import ch.epfl.lamp.compiler.msil.{Type => MSILType, Assembly}
object ClassPath {
/** Expand single path entry */
private def expandS(pattern: String): List[String] = {
- def isJar(name: String) = name.toLowerCase endsWith ".jar"
-
- /** Get all jars in directory */
- def lsJars(f: File, filt: String => Boolean = _ => true) = {
- val list = f.listFiles()
- if (list eq null) Nil
- else list.filter(f => f.isFile() && filt(f.getName) && isJar(f.getName())).map(_.getPath()).toList
- }
+ val wildSuffix = File.separator + "*"
- val suffix = File.separator + "*"
+ /** Get all subdirectories, jars, zips out of a directory. */
+ def lsDir(dir: Directory, filt: String => Boolean = _ => true) =
+ dir.list filter (x => filt(x.name) && (x.isDirectory || isJarOrZip(x))) map (_.path) toList
def basedir(s: String) =
if (s contains File.separator) s.substring(0, s.lastIndexOf(File.separator))
else "."
- if (pattern == "*") lsJars(new File("."))
- else if (pattern endsWith suffix) lsJars(new File(pattern dropRight 2))
+ if (pattern == "*") lsDir(Directory("."))
+ else if (pattern endsWith wildSuffix) lsDir(Directory(pattern dropRight 2))
else if (pattern contains '*') {
val regexp = ("^%s$" format pattern.replaceAll("""\*""", """.*""")).r
- lsJars(new File(basedir(pattern)), regexp findFirstIn _ isDefined)
+ lsDir(Directory(pattern).parent, regexp findFirstIn _ isDefined)
}
else List(pattern)
}
- /** Split path using platform-dependent path separator */
- private def splitPath(path: String): List[String] =
- path split File.pathSeparator toList
+ /** Return duplicated classpath entries as
+ * (name, list of origins)
+ * in the order they occur on the path.
+ */
+ def findDuplicates(cp: ClassPath[_]) = {
+ def toFullName(x: (String, _, cp.AnyClassRep)) = x._1 + "." + x._3.name
+ def toOriginString(x: ClassPath[_]) = x.origin getOrElse x.name
+
+ /** Flatten everything into tuples, recombine grouped by name, filter down to 2+ entries. */
+ val flattened = (
+ for ((pkgName, pkg) <- cp.allPackagesWithNames ; clazz <- pkg.classes) yield
+ (pkgName, pkg, clazz)
+ )
+ val multipleAppearingEntries = flattened groupBy toFullName filter (_._2.size > 1)
+
+ /** Extract results. */
+ for (name <- flattened map toFullName distinct ; dups <- multipleAppearingEntries get name) yield
+ (name, dups map { case (_, cp, _) => toOriginString(cp) })
+ }
+
+ /** Split classpath using platform-dependent path separator */
+ def split(path: String): List[String] = (path split pathSeparator).toList filterNot (_ == "") distinct
+
+ /** Join classpath using platform-dependent path separator */
+ def join(paths: String*): String = paths filterNot (_ == "") mkString pathSeparator
+
+ /** Split the classpath, apply a transformation function, and reassemble it. */
+ def map(cp: String, f: String => String): String = join(split(cp) map f: _*)
+
+ /** Split the classpath, filter according to predicate, and reassemble. */
+ def filter(cp: String, p: String => Boolean): String = join(split(cp) filter p: _*)
+
+ /** Split the classpath and map them into Paths */
+ def toPaths(cp: String): List[Path] = split(cp) map (x => Path(x).toAbsolute)
+
+ /** Join the paths as a classpath */
+ def fromPaths(paths: Path*): String = join(paths map (_.path): _*)
+
+ /** Split the classpath and map them into URLs */
+ def toURLs(cp: String): List[URL] = toPaths(cp) map (_.toURL)
/** Expand path and possibly expanding stars */
def expandPath(path: String, expandStar: Boolean = true): List[String] =
- if (expandStar) splitPath(path).flatMap(expandS(_))
- else splitPath(path)
-
+ if (expandStar) split(path) flatMap expandS
+ else split(path)
- def validPackage(name: String) =
- !(name.equals("META-INF") || name.startsWith("."))
+ /** Expand dir out to contents, a la extdir */
+ def expandDir(extdir: String): List[String] = {
+ val dir = Option(AbstractFile getDirectory extdir) getOrElse (return Nil)
+ dir filter (_.isClassContainer) map (dir.sfile.get / _.name path) toList
+ }
- def validSourceFile(name: String) =
- (name.endsWith(".scala") || name.endsWith(".java"))
+ /** A useful name filter. */
+ def isTraitImplementation(name: String) = name endsWith "$class.class"
- var XO = false
- def validClassFile(name: String) =
- if (name.endsWith(".class")) {
- val className = name.substring(0, name.length - 6)
- (!className.endsWith("$class") || XO)
- } else false
+ import java.net.MalformedURLException
+ def specToURL(spec: String): Option[URL] =
+ try Some(new URL(spec))
+ catch { case _: MalformedURLException => None }
+ /** A class modeling aspects of a ClassPath which should be
+ * propagated to any classpaths it creates.
+ */
+ abstract class ClassPathContext[T] {
+ /** A filter which can be used to exclude entities from the classpath
+ * based on their name.
+ */
+ def isValidName(name: String): Boolean = true
+
+ /** From the representation to its identifier.
+ */
+ def toBinaryName(rep: T): String
+
+ /** Create a new classpath based on the abstract file.
+ */
+ def newClassPath(file: AbstractFile): ClassPath[T]
+
+ /** Creators for sub classpaths which preserve this context.
+ */
+ def sourcesInPath(path: String): List[ClassPath[T]] =
+ for (file <- expandPath(path, false) ; dir <- Option(AbstractFile getDirectory file)) yield
+ new SourcePath[T](dir, this)
+
+ def contentsOfDirsInPath(path: String): List[ClassPath[T]] =
+ for (dir <- expandPath(path, false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
+ newClassPath(entry)
+
+ def classesAtAllURLS(path: String): List[ClassPath[T]] =
+ (path split " ").toList flatMap classesAtURL
+
+ def classesAtURL(spec: String) =
+ for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield
+ newClassPath(location)
+
+ def classesInExpandedPath(path: String) = classesInPathImpl(path, true)
+ def classesInPath(path: String) = classesInPathImpl(path, false)
+
+ // Internal
+ private def classesInPathImpl(path: String, expand: Boolean) =
+ for (file <- expandPath(path, expand) ; dir <- Option(AbstractFile getDirectory file)) yield
+ newClassPath(dir)
+ }
- def collectTypes(assemFile: AbstractFile) = {
- var res: Array[MSILType] = MSILType.EmptyTypes
- val assem = Assembly.LoadFrom(assemFile.path)
- if (assem != null) {
- // DeclaringType == null: true for non-inner classes
- res = assem.GetTypes().filter((typ: MSILType) => typ.DeclaringType == null)
- Sorting.stableSort(res, (t1: MSILType, t2: MSILType) => (t1.FullName compareTo t2.FullName) < 0)
+ class JavaContext extends ClassPathContext[AbstractFile] {
+ def toBinaryName(rep: AbstractFile) = {
+ assert(rep.name endsWith ".class", rep.name)
+ rep.name dropRight 6
}
- res
+ def newClassPath(dir: AbstractFile) = new DirectoryClassPath(dir, this)
}
-}
-/**
- * A represents classes which can be loaded with a ClassfileLoader/MSILTypeLoader
- * and / or a SourcefileLoader.
- */
-case class ClassRep[T](binary: Option[T], source: Option[AbstractFile]) {
- def name = {
- if (binary.isDefined) binary.get match {
- case f: AbstractFile =>
- assert(f.name.endsWith(".class"), f.name)
- f.name.substring(0, f.name.length - 6)
- case t: MSILType =>
- t.Name
- case c =>
- throw new FatalError("Unexpected binary class representation: "+ c)
- } else {
- assert(source.isDefined)
- val nme = source.get.name
- if (nme.endsWith(".scala"))
- nme.substring(0, nme.length - 6)
- else if (nme.endsWith(".java"))
- nme.substring(0, nme.length - 5)
- else
- throw new FatalError("Unexpected source file ending: "+ nme)
- }
+ object DefaultJavaContext extends JavaContext {
+ override def isValidName(name: String) = !isTraitImplementation(name)
+ }
+
+ /** From the source file to its identifier.
+ */
+ def toSourceName(f: AbstractFile): String = {
+ val nme = f.name
+ if (nme.endsWith(".scala"))
+ nme dropRight 6
+ else if (nme.endsWith(".java"))
+ nme dropRight 5
+ else
+ throw new FatalError("Unexpected source file ending: " + nme)
}
}
+import ClassPath._
/**
* Represents a package which contains classes and other packages
*/
abstract class ClassPath[T] {
+ type AnyClassRep = ClassPath[T]#ClassRep
+
/**
* The short name of the package (without prefix)
*/
def name: String
- def classes: List[ClassRep[T]]
+
+ /**
+ * A String representing the origin of this classpath element, if known.
+ * For example, the path of the directory or jar.
+ */
+ def origin: Option[String] = None
+
+ /** A list of URLs representing this classpath.
+ */
+ def asURLs: List[URL]
+
+ /** The whole classpath in the form of one String.
+ */
+ def asClasspathString: String
+
+ /** Info which should be propagated to any sub-classpaths.
+ */
+ def context: ClassPathContext[T]
+
+ /** Lists of entities.
+ */
+ def classes: List[AnyClassRep]
def packages: List[ClassPath[T]]
def sourcepaths: List[AbstractFile]
- /**
- * Find a ClassRep given a class name of the form "package.subpackage.ClassName".
- * Does not support nested classes on .NET
+ /** Information which entails walking the tree. This is probably only
+ * necessary for tracking down problems - it's normally not used.
*/
- def findClass(name: String): Option[ClassRep[T]] = {
- val i = name.indexOf('.')
- if (i < 0) {
- classes.find(c => c.name == name)
- } else {
- val pkg = name.substring(0, i)
- val rest = name.substring(i + 1, name.length)
- packages.find(p => p.name == pkg).flatMap(_.findClass(rest))
- }
+ def allPackages: List[ClassPath[T]] = packages ::: (packages flatMap (_.allPackages))
+ def allPackageNames: List[String] = {
+ def subpackages(prefix: String, cp: ClassPath[T]): List[String] = (
+ (cp.packages map (prefix + _.name)) :::
+ (cp.packages flatMap (x => subpackages(prefix + x.name + ".", x)))
+ )
+ subpackages("", this)
}
-}
+ def allPackagesWithNames: List[(String, ClassPath[T])] = {
+ val root = packages map (p => p.name -> p)
+ val subs =
+ for ((prefix, p) <- root ; (k, v) <- p.allPackagesWithNames) yield
+ (prefix + "." + k, v)
-/**
- * A Classpath containing source files
- */
-class SourcePath[T](dir: AbstractFile) extends ClassPath[T] {
- def name = dir.name
+ root ::: subs
+ }
- def classes = {
- val cls = new ListBuffer[ClassRep[T]]
- for (f <- dir.iterator) {
- if (!f.isDirectory && ClassPath.validSourceFile(f.name))
- cls += ClassRep[T](None, Some(f))
+ /**
+ * Represents classes which can be loaded with a ClassfileLoader/MSILTypeLoader
+ * and / or a SourcefileLoader.
+ */
+ case class ClassRep(binary: Option[T], source: Option[AbstractFile]) {
+ def name: String = binary match {
+ case Some(x) => context.toBinaryName(x)
+ case _ =>
+ assert(source.isDefined)
+ toSourceName(source.get)
}
- cls.toList
}
- def packages = {
- val pkg = new ListBuffer[SourcePath[T]]
- for (f <- dir.iterator) {
- if (f.isDirectory && ClassPath.validPackage(f.name))
- pkg += new SourcePath[T](f)
+ /** Filters for assessing validity of various entities.
+ */
+ def validClassFile(name: String) = (name endsWith ".class") && context.isValidName(name)
+ def validPackage(name: String) = (name != "META-INF") && (name != "") && (name(0) != '.')
+ def validSourceFile(name: String) = validSourceExtensions exists (name endsWith _)
+ def validSourceExtensions = List(".scala", ".java")
+
+ /**
+ * Find a ClassRep given a class name of the form "package.subpackage.ClassName".
+ * Does not support nested classes on .NET
+ */
+ def findClass(name: String): Option[AnyClassRep] =
+ splitWhere(name, _ == '.', true) match {
+ case Some((pkg, rest)) =>
+ val rep = packages find (_.name == pkg) flatMap (_ findClass rest)
+ rep map {
+ case x: ClassRep => x
+ case x => throw new FatalError("Unexpected ClassRep '%s' found searching for name '%s'".format(x, name))
+ }
+ case _ =>
+ classes find (_.name == name)
}
- pkg.toList
- }
- def sourcepaths: List[AbstractFile] = List(dir)
+ def findSourceFile(name: String): Option[AbstractFile] =
+ findClass(name) match {
+ case Some(ClassRep(Some(x: AbstractFile), _)) => Some(x)
+ case _ => None
+ }
- override def toString() = "sourcepath: "+ dir.toString()
+ def sortString = asURLs map (_.toString) sorted
+ override def equals(that: Any) = that match {
+ case x: ClassPath[_] => this.sortString == x.sortString
+ case _ => false
+ }
+ override def hashCode = sortString.hashCode
}
/**
- * A directory (or a .jar file) containing classfiles and packages
+ * A Classpath containing source files
*/
-class DirectoryClassPath(dir: AbstractFile) extends ClassPath[AbstractFile] {
+class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends ClassPath[T] {
def name = dir.name
+ override def origin = dir.underlyingSource map (_.path)
+ def asURLs = dir.sfile.toList map (_.toURL)
+ def asClasspathString = dir.path
+ val sourcepaths: List[AbstractFile] = List(dir)
- def classes = {
- val cls = new ListBuffer[ClassRep[AbstractFile]]
- for (f <- dir.iterator) {
- if (!f.isDirectory && ClassPath.validClassFile(f.name))
- cls += ClassRep(Some(f), None)
- }
- cls.toList
- }
+ lazy val classes: List[ClassRep] = dir collect {
+ case f if !f.isDirectory && validSourceFile(f.name) => ClassRep(None, Some(f))
+ } toList
- def packages = {
- val pkg = new ListBuffer[DirectoryClassPath]
- for (f <- dir.iterator) {
- if (f.isDirectory && ClassPath.validPackage(f.name))
- pkg += new DirectoryClassPath(f)
- }
- pkg.toList
- }
+ lazy val packages: List[SourcePath[T]] = dir collect {
+ case f if f.isDirectory && validPackage(f.name) => new SourcePath[T](f, context)
+ } toList
- def sourcepaths: List[AbstractFile] = Nil
- override def toString() = "directory classpath: "+ dir.toString()
+ override def toString() = "sourcepath: "+ dir.toString()
}
-
-
/**
- * A assembly file (dll / exe) containing classes and namespaces
+ * A directory (or a .jar file) containing classfiles and packages
*/
-class AssemblyClassPath(types: Array[MSILType], namespace: String) extends ClassPath[MSILType] {
- def name = {
- val i = namespace.lastIndexOf('.')
- if (i < 0) namespace
- else namespace.substring(i + 1, namespace.length)
- }
-
- def this(assemFile: AbstractFile) {
- this(ClassPath.collectTypes(assemFile), "")
- }
-
- private lazy val first: Int = {
- var m = 0
- var n = types.length - 1
- while (m < n) {
- val l = (m + n) / 2
- val res = types(l).FullName.compareTo(namespace)
- if (res < 0) m = l + 1
- else n = l
- }
- if (types(m).FullName.startsWith(namespace)) m else types.length
- }
+class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[AbstractFile]) extends ClassPath[AbstractFile] {
+ def name = dir.name
+ override def origin = dir.underlyingSource map (_.path)
+ def asURLs = dir.sfile.toList map (_.toURL)
+ def asClasspathString = dir.path
+ val sourcepaths: List[AbstractFile] = Nil
- def classes = {
- val cls = new ListBuffer[ClassRep[MSILType]]
- var i = first
- while (i < types.length && types(i).Namespace.startsWith(namespace)) {
- // CLRTypes used to exclude java.lang.Object and java.lang.String (no idea why..)
- if (types(i).Namespace == namespace)
- cls += ClassRep(Some(types(i)), None)
- i += 1
- }
- cls.toList
- }
+ lazy val classes: List[ClassRep] = dir collect {
+ case f if !f.isDirectory && validClassFile(f.name) => ClassRep(Some(f), None)
+ } toList
- def packages = {
- val nsSet = new MutHashSet[String]
- var i = first
- while (i < types.length && types(i).Namespace.startsWith(namespace)) {
- val subns = types(i).Namespace
- if (subns.length > namespace.length) {
- // example: namespace = "System", subns = "System.Reflection.Emit"
- // => find second "." and "System.Reflection" to nsSet.
- val end = subns.indexOf('.', namespace.length + 1)
- nsSet += (if (end < 0) subns
- else subns.substring(0, end))
- }
- i += 1
- }
- for (ns <- nsSet.toList)
- yield new AssemblyClassPath(types, ns)
- }
-
- def sourcepaths: List[AbstractFile] = Nil
+ lazy val packages: List[DirectoryClassPath] = dir collect {
+ case f if f.isDirectory && validPackage(f.name) => new DirectoryClassPath(f, context)
+ } toList
- override def toString() = "assembly classpath "+ namespace
+ override def toString() = "directory classpath: "+ dir
}
/**
* A classpath unifying multiple class- and sourcepath entries.
*/
-abstract class MergedClassPath[T] extends ClassPath[T] {
- protected val entries: List[ClassPath[T]]
-
+class MergedClassPath[T](
+ val entries: List[ClassPath[T]],
+ val context: ClassPathContext[T])
+extends ClassPath[T] {
def name = entries.head.name
+ def asURLs = entries flatMap (_.asURLs)
+ lazy val sourcepaths: List[AbstractFile] = entries flatMap (_.sourcepaths)
+
+ override def origin = Some(entries map (x => x.origin getOrElse x.name) mkString ("Merged(", ", ", ")"))
+ override def asClasspathString: String = join(entries map (_.asClasspathString) : _*)
- def classes: List[ClassRep[T]] = {
- val cls = new ListBuffer[ClassRep[T]]
+ lazy val classes: List[AnyClassRep] = {
+ val cls = new ListBuffer[AnyClassRep]
for (e <- entries; c <- e.classes) {
val name = c.name
- val idx = cls.indexWhere(cl => cl.name == name)
+ val idx = cls.indexWhere(_.name == name)
if (idx >= 0) {
val existing = cls(idx)
if (existing.binary.isEmpty && c.binary.isDefined)
@@ -291,11 +355,11 @@ abstract class MergedClassPath[T] extends ClassPath[T] {
cls.toList
}
- def packages: List[ClassPath[T]] = {
+ lazy val packages: List[ClassPath[T]] = {
val pkg = new ListBuffer[ClassPath[T]]
for (e <- entries; p <- e.packages) {
val name = p.name
- val idx = pkg.indexWhere(pk => pk.name == name)
+ val idx = pkg.indexWhere(_.name == name)
if (idx >= 0) {
pkg(idx) = addPackage(pkg(idx), p)
} else {
@@ -305,18 +369,46 @@ abstract class MergedClassPath[T] extends ClassPath[T] {
pkg.toList
}
- def sourcepaths: List[AbstractFile] = entries.flatMap(_.sourcepaths)
+ private def addPackage(to: ClassPath[T], pkg: ClassPath[T]) = {
+ val newEntries = to match {
+ case cp: MergedClassPath[_] => cp.entries :+ pkg
+ case _ => List(to, pkg)
+ }
+ new MergedClassPath[T](newEntries, context)
+ }
- private def addPackage(to: ClassPath[T], pkg: ClassPath[T]) = to match {
- case cp: MergedClassPath[T] =>
- newMergedClassPath(cp.entries ::: List(pkg))
- case _ =>
- newMergedClassPath(List(to, pkg))
+ override def allPackages: List[ClassPath[T]] = entries flatMap (_.allPackages)
+ override def allPackageNames = entries flatMap (_.allPackageNames)
+ override def allPackagesWithNames = entries flatMap (_.allPackagesWithNames)
+
+ def duplicatedClasses = {
+ def toFullName(x: (String, _, AnyClassRep)) = x._1 + "." + x._3.name
+
+ /** Flatten everything into tuples, recombine grouped by name, filter down to 2+ entries. */
+ val flattened = (
+ for ((pkgName, pkg) <- allPackagesWithNames ; clazz <- pkg.classes) yield
+ (pkgName, pkg, clazz)
+ )
+ val multipleAppearingEntries = flattened groupBy toFullName filter (_._2.size > 1)
+
+ /** Using original name list as reference point, return duplicated entries as
+ * (name, list of origins)
+ * in the order they occur on the path.
+ */
+ for (name <- flattened map toFullName distinct ; dups <- multipleAppearingEntries get name) yield
+ (name, dups map {
+ case (_, cp, _) if cp.origin.isDefined => cp.origin.get
+ case (_, cp, _) => cp.asURLs.mkString
+ })
}
- private def newMergedClassPath(entrs: List[ClassPath[T]]): MergedClassPath[T] =
- new MergedClassPath[T] {
- protected val entries = entrs
+ def show {
+ println("ClassPath %s has %d entries and results in:\n".format(name, entries.size))
+ asClasspathString split ':' foreach (x => println(" " + x))
+ }
+ def showDuplicates =
+ ClassPath findDuplicates this foreach {
+ case (name, xs) => println(xs.mkString(name + ":\n ", "\n ", "\n"))
}
override def toString() = "merged classpath "+ entries.mkString("(", "\n", ")")
@@ -326,118 +418,8 @@ abstract class MergedClassPath[T] extends ClassPath[T] {
* The classpath when compiling with target:jvm. Binary files (classfiles) are represented
* as AbstractFile. nsc.io.ZipArchive is used to view zip/jar archives as directories.
*/
-class JavaClassPath(boot: String, ext: String, user: String, source: String, Xcodebase: String)
-extends MergedClassPath[AbstractFile] {
-
- protected val entries: List[ClassPath[AbstractFile]] = assembleEntries()
- private def assembleEntries(): List[ClassPath[AbstractFile]] = {
- import ClassPath._
- val etr = new ListBuffer[ClassPath[AbstractFile]]
-
- def addFilesInPath(path: String, expand: Boolean,
- ctr: AbstractFile => ClassPath[AbstractFile] = x => new DirectoryClassPath(x)) {
- for (fileName <- expandPath(path, expandStar = expand)) {
- val file = AbstractFile.getDirectory(fileName)
- if (file ne null) etr += ctr(file)
- }
- }
-
- // 1. Boot classpath
- addFilesInPath(boot, false)
-
- // 2. Ext classpath
- for (fileName <- expandPath(ext, expandStar = false)) {
- val dir = AbstractFile.getDirectory(fileName)
- if (dir ne null) {
- for (file <- dir) {
- val name = file.name.toLowerCase
- if (name.endsWith(".jar") || name.endsWith(".zip") || file.isDirectory) {
- val archive = AbstractFile.getDirectory(new File(dir.file, name))
- if (archive ne null) etr += new DirectoryClassPath(archive)
- }
- }
- }
- }
-
- // 3. User classpath
- addFilesInPath(user, true)
-
- // 4. Codebase entries (URLs)
- {
- val urlSeparator = " "
- val urlStrtok = new StringTokenizer(Xcodebase, urlSeparator)
- while (urlStrtok.hasMoreTokens()) try {
- val url = new URL(urlStrtok.nextToken())
- val archive = AbstractFile.getURL(url)
- if (archive ne null) etr += new DirectoryClassPath(archive)
- }
- catch {
- case e =>
- Console.println("error adding classpath form URL: " + e.getMessage)//debug
- throw e
- }
- }
-
- // 5. Source path
- if (source != "")
- addFilesInPath(source, false, x => new SourcePath[AbstractFile](x))
-
- etr.toList
- }
-}
-
-/**
- * The classpath when compiling with target:msil. Binary files are represented as
- * MSILType values.
- */
-class MsilClassPath(ext: String, user: String, source: String) extends MergedClassPath[MSILType] {
- protected val entries: List[ClassPath[MSILType]] = assembleEntries()
-
- private def assembleEntries(): List[ClassPath[MSILType]] = {
- import ClassPath._
- val etr = new ListBuffer[ClassPath[MSILType]]
- val names = new MutHashSet[String]
-
- // 1. Assemblies from -Xassem-extdirs
- for (dirName <- expandPath(ext, expandStar = false)) {
- val dir = AbstractFile.getDirectory(dirName)
- if (dir ne null) {
- for (file <- dir) {
- val name = file.name.toLowerCase
- if (name.endsWith(".dll") || name.endsWith(".exe")) {
- names += name
- etr += new AssemblyClassPath(file)
- }
- }
- }
- }
-
- // 2. Assemblies from -Xassem-path
- for (fileName <- expandPath(user, expandStar = false)) {
- val file = AbstractFile.getFile(fileName)
- if (file ne null) {
- val name = file.name.toLowerCase
- if (name.endsWith(".dll") || name.endsWith(".exe")) {
- names += name
- etr += new AssemblyClassPath(file)
- }
- }
- }
-
- def check(n: String) {
- if (!names.contains(n))
- throw new AssertionError("Cannot find assembly "+ n +
- ". Use -Xassem-extdirs or -Xassem-path to specify its location")
- }
- check("mscorlib.dll")
- check("scalaruntime.dll")
-
- // 3. Source path
- for (dirName <- expandPath(source, expandStar = false)) {
- val file = AbstractFile.getDirectory(dirName)
- if (file ne null) etr += new SourcePath[MSILType](file)
- }
-
- etr.toList
- }
+class JavaClassPath(
+ containers: List[ClassPath[AbstractFile]],
+ context: JavaContext)
+extends MergedClassPath[AbstractFile](containers, context) {
}
diff --git a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
new file mode 100644
index 0000000000..16d79d5776
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
@@ -0,0 +1,145 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package util
+
+import scala.util.parsing.combinator._
+import scala.util.parsing.input.{ Reader }
+import scala.util.parsing.input.CharArrayReader.EofCh
+import scala.collection.mutable.ListBuffer
+
+/** A simple command line parser to replace the several different
+ * simple ones spread around trunk.
+ *
+ * XXX Note this has been completely obsolesced by scala.tools.cmd.
+ * I checked it back in as part of rolling partest back a month
+ * rather than go down the rabbit hole of unravelling dependencies.
+ */
+
+trait ParserUtil extends Parsers {
+ class ParserPlus[+T](underlying: Parser[T]) {
+ def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b => b }
+ def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b => a }
+ }
+ protected implicit def parser2parserPlus[T](p: Parser[T]): ParserPlus[T] = new ParserPlus(p)
+}
+
+case class CommandLine(
+ args: List[String],
+ unaryArguments: List[String],
+ binaryArguments: List[String]
+) {
+ def this(args: List[String]) = this(args, Nil, Nil)
+ def this(args: Array[String]) = this(args.toList, Nil, Nil)
+ def this(line: String) = this(CommandLineParser tokenize line, Nil, Nil)
+
+ def withUnaryArgs(xs: List[String]) = copy(unaryArguments = xs)
+ def withBinaryArgs(xs: List[String]) = copy(binaryArguments = xs)
+
+ def originalArgs = args
+ def assumeBinary = true
+ def enforceArity = true
+ def onlyKnownOptions = false
+
+ val Terminator = "--"
+ val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true
+
+ def mapForUnary(opt: String) = Map(opt -> ValueForUnaryOption)
+ def errorFn(msg: String) = println(msg)
+
+ /** argMap is option -> argument (or "" if it is a unary argument)
+ * residualArgs are what is left after removing the options and their args.
+ */
+ lazy val (argMap, residualArgs) = {
+ val residualBuffer = new ListBuffer[String]
+
+ def stripQuotes(s: String) = {
+ def isQuotedBy(c: Char) = s.length > 0 && s.head == c && s.last == c
+ if (List('"', '\'') exists isQuotedBy) s.tail.init else s
+ }
+
+ def isValidOption(s: String) = !onlyKnownOptions || (unaryArguments contains s) || (binaryArguments contains s)
+ def isOption(s: String) = (s startsWith "-") && (isValidOption(s) || { unknownOption(s) ; false })
+ def isUnary(s: String) = isOption(s) && (unaryArguments contains s)
+ def isBinary(s: String) = isOption(s) && !isUnary(s) && (assumeBinary || (binaryArguments contains s))
+
+ def unknownOption(opt: String) =
+ errorFn("Option '%s' not recognized.".format(opt))
+ def missingArg(opt: String, what: String) =
+ errorFn("Option '%s' requires argument, found %s instead.".format(opt, what))
+
+ def loop(args: List[String]): Map[String, String] = {
+ def residual(xs: List[String]) = { residualBuffer ++= xs ; Map[String, String]() }
+ if (args.isEmpty) return Map()
+ val hd :: rest = args
+ if (rest.isEmpty) {
+ if (isBinary(hd) && enforceArity)
+ missingArg(hd, "EOF")
+
+ if (isOption(hd)) mapForUnary(hd) else residual(args)
+ }
+ else
+ if (hd == Terminator) residual(rest)
+ else {
+ val hd1 :: hd2 :: rest = args
+
+ if (hd2 == Terminator) mapForUnary(hd1) ++ residual(rest)
+ else if (isUnary(hd1)) mapForUnary(hd1) ++ loop(hd2 :: rest)
+ else if (isBinary(hd1)) {
+ // Disabling this check so
+ // --scalacopts "-verbose" works. We can't tell if it's quoted,
+ // the shell does us in.
+ //
+ // if (isOption(hd2) && enforceArity)
+ // missingArg(hd1, hd2)
+
+ Map(hd1 -> hd2) ++ loop(rest)
+ }
+ else { residual(List(hd1)) ++ loop(hd2 :: rest) }
+ }
+ }
+
+ (loop(args), residualBuffer map stripQuotes toList)
+ }
+
+ def isSet(arg: String) = args contains arg
+ def get(arg: String) = argMap get arg
+ def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse
+ def apply(arg: String) = argMap(arg)
+
+ override def toString() = "CommandLine(\n%s)\n" format (args map (" " + _ + "\n") mkString)
+}
+
+object CommandLineParser extends RegexParsers with ParserUtil {
+ override def skipWhitespace = false
+
+ def elemExcept(xs: Elem*): Parser[Elem] = elem("elemExcept", x => x != EofCh && !(xs contains x))
+ def elemOf(xs: Elem*): Parser[Elem] = elem("elemOf", xs contains _)
+ def escaped(ch: Char): Parser[String] = "\\" + ch
+ def mkQuoted(ch: Char): Parser[String] = (
+ elem(ch) !~> rep(escaped(ch) | elemExcept(ch)) <~ ch ^^ (_.mkString)
+ | failure("Unmatched %s in input." format ch)
+ )
+
+ /** Apparently windows can't deal with the quotes sticking around. */
+ lazy val squoted: Parser[String] = mkQuoted('\'') // ^^ (x => "'%s'" format x)
+ lazy val dquoted: Parser[String] = mkQuoted('"') // ^^ (x => "\"" + x + "\"")
+ lazy val token: Parser[String] = """\S+""".r
+
+ lazy val argument: Parser[String] = squoted | dquoted | token
+ lazy val commandLine: Parser[List[String]] = phrase(repsep(argument, whiteSpace))
+
+ class ParseException(msg: String) extends RuntimeException(msg)
+
+ def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x))
+ def tokenize(line: String, errorFn: String => Unit): List[String] = {
+ parse(commandLine, line.trim) match {
+ case Success(args, _) => args
+ case NoSuccess(msg, rest) => errorFn(msg) ; Nil
+ }
+ }
+ def apply(line: String) = new CommandLine(tokenize(line))
+}
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
new file mode 100755
index 0000000000..06045daf3b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -0,0 +1,138 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+
+package scala.tools.nsc
+package util
+
+import Chars._
+import scala.collection.mutable.{HashMap, ListBuffer, StringBuilder}
+
+/** Utilitity methods for doc comment strings
+ */
+object DocStrings {
+
+ /** Returns index of string `str` following `start` skipping longest
+ * sequence of whitespace characters characters (but no newlines)
+ */
+ def skipWhitespace(str: String, start: Int): Int =
+ if (start < str.length && isWhitespace(str charAt start)) skipWhitespace(str, start + 1)
+ else start
+
+ /** Returns index of string `str` following `start` skipping
+ * sequence of identifier characters.
+ */
+ def skipIdent(str: String, start: Int): Int =
+ if (start < str.length && isIdentifierPart(str charAt start)) skipIdent(str, start + 1)
+ else start
+
+ /** Returns index of string `str` after `start` skipping longest
+ * sequence of space and tab characters, possibly also containing
+ * a single `*' character or the `/``**` sequence.
+ * @pre start == str.length || str(start) == `\n'
+ */
+ def skipLineLead(str: String, start: Int): Int =
+ if (start == str.length) start
+ else {
+ val idx = skipWhitespace(str, start + 1)
+ if (idx < str.length && (str charAt idx) == '*') skipWhitespace(str, idx + 1)
+ else if (idx + 2 < str.length && (str charAt idx) == '/' && (str charAt (idx + 1)) == '*' && (str charAt (idx + 2)) == '*')
+ skipWhitespace(str, idx + 3)
+ else idx
+ }
+
+ /** Skips to next occurrence of `\n' or to the position after the `/``**` sequence following index `start`.
+ */
+ def skipToEol(str: String, start: Int): Int =
+ if (start + 2 < str.length && (str charAt start) == '/' && (str charAt (start + 1)) == '*' && (str charAt (start + 2)) == '*') start + 3
+ else if (start < str.length && (str charAt start) != '\n') skipToEol(str, start + 1)
+ else start
+
+ /** Returns first index following `start` and starting a line (i.e. after skipLineLead) or starting the comment
+ * which satisfies predicate `p'.
+ */
+ def findNext(str: String, start: Int)(p: Int => Boolean): Int = {
+ val idx = skipLineLead(str, skipToEol(str, start))
+ if (idx < str.length && !p(idx)) findNext(str, idx)(p)
+ else idx
+ }
+
+ /** Return first index following `start` and starting a line (i.e. after skipLineLead)
+ * which satisfies predicate `p'.
+ */
+ def findAll(str: String, start: Int)(p: Int => Boolean): List[Int] = {
+ val idx = findNext(str, start)(p)
+ if (idx == str.length) List()
+ else idx :: findAll(str, idx)(p)
+ }
+
+ /** Produces a string index, which is a list of ``sections'', i.e
+ * pairs of start/end positions of all tagged sections in the string.
+ * Every section starts with a `@' and extends to the next `@', or
+ * to the end of the comment string, but excluding the final two
+ * characters which terminate the comment.
+ */
+ def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] =
+ findAll(str, 0) (idx => str(idx) == '@' && p(idx)) match {
+ case List() => List()
+ case idxs => idxs zip (idxs.tail ::: List(str.length - 2))
+ }
+
+ /** Does interval `iv` start with given `tag`?
+ */
+ def startsWithTag(str: String, section: (Int, Int), tag: String): Boolean =
+ startsWithTag(str, section._1, tag)
+
+ def startsWithTag(str: String, start: Int, tag: String): Boolean =
+ str.startsWith(tag, start) && !isIdentifierPart(str charAt (start + tag.length))
+
+
+ /** The first start tag of a list of tag intervals,
+ * or the end of the whole comment string - 2 if list is empty
+ */
+ def startTag(str: String, sections: List[(Int, Int)]) = sections match {
+ case List() => str.length - 2
+ case (start, _) :: _ => start
+ }
+
+ /** A map from parameter names to start/end indices describing all parameter
+ * sections in `str` tagged with `tag`, where `sections` is the index of `str`.
+ */
+ def paramDocs(str: String, tag: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] =
+ Map() ++ {
+ for (section <- sections if startsWithTag(str, section, tag)) yield {
+ val start = skipWhitespace(str, section._1 + tag.length)
+ str.substring(start, skipIdent(str, start)) -> section
+ }
+ }
+
+ /** Optionally start and end index of return section in `str`, or `None`
+ * if `str` does not have a @return.
+ */
+ def returnDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] =
+ sections find (startsWithTag(str, _, "@return"))
+
+ /** Extracts variable name from a string, stripping any pair of surrounding braces */
+ def variableName(str: String): String =
+ if (str.length >= 2 && (str charAt 0) == '{' && (str charAt (str.length - 1)) == '}')
+ str.substring(1, str.length - 1)
+ else
+ str
+
+ /** Returns index following variable, or start index if no variable was recognized
+ */
+ def skipVariable(str: String, start: Int): Int = {
+ var idx = start
+ if (idx < str.length && (str charAt idx) == '{') {
+ do idx += 1
+ while (idx < str.length && (str charAt idx) != '}')
+ if (idx < str.length) idx + 1 else start
+ } else {
+ while (idx < str.length && isVarPart(str charAt idx))
+ idx += 1
+ idx
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
index ac325042da..714e731891 100644
--- a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
+++ b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package util
diff --git a/src/compiler/scala/tools/nsc/util/HashSet.scala b/src/compiler/scala/tools/nsc/util/HashSet.scala
index ebc517266b..8e0c2e2e59 100644
--- a/src/compiler/scala/tools/nsc/util/HashSet.scala
+++ b/src/compiler/scala/tools/nsc/util/HashSet.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package util
@@ -12,22 +11,36 @@ class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) exte
def this(label: String) = this(label, 16)
def this() = this(16)
- private var capacity = initialCapacity
private var used = 0
- private var table = new Array[AnyRef](capacity)
+ private var table = new Array[AnyRef](initialCapacity)
// System.err.println("Created: " + this)
def size: Int = used
def clear() {
- capacity = initialCapacity
used = 0
- table = new Array[AnyRef](capacity)
+ table = new Array[AnyRef](initialCapacity)
}
- private def index(x: Int): Int = Math.abs(x % capacity)
+ private def index(x: Int): Int = math.abs(x % table.length)
+
+ def findEntryOrUpdate(x: T): T = {
+ var h = index(x.##)
+ var entry = table(h)
+ while (entry ne null) {
+ if (x == entry)
+ return entry.asInstanceOf[T]
+
+ h = index(h + 1)
+ entry = table(h)
+ }
+ table(h) = x
+ used += 1
+ if (used > (table.length >> 2)) growTable()
+ x
+ }
def findEntry(x: T): T = {
- var h = index(x.hashCode())
+ var h = index(x.##)
var entry = table(h)
while ((entry ne null) && entry != x) {
h = index(h + 1)
@@ -37,46 +50,53 @@ class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) exte
}
def addEntry(x: T) {
- var h = index(x.hashCode())
+ var h = index(x.##)
var entry = table(h)
while (entry ne null) {
if (entry == x) return
- h = index((h + 1))
+ h = index(h + 1)
entry = table(h)
}
table(h) = x
used += 1
- if (used > (capacity >> 2)) growTable()
+ if (used > (table.length >> 2)) growTable()
}
def iterator = new Iterator[T] {
private var i = 0
def hasNext: Boolean = {
- while (i < capacity && (table(i) eq null)) i += 1
- i < capacity
+ while (i < table.length && (table(i) eq null)) i += 1
+ i < table.length
}
def next: T =
if (hasNext) { i += 1; table(i - 1).asInstanceOf[T] }
else null
}
+ private def addOldEntry(x: T) {
+ var h = index(x.##)
+ var entry = table(h)
+ while (entry ne null) {
+ h = index(h + 1)
+ entry = table(h)
+ }
+ table(h) = x
+ }
+
private def growTable() {
val oldtable = table
val growthFactor =
- if (capacity <= initialCapacity) 8
- else if (capacity <= (initialCapacity * 8)) 4
+ if (table.length <= initialCapacity) 8
+ else if (table.length <= (initialCapacity * 8)) 4
else 2
- capacity *= growthFactor
- table = new Array[AnyRef](capacity)
+ table = new Array[AnyRef](table.length * growthFactor)
var i = 0
- used = 0
while (i < oldtable.length) {
val entry = oldtable(i)
- if (entry ne null) addEntry(entry.asInstanceOf[T])
+ if (entry ne null) addOldEntry(entry.asInstanceOf[T])
i += 1
}
- // System.err.println("Grown: " + this)
}
- override def toString() = "HashSet %s(%d / %d)".format(label, used, capacity)
+ override def toString() = "HashSet %s(%d / %d)".format(label, used, table.length)
}
diff --git a/src/compiler/scala/tools/nsc/util/InterruptReq.scala b/src/compiler/scala/tools/nsc/util/InterruptReq.scala
new file mode 100644
index 0000000000..72aedde4a8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/InterruptReq.scala
@@ -0,0 +1,37 @@
+package scala.tools.nsc
+package util
+
+/** A class of work items to be used in interrupt requests.
+ */
+abstract class InterruptReq {
+ /** The result type of the operation
+ */
+ type R
+
+ /** The operation to be performed */
+ protected val todo: () => R
+
+ /** The result provided */
+ private var result: Option[Either[R, Throwable]] = None
+
+ /** To be called from interrupted server to execute demanded task */
+ def execute(): Unit = synchronized {
+ try {
+ result = Some(Left(todo()))
+ } catch {
+ case t => result = Some(Right(t))
+ }
+ notify()
+ }
+
+ /** To be called from interrupting client to get result for interrupt */
+ def getResult(): R = synchronized {
+ while (result.isEmpty) wait()
+ result.get match {
+ case Left(res) => res
+ case Right(t) => throw new FailedInterrupt(t)
+ }
+ }
+}
+
+class FailedInterrupt(cause: Throwable) extends Exception("Compiler exception during call to 'ask'", cause)
diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
index 9dce09ad7e..fa377a294d 100644
--- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
@@ -1,13 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package util
-import scala.tools.nsc.util.SourceFile.{LF, FF, CR, SU}
+import Chars._
class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, startcol: int, */
decodeUni: Boolean, error: String => Unit) extends Iterator[Char] with Cloneable {
@@ -121,15 +120,4 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int,
def copy: JavaCharArrayReader =
new JavaCharArrayReader(buf, bp, /* nextcol, nextline, */ decodeUni, error)
-
- def digit2int(ch: Char, base: Int): Int = {
- if ('0' <= ch && ch <= '9' && ch < '0' + base)
- ch - '0'
- else if ('A' <= ch && ch < 'A' + base - 10)
- ch - 'A' + 10
- else if ('a' <= ch && ch < 'a' + base - 10)
- ch - 'a' + 10
- else
- -1
- }
}
diff --git a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
new file mode 100644
index 0000000000..5511326a6b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
@@ -0,0 +1,169 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+// $Id$
+
+package scala.tools.nsc
+package util
+
+import java.io.File
+import java.net.URL
+import java.util.StringTokenizer
+import scala.util.Sorting
+
+import scala.collection.mutable.{ ListBuffer, HashSet => MutHashSet }
+import scala.tools.nsc.io.AbstractFile
+
+import ch.epfl.lamp.compiler.msil.{ Type => MSILType, Assembly }
+import ClassPath.{ ClassPathContext, isTraitImplementation }
+
+/** Keeping the MSIL classpath code in its own file is important to make sure
+ * we don't accidentally introduce a dependency on msil.jar in the jvm.
+ */
+
+object MsilClassPath {
+ def collectTypes(assemFile: AbstractFile) = {
+ var res: Array[MSILType] = MSILType.EmptyTypes
+ val assem = Assembly.LoadFrom(assemFile.path)
+ if (assem != null) {
+ // DeclaringType == null: true for non-inner classes
+ res = assem.GetTypes() filter (_.DeclaringType == null)
+ Sorting.stableSort(res, (t1: MSILType, t2: MSILType) => (t1.FullName compareTo t2.FullName) < 0)
+ }
+ res
+ }
+
+ /** On the java side this logic is in PathResolver, but as I'm not really
+ * up to folding MSIL into that, I am encapsulating it here.
+ */
+ def fromSettings(settings: Settings): MsilClassPath = {
+ val context =
+ if (settings.inline.value) new MsilContext
+ else new MsilContext { override def isValidName(name: String) = !isTraitImplementation(name) }
+
+ import settings._
+ new MsilClassPath(assemextdirs.value, assemrefs.value, sourcepath.value, context)
+ }
+
+ class MsilContext extends ClassPathContext[MSILType] {
+ def toBinaryName(rep: MSILType) = rep.Name
+ def newClassPath(assemFile: AbstractFile) = new AssemblyClassPath(MsilClassPath collectTypes assemFile, "", this)
+ }
+
+ private def assembleEntries(ext: String, user: String, source: String, context: MsilContext): List[ClassPath[MSILType]] = {
+ import ClassPath._
+ val etr = new ListBuffer[ClassPath[MSILType]]
+ val names = new MutHashSet[String]
+
+ // 1. Assemblies from -Xassem-extdirs
+ for (dirName <- expandPath(ext, expandStar = false)) {
+ val dir = AbstractFile.getDirectory(dirName)
+ if (dir ne null) {
+ for (file <- dir) {
+ val name = file.name.toLowerCase
+ if (name.endsWith(".dll") || name.endsWith(".exe")) {
+ names += name
+ etr += context.newClassPath(file)
+ }
+ }
+ }
+ }
+
+ // 2. Assemblies from -Xassem-path
+ for (fileName <- expandPath(user, expandStar = false)) {
+ val file = AbstractFile.getFile(fileName)
+ if (file ne null) {
+ val name = file.name.toLowerCase
+ if (name.endsWith(".dll") || name.endsWith(".exe")) {
+ names += name
+ etr += context.newClassPath(file)
+ }
+ }
+ }
+
+ def check(n: String) {
+ if (!names.contains(n))
+ throw new AssertionError("Cannot find assembly "+ n +
+ ". Use -Xassem-extdirs or -Xassem-path to specify its location")
+ }
+ check("mscorlib.dll")
+ check("scalaruntime.dll")
+
+ // 3. Source path
+ for (dirName <- expandPath(source, expandStar = false)) {
+ val file = AbstractFile.getDirectory(dirName)
+ if (file ne null) etr += new SourcePath[MSILType](file, context)
+ }
+
+ etr.toList
+ }
+}
+import MsilClassPath._
+
+/**
+ * A assembly file (dll / exe) containing classes and namespaces
+ */
+class AssemblyClassPath(types: Array[MSILType], namespace: String, val context: MsilContext) extends ClassPath[MSILType] {
+ def name = {
+ val i = namespace.lastIndexOf('.')
+ if (i < 0) namespace
+ else namespace drop (i + 1)
+ }
+ def asURLs = List(new java.net.URL(name))
+ def asClasspathString = error("Unknown") // I don't know what if anything makes sense here?
+
+ private lazy val first: Int = {
+ var m = 0
+ var n = types.length - 1
+ while (m < n) {
+ val l = (m + n) / 2
+ val res = types(l).FullName.compareTo(namespace)
+ if (res < 0) m = l + 1
+ else n = l
+ }
+ if (types(m).FullName.startsWith(namespace)) m else types.length
+ }
+
+ lazy val classes = {
+ val cls = new ListBuffer[ClassRep]
+ var i = first
+ while (i < types.length && types(i).Namespace.startsWith(namespace)) {
+ // CLRTypes used to exclude java.lang.Object and java.lang.String (no idea why..)
+ if (types(i).Namespace == namespace)
+ cls += ClassRep(Some(types(i)), None)
+ i += 1
+ }
+ cls.toList
+ }
+
+ lazy val packages = {
+ val nsSet = new MutHashSet[String]
+ var i = first
+ while (i < types.length && types(i).Namespace.startsWith(namespace)) {
+ val subns = types(i).Namespace
+ if (subns.length > namespace.length) {
+ // example: namespace = "System", subns = "System.Reflection.Emit"
+ // => find second "." and "System.Reflection" to nsSet.
+ val end = subns.indexOf('.', namespace.length + 1)
+ nsSet += (if (end < 0) subns
+ else subns.substring(0, end))
+ }
+ i += 1
+ }
+ for (ns <- nsSet.toList)
+ yield new AssemblyClassPath(types, ns, context)
+ }
+
+ val sourcepaths: List[AbstractFile] = Nil
+
+ override def toString() = "assembly classpath "+ namespace
+}
+
+/**
+ * The classpath when compiling with target:msil. Binary files are represented as
+ * MSILType values.
+ */
+class MsilClassPath(ext: String, user: String, source: String, context: MsilContext)
+extends MergedClassPath[MSILType](MsilClassPath.assembleEntries(ext, user, source, context), context) { } \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/util/MultiHashMap.scala b/src/compiler/scala/tools/nsc/util/MultiHashMap.scala
new file mode 100644
index 0000000000..719d18cd2e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/MultiHashMap.scala
@@ -0,0 +1,10 @@
+package scala.tools.nsc.util
+
+import collection.mutable.HashMap
+import collection.immutable
+
+/** A hashmap with set-valued values, and an empty set as default value
+ */
+class MultiHashMap[K, V] extends HashMap[K, immutable.Set[V]] {
+ override def default(key: K): immutable.Set[V] = Set()
+}
diff --git a/src/compiler/scala/tools/nsc/util/Position.scala b/src/compiler/scala/tools/nsc/util/Position.scala
index 50cd0be675..c97dc930ab 100644
--- a/src/compiler/scala/tools/nsc/util/Position.scala
+++ b/src/compiler/scala/tools/nsc/util/Position.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*
*/
-// $Id$
package scala.tools.nsc
package util
@@ -109,6 +108,9 @@ trait Position {
/** The same position with a different point value (if a range or offset) */
def withPoint(off: Int) = this
+ /** The same position with a different source value, and its values shifted by given offset */
+ def withSource(source: SourceFile, shift: Int) = this
+
/** If this is a range, the union with the other range, with the point of this position.
* Otherwise, this position
*/
@@ -130,7 +132,7 @@ trait Position {
def focusEnd = this
/** Does this position include the given position `pos`.
- * This holds this is a range position and its range [start..end]
+ * This holds if this is a range position and its range [start..end]
* is the same or covers the range of the given position.
*/
def includes(pos: Position) = false
@@ -152,7 +154,7 @@ trait Position {
* do not share a common point).
*/
def properlyPrecedes(pos: Position) =
- isDefined && pos.isDefined && startOrPoint < pos.endOrPoint
+ isDefined && pos.isDefined && endOrPoint < pos.startOrPoint
/** Does this position overlap with that position?
* This holds if both positions are ranges and there is an interval of
@@ -172,6 +174,9 @@ trait Position {
def column: Int = throw new UnsupportedOperationException("Position.column")
+ /** Convert this to a position around `point` that spans a single source line */
+ def toSingleLine: Position = this
+
def lineContent: String =
if (isDefined) source.lineToString(line - 1)
else "NO_LINE"
@@ -200,6 +205,7 @@ class OffsetPosition(override val source: SourceFile, override val point: Int) e
override def isDefined = true
override def pointOrElse(default: Int): Int = point
override def withPoint(off: Int) = new OffsetPosition(source, off)
+ override def withSource(source: SourceFile, shift: Int) = new OffsetPosition(source, point + shift)
override def line: Int = source.offsetToLine(point) + 1
@@ -240,6 +246,7 @@ extends OffsetPosition(source, point) {
override def withStart(off: Int) = new RangePosition(source, off, point, end)
override def withEnd(off: Int) = new RangePosition(source, start, point, off)
override def withPoint(off: Int) = new RangePosition(source, start, off, end)
+ override def withSource(source: SourceFile, shift: Int) = new RangePosition(source, start + shift, point + shift, end + shift)
override def focusStart = new OffsetPosition(source, start)
override def focus = {
if (focusCache eq NoPosition) focusCache = new OffsetPosition(source, point)
@@ -250,6 +257,15 @@ extends OffsetPosition(source, point) {
override def includes(pos: Position) = pos.isDefined && start <= pos.startOrPoint && pos.endOrPoint <= end
override def union(pos: Position) =
if (pos.isRange) new RangePosition(source, start min pos.start, point, end max pos.end) else this
+
+ override def toSingleLine: Position = source match {
+ case bs: BatchSourceFile
+ if end > 0 && bs.offsetToLine(start) < bs.offsetToLine(end - 1) =>
+ val pointLine = bs.offsetToLine(point)
+ new RangePosition(source, bs.lineToOffset(pointLine), point, bs.lineToOffset(pointLine + 1))
+ case _ => this
+ }
+
override def toString = "RangePosition("+source+", "+start+", "+point+", "+end+")"
override def show = "["+start+":"+end+"]"
private var focusCache: Position = NoPosition
diff --git a/src/compiler/scala/tools/nsc/util/RegexCache.scala b/src/compiler/scala/tools/nsc/util/RegexCache.scala
index c3db44d623..5c3197afa5 100644
--- a/src/compiler/scala/tools/nsc/util/RegexCache.scala
+++ b/src/compiler/scala/tools/nsc/util/RegexCache.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Lex Spoon
*/
-// $Id$
package scala.tools.nsc
package util
diff --git a/src/library/scala/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
index 3393f0413f..5b1471c90d 100644
--- a/src/library/scala/util/ScalaClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
@@ -1,9 +1,10 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Paul Phillips
*/
-package scala.util
+package scala.tools.nsc
+package util
import java.lang.{ ClassLoader => JavaClassLoader }
import java.lang.reflect.{ Constructor, Modifier, Method }
@@ -11,8 +12,10 @@ import java.net.URL
import ScalaClassLoader._
import scala.util.control.Exception.{ catching }
-trait ScalaClassLoader extends JavaClassLoader
-{
+trait ScalaClassLoader extends JavaClassLoader {
+ /** Override to see classloader activity traced */
+ protected def trace: Boolean = false
+
/** Executing an action with this classloader as context classloader */
def asContext[T](action: => T): T = {
val oldLoader = getContextLoader
@@ -29,6 +32,9 @@ trait ScalaClassLoader extends JavaClassLoader
/** Load, link and initialize a class with this classloader */
def tryToInitializeClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, true)
+ private def tryBody[T <: AnyRef](body: => Any): Option[T] =
+ catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt body.asInstanceOf[T]
+
private def tryClass[T <: AnyRef](path: String, initialize: Boolean): Option[Class[T]] =
catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt
Class.forName(path, initialize, this).asInstanceOf[Class[T]]
@@ -41,6 +47,27 @@ trait ScalaClassLoader extends JavaClassLoader
}
}
+ override def findClass(name: String) = {
+ val result = super.findClass(name)
+ if (trace) println("findClass(%s) = %s".format(name, result))
+ result
+ }
+
+ override def loadClass(name: String, resolve: Boolean) = {
+ val result = super.loadClass(name, resolve)
+ if (trace) println("loadClass(%s, %s) = %s".format(name, resolve, result))
+ result
+ }
+
+ /** The actual bytes for a class file, or an empty array if it can't be found. */
+ def findBytesForClassName(s: String): Array[Byte] = {
+ val name = s.replaceAll("""\.""", "/") + ".class"
+ val url = this.getResource(name)
+
+ if (url == null) Array()
+ else new io.Streamable.Bytes { def inputStream() = url.openStream } . toByteArray()
+ }
+
/** Run the main method of a class to be loaded by this classloader */
def run(objectName: String, arguments: Seq[String]) {
val clsToRun = tryToInitializeClass(objectName) getOrElse (
@@ -55,22 +82,29 @@ trait ScalaClassLoader extends JavaClassLoader
}
}
-class URLClassLoader(urls: Seq[URL], parent: JavaClassLoader)
- extends java.net.URLClassLoader(urls.toArray, parent)
- with ScalaClassLoader
-{
- /** Override to widen to public */
- override def addURL(url: URL) = super.addURL(url)
-}
-
object ScalaClassLoader {
+ class URLClassLoader(urls: Seq[URL], parent: JavaClassLoader)
+ extends java.net.URLClassLoader(urls.toArray, parent)
+ with ScalaClassLoader {
+
+ private var classloaderURLs = urls.toList
+
+ /** Override to widen to public */
+ override def addURL(url: URL) = {
+ classloaderURLs +:= url
+ super.addURL(url)
+ }
+
+ override def toString = urls.mkString("URLClassLoader(\n ", "\n ", "\n)\n")
+ }
+
def setContextLoader(cl: JavaClassLoader) = Thread.currentThread.setContextClassLoader(cl)
def getContextLoader() = Thread.currentThread.getContextClassLoader()
- def getSystemLoader() = JavaClassLoader.getSystemClassLoader()
+ def getSystemLoader(): ScalaClassLoader = new JavaClassLoader(JavaClassLoader.getSystemClassLoader()) with ScalaClassLoader
def defaultParentClassLoader() = findExtClassLoader()
- def fromURLs(urls: Seq[URL]): URLClassLoader =
- new URLClassLoader(urls.toList, defaultParentClassLoader())
+ def fromURLs(urls: Seq[URL], parent: ClassLoader = defaultParentClassLoader()): URLClassLoader =
+ new URLClassLoader(urls.toList, parent)
/** True if supplied class exists in supplied path */
def classExists(urls: Seq[URL], name: String): Boolean =
@@ -89,4 +123,9 @@ object ScalaClassLoader {
search(getContextLoader())
}
+
+ /** Finding what jar a clazz or instance came from */
+ def origin(x: Any): Option[URL] = originOfClass(x.asInstanceOf[AnyRef].getClass)
+ def originOfClass(x: Class[_]): Option[URL] =
+ Option(x.getProtectionDomain.getCodeSource) flatMap (x => Option(x.getLocation))
}
diff --git a/src/compiler/scala/tools/nsc/util/Set.scala b/src/compiler/scala/tools/nsc/util/Set.scala
index f30db0b1b6..c8ba3d27c9 100644
--- a/src/compiler/scala/tools/nsc/util/Set.scala
+++ b/src/compiler/scala/tools/nsc/util/Set.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package util
diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
index dcb87a38b5..458116845d 100644
--- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala
+++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
@@ -1,10 +1,10 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
-package scala.tools.nsc
+package scala.tools
+package nsc
package util
import java.io.{File, FileInputStream, PrintStream, IOException}
@@ -12,13 +12,48 @@ import java.lang.Long.toHexString
import java.lang.Float.intBitsToFloat
import java.lang.Double.longBitsToDouble
-import symtab.{Flags, Names}
-import symtab.classfile.{PickleBuffer, PickleFormat}
+import cmd.program.Simple
+import symtab.{ Flags, Names }
+import scala.reflect.generic.{ PickleBuffer, PickleFormat }
+import interpreter.ByteCode.scalaSigBytesForPath
object ShowPickled extends Names {
-
import PickleFormat._
+ case class PickleBufferEntry(num: Int, startIndex: Int, tag: Int, bytes: Array[Byte]) {
+ def isName = tag == TERMname || tag == TYPEname
+ def hasName = tag match {
+ case TYPEsym | ALIASsym | CLASSsym | MODULEsym | VALsym | EXTref | EXTMODCLASSref => true
+ case _ => false
+ }
+ def readName =
+ if (isName) new String(bytes, "UTF-8")
+ else error("%s is no name" format tagName)
+ def nameIndex =
+ if (hasName) readNat(bytes, 0)
+ else error("%s has no name" format tagName)
+
+ def tagName = tag2string(tag)
+ override def toString = "%d,%d: %s".format(num, startIndex, tagName)
+ }
+
+ case class PickleBufferEntryList(entries: IndexedSeq[PickleBufferEntry]) {
+ def nameAt(idx: Int) = {
+ val entry = entries(idx)
+ if (entry.isName) entry.readName
+ else if (entry.hasName) entries(entry.nameIndex).readName
+ else "?"
+ }
+ }
+
+ def makeEntryList(buf: PickleBuffer, index: Array[Int]) = {
+ val entries = buf.toIndexedSeq.zipWithIndex map {
+ case ((tag, data), num) => PickleBufferEntry(num, index(num), tag, data)
+ }
+
+ PickleBufferEntryList(entries)
+ }
+
def tag2string(tag: Int): String = tag match {
case TERMname => "TERMname"
case TYPEname => "TYPEname"
@@ -41,6 +76,8 @@ object ShowPickled extends Names {
case CLASSINFOtpe => "CLASSINFOtpe"
case METHODtpe => "METHODtpe"
case POLYtpe => "POLYtpe"
+ case IMPLICITMETHODtpe => "IMPLICITMETHODtpe"
+ case SUPERtpe => "SUPERtpe"
case LITERALunit => "LITERALunit"
case LITERALboolean => "LITERALboolean"
case LITERALbyte => "LITERALbyte"
@@ -53,46 +90,110 @@ object ShowPickled extends Names {
case LITERALstring => "LITERALstring"
case LITERALnull => "LITERALnull"
case LITERALclass => "LITERALclass"
+ case LITERALenum => "LITERALenum"
+ case SYMANNOT => "SYMANNOT"
case CHILDREN => "CHILDREN"
+ case ANNOTATEDtpe => "ANNOTATEDtpe"
+ case ANNOTINFO => "ANNOTINFO"
+ case ANNOTARGARRAY => "ANNOTARGARRAY"
+ // case DEBRUIJNINDEXtpe => "DEBRUIJNINDEXtpe"
+ case EXISTENTIALtpe => "EXISTENTIALtpe"
+ case TREE => "TREE"
+ case MODIFIERS => "MODIFIERS"
+
case _ => "***BAD TAG***(" + tag + ")"
}
- def printFile(buf: PickleBuffer, out: PrintStream) {
+ /** Extremely regrettably, essentially copied from PickleBuffer.
+ */
+ def readNat(data: Array[Byte], index: Int): Int = {
+ var idx = index
+ var result = 0L
+ var b = 0L
+ do {
+ b = data(idx)
+ idx += 1
+ result = (result << 7) + (b & 0x7f)
+ } while((b & 0x80) != 0L)
+
+ result.toInt
+ }
+
+ def printFile(buf: PickleBuffer, out: PrintStream): Unit = printFile(buf, out, false)
+ def printFile(buf: PickleBuffer, out: PrintStream, bare: Boolean) {
out.println("Version " + buf.readNat() + "." + buf.readNat())
val index = buf.createIndex
+ val entryList = makeEntryList(buf, index)
+ buf.readIndex = 0
+
+ /** A print wrapper which discards everything if bare is true.
+ */
+ def p(s: String) = if (!bare) out print s
def printNameRef() {
- val x = buf.readNat()
- val savedIndex = buf.readIndex
- buf.readIndex = index(x)
- val tag = buf.readByte()
- val len = buf.readNat()
- out.print(" " + x + "(" + newTermName(buf.bytes, buf.readIndex, len) + ")")
- buf.readIndex = savedIndex
+ val idx = buf.readNat()
+ val name = entryList nameAt idx
+ val toPrint = if (bare) " " + name else " %s(%s)".format(idx, name)
+
+ out print toPrint
}
- def printNat() = out.print(" " + buf.readNat())
+ def printNat() = p(" " + buf.readNat())
+ def printReadNat(x: Int) = p(" " + x)
+
def printSymbolRef() = printNat()
def printTypeRef() = printNat()
def printConstantRef() = printNat()
+ def printAnnotInfoRef() = printNat()
+ def printConstAnnotArgRef() = printNat()
+ def printAnnotArgRef() = printNat()
- def printSymInfo() {
+ def printSymInfo(end: Int) {
printNameRef()
printSymbolRef()
val pflags = buf.readLongNat()
- out.print(" " + toHexString(pflags) +
- "[" + Flags.flagsToString(Flags.pickledToRawFlags(pflags)) + "] ")
- printTypeRef()
+ def printFlags(privateWithin: Option[Int]) = {
+ val accessBoundary = (
+ for (idx <- privateWithin) yield {
+ val s = entryList nameAt idx
+ if (bare) s else idx + "(" + s + ")"
+ }
+ )
+ val flagString = {
+ val arg1 = Flags.pickledToRawFlags(pflags)
+ accessBoundary match {
+ case Some(pw) => Flags.flagsToString(arg1, pw)
+ case _ => Flags.flagsToString(arg1)
+ }
+ }
+
+ out.print(" %s[%s]".format(toHexString(pflags), flagString))
+ }
+
+ /** Might be info or privateWithin */
+ val x = buf.readNat()
+ if (buf.readIndex == end) {
+ printFlags(None)
+ printReadNat(x)
+ }
+ else {
+ printFlags(Some(x))
+ printTypeRef()
+ }
}
+ /** Note: the entries which require some semantic analysis to be correctly
+ * interpreted are for the most part going to tell you the wrong thing.
+ * It's not so easy to duplicate the logic applied in the UnPickler.
+ */
def printEntry(i: Int) {
buf.readIndex = index(i)
- out.print(i + "," + buf.readIndex + ": ")
+ p(i + "," + buf.readIndex + ": ")
val tag = buf.readByte()
out.print(tag2string(tag))
val len = buf.readNat()
val end = len + buf.readIndex
- out.print(" " + len + ":")
+ p(" " + len + ":")
tag match {
case TERMname =>
out.print(" ")
@@ -103,7 +204,7 @@ object ShowPickled extends Names {
out.print(newTypeName(buf.bytes, buf.readIndex, len))
buf.readIndex = end
case TYPEsym | ALIASsym | CLASSsym | MODULEsym | VALsym =>
- printSymInfo()
+ printSymInfo(end)
if (tag == CLASSsym && (buf.readIndex < end)) printTypeRef()
case EXTref | EXTMODCLASSref =>
printNameRef()
@@ -122,7 +223,7 @@ object ShowPickled extends Names {
printSymbolRef(); buf.until(end, printTypeRef)
case CLASSINFOtpe =>
printSymbolRef(); buf.until(end, printTypeRef)
- case METHODtpe =>
+ case METHODtpe | IMPLICITMETHODtpe =>
printTypeRef(); buf.until(end, printTypeRef)
case POLYtpe =>
printTypeRef(); buf.until(end, printSymbolRef)
@@ -144,34 +245,68 @@ object ShowPickled extends Names {
out.print(" " + longBitsToDouble(buf.readLong(len)))
case LITERALstring =>
printNameRef()
+ case LITERALenum =>
+ printSymbolRef()
case LITERALnull =>
out.print(" <null>")
case LITERALclass =>
printTypeRef()
case CHILDREN =>
printSymbolRef(); buf.until(end, printSymbolRef)
+ case SYMANNOT =>
+ printSymbolRef(); printTypeRef(); buf.until(end, printAnnotArgRef)
+ case ANNOTATEDtpe =>
+ printTypeRef(); buf.until(end, printAnnotInfoRef);
+ case ANNOTINFO =>
+ printTypeRef(); buf.until(end, printAnnotArgRef)
+ case ANNOTARGARRAY =>
+ buf.until(end, printConstAnnotArgRef)
+ case EXISTENTIALtpe =>
+ printTypeRef(); buf.until(end, printSymbolRef)
+
case _ =>
}
out.println()
- if (buf.readIndex != end)
- out.println("BAD ENTRY END: , computed = " + end +
- ", factual = " + buf.readIndex)
+ if (buf.readIndex != end) {
+ out.println("BAD ENTRY END: computed = %d, actual = %d, bytes = %s".format(
+ end, buf.readIndex, buf.bytes.slice(index(i), (end max buf.readIndex)).mkString(", ")
+ ))
+ }
}
for (i <- 0 until index.length) printEntry(i)
}
+ def fromFile(path: String) = fromBytes(io.File(path).toByteArray)
+ def fromName(name: String) = fromBytes(scalaSigBytesForPath(name) getOrElse Array())
+ def fromBytes(data: => Array[Byte]): Option[PickleBuffer] =
+ try Some(new PickleBuffer(data, 0, data.length))
+ catch { case _: Exception => None }
+
+ def show(what: String, pickle: PickleBuffer, bare: Boolean) = {
+ Console.println(what + ": ")
+ printFile(pickle, Console.out, bare)
+ }
+
+ private lazy val ShowPickledSpec =
+ Simple(
+ Simple.scalaProgramInfo("showPickled", "Usage: showPickled [--bare] <classname>"),
+ List("--bare" -> "suppress numbers in output"),
+ Nil,
+ null
+ )
+
+ /** Option --bare suppresses numbers so the output can be diffed.
+ */
def main(args: Array[String]) {
- val file = new File(args(0))
- try {
- val stream = new FileInputStream(file)
- val data = new Array[Byte](stream.available())
- stream.read(data)
- val pickle = new PickleBuffer(data, 0, data.length)
- printFile(pickle, Console.out)
- } catch {
- case ex: IOException =>
- Console.println("cannot read " + file + ": " + ex.getMessage())
+ val runner = ShowPickledSpec instance args
+ import runner._
+
+ residualArgs foreach { arg =>
+ (fromFile(arg) orElse fromName(arg)) match {
+ case Some(pb) => show(arg, pb, parsed isSet "--bare")
+ case _ => Console.println("Cannot read " + arg)
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/util/SourceFile.scala b/src/compiler/scala/tools/nsc/util/SourceFile.scala
index 90b82dab76..90a9057f01 100644
--- a/src/compiler/scala/tools/nsc/util/SourceFile.scala
+++ b/src/compiler/scala/tools/nsc/util/SourceFile.scala
@@ -1,38 +1,25 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package util
-import scala.tools.nsc.io.{AbstractFile, VirtualFile}
-import scala.collection.mutable.ArrayBuffer
-import annotation.{ tailrec, switch }
-object SourceFile {
- // Be very careful touching these.
- // Apparently trivial changes to the way you write these constants
- // will cause Scanners.scala to go from a nice efficient switch to
- // a ghastly nested if statement which will bring the type checker
- // to its knees. See ticket #1456
- final val LF = '\u000A'
- final val FF = '\u000C'
- final val CR = '\u000D'
- final val SU = '\u001A'
+import io.{ AbstractFile, VirtualFile }
+import scala.collection.mutable.ArrayBuffer
+import annotation.tailrec
+import java.util.regex.Pattern
+import java.io.IOException
+import Chars._
- @inline def isLineBreakChar(c: Char) = (c: @switch) match {
- case LF|FF|CR|SU => true
- case _ => false
- }
-}
/** abstract base class of a source file used in the compiler */
abstract class SourceFile {
- import SourceFile._
def content : Array[Char] // normalized, must end in SU
def file : AbstractFile
def isLineBreak(idx : Int) : Boolean
+ def isSelfContained: Boolean
def length : Int
def position(offset: Int) : Position = {
assert(offset < length)
@@ -59,38 +46,67 @@ abstract class SourceFile {
final def skipWhitespace(offset: Int): Int =
if (content(offset).isWhitespace) skipWhitespace(offset + 1) else offset
- def identifier(pos: Position, compiler: Global): Option[String] = None
+ def identifier(pos: Position): Option[String] = None
+}
+
+object ScriptSourceFile {
+ /** Length of the script header from the given content, if there is one.
+ * The header begins with "#!" or "::#!" and ends with a line starting
+ * with "!#" or "::!#".
+ */
+ def headerLength(cs: Array[Char]): Int = {
+ val headerPattern = Pattern.compile("""^(::)?!#.*(\r|\n|\r\n)""", Pattern.MULTILINE)
+ val headerStarts = List("#!", "::#!")
+
+ if (headerStarts exists (cs startsWith _)) {
+ val matcher = headerPattern matcher cs.mkString
+ if (matcher.find) matcher.end
+ else throw new IOException("script file does not close its header with !# or ::!#")
+ }
+ else 0
+ }
+ def stripHeader(cs: Array[Char]): Array[Char] = cs drop headerLength(cs)
+
+ def apply(file: AbstractFile, content: Array[Char]) = {
+ val underlying = new BatchSourceFile(file, content)
+ val headerLen = headerLength(content)
+ val stripped = new ScriptSourceFile(underlying, content drop headerLen, headerLen)
+
+ stripped
+ }
+}
+import ScriptSourceFile._
+
+class ScriptSourceFile(underlying: BatchSourceFile, content: Array[Char], override val start: Int) extends BatchSourceFile(underlying.file, content) {
+ override def isSelfContained = false
+
+ override def positionInUltimateSource(pos: Position) =
+ if (!pos.isDefined) super.positionInUltimateSource(pos)
+ else new OffsetPosition(underlying, pos.point + start)
}
/** a file whose contents do not change over time */
class BatchSourceFile(val file : AbstractFile, val content: Array[Char]) extends SourceFile {
- import SourceFile._
def this(_file: AbstractFile) = this(_file, _file.toCharArray)
def this(sourceName: String, cs: Seq[Char]) = this(new VirtualFile(sourceName), cs.toArray)
def this(file: AbstractFile, cs: Seq[Char]) = this(file, cs.toArray)
override def equals(that : Any) = that match {
- case that : BatchSourceFile => file == that.file
+ case that : BatchSourceFile => file.path == that.file.path && start == that.start
case _ => false
}
- override def hashCode = file.hashCode
+ override def hashCode = file.path.## + start.##
val length = content.length
-
- // in SourceFileFragments, these are overridden to compensate during offset calculation
- // Invariant: length + start = underlyingLength
- def underlyingLength = length
def start = 0
+ def isSelfContained = true
- override def identifier(pos: Position, compiler: Global) =
+ override def identifier(pos: Position) =
if (pos.isDefined && pos.source == this && pos.point != -1) {
- def isOK(c: Char) = {
- import compiler.syntaxAnalyzer.{ isOperatorPart, isIdentifierPart }
- isIdentifierPart(c) || isOperatorPart(c)
- }
+ def isOK(c: Char) = isIdentifierPart(c) || isOperatorPart(c)
Some(new String(content drop pos.point takeWhile isOK))
} else {
- super.identifier(pos, compiler)
+ super.identifier(pos)
}
def isLineBreak(idx: Int) =
@@ -101,13 +117,14 @@ class BatchSourceFile(val file : AbstractFile, val content: Array[Char]) extends
else isLineBreakChar(ch)
}
- private lazy val lineIndices: Array[Int] = {
+ def calculateLineIndices(cs: Array[Char]) = {
val buf = new ArrayBuffer[Int]
buf += 0
- for (i <- 0 until content.length) if (isLineBreak(i)) buf += i + 1
- buf += content.length // sentinel, so that findLine below works smoother
+ for (i <- 0 until cs.length) if (isLineBreak(i)) buf += i + 1
+ buf += cs.length // sentinel, so that findLine below works smoother
buf.toArray
}
+ private lazy val lineIndices: Array[Int] = calculateLineIndices(content)
def lineToOffset(index : Int): Int = lineIndices(index)
@@ -125,133 +142,4 @@ class BatchSourceFile(val file : AbstractFile, val content: Array[Char]) extends
lastLine = findLine(0, lines.length, lastLine)
lastLine
}
-
-/**
-
- // An array which maps line numbers (counting from 0) to char offset into content
- private lazy val lineIndices: Array[Int] = {
-
- val xs = content.indices filter isLineBreak map (_ + 1) toArray
- val arr = new Array[Int](xs.length + 1)
- arr(0) = 0
- System.arraycopy(xs, 0, arr, 1, xs.length)
-
- arr
- }
- // A reverse map which also hunts down the right answer on non-exact lookups
- private class SparseReverser() {
- val revMap = Map(lineIndices.zipWithIndex: _*)
-
- def apply(x: Int): Int = revMap.get(x) match {
- case Some(res) => res
- case _ =>
- var candidate = x - 1
- while (!revMap.contains(candidate))
- candidate -= 1
-
- revMap(candidate)
- }
- }
- private lazy val lineIndicesRev = new SparseReverser()
-
- def lineToOffset(index : Int): Int = lineIndices(index)
- def offsetToLine(offset: Int): Int = lineIndicesRev(offset)
-
- */
-}
-
-/** A source file composed of multiple other source files.
- *
- * @version 1.0
- */
-class CompoundSourceFile(
- name: String,
- components: List[BatchSourceFile],
- contents: Array[Char])
-extends BatchSourceFile(name, contents)
-{
- /** The usual constructor. Specify a name for the compound file and
- * a list of component sources.
- */
- def this(name: String, components: BatchSourceFile*) = {
- this(
- name,
- components.toList,
- Array.concat(components.map(comp =>
- CompoundSourceFile.stripSU(comp.content).toArray):_*))
- }
-
- /** Create an instance with the specified components and a generic name. */
- def this(components: BatchSourceFile*) =
- this("(virtual file)", components.toList:_*)
-
- override def positionInUltimateSource(position: Position) = {
- if (!position.isDefined) super.positionInUltimateSource(position)
- else {
- println("!!!")
- var off = position.point
- var compsLeft = components
- // the search here has to be against the length of the files underlying the
- // components, not their advertised length (which in the case of a fragment is
- // less than the underlying length.) Otherwise we can and will overshoot the
- // correct component and return a garbage position.
- while (compsLeft.head.underlyingLength-1 <= off && !compsLeft.tail.isEmpty) {
- println("discarding "+compsLeft.head)
- off = off - compsLeft.head.underlyingLength + 1
- compsLeft = compsLeft.tail
- }
- // now that we've identified the correct component, we have to adjust the
- // position we report since it is expected relative to the fragment, not the
- // underlying file. Thus, off - comp.start.
- val comp = compsLeft.head
- comp.positionInUltimateSource(new OffsetPosition(this, off - comp.start))
- }
- }
-}
-
-object CompoundSourceFile {
- private[util] def stripSU(chars: Array[Char]) =
- if (chars.length > 0 && chars.last == SourceFile.SU)
- chars.slice(0, chars.length-1)
- else
- chars
-}
-
-
-/** One portion of an underlying file. The fragment includes
- * the indices from the specified start (inclusively) to stop
- * (not inclusively).
- */
-class SourceFileFragment private (
- name: String,
- underlyingFile: BatchSourceFile,
- override val start: Int,
- stop: Int,
- contents: Array[Char])
-extends BatchSourceFile(name, contents) {
- override def underlyingLength = underlyingFile.length
- def this(name: String, underlyingFile: BatchSourceFile, start: Int, stop: Int) =
- this(
- name,
- underlyingFile,
- start,
- stop,
- { assert(start >= 0)
- assert(start <= stop)
- assert(start <= underlyingFile.length)
- assert(stop <= underlyingFile.length)
- underlyingFile.content.slice(start, stop).toArray })
-
- def this(underlyingFile: BatchSourceFile, start: Int, stop: Int) =
- this(
- "(fragment of " + underlyingFile.file.name + ")",
- underlyingFile,
- start,
- stop)
-
- override def positionInUltimateSource(position: Position) =
- super.positionInUltimateSource(
- if (position.isDefined) new OffsetPosition(this, position.point)
- else position
- )
}
diff --git a/src/compiler/scala/tools/nsc/util/Statistics.scala b/src/compiler/scala/tools/nsc/util/Statistics.scala
index 3f22669e6f..de51c2778c 100644
--- a/src/compiler/scala/tools/nsc/util/Statistics.scala
+++ b/src/compiler/scala/tools/nsc/util/Statistics.scala
@@ -1,65 +1,278 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package util
object Statistics {
- var enabled = false
+
+ private var _enabled = false
+
+ def enabled = _enabled
+ def enabled_=(cond: Boolean) = {
+ if (cond && !_enabled) {
+ val test = new Timer()
+ val start = System.nanoTime()
+ var total = 0L
+ for (i <- 1 to 10000) {
+ val time = System.nanoTime()
+ total += System.nanoTime() - time
+ }
+ val total2 = System.nanoTime() - start
+ println("Enabling statistics, measuring overhead = "+
+ total/10000.0+"ns to "+total2/10000.0+"ns per timer")
+ _enabled = true
+ }
+ }
+
+ var phasesShown = List("parser", "typer", "erasure", "cleanup")
+
+ def currentTime() =
+ if (_enabled) System.nanoTime() else 0L
+
+ private def showPercent(x: Double, base: Double) =
+ if (base == 0) "" else " ("+"%2.1f".format(x / base * 100)+"%)"
+
+ def incCounter(c: Counter) {
+ if (_enabled) c.value += 1
+ }
+
+ def incCounter(c: Counter, delta: Int) {
+ if (_enabled) c.value += delta
+ }
+
+ def startCounter(sc: SubCounter): IntPair =
+ if (_enabled) sc.start() else null
+
+ def stopCounter(sc: SubCounter, start: IntPair) {
+ if (_enabled) sc.stop(start)
+ }
+
+ def startTimer(tm: Timer): LongPair =
+ if (_enabled) tm.start() else null
+
+ def stopTimer(tm: Timer, start: LongPair) {
+ if (_enabled) tm.stop(start)
+ }
+
+ case class IntPair(x: Int, y: Int)
+ case class LongPair(x: Long, y: Long)
+
+ class Counter {
+ var value: Int = 0
+ override def toString = value.toString
+ }
+
+ class SubCounter(c: Counter) {
+ var value: Int = 0
+ def start(): IntPair =
+ if (_enabled) IntPair(value, c.value) else null
+ def stop(prev: IntPair) {
+ if (_enabled) {
+ val IntPair(value0, cvalue0) = prev
+ value = value0 + c.value - cvalue0
+ }
+ }
+ override def toString =
+ value+showPercent(value, c.value)
+ }
+
+ class Timer {
+ var nanos: Long = 0
+ var timings = 0
+ def start(): LongPair =
+ if (_enabled) {
+ timings += 1
+ LongPair(nanos, System.nanoTime())
+ } else null
+ def stop(prev: LongPair) {
+ if (_enabled) {
+ val LongPair(nanos0, start) = prev
+ nanos = nanos0 + System.nanoTime() - start
+ timings += 1
+ }
+ }
+ override def toString = (timings/2)+" spans, "+nanos.toString+"ns"
+ }
+
+ class ClassCounts extends scala.collection.mutable.HashMap[Class[_], Int] {
+ override def default(key: Class[_]) = 0
+ }
+
+ var nodeByType = new ClassCounts
+
+ var microsByType = new ClassCounts
+ var visitsByType = new ClassCounts
+ var pendingTreeTypes: List[Class[_]] = List()
+ var typerTime: Long = 0L
+
+ val singletonBaseTypeSeqCount = new Counter
+ val compoundBaseTypeSeqCount = new Counter
+ val typerefBaseTypeSeqCount = new Counter
+ val findMemberCount = new Counter
+ val noMemberCount = new Counter
+ val multMemberCount = new Counter
+ val findMemberNanos = new Timer
+ val asSeenFromCount = new Counter
+ val asSeenFromNanos = new Timer
+ val subtypeCount = new Counter
+ val subtypeNanos = new Timer
+ val sametypeCount = new Counter
+ val rawTypeCount = new Counter
+ val rawTypeFailed = new SubCounter(rawTypeCount)
+ val findMemberFailed = new SubCounter(findMemberCount)
+ val subtypeFailed = new SubCounter(subtypeCount)
+ val rawTypeImpl = new SubCounter(rawTypeCount)
+ val findMemberImpl = new SubCounter(findMemberCount)
+ val subtypeImpl = new SubCounter(subtypeCount)
+ val baseTypeSeqCount = new Counter
+ val baseTypeSeqLenTotal = new Counter
+ val typeSymbolCount = new Counter
+ val classSymbolCount = new Counter
+ val typedApplyCount = new Counter
+ val typedIdentCount = new Counter
+ val typedSelectCount = new Counter
+ val typerNanos = new Timer
+ val classReadNanos = new Timer
+
+ val failedApplyNanos = new Timer
+ val failedOpEqNanos = new Timer
+ val failedSilentNanos = new Timer
+
+ val implicitSearchCount = new Counter
+ val implicitNanos = new Timer
+ val oftypeImplicitHits = new Counter
+ val inscopeImplicitHits = new Counter
+
+ val triedImplicits = new Counter
+ val plausiblyCompatibleImplicits = new Counter
+ val matchingImplicits = new Counter
+ val typedImplicits = new Counter
+ val foundImplicits = new Counter
+
+ val inscopeSucceedNanos = new Timer
+ val inscopeFailNanos = new Timer
+ val oftypeSucceedNanos = new Timer
+ val oftypeFailNanos = new Timer
+ val implicitCacheHits = new Counter
+ val implicitCacheMisses = new Counter
+ val improvesCount = new Counter
+ val subtypeAppInfos = new SubCounter(subtypeCount)
+ val subtypeImprovCount = new SubCounter(subtypeCount)
+ val subtypeETNanos = new Timer
+ val matchesPtNanos = new Timer
+ val ctr1 = new Counter
+ val ctr2 = new Counter
+ val ctr3 = new Counter
+ val ctr4 = new Counter
+ val counter1: SubCounter = new SubCounter(subtypeCount)
+ val counter2: SubCounter = new SubCounter(subtypeCount)
+ val timer1: Timer = new Timer
+ val timer2: Timer = new Timer
}
abstract class Statistics {
+ import Statistics._
+
val global: Global
import global._
- def showRelative(base: Long)(time: Long) = "%2.1f".format(time.toDouble / base * 100)+" / "+time+"ns"
- def showRelTyper(time: Long) = showRelative(analyzer.typerTime)(time)
-
- def print(phase: Phase) = {
- if (List("typer", "erasure", "cleanup") contains phase.name) {
- inform("*** Cumulative statistics at phase " + phase)
- inform("#tree nodes : " + nodeCount)
- inform("#identifiers : " + analyzer.idcnt)
- inform("#selections : " + analyzer.selcnt)
- inform("#applications: " + analyzer.appcnt)
- inform("#implicits : " + analyzer.implcnt)
- inform("#uniquetypes : " + uniqueTypeCount)
- inform("#symbols : " + symbolCount)
- inform("#type symbols: " + typeSymbolCount)
- inform("#class symbols: " + classSymbolCount)
- inform("#singleton closures: " + singletonBaseTypeSeqCount)
- inform("#compound closures : " + compoundBaseTypeSeqCount)
- inform("#typeref closures : " + typerefBaseTypeSeqCount)
- inform("#findMember : " + findMemberCount)
- inform("#notfound member: " + noMemberCount)
- inform("#multiple member: " + multMemberCount)
- inform("time findMember: " + findMemberNanos)
- inform("#norm meth : " + analyzer.normM)
- inform("#norm poly : " + analyzer.normP)
- inform("#norm other : " + analyzer.normO)
- inform("#subtype : " + subtypeCount)
- inform("ns subtype : " + subtypeNanos)
- inform("#sametype : " + sametypeCount)
+ def countNodes(tree: Tree, counts: ClassCounts) {
+ for (t <- tree) counts(t.getClass) += 1
+ counts
+ }
+
+ def showRelative(base: Long)(value: Long) =
+ value+showPercent(value, base)
+
+ def showRelTyper(timer: Timer) =
+ timer+showPercent(timer.nanos, typerNanos.nanos)
+
+ def showCounts(counts: ClassCounts) =
+ counts.toSeq.sortWith(_._2 > _._2).map {
+ case (cls, cnt) =>
+ cls.toString.substring(cls.toString.lastIndexOf("$") + 1)+": "+cnt
+ }
+
+ def print(phase: Phase) = if (phasesShown contains phase.name) {
+ inform("*** Cumulative statistics at phase " + phase)
+ inform("#created tree nodes : " + nodeCount)
+ inform("#created tree nodes by type: "+showCounts(nodeByType))
+ if (phase.name != "parser") {
+ val counts = new ClassCounts
+ for (u <- currentRun.units; t <- u.body) counts(t.getClass) += 1
+ inform("#retained nodes : " + counts.values.sum)
+ inform("#retained nodes by type : " + showCounts(counts))
+ inform("#typechecked identifiers : " + typedIdentCount)
+ inform("#typechecked selections : " + typedSelectCount)
+ inform("#typechecked applications: " + typedApplyCount)
+ inform("#raw type creations : " + rawTypeCount)
+ inform(" of which in failed : " + rawTypeFailed)
+ inform(" of which in implicits : " + rawTypeImpl)
+ inform("#unique types : " + uniqueTypeCount)
+ inform("#symbols : " + symbolCount)
+ inform(" of which type symbols : " + typeSymbolCount)
+ inform(" of which class symbols : " + classSymbolCount)
+ inform("#base type seqs : " + baseTypeSeqCount)
+ inform("avg base type seq length : " + baseTypeSeqLenTotal.value.toFloat / baseTypeSeqCount.value)
+ inform("#singleton base type seqs: " + singletonBaseTypeSeqCount)
+ inform("#compound base type seqs : " + compoundBaseTypeSeqCount)
+ inform("#typeref base type seqs : " + typerefBaseTypeSeqCount)
+ inform("#findMember ops : " + findMemberCount)
+ inform(" of which in failed : " + findMemberFailed)
+ inform(" of which in implicits : " + findMemberImpl)
+ inform("#notfound member : " + noMemberCount)
+ inform("#multiple member : " + multMemberCount)
+ inform("#asSeenFrom ops : " + asSeenFromCount)
+ inform("#subtype : " + subtypeCount)
+ inform(" of which in failed : " + subtypeFailed)
+ inform(" of which in implicits : " + subtypeImpl)
+ inform(" of which in app impl : " + subtypeAppInfos)
+ inform(" of which in improv : " + subtypeImprovCount)
+ inform("#sametype : " + sametypeCount)
inform("ms type-flow-analysis: " + analysis.timer.millis)
+
if (phase.name == "typer") {
- inform("time spent typechecking: "+showRelTyper(analyzer.typerTime))
- inform("time spent in implicits: "+showRelTyper(analyzer.implicitTime))
- inform(" successful in scope: "+showRelTyper(analyzer.inscopeSucceed))
- inform(" failed in scope: "+showRelTyper(analyzer.inscopeFail))
- inform(" successful of type: "+showRelTyper(analyzer.oftypeSucceed))
- inform(" failed of type: "+showRelTyper(analyzer.oftypeFail))
- inform(" successful manifest: "+showRelTyper(analyzer.manifSucceed))
- inform(" failed manifest: "+showRelTyper(analyzer.manifFail))
- inform("implicit cache hitratio: "+"%2.1f".format(analyzer.hits.toDouble / (analyzer.hits + analyzer.misses) * 100))
- inform("time spent in failed : "+showRelTyper(analyzer.failedSilent))
- inform(" failed op= : "+showRelTyper(analyzer.failedOpEqs))
- inform(" failed apply : "+showRelTyper(analyzer.failedApplies))
+ inform("time spent typechecking : "+showRelTyper(typerNanos))
+ inform("time classfilereading : "+showRelTyper(classReadNanos))
+ inform("time spent in implicits : "+showRelTyper(implicitNanos))
+ inform(" successful in scope : "+showRelTyper(inscopeSucceedNanos))
+ inform(" failed in scope : "+showRelTyper(inscopeFailNanos))
+ inform(" successful of type : "+showRelTyper(oftypeSucceedNanos))
+ inform(" failed of type : "+showRelTyper(oftypeFailNanos))
+ inform(" assembling parts : "+showRelTyper(subtypeETNanos))
+ inform(" matchesPT : "+showRelTyper(matchesPtNanos))
+ inform("implicit cache hits : "+showRelative(implicitCacheHits.value + implicitCacheMisses.value)(implicitCacheHits.value))
+ inform("time spent in failed : "+showRelTyper(failedSilentNanos))
+ inform(" failed apply : "+showRelTyper(failedApplyNanos))
+ inform(" failed op= : "+showRelTyper(failedOpEqNanos))
+ inform("micros by tree node : "+showCounts(microsByType))
+ inform("#visits by tree node : "+showCounts(visitsByType))
+ val average = new ClassCounts
+ for (c <- microsByType.keysIterator) average(c) = microsByType(c)/visitsByType(c)
+ inform("avg micros by tree node : "+showCounts(average))
+ inform("time spent in <:< : "+showRelTyper(subtypeNanos))
+ inform("time spent in findmember : "+showRelTyper(findMemberNanos))
+ inform("time spent in asSeenFrom : "+showRelTyper(asSeenFromNanos))
+ inform("#implicit searches : " + implicitSearchCount)
+ inform("#tried, plausible, matching, typed, found implicits: "+triedImplicits+", "+plausiblyCompatibleImplicits+", "+matchingImplicits+", "+typedImplicits+", "+foundImplicits)
+ inform("#implicit improves tests : " + improvesCount)
+ inform("#implicit inscope hits : " + inscopeImplicitHits)
+ inform("#implicit oftype hits : " + oftypeImplicitHits)
}
+
+ if (ctr1 != null) inform("#ctr1 : " + ctr1)
+ if (ctr2 != null) inform("#ctr2 : " + ctr2)
+ if (ctr3 != null) inform("#ctr3 : " + ctr3)
+ if (ctr4 != null) inform("#ctr4 : " + ctr4)
+ if (counter1 != null) inform("#counter1 : " + counter1)
+ if (counter2 != null) inform("#counter2 : " + counter2)
+ if (timer1 != null) inform("#timer1 : " + timer1)
+ if (timer2 != null) inform("#timer2 : " + timer2)
//for (t <- uniques.iterator) println("unique: "+t)
}
}
diff --git a/src/compiler/scala/tools/nsc/util/Tracer.scala b/src/compiler/scala/tools/nsc/util/Tracer.scala
new file mode 100644
index 0000000000..c5d3fd3753
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/Tracer.scala
@@ -0,0 +1,37 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package util
+
+class Tracer(enabled: () => Boolean) {
+ private var indentLevel = 0
+ private def ind(s: String) = (" " * (indentLevel*2)) + s
+ private def indented[T](body: => T): T = {
+ indentLevel += 1
+ try body
+ finally indentLevel -= 1
+ }
+ private def p(s: String) = {
+ System.out.print(s)
+ System.out.flush()
+ }
+ private def pin[T](x: T): T = {
+ p(ind("" + x))
+ x
+ }
+ def apply[T](name: String, args: Any*)(body: => T): T = {
+ if (enabled()) {
+ p(ind("%s(%s) = {\n".format(name, args mkString ", ")))
+ try indented(pin(body))
+ finally println("\n" + ind("}"))
+ }
+ else body
+ }
+}
+
+object Tracer {
+ def apply(enabled: => Boolean): Tracer = new Tracer(() => enabled)
+}
diff --git a/src/compiler/scala/tools/nsc/util/TreeSet.scala b/src/compiler/scala/tools/nsc/util/TreeSet.scala
index 1862212467..522c99c4e0 100644
--- a/src/compiler/scala/tools/nsc/util/TreeSet.scala
+++ b/src/compiler/scala/tools/nsc/util/TreeSet.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
package scala.tools.nsc
package util
@@ -43,14 +42,10 @@ class TreeSet[T >: Null <: AnyRef](less: (T, T) => Boolean) extends Set[T] {
def iterator = {
def elems(t: Tree): Iterator[T] = {
- var it = Iterator single t.elem
- if (t.l ne null) it = elems(t.l) append it
- if (t.r ne null) it = it append elems(t.r)
- // if (t.l ne null) it = elems(t.l) ++ it
- // if (t.r ne null) it = it ++ elems(t.r)
- it
+ if (t eq null) Iterator.empty
+ else elems(t.l) ++ (Iterator single t.elem) ++ elems(t.r)
}
- if (tree eq null) Iterator.empty else elems(tree)
+ elems(tree)
}
override def toString(): String = {
diff --git a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
index b4ecbf8a71..aa1bb734ea 100644
--- a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
+++ b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
@@ -8,39 +8,59 @@ class WorkScheduler {
type Action = () => Unit
private var todo = new Queue[Action]
- private var except = new Queue[Exception]
+ private var throwables = new Queue[Throwable]
+ private var interruptReqs = new Queue[InterruptReq]
- /** Called from server: block until todo list is nonempty */
+ /** Called from server: block until one of todo list, throwables or interruptReqs is nonempty */
def waitForMoreWork() = synchronized {
- while (todo.isEmpty) { wait() }
+ while (todo.isEmpty && throwables.isEmpty && interruptReqs.isEmpty) { wait() }
}
- /** called from Server: test whether todo list is nonempty */
- def moreWork(): Boolean = synchronized {
- todo.nonEmpty
+ /** called from Server: test whether one of todo list, throwables, or InterruptReqs is nonempty */
+ def moreWork: Boolean = synchronized {
+ todo.nonEmpty || throwables.nonEmpty || interruptReqs.nonEmpty
}
/** Called from server: get first action in todo list, and pop it off */
def nextWorkItem(): Option[Action] = synchronized {
- if (!todo.isEmpty) {
- Some(todo.dequeue())
- } else None
+ if (todo.isEmpty) None else Some(todo.dequeue())
+ }
+
+ def dequeueAll[T](f: Action => Option[T]): Seq[T] = synchronized {
+ todo.dequeueAll(a => f(a).isDefined).map(a => f(a).get)
}
/** Called from server: return optional exception posted by client
* Reset to no exception.
*/
- def pollException(): Option[Exception] = synchronized {
- if (except.isEmpty)
+ def pollThrowable(): Option[Throwable] = synchronized {
+ if (throwables.isEmpty)
None
else {
- val result = Some(except.dequeue())
- if (!except.isEmpty)
+ val result = Some(throwables.dequeue())
+ if (!throwables.isEmpty)
postWorkItem { () => }
result
}
}
+ def pollInterrupt(): Option[InterruptReq] = synchronized {
+ if (interruptReqs.isEmpty) None else Some(interruptReqs.dequeue())
+ }
+
+ /** Called from client: have interrupt executed by server and return result */
+ def doQuickly[A](op: () => A): A = {
+ val ir = new InterruptReq {
+ type R = A
+ val todo = op
+ }
+ synchronized {
+ interruptReqs enqueue ir
+ notify()
+ }
+ ir.getResult()
+ }
+
/** Called from client: have action executed by server */
def postWorkItem(action: Action) = synchronized {
todo enqueue action
@@ -55,8 +75,13 @@ class WorkScheduler {
/** Called from client:
* Require an exception to be thrown on next poll.
*/
- def raise(exc: Exception) = synchronized {
- except enqueue exc
- postWorkItem { () => }
+ def raise(exc: Throwable) = synchronized {
+ throwables enqueue exc
+ postWorkItem { new EmptyAction }
}
}
+
+class EmptyAction extends (() => Unit) {
+ def apply() {}
+}
+
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
new file mode 100644
index 0000000000..92d4eab54f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -0,0 +1,29 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+
+import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter }
+
+package object util {
+ /** Apply a function and return the passed value */
+ def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
+
+ /** Generate a string using a routine that wants to write on a stream. */
+ def stringFromWriter(writer: PrintWriter => Unit): String = {
+ val stringWriter = new StringWriter()
+ val stream = new NewLinePrintWriter(stringWriter)
+ writer(stream)
+ stream.close()
+ stringWriter.toString
+ }
+ def stringFromStream(stream: OutputStream => Unit): String = {
+ val bs = new ByteArrayOutputStream()
+ val ps = new PrintStream(bs)
+ stream(ps)
+ ps.close()
+ bs.toString()
+ }
+}
diff --git a/src/compiler/scala/tools/util/AbstractTimer.scala b/src/compiler/scala/tools/util/AbstractTimer.scala
index ab86533ba1..b0ea663c47 100644
--- a/src/compiler/scala/tools/util/AbstractTimer.scala
+++ b/src/compiler/scala/tools/util/AbstractTimer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.util
diff --git a/src/compiler/scala/tools/util/ClassPathSettings.scala b/src/compiler/scala/tools/util/ClassPathSettings.scala
new file mode 100644
index 0000000000..ec2e1c3c5a
--- /dev/null
+++ b/src/compiler/scala/tools/util/ClassPathSettings.scala
@@ -0,0 +1,32 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package util
+
+trait ClassPathSettings {
+ def javabootclasspath: String // -javabootclasspath
+ def javaextdirs: String // -javaextdirs
+ def bootclasspath: String // -bootclasspath
+ def extdirs: String // -extdirs
+ def classpath: String // -classpath
+ def sourcepath: String // -sourcepath
+}
+
+// val debugLogger = {
+// val f = File("/tmp/path-resolve-log.txt")
+// if (f.exists) f.truncate()
+// else f.createFile()
+//
+// val res = f.bufferedWriter()
+// res write ("Started debug log: %s\n".format(new java.util.Date))
+// res
+// }
+// def log(msg: Any) = {
+// Console println msg
+// debugLogger.write(msg.toString + "\n")
+// debugLogger flush
+// }
+
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
new file mode 100644
index 0000000000..d2cb5e74c0
--- /dev/null
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -0,0 +1,253 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package util
+
+import java.net.{ URL, MalformedURLException }
+import scala.util.Properties._
+import nsc.{ Settings, GenericRunnerSettings }
+import nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
+import nsc.io.{ File, Directory, Path }
+import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
+import PartialFunction.condOpt
+
+// Loosely based on the draft specification at:
+// https://lampsvn.epfl.ch/trac/scala/wiki/Classpath
+
+object PathResolver {
+ def firstNonEmpty(xs: String*) = xs find (_ != "") getOrElse ""
+
+ private def fileOpt(f: Path): Option[String] = f ifFile (_.path)
+ private def dirOpt(d: Path): Option[String] = d ifDirectory (_.path)
+ private def expandToPath(p: Path) = join(ClassPath.expandPath(p.path, true): _*)
+ private def expandToContents(p: Path) = join(ClassPath.expandDir(p.path): _*)
+
+ /** Map all classpath elements to absolute paths and reconstruct the classpath.
+ */
+ def makeAbsolute(cp: String) = ClassPath.map(cp, x => Path(x).toAbsolute.path)
+
+ /** pretty print class path */
+ def ppcp(s: String) = split(s) match {
+ case Nil => ""
+ case Seq(x) => x
+ case xs => xs map ("\n" + _) mkString
+ }
+
+ /** Values found solely by inspecting environment or property variables.
+ */
+ object Environment {
+ private def searchForBootClasspath = {
+ import scala.collection.JavaConversions._
+ System.getProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse ""
+ }
+ private def searchForScalaHome = {
+ for (url <- ScalaClassLoader originOfClass classOf[ScalaObject] ; if url.getProtocol == "file") yield
+ File(url.getFile).parent.path
+ } getOrElse ""
+
+ /** Environment variables which java pays attention to so it
+ * seems we do as well.
+ */
+ def classPathEnv = envOrElse("CLASSPATH", "")
+ def sourcePathEnv = envOrElse("SOURCEPATH", "")
+
+ def javaBootClassPath = propOrElse("sun.boot.class.path", searchForBootClasspath)
+ def javaExtDirs = propOrEmpty("java.ext.dirs")
+ def scalaHome = propOrEmpty("scala.home")
+ def scalaExtDirs = propOrEmpty("scala.ext.dirs")
+
+ /** The java classpath and whether to use it. */
+ def javaUserClassPath = propOrElse("java.class.path", "")
+ def useJavaClassPath = propOrFalse("scala.usejavacp")
+
+ override def toString = """
+ |object Environment {
+ | scalaHome = %s (useJavaClassPath = %s)
+ | javaBootClassPath = <%d chars>
+ | javaExtDirs = %s
+ | javaUserClassPath = %s
+ | scalaExtDirs = %s
+ |}""".trim.stripMargin.format(
+ scalaHome, useJavaClassPath,
+ javaBootClassPath.length,
+ ppcp(javaExtDirs),
+ ppcp(javaUserClassPath),
+ ppcp(scalaExtDirs)
+ )
+ }
+
+ /** Default values based on those in Environment as interpreted according
+ * to the path resolution specification.
+ */
+ object Defaults {
+ /* Against my better judgment, giving in to martin here and allowing
+ * CLASSPATH as the default if no -cp is given. Only if there is no
+ * command line option or environment variable is "." used.
+ */
+ def scalaUserClassPath = firstNonEmpty(Environment.classPathEnv, ".")
+ def scalaSourcePath = Environment.sourcePathEnv
+
+ def javaBootClassPath = Environment.javaBootClassPath
+ def javaUserClassPath = Environment.javaUserClassPath
+ def javaExtDirs = Environment.javaExtDirs
+ def useJavaClassPath = Environment.useJavaClassPath
+
+ def scalaHome = Environment.scalaHome
+ def scalaHomeDir = Directory(scalaHome)
+ def scalaHomeExists = scalaHomeDir.isDirectory
+ def scalaLibDir = Directory(scalaHomeDir / "lib")
+ def scalaClassesDir = Directory(scalaHomeDir / "classes")
+
+ def scalaLibAsJar = File(scalaLibDir / "scala-library.jar")
+ def scalaLibAsDir = Directory(scalaClassesDir / "library")
+
+ def scalaLibDirFound: Option[Directory] =
+ if (scalaLibAsJar.isFile) Some(scalaLibDir)
+ else if (scalaLibAsDir.isDirectory) Some(scalaClassesDir)
+ else None
+
+ def scalaLibFound =
+ if (scalaLibAsJar.isFile) scalaLibAsJar.path
+ else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path
+ else ""
+
+ def scalaBootClassPath = scalaLibDirFound match {
+ case Some(dir) if scalaHomeExists => join(ClassPath expandDir dir.path: _*)
+ case _ => ""
+ }
+
+ def scalaExtDirs = Environment.scalaExtDirs
+
+ def scalaPluginPath = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path
+
+ override def toString = """
+ |object Defaults {
+ | scalaHome = %s
+ | javaBootClassPath = %s
+ | scalaLibDirFound = %s
+ | scalaLibFound = %s
+ | scalaBootClassPath = %s
+ | scalaPluginPath = %s
+ |}""".trim.stripMargin.format(
+ scalaHome,
+ ppcp(javaBootClassPath),
+ scalaLibDirFound, scalaLibFound,
+ ppcp(scalaBootClassPath), ppcp(scalaPluginPath)
+ )
+ }
+
+ def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = {
+ val s = new Settings()
+ s.classpath.value = path
+ new PathResolver(s, context) result
+ }
+
+ /** With no arguments, show the interesting values in Environment and Defaults.
+ * If there are arguments, show those in Calculated as if those options had been
+ * given to a scala runner.
+ */
+ def main(args: Array[String]): Unit = {
+ if (args.isEmpty) {
+ println(Environment)
+ println(Defaults)
+ }
+ else {
+ val settings = new Settings()
+ val rest = settings.processArguments(args.toList, false)._2
+ val pr = new PathResolver(settings)
+ println(" COMMAND: 'scala %s'".format(args.mkString(" ")))
+ println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" ")))
+ pr.result.show
+ }
+ }
+}
+import PathResolver.{ Defaults, Environment, firstNonEmpty, ppcp }
+
+class PathResolver(settings: Settings, context: JavaContext) {
+ def this(settings: Settings) = this(settings, if (settings.inline.value) new JavaContext else DefaultJavaContext)
+
+ private def cmdLineOrElse(name: String, alt: String) = {
+ (commandLineFor(name) match {
+ case Some("") => None
+ case x => x
+ }) getOrElse alt
+ }
+
+ private def commandLineFor(s: String): Option[String] = condOpt(s) {
+ case "javabootclasspath" => settings.javabootclasspath.value
+ case "javaextdirs" => settings.javaextdirs.value
+ case "bootclasspath" => settings.bootclasspath.value
+ case "extdirs" => settings.extdirs.value
+ case "classpath" | "cp" => settings.classpath.value
+ case "sourcepath" => settings.sourcepath.value
+ }
+
+ /** Calculated values based on any given command line options, falling back on
+ * those in Defaults.
+ */
+ object Calculated {
+ def scalaHome = Defaults.scalaHome
+ def useJavaClassPath = settings.usejavacp.value || Defaults.useJavaClassPath
+ def javaBootClassPath = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath)
+ def javaExtDirs = cmdLineOrElse("javaextdirs", Defaults.javaExtDirs)
+ def javaUserClassPath = if (useJavaClassPath) Defaults.javaUserClassPath else ""
+ def scalaBootClassPath = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath)
+ def scalaExtDirs = cmdLineOrElse("extdirs", Defaults.scalaExtDirs)
+ def userClassPath = cmdLineOrElse("classpath", Defaults.scalaUserClassPath)
+ def sourcePath = cmdLineOrElse("sourcepath", Defaults.scalaSourcePath)
+
+ import context._
+
+ // Assemble the elements!
+ def basis = List(
+ classesInPath(javaBootClassPath), // 1. The Java bootstrap class path.
+ contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path.
+ classesInExpandedPath(javaUserClassPath), // 3. The Java application class path.
+ classesInPath(scalaBootClassPath), // 4. The Scala boot class path.
+ contentsOfDirsInPath(scalaExtDirs), // 5. The Scala extension class path.
+ classesInExpandedPath(userClassPath), // 6. The Scala application class path.
+ sourcesInPath(sourcePath) // 7. The Scala source path.
+ )
+
+ lazy val containers = basis.flatten.distinct
+
+ override def toString = """
+ |object Calculated {
+ | scalaHome = %s
+ | javaBootClassPath = %s
+ | javaExtDirs = %s
+ | javaUserClassPath = %s
+ | useJavaClassPath = %s
+ | scalaBootClassPath = %s
+ | scalaExtDirs = %s
+ | userClassPath = %s
+ | sourcePath = %s
+ |}""".trim.stripMargin.format(
+ scalaHome,
+ ppcp(javaBootClassPath), ppcp(javaExtDirs), ppcp(javaUserClassPath),
+ useJavaClassPath,
+ ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath),
+ ppcp(sourcePath)
+ )
+ }
+
+ def containers = Calculated.containers
+
+ lazy val result = {
+ val cp = new JavaClassPath(containers, context)
+ if (settings.Ylogcp.value) {
+ Console.println("Classpath built from " + settings.toConciseString)
+ Console.println("Defaults: " + PathResolver.Defaults)
+
+ val xs = (Calculated.basis drop 2).flatten.distinct
+ println("After java boot/extdirs classpath has %d entries:" format xs.size)
+ xs foreach (x => println(" " + x))
+ }
+ cp
+ }
+
+ def asURLs = result.asURLs
+}
diff --git a/src/compiler/scala/tools/util/SocketConnection.scala b/src/compiler/scala/tools/util/SocketConnection.scala
index 57b2b63100..040f2b2392 100644
--- a/src/compiler/scala/tools/util/SocketConnection.scala
+++ b/src/compiler/scala/tools/util/SocketConnection.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.util
diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala
index 00180f7659..88d7013f36 100644
--- a/src/compiler/scala/tools/util/SocketServer.scala
+++ b/src/compiler/scala/tools/util/SocketServer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.util
diff --git a/src/compiler/scala/tools/util/StringOps.scala b/src/compiler/scala/tools/util/StringOps.scala
index c955b6b506..1a42c32fc8 100644
--- a/src/compiler/scala/tools/util/StringOps.scala
+++ b/src/compiler/scala/tools/util/StringOps.scala
@@ -1,23 +1,22 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-package scala.tools.util
+package scala.tools
+package util
-/** This objects provides methods to extract elements from
- * a string according to some defined character separator.
+/** This object provides utility methods to extract elements
+ * from Strings.
*
* @author Martin Odersky
* @version 1.0
*/
object StringOps {
-
def decompose(str: String, sep: Char): List[String] = {
def ws(start: Int): List[String] =
if (start == str.length) List()
@@ -31,4 +30,19 @@ object StringOps {
}
def words(str: String): List[String] = decompose(str, ' ')
+
+ def stripPrefixOpt(str: String, prefix: String): Option[String] =
+ if (str startsWith prefix) Some(str drop prefix.length)
+ else None
+
+ def stripSuffixOpt(str: String, suffix: String): Option[String] =
+ if (str endsWith suffix) Some(str dropRight suffix.length)
+ else None
+
+ def splitWhere(str: String, f: Char => Boolean, doDropIndex: Boolean = false): Option[(String, String)] =
+ splitAt(str, str indexWhere f, doDropIndex)
+
+ def splitAt(str: String, idx: Int, doDropIndex: Boolean = false): Option[(String, String)] =
+ if (idx == -1) None
+ else Some(str take idx, str drop (if (doDropIndex) idx + 1 else idx))
}
diff --git a/src/compiler/scala/tools/util/Which.scala b/src/compiler/scala/tools/util/Which.scala
new file mode 100644
index 0000000000..b331416f3d
--- /dev/null
+++ b/src/compiler/scala/tools/util/Which.scala
@@ -0,0 +1,39 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package util
+
+import scala.tools.nsc._
+
+/** A tool for identifying which classfile is being used.
+ * under the given conditions.
+ */
+object Which
+{
+ def main(args: Array[String]): Unit = {
+ val settings = new Settings()
+ val names = settings.processArguments(args.toList, true)._2
+ val global = new Global(settings)
+ val cp = global.classPath
+
+ import cp._
+
+ for (name <- names) {
+ def fail = println("Could not find: %s".format(name))
+ (cp findClass name) match {
+ case Some(classRep) => classRep.binary match {
+ case Some(f) => println("%s is %s".format(name, f))
+ case _ => fail
+ }
+ case _ => fail
+ }
+ }
+ }
+}
+
+
+
+
diff --git a/src/continuations/library/scala/util/continuations/ControlContext.scala b/src/continuations/library/scala/util/continuations/ControlContext.scala
new file mode 100644
index 0000000000..87a9bf1fc5
--- /dev/null
+++ b/src/continuations/library/scala/util/continuations/ControlContext.scala
@@ -0,0 +1,161 @@
+// $Id$
+
+package scala.util.continuations
+
+
+class cpsParam[-B,+C] extends StaticAnnotation with TypeConstraint
+
+private class cpsSym[B] extends Annotation // implementation detail
+
+private class cpsSynth extends Annotation // implementation detail
+
+private class cpsPlus extends StaticAnnotation with TypeConstraint // implementation detail
+private class cpsMinus extends Annotation // implementation detail
+
+
+
+@serializable final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val x: A) {
+
+ /*
+ final def map[A1](f: A => A1): ControlContext[A1,B,C] = {
+ new ControlContext((k:(A1 => B)) => fun((x:A) => k(f(x))), null.asInstanceOf[A1])
+ }
+
+ final def flatMap[A1,B1<:B](f: (A => ControlContext[A1,B1,B])): ControlContext[A1,B1,C] = {
+ new ControlContext((k:(A1 => B1)) => fun((x:A) => f(x).fun(k)))
+ }
+ */
+
+
+ @noinline final def map[A1](f: A => A1): ControlContext[A1,B,C] = {
+ if (fun eq null)
+ try {
+ new ControlContext(null, f(x)) // TODO: only alloc if f(x) != x
+ } catch {
+ case ex: Exception =>
+ new ControlContext((k: A1 => B, thr: Exception => B) => thr(ex).asInstanceOf[C], null.asInstanceOf[A1])
+ }
+ else
+ new ControlContext({ (k: A1 => B, thr: Exception => B) =>
+ fun( { (x:A) =>
+ var done = false
+ try {
+ val res = f(x)
+ done = true
+ k(res)
+ } catch {
+ case ex: Exception if !done =>
+ thr(ex)
+ }
+ }, thr)
+ }, null.asInstanceOf[A1])
+ }
+
+
+ // it would be nice if @inline would turn the trivial path into a tail call.
+ // unfortunately it doesn't, so we do it ourselves in SelectiveCPSTransform
+
+ @noinline final def flatMap[A1,B1,C1<:B](f: (A => ControlContext[A1,B1,C1])): ControlContext[A1,B1,C] = {
+ if (fun eq null)
+ try {
+ f(x).asInstanceOf[ControlContext[A1,B1,C]]
+ } catch {
+ case ex: Exception =>
+ new ControlContext((k: A1 => B1, thr: Exception => B1) => thr(ex).asInstanceOf[C], null.asInstanceOf[A1])
+ }
+ else
+ new ControlContext({ (k: A1 => B1, thr: Exception => B1) =>
+ fun( { (x:A) =>
+ var done = false
+ try {
+ val ctxR = f(x)
+ done = true
+ val res: C1 = ctxR.foreachFull(k, thr) // => B1
+ res
+ } catch {
+ case ex: Exception if !done =>
+ thr(ex).asInstanceOf[B] // => B NOTE: in general this is unsafe!
+ } // However, the plugin will not generate offending code
+ }, thr.asInstanceOf[Exception=>B]) // => B
+ }, null.asInstanceOf[A1])
+ }
+
+ final def foreach(f: A => B) = foreachFull(f, throw _)
+
+ def foreachFull(f: A => B, g: Exception => B): C = {
+ if (fun eq null)
+ f(x).asInstanceOf[C]
+ else
+ fun(f, g)
+ }
+
+
+ final def isTrivial = fun eq null
+ final def getTrivialValue = x.asInstanceOf[A]
+
+ // need filter or other functions?
+
+ final def flatMapCatch[A1>:A,B1<:B,C1>:C<:B1](pf: PartialFunction[Exception, ControlContext[A1,B1,C1]]): ControlContext[A1,B1,C1] = {
+ if (fun eq null)
+ this
+ else {
+ val fun1 = (ret1: A1 => B1, thr1: Exception => B1) => {
+ val thr: Exception => B1 = { t: Exception =>
+ var captureExceptions = true
+ try {
+ if (pf.isDefinedAt(t)) {
+ val cc1 = pf(t)
+ captureExceptions = false
+ cc1.foreachFull(ret1, thr1) // Throw => B
+ } else {
+ captureExceptions = false
+ thr1(t) // Throw => B1
+ }
+ } catch {
+ case t1: Exception if captureExceptions => thr1(t1) // => E2
+ }
+ }
+ fun(ret1, thr)// fun(ret1, thr) // => B
+ }
+ new ControlContext(fun1, null.asInstanceOf[A1])
+ }
+ }
+
+ final def mapFinally(f: () => Unit): ControlContext[A,B,C] = {
+ if (fun eq null) {
+ try {
+ f()
+ this
+ } catch {
+ case ex: Exception =>
+ new ControlContext((k: A => B, thr: Exception => B) => thr(ex).asInstanceOf[C], null.asInstanceOf[A])
+ }
+ } else {
+ val fun1 = (ret1: A => B, thr1: Exception => B) => {
+ val ret: A => B = { x: A =>
+ var captureExceptions = true
+ try {
+ f()
+ captureExceptions = false
+ ret1(x)
+ } catch {
+ case t1: Exception if captureExceptions => thr1(t1)
+ }
+ }
+ val thr: Exception => B = { t: Exception =>
+ var captureExceptions = true
+ try {
+ f()
+ captureExceptions = false
+ thr1(t)
+ } catch {
+ case t1: Exception if captureExceptions => thr1(t1)
+ }
+ }
+ fun(ret, thr1)
+ }
+ new ControlContext(fun1, null.asInstanceOf[A])
+ }
+ }
+
+}
diff --git a/src/continuations/library/scala/util/continuations/package.scala b/src/continuations/library/scala/util/continuations/package.scala
new file mode 100644
index 0000000000..aa4681a0cc
--- /dev/null
+++ b/src/continuations/library/scala/util/continuations/package.scala
@@ -0,0 +1,65 @@
+// $Id$
+
+
+// TODO: scaladoc
+
+package scala.util
+
+package object continuations {
+
+ type cps[A] = cpsParam[A,A]
+
+ type suspendable = cps[Unit]
+
+
+ def shift[A,B,C](fun: (A => B) => C): A @cpsParam[B,C] = {
+ throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
+ }
+
+ def reset[A,C](ctx: =>(A @cpsParam[A,C])): C = {
+ val ctxR = reify[A,A,C](ctx)
+ if (ctxR.isTrivial)
+ ctxR.getTrivialValue.asInstanceOf[C]
+ else
+ ctxR.foreach((x:A) => x)
+ }
+
+ def reset0[A](ctx: =>(A @cpsParam[A,A])): A = reset(ctx)
+
+ def run[A](ctx: =>(Any @cpsParam[Unit,A])): A = {
+ val ctxR = reify[Any,Unit,A](ctx)
+ if (ctxR.isTrivial)
+ ctxR.getTrivialValue.asInstanceOf[A]
+ else
+ ctxR.foreach((x:Any) => ())
+ }
+
+
+ // methods below are primarily implementation details and are not
+ // needed frequently in client code
+
+ def shiftUnit0[A,B](x: A): A @cpsParam[B,B] = {
+ shiftUnit[A,B,B](x)
+ }
+
+ def shiftUnit[A,B,C>:B](x: A): A @cpsParam[B,C] = {
+ throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
+ }
+
+ def reify[A,B,C](ctx: =>(A @cpsParam[B,C])): ControlContext[A,B,C] = {
+ throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
+ }
+
+ def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = {
+ new ControlContext(null, x)
+ }
+
+ def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = {
+ new ControlContext((f:A=>B,g:Exception=>B) => fun(f), null.asInstanceOf[A])
+ }
+
+ def reifyR[A,B,C](ctx: => ControlContext[A,B,C]): ControlContext[A,B,C] = {
+ ctx
+ }
+
+}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
new file mode 100644
index 0000000000..0c124c9c19
--- /dev/null
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -0,0 +1,462 @@
+// $Id$
+
+package scala.tools.selectivecps
+
+import scala.tools.nsc.Global
+
+import scala.collection.mutable.{Map, HashMap}
+
+import java.io.{StringWriter, PrintWriter}
+
+abstract class CPSAnnotationChecker extends CPSUtils {
+ val global: Global
+ import global._
+ import definitions._
+
+ //override val verbose = true
+
+ /**
+ * Checks whether @cps annotations conform
+ */
+ object checker extends AnnotationChecker {
+
+ /** Check annotations to decide whether tpe1 <:< tpe2 */
+ def annotationsConform(tpe1: Type, tpe2: Type): Boolean = {
+ if (!cpsEnabled) return true
+
+ vprintln("check annotations: " + tpe1 + " <:< " + tpe2)
+
+ // Nothing is least element, but Any is not the greatest
+ if (tpe1.typeSymbol eq NothingClass)
+ return true
+
+ val annots1 = filterAttribs(tpe1,MarkerCPSTypes)
+ val annots2 = filterAttribs(tpe2,MarkerCPSTypes)
+
+ // @plus and @minus should only occur at the left, and never together
+ // TODO: insert check
+ val adaptPlusAnnots1 = filterAttribs(tpe1,MarkerCPSAdaptPlus)
+ val adaptMinusAnnots1 = filterAttribs(tpe1,MarkerCPSAdaptMinus)
+
+ // @minus @cps is the same as no annotations
+ if (!adaptMinusAnnots1.isEmpty)
+ return annots2.isEmpty
+
+ // to handle answer type modification, we must make @plus <:< @cps
+ if (!adaptPlusAnnots1.isEmpty && annots1.isEmpty)
+ return true
+
+ // @plus @cps will fall through and compare the @cps type args
+
+ // @cps parameters must match exactly
+ if ((annots1 corresponds annots2) { _.atp <:< _.atp })
+ return true
+
+ false
+ }
+
+
+ /** Refine the computed least upper bound of a list of types.
+ * All this should do is add annotations. */
+ override def annotationsLub(tpe: Type, ts: List[Type]): Type = {
+ if (!cpsEnabled) return tpe
+
+ val annots1 = filterAttribs(tpe, MarkerCPSTypes)
+ val annots2 = ts flatMap (filterAttribs(_, MarkerCPSTypes))
+
+ if (annots2.nonEmpty) {
+ val cpsLub = AnnotationInfo(global.lub(annots1:::annots2 map (_.atp)), Nil, Nil)
+ val tpe1 = if (annots1.nonEmpty) removeAttribs(tpe, MarkerCPSTypes) else tpe
+ tpe1.withAnnotation(cpsLub)
+ } else tpe
+ }
+
+ /** Refine the bounds on type parameters to the given type arguments. */
+ override def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
+ if (!cpsEnabled) return bounds
+
+ val anyAtCPS = AnnotationInfo(appliedType(MarkerCPSTypes.tpe, List(NothingClass.tpe, AnyClass.tpe)), Nil, Nil)
+ if (isFunctionType(tparams.head.owner.tpe) || tparams.head.owner == PartialFunctionClass) {
+ vprintln("function bound: " + tparams.head.owner.tpe + "/"+bounds+"/"+targs)
+ if (targs.last.hasAnnotation(MarkerCPSTypes))
+ bounds.reverse match {
+ case res::b if !res.hi.hasAnnotation(MarkerCPSTypes) =>
+ (TypeBounds(res.lo, res.hi.withAnnotation(anyAtCPS))::b).reverse
+ case _ => bounds
+ }
+ else
+ bounds
+ } else if (tparams.head.owner == ByNameParamClass) {
+ vprintln("byname bound: " + tparams.head.owner.tpe + "/"+bounds+"/"+targs)
+ if (targs.head.hasAnnotation(MarkerCPSTypes) && !bounds.head.hi.hasAnnotation(MarkerCPSTypes))
+ TypeBounds(bounds.head.lo, bounds.head.hi.withAnnotation(anyAtCPS))::Nil
+ else bounds
+ } else
+ bounds
+ }
+
+
+ override def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = {
+ if (!cpsEnabled) return false
+ vprintln("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+
+ val annots1 = filterAttribs(tree.tpe,MarkerCPSTypes)
+ val annots2 = filterAttribs(pt,MarkerCPSTypes)
+
+ if ((mode & global.analyzer.PATTERNmode) != 0) {
+ //println("can adapt pattern annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+ if (!annots1.isEmpty) {
+ return true
+ }
+ }
+
+/*
+ // not precise enough -- still relying on addAnnotations to remove things from ValDef symbols
+ if ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode) != 0) {
+ if (!annots1.isEmpty) {
+ return true
+ }
+ }
+*/
+
+/*
+ this interferes with overloading resolution
+ if ((mode & global.analyzer.BYVALmode) != 0 && tree.tpe <:< pt) {
+ vprintln("already compatible, can't adapt further")
+ return false
+ }
+*/
+ if ((mode & global.analyzer.EXPRmode) != 0) {
+ if ((annots1 corresponds annots2) { case (a1,a2) => a1.atp <:< a2.atp }) {
+ vprintln("already same, can't adapt further")
+ return false
+ }
+
+ if (annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.BYVALmode) == 0)) {
+ //println("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+ val adapt = AnnotationInfo(MarkerCPSAdaptPlus.tpe, Nil, Nil)
+ if (!tree.tpe.annotations.contains(adapt)) {
+ // val base = tree.tpe <:< removeAllCPSAnnotations(pt)
+ // val known = global.analyzer.isFullyDefined(pt)
+ // println(same + "/" + base + "/" + known)
+ //val same = annots2 forall { case AnnotationInfo(atp: TypeRef, _, _) => atp.typeArgs(0) =:= atp.typeArgs(1) }
+ // TBD: use same or not?
+ //if (same) {
+ vprintln("yes we can!! (unit)")
+ return true
+ //}
+ }
+ } else if (!annots1.isEmpty && ((mode & global.analyzer.BYVALmode) != 0)) {
+ if (!tree.tpe.hasAnnotation(MarkerCPSAdaptMinus)) {
+ vprintln("yes we can!! (byval)")
+ return true
+ }
+ }
+ }
+ false
+ }
+
+
+ override def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = {
+ if (!cpsEnabled) return tree
+
+ vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+
+ val annots1 = filterAttribs(tree.tpe,MarkerCPSTypes)
+ val annots2 = filterAttribs(pt,MarkerCPSTypes)
+
+ if ((mode & global.analyzer.PATTERNmode) != 0) {
+ if (!annots1.isEmpty) {
+ return tree.setType(removeAllCPSAnnotations(tree.tpe))
+ }
+ }
+
+/*
+ // doesn't work correctly -- still relying on addAnnotations to remove things from ValDef symbols
+ if ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode) != 0) {
+ if (!annots1.isEmpty) {
+ println("removing annotation from " + tree + "/" + tree.tpe)
+ val s = tree.setType(removeAllCPSAnnotations(tree.tpe))
+ println(s)
+ s
+ }
+ }
+*/
+
+ if ((mode & global.analyzer.EXPRmode) != 0) {
+ if (annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.BYVALmode) == 0)) { // shiftUnit
+ // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
+ // tree will look like having any possible annotation
+ //println("adapt annotations " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+
+ val adapt = AnnotationInfo(MarkerCPSAdaptPlus.tpe, Nil, Nil)
+ //val same = annots2 forall { case AnnotationInfo(atp: TypeRef, _, _) => atp.typeArgs(0) =:= atp.typeArgs(1) }
+ // TBD: use same or not? see infer0.scala/infer1.scala
+
+ // CAVEAT:
+ // for monomorphic answer types we want to have @plus @cps (for better checking)
+ // for answer type modification we want to have only @plus (because actual answer type may differ from pt)
+
+ //val known = global.analyzer.isFullyDefined(pt)
+
+ if (/*same &&*/ !tree.tpe.annotations.contains(adapt)) {
+ //if (known)
+ return tree.setType(tree.tpe.withAnnotations(adapt::annots2)) // needed for #1807
+ //else
+ // return tree.setType(tree.tpe.withAnnotations(adapt::Nil))
+ }
+ tree
+ } else if (!annots1.isEmpty && ((mode & global.analyzer.BYVALmode) != 0)) { // dropping annotation
+ // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
+ // tree will look like having no annotation
+ if (!tree.tpe.hasAnnotation(MarkerCPSAdaptMinus)) {
+ val adapt = AnnotationInfo(MarkerCPSAdaptMinus.tpe, Nil, Nil)
+ return tree.setType(tree.tpe.withAnnotations(adapt::Nil))
+ }
+ }
+ }
+ tree
+ }
+
+
+ def updateAttributesFromChildren(tpe: Type, childAnnots: List[AnnotationInfo], byName: List[Tree]): Type = {
+ tpe match {
+ // Would need to push annots into each alternative of overloaded type
+ // But we can't, since alternatives aren't types but symbols, which we
+ // can't change (we'd be affecting symbols globally)
+ /*
+ case OverloadedType(pre, alts) =>
+ OverloadedType(pre, alts.map((sym: Symbol) => updateAttributes(pre.memberType(sym), annots)))
+ */
+ case _ =>
+ assert(childAnnots forall (_.atp.typeSymbol == MarkerCPSTypes), childAnnots)
+ /*
+ [] + [] = []
+ plus + [] = plus
+ cps + [] = cps
+ plus cps + [] = plus cps
+ minus cps + [] = minus cp
+ synth cps + [] = synth cps // <- synth on left - does it happen?
+
+ [] + cps = cps
+ plus + cps = synth cps
+ cps + cps = cps! <- lin
+ plus cps + cps = synth cps! <- unify
+ minus cps + cps = minus cps! <- lin
+ synth cps + cps = synth cps! <- unify
+ */
+
+ val plus = tpe.hasAnnotation(MarkerCPSAdaptPlus) || (tpe.hasAnnotation(MarkerCPSTypes) &&
+ byName.nonEmpty && byName.forall(_.tpe.hasAnnotation(MarkerCPSAdaptPlus)))
+
+ // move @plus annotations outward from by-name children
+ if (childAnnots.isEmpty) {
+ if (plus) { // @plus or @plus @cps
+ for (t <- byName) {
+ //println("removeAnnotation " + t + " / " + t.tpe)
+ t.setType(removeAttribs(t.tpe, MarkerCPSAdaptPlus, MarkerCPSTypes))
+ }
+ return tpe.withAnnotation(AnnotationInfo(MarkerCPSAdaptPlus.tpe, Nil, Nil))
+ } else
+ return tpe
+ }
+
+ val annots1 = filterAttribs(tpe, MarkerCPSTypes)
+
+ if (annots1.isEmpty) { // nothing or @plus
+ val synth = MarkerCPSSynth.tpe
+ val annots2 = List(linearize(childAnnots))
+ removeAttribs(tpe,MarkerCPSAdaptPlus).withAnnotations(AnnotationInfo(synth, Nil, Nil)::annots2)
+ } else {
+ val annot1 = single(annots1)
+ if (plus) { // @plus @cps
+ val synth = AnnotationInfo(MarkerCPSSynth.tpe, Nil, Nil)
+ val annot2 = linearize(childAnnots)
+ if (!(annot2.atp <:< annot1.atp))
+ throw new TypeError(annot2 + " is not a subtype of " + annot1)
+ val res = removeAttribs(tpe, MarkerCPSAdaptPlus, MarkerCPSTypes).withAnnotations(List(synth, annot2))
+ for (t <- byName) {
+ //println("removeAnnotation " + t + " / " + t.tpe)
+ t.setType(removeAttribs(t.tpe, MarkerCPSAdaptPlus, MarkerCPSTypes))
+ }
+ res
+ } else if (tpe.hasAnnotation(MarkerCPSSynth)) { // @synth @cps
+ val annot2 = linearize(childAnnots)
+ if (!(annot2.atp <:< annot1.atp))
+ throw new TypeError(annot2 + " is not a subtype of " + annot1)
+ removeAttribs(tpe, MarkerCPSTypes).withAnnotation(annot2)
+ } else { // @cps
+ removeAttribs(tpe, MarkerCPSTypes).withAnnotation(linearize(childAnnots:::annots1))
+ }
+ }
+ }
+ }
+
+
+
+
+
+ def transArgList(fun: Tree, args: List[Tree]): List[List[Tree]] = {
+ val formals = fun.tpe.paramTypes
+ val overshoot = args.length - formals.length
+
+ for ((a,tp) <- args.zip(formals ::: List.fill(overshoot)(NoType))) yield {
+ tp match {
+ case TypeRef(_, sym, List(elemtp)) if sym == ByNameParamClass =>
+ Nil // TODO: check conformance??
+ case _ =>
+ List(a)
+ }
+ }
+ }
+
+
+ def transStms(stms: List[Tree]): List[Tree] = stms match {
+ case ValDef(mods, name, tpt, rhs)::xs =>
+ rhs::transStms(xs)
+ case Assign(lhs, rhs)::xs =>
+ rhs::transStms(xs)
+ case x::xs =>
+ x::transStms(xs)
+ case Nil =>
+ Nil
+ }
+
+ def single(xs: List[AnnotationInfo]) = xs match {
+ case List(x) => x
+ case _ =>
+ global.error("not a single cps annotation: " + xs)// FIXME: error message
+ xs(0)
+ }
+
+ def transChildrenInOrder(tree: Tree, tpe: Type, childTrees: List[Tree], byName: List[Tree]) = {
+ val children = childTrees.flatMap { t =>
+ if (t.tpe eq null) Nil else {
+ val types = filterAttribs(t.tpe, MarkerCPSTypes)
+ // TODO: check that it has been adapted and if so correctly
+ if (types.isEmpty) Nil else List(single(types))
+ }
+ }
+
+ val newtpe = updateAttributesFromChildren(tpe, children, byName)
+
+ if (!newtpe.annotations.isEmpty)
+ vprintln("[checker] inferred " + tree + " / " + tpe + " ===> "+ newtpe)
+
+ newtpe
+ }
+
+ /** Modify the type that has thus far been inferred
+ * for a tree. All this should do is add annotations. */
+
+ override def addAnnotations(tree: Tree, tpe: Type): Type = {
+ if (!cpsEnabled) {
+ if (tpe.annotations.nonEmpty && tpe.hasAnnotation(MarkerCPSTypes))
+ global.reporter.error(tree.pos, "this code must be compiled with the Scala continuations plugin enabled")
+ return tpe
+ }
+
+// if (tree.tpe.hasAnnotation(MarkerCPSAdaptPlus))
+// println("addAnnotation " + tree + "/" + tpe)
+
+ tree match {
+
+ case Apply(fun @ Select(qual, name), args) if (fun.tpe ne null) && !fun.tpe.isErroneous =>
+
+ // HACK: With overloaded methods, fun will never get annotated. This is because
+ // the 'overloaded' type gets annotated, but not the alternatives (among which
+ // fun's type is chosen)
+
+ vprintln("[checker] checking select apply " + tree + "/" + tpe)
+
+ transChildrenInOrder(tree, tpe, qual::(transArgList(fun, args).flatten), Nil)
+
+ case TypeApply(fun @ Select(qual, name), args) if (fun.tpe ne null) && !fun.tpe.isErroneous =>
+ vprintln("[checker] checking select apply " + tree + "/" + tpe)
+
+ transChildrenInOrder(tree, tpe, List(qual, fun), Nil)
+
+ case Apply(fun, args) if (fun.tpe ne null) && !fun.tpe.isErroneous =>
+
+ vprintln("[checker] checking unknown apply " + tree + "/" + tpe)
+
+ transChildrenInOrder(tree, tpe, fun::(transArgList(fun, args).flatten), Nil)
+
+ case TypeApply(fun, args) =>
+
+ vprintln("[checker] checking type apply " + tree + "/" + tpe)
+
+ transChildrenInOrder(tree, tpe, List(fun), Nil)
+
+ case Select(qual, name) =>
+
+ vprintln("[checker] checking select " + tree + "/" + tpe)
+
+ // straightforward way is problematic (see select.scala and Test2.scala)
+ // transChildrenInOrder(tree, tpe, List(qual), Nil)
+
+ // the problem is that qual may be of type OverloadedType (or MethodType) and
+ // we cannot safely annotate these. so we just ignore these cases and
+ // clean up later in the Apply/TypeApply trees.
+
+ if (qual.tpe.hasAnnotation(MarkerCPSTypes)) {
+ // however there is one special case:
+ // if it's a method without parameters, just apply it. normally done in adapt, but
+ // we have to do it here so we don't lose the cps information (wouldn't trigger our
+ // adapt and there is no Apply/TypeApply created)
+ tpe match {
+ case PolyType(List(), restpe) =>
+ //println("yep: " + restpe + "," + restpe.getClass)
+ transChildrenInOrder(tree, restpe, List(qual), Nil)
+ case _ : PolyType => tpe
+ case _ : MethodType => tpe
+ case _ : OverloadedType => tpe
+ case _ =>
+ transChildrenInOrder(tree, tpe, List(qual), Nil)
+ }
+ } else
+ tpe
+
+ case If(cond, thenp, elsep) =>
+ transChildrenInOrder(tree, tpe, List(cond), List(thenp, elsep))
+
+ case Match(select, cases) =>
+ // TODO: can there be cases that are not CaseDefs?? check collect vs map!
+ transChildrenInOrder(tree, tpe, List(select), cases:::(cases collect { case CaseDef(_, _, body) => body }))
+
+ case Try(block, catches, finalizer) =>
+ val tpe1 = transChildrenInOrder(tree, tpe, Nil, block::catches:::(catches collect { case CaseDef(_, _, body) => body }))
+
+ val annots = filterAttribs(tpe1, MarkerCPSTypes)
+ if (annots.nonEmpty) {
+ val ann = single(annots)
+ val atp0::atp1::Nil = ann.atp.normalize.typeArgs
+ if (!(atp0 =:= atp1))
+ throw new TypeError("only simple cps types allowed in try/catch blocks (found: " + tpe1 + ")")
+ if (!finalizer.isEmpty) // no finalizers allowed. see explanation in SelectiveCPSTransform
+ reporter.error(tree.pos, "try/catch blocks that use continuations cannot have finalizers")
+ }
+ tpe1
+
+ case Block(stms, expr) =>
+ // if any stm has annotation, so does block
+ transChildrenInOrder(tree, tpe, transStms(stms), List(expr))
+
+ case ValDef(mods, name, tpt, rhs) =>
+ vprintln("[checker] checking valdef " + name + "/"+tpe+"/"+tpt+"/"+tree.symbol.tpe)
+ // ValDef symbols must *not* have annotations!
+ if (hasAnswerTypeAnn(tree.symbol.info)) { // is it okay to modify sym here?
+ vprintln("removing annotation from sym " + tree.symbol + "/" + tree.symbol.tpe + "/" + tpt)
+ tpt.setType(removeAllCPSAnnotations(tpt.tpe))
+ tree.symbol.setInfo(removeAllCPSAnnotations(tree.symbol.info))
+ }
+ tpe
+
+ case _ =>
+ tpe
+ }
+
+
+ }
+ }
+}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
new file mode 100644
index 0000000000..57cba6e829
--- /dev/null
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -0,0 +1,131 @@
+// $Id$
+
+package scala.tools.selectivecps
+
+import scala.tools.nsc.Global
+
+trait CPSUtils {
+ val global: Global
+ import global._
+ import definitions._
+
+ var cpsEnabled = false
+ val verbose: Boolean = System.getProperty("cpsVerbose", "false") == "true"
+ @inline final def vprintln(x: =>Any): Unit = if (verbose) println(x)
+
+
+ lazy val MarkerCPSSym = definitions.getClass("scala.util.continuations.cpsSym")
+ lazy val MarkerCPSTypes = definitions.getClass("scala.util.continuations.cpsParam")
+ lazy val MarkerCPSSynth = definitions.getClass("scala.util.continuations.cpsSynth")
+
+ lazy val MarkerCPSAdaptPlus = definitions.getClass("scala.util.continuations.cpsPlus")
+ lazy val MarkerCPSAdaptMinus = definitions.getClass("scala.util.continuations.cpsMinus")
+
+
+ lazy val Context = definitions.getClass("scala.util.continuations.ControlContext")
+
+ lazy val ModCPS = definitions.getModule("scala.util.continuations")
+ lazy val MethShiftUnit = definitions.getMember(ModCPS, "shiftUnit")
+ lazy val MethShiftUnitR = definitions.getMember(ModCPS, "shiftUnitR")
+ lazy val MethShift = definitions.getMember(ModCPS, "shift")
+ lazy val MethShiftR = definitions.getMember(ModCPS, "shiftR")
+ lazy val MethReify = definitions.getMember(ModCPS, "reify")
+ lazy val MethReifyR = definitions.getMember(ModCPS, "reifyR")
+
+
+ lazy val allCPSAnnotations = List(MarkerCPSSym, MarkerCPSTypes, MarkerCPSSynth,
+ MarkerCPSAdaptPlus, MarkerCPSAdaptMinus)
+
+ // annotation checker
+
+ def filterAttribs(tpe:Type, cls:Symbol) =
+ tpe.annotations.filter(_.atp.typeSymbol == cls)
+
+ def removeAttribs(tpe:Type, cls:Symbol*) =
+ tpe.withoutAnnotations.withAnnotations(tpe.annotations.filterNot(cls contains _.atp.typeSymbol))
+
+ def removeAllCPSAnnotations(tpe: Type) = removeAttribs(tpe, allCPSAnnotations:_*)
+
+ def linearize(ann: List[AnnotationInfo]): AnnotationInfo = {
+ ann.reduceLeft { (a, b) =>
+ val atp0::atp1::Nil = a.atp.normalize.typeArgs
+ val btp0::btp1::Nil = b.atp.normalize.typeArgs
+ val (u0,v0) = (atp0, atp1)
+ val (u1,v1) = (btp0, btp1)
+/*
+ val (u0,v0) = (a.atp.typeArgs(0), a.atp.typeArgs(1))
+ val (u1,v1) = (b.atp.typeArgs(0), b.atp.typeArgs(1))
+ vprintln("check lin " + a + " andThen " + b)
+*/
+ vprintln("check lin " + a + " andThen " + b)
+ if (!(v1 <:< u0))
+ throw new TypeError("illegal answer type modification: " + a + " andThen " + b)
+ // TODO: improve error message (but it is not very common)
+ AnnotationInfo(appliedType(MarkerCPSTypes.tpe, List(u1,v0)),Nil,Nil)
+ }
+ }
+
+ // anf transform
+
+ def getExternalAnswerTypeAnn(tp: Type) = {
+ tp.annotations.find(a => a.atp.typeSymbol == MarkerCPSTypes) match {
+ case Some(AnnotationInfo(atp, _, _)) =>
+ val atp0::atp1::Nil = atp.normalize.typeArgs
+ Some((atp0, atp1))
+ case None =>
+ if (tp.hasAnnotation(MarkerCPSAdaptPlus))
+ global.warning("trying to instantiate type " + tp + " to unknown cps type")
+ None
+ }
+ }
+
+ def getAnswerTypeAnn(tp: Type) = {
+ tp.annotations.find(a => a.atp.typeSymbol == MarkerCPSTypes) match {
+ case Some(AnnotationInfo(atp, _, _)) =>
+ if (!tp.hasAnnotation(MarkerCPSAdaptPlus)) {//&& !tp.hasAnnotation(MarkerCPSAdaptMinus))
+ val atp0::atp1::Nil = atp.normalize.typeArgs
+ Some((atp0, atp1))
+ } else
+ None
+ case None => None
+ }
+ }
+
+ def hasAnswerTypeAnn(tp: Type) = {
+ tp.hasAnnotation(MarkerCPSTypes) && !tp.hasAnnotation(MarkerCPSAdaptPlus) /*&&
+ !tp.hasAnnotation(MarkerCPSAdaptMinus)*/
+ }
+
+ def hasSynthAnn(tp: Type) = {
+ tp.annotations.exists(a => a.atp.typeSymbol == MarkerCPSSynth)
+ }
+
+ def updateSynthFlag(tree: Tree) = { // remove annotations if *we* added them (@synth present)
+ if (hasSynthAnn(tree.tpe)) {
+ log("removing annotation from " + tree)
+ tree.setType(removeAllCPSAnnotations(tree.tpe))
+ } else
+ tree
+ }
+
+ type CPSInfo = Option[(Type,Type)]
+
+ def linearize(a: CPSInfo, b: CPSInfo)(implicit unit: CompilationUnit, pos: Position): CPSInfo = {
+ (a,b) match {
+ case (Some((u0,v0)), Some((u1,v1))) =>
+ vprintln("check lin " + a + " andThen " + b)
+ if (!(v1 <:< u0)) {
+ unit.error(pos,"cannot change answer type in composition of cps expressions " +
+ "from " + u1 + " to " + v0 + " because " + v1 + " is not a subtype of " + u0 + ".")
+ throw new Exception("check lin " + a + " andThen " + b)
+ }
+ Some((u1,v0))
+ case (Some(_), _) => a
+ case (_, Some(_)) => b
+ case _ => None
+ }
+ }
+
+ // cps transform
+
+} \ No newline at end of file
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
new file mode 100644
index 0000000000..936b572caf
--- /dev/null
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -0,0 +1,414 @@
+// $Id$
+
+package scala.tools.selectivecps
+
+import scala.tools.nsc._
+import scala.tools.nsc.transform._
+import scala.tools.nsc.symtab._
+import scala.tools.nsc.plugins._
+
+import scala.tools.nsc.ast._
+
+/**
+ * In methods marked @cps, explicitly name results of calls to other @cps methods
+ */
+abstract class SelectiveANFTransform extends PluginComponent with Transform with
+ TypingTransformers with CPSUtils {
+ // inherits abstract value `global' and class `Phase' from Transform
+
+ import global._ // the global environment
+ import definitions._ // standard classes and methods
+ import typer.atOwner // methods to type trees
+
+ /** the following two members override abstract members in Transform */
+ val phaseName: String = "selectiveanf"
+
+ protected def newTransformer(unit: CompilationUnit): Transformer =
+ new ANFTransformer(unit)
+
+
+ class ANFTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+
+ implicit val _unit = unit // allow code in CPSUtils.scala to report errors
+ var cpsAllowed: Boolean = false // detect cps code in places we do not handle (yet)
+
+ override def transform(tree: Tree): Tree = {
+ if (!cpsEnabled) return tree
+
+ tree match {
+
+ // Maybe we should further generalize the transform and move it over
+ // to the regular Transformer facility. But then, actual and required cps
+ // state would need more complicated (stateful!) tracking.
+
+ // Making the default case use transExpr(tree, None, None) instead of
+ // calling super.transform() would be a start, but at the moment,
+ // this would cause infinite recursion. But we could remove the
+ // ValDef case here.
+
+ case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ log("transforming " + dd.symbol)
+
+ atOwner(dd.symbol) {
+ val rhs1 = transExpr(rhs, None, getExternalAnswerTypeAnn(tpt.tpe))
+
+ log("result "+rhs1)
+ log("result is of type "+rhs1.tpe)
+
+ treeCopy.DefDef(dd, mods, name, transformTypeDefs(tparams), transformValDefss(vparamss),
+ transform(tpt), rhs1)
+ }
+
+ case ff @ Function(vparams, body) =>
+ log("transforming anon function " + ff.symbol)
+
+ atOwner(ff.symbol) {
+
+ //val body1 = transExpr(body, None, getExternalAnswerTypeAnn(body.tpe))
+
+ // need to special case partial functions: if expected type is @cps
+ // but all cases are pure, then we would transform
+ // { x => x match { case A => ... }} to
+ // { x => shiftUnit(x match { case A => ... })}
+ // which Uncurry cannot handle (see function6.scala)
+
+ val ext = getExternalAnswerTypeAnn(body.tpe)
+
+ val body1 = body match {
+ case Match(selector, cases) if (ext.isDefined && getAnswerTypeAnn(body.tpe).isEmpty) =>
+ val cases1 = for {
+ cd @ CaseDef(pat, guard, caseBody) <- cases
+ val caseBody1 = transExpr(body, None, ext)
+ } yield {
+ treeCopy.CaseDef(cd, transform(pat), transform(guard), caseBody1)
+ }
+ treeCopy.Match(tree, transform(selector), cases1)
+
+ case _ =>
+ transExpr(body, None, ext)
+ }
+
+ log("result "+body1)
+ log("result is of type "+body1.tpe)
+
+ treeCopy.Function(ff, transformValDefs(vparams), body1)
+ }
+
+ case vd @ ValDef(mods, name, tpt, rhs) => // object-level valdefs
+ log("transforming valdef " + vd.symbol)
+
+ atOwner(vd.symbol) {
+
+ assert(getExternalAnswerTypeAnn(tpt.tpe) == None)
+
+ val rhs1 = transExpr(rhs, None, None)
+
+ treeCopy.ValDef(vd, mods, name, transform(tpt), rhs1)
+ }
+
+ case TypeTree() =>
+ // circumvent cpsAllowed here
+ super.transform(tree)
+
+ case Apply(_,_) =>
+ // this allows reset { ... } in object constructors
+ // it's kind of a hack to put it here (see note above)
+ transExpr(tree, None, None)
+
+ case _ =>
+
+ if (hasAnswerTypeAnn(tree.tpe)) {
+ if (!cpsAllowed)
+ unit.error(tree.pos, "cps code not allowed here / " + tree.getClass + " / " + tree)
+
+ log(tree)
+ }
+
+ cpsAllowed = false
+ super.transform(tree)
+ }
+ }
+
+
+ def transExpr(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): Tree = {
+ transTailValue(tree, cpsA, cpsR) match {
+ case (Nil, b) => b
+ case (a, b) =>
+ treeCopy.Block(tree, a,b)
+ }
+ }
+
+
+ def transArgList(fun: Tree, args: List[Tree], cpsA: CPSInfo): (List[List[Tree]], List[Tree], CPSInfo) = {
+ val formals = fun.tpe.paramTypes
+ val overshoot = args.length - formals.length
+
+ var spc: CPSInfo = cpsA
+
+ val (stm,expr) = (for ((a,tp) <- args.zip(formals ::: List.fill(overshoot)(NoType))) yield {
+ tp match {
+ case TypeRef(_, sym, List(elemtp)) if sym == ByNameParamClass =>
+ (Nil, transExpr(a, None, getAnswerTypeAnn(elemtp)))
+ case _ =>
+ val (valStm, valExpr, valSpc) = transInlineValue(a, spc)
+ spc = valSpc
+ (valStm, valExpr)
+ }
+ }).unzip
+
+ (stm,expr,spc)
+ }
+
+
+ def transValue(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): (List[Tree], Tree, CPSInfo) = {
+ // return value: (stms, expr, spc), where spc is CPSInfo after stms but *before* expr
+ implicit val pos = tree.pos
+ tree match {
+ case Block(stms, expr) =>
+ val (cpsA2, cpsR2) = (cpsA, linearize(cpsA, getAnswerTypeAnn(tree.tpe))) // tbd
+// val (cpsA2, cpsR2) = (None, getAnswerTypeAnn(tree.tpe))
+ val (a, b) = transBlock(stms, expr, cpsA2, cpsR2)
+
+ val tree1 = (treeCopy.Block(tree, a, b)) // no updateSynthFlag here!!!
+
+ (Nil, tree1, cpsA)
+
+ case If(cond, thenp, elsep) =>
+
+ val (condStats, condVal, spc) = transInlineValue(cond, cpsA)
+
+ val (cpsA2, cpsR2) = (spc, linearize(spc, getAnswerTypeAnn(tree.tpe)))
+// val (cpsA2, cpsR2) = (None, getAnswerTypeAnn(tree.tpe))
+ val thenVal = transExpr(thenp, cpsA2, cpsR2)
+ val elseVal = transExpr(elsep, cpsA2, cpsR2)
+
+ // check that then and else parts agree (not necessary any more, but left as sanity check)
+ if (cpsR.isDefined) {
+ if (elsep == EmptyTree)
+ unit.error(tree.pos, "always need else part in cps code")
+ }
+ if (hasAnswerTypeAnn(thenVal.tpe) != hasAnswerTypeAnn(elseVal.tpe)) {
+ unit.error(tree.pos, "then and else parts must both be cps code or neither of them")
+ }
+
+ (condStats, updateSynthFlag(treeCopy.If(tree, condVal, thenVal, elseVal)), spc)
+
+ case Match(selector, cases) =>
+
+ val (selStats, selVal, spc) = transInlineValue(selector, cpsA)
+ val (cpsA2, cpsR2) = (spc, linearize(spc, getAnswerTypeAnn(tree.tpe)))
+// val (cpsA2, cpsR2) = (None, getAnswerTypeAnn(tree.tpe))
+
+ val caseVals = for {
+ cd @ CaseDef(pat, guard, body) <- cases
+ val bodyVal = transExpr(body, cpsA2, cpsR2)
+ } yield {
+ treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
+ }
+
+ (selStats, updateSynthFlag(treeCopy.Match(tree, selVal, caseVals)), spc)
+
+
+ case ldef @ LabelDef(name, params, rhs) =>
+ if (hasAnswerTypeAnn(tree.tpe)) {
+ val sym = currentOwner.newMethod(tree.pos, name)//unit.fresh.newName(tree.pos, "myloopvar")
+ .setInfo(ldef.symbol.info)
+ .setFlag(Flags.SYNTHETIC)
+
+ val rhs1 = new TreeSymSubstituter(List(ldef.symbol), List(sym)).transform(rhs)
+ val rhsVal = transExpr(rhs1, None, getAnswerTypeAnn(tree.tpe))
+
+ val stm1 = localTyper.typed(DefDef(sym, rhsVal))
+ val expr = localTyper.typed(Apply(Ident(sym), List()))
+
+ (List(stm1), expr, cpsA)
+ } else {
+ val rhsVal = transExpr(rhs, None, None)
+ (Nil, updateSynthFlag(treeCopy.LabelDef(tree, name, params, rhsVal)), cpsA)
+ }
+
+
+ case Try(block, catches, finalizer) =>
+ val blockVal = transExpr(block, cpsA, cpsR)
+
+ val catchVals = for {
+ cd @ CaseDef(pat, guard, body) <- catches
+ val bodyVal = transExpr(body, cpsA, cpsR)
+ } yield {
+ treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
+ }
+
+ val finallyVal = transExpr(finalizer, None, None) // for now, no cps in finally
+
+ (Nil, updateSynthFlag(treeCopy.Try(tree, blockVal, catchVals, finallyVal)), cpsA)
+
+ case Assign(lhs, rhs) =>
+ // allow cps code in rhs only
+ val (stms, expr, spc) = transInlineValue(rhs, cpsA)
+ (stms, updateSynthFlag(treeCopy.Assign(tree, transform(lhs), expr)), spc)
+
+ case Return(expr0) =>
+ val (stms, expr, spc) = transInlineValue(expr0, cpsA)
+ (stms, updateSynthFlag(treeCopy.Return(tree, expr)), spc)
+
+ case Throw(expr0) =>
+ val (stms, expr, spc) = transInlineValue(expr0, cpsA)
+ (stms, updateSynthFlag(treeCopy.Throw(tree, expr)), spc)
+
+ case Typed(expr0, tpt) =>
+ // TODO: should x: A @cps[B,C] have a special meaning?
+ // type casts used in different ways (see match2.scala, #3199)
+ val (stms, expr, spc) = transInlineValue(expr0, cpsA)
+ val tpt1 = if (treeInfo.isWildcardStarArg(tree)) tpt else
+ treeCopy.TypeTree(tpt).setType(removeAllCPSAnnotations(tpt.tpe))
+// (stms, updateSynthFlag(treeCopy.Typed(tree, expr, tpt1)), spc)
+ (stms, treeCopy.Typed(tree, expr, tpt1).setType(removeAllCPSAnnotations(tree.tpe)), spc)
+
+ case TypeApply(fun, args) =>
+ val (stms, expr, spc) = transInlineValue(fun, cpsA)
+ (stms, updateSynthFlag(treeCopy.TypeApply(tree, expr, args)), spc)
+
+ case Select(qual, name) =>
+ val (stms, expr, spc) = transInlineValue(qual, cpsA)
+ (stms, updateSynthFlag(treeCopy.Select(tree, expr, name)), spc)
+
+ case Apply(fun, args) =>
+ val (funStm, funExpr, funSpc) = transInlineValue(fun, cpsA)
+ val (argStm, argExpr, argSpc) = transArgList(fun, args, funSpc)
+
+ (funStm ::: (argStm.flatten), updateSynthFlag(treeCopy.Apply(tree, funExpr, argExpr)),
+ argSpc)
+
+ case _ =>
+ cpsAllowed = true
+ (Nil, transform(tree), cpsA)
+ }
+ }
+
+ def transTailValue(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): (List[Tree], Tree) = {
+
+ val (stms, expr, spc) = transValue(tree, cpsA, cpsR)
+
+ val bot = linearize(spc, getAnswerTypeAnn(expr.tpe))(unit, tree.pos)
+
+ val plainTpe = removeAllCPSAnnotations(expr.tpe)
+
+ if (cpsR.isDefined && !bot.isDefined) {
+
+ if (!expr.isEmpty && (expr.tpe.typeSymbol ne NothingClass)) {
+ // must convert!
+ log("cps type conversion (has: " + cpsA + "/" + spc + "/" + expr.tpe + ")")
+ log("cps type conversion (expected: " + cpsR.get + "): " + expr)
+
+ if (!expr.tpe.hasAnnotation(MarkerCPSAdaptPlus))
+ unit.warning(tree.pos, "expression " + tree + " is cps-transformed unexpectedly")
+
+ try {
+ val Some((a, b)) = cpsR
+
+ val res = localTyper.typed(atPos(tree.pos) {
+ Apply(TypeApply(gen.mkAttributedRef(MethShiftUnit),
+ List(TypeTree(plainTpe), TypeTree(a), TypeTree(b))),
+ List(expr))
+ })
+ return (stms, res)
+
+ } catch {
+ case ex:TypeError =>
+ unit.error(ex.pos, "cannot cps-transform expression " + tree + ": " + ex.msg)
+ }
+ }
+
+ } else if (!cpsR.isDefined && bot.isDefined) {
+ // error!
+ log("cps type error: " + expr)
+ //println("cps type error: " + expr + "/" + expr.tpe + "/" + getAnswerTypeAnn(expr.tpe))
+
+ println(cpsR + "/" + spc + "/" + bot)
+
+ unit.error(tree.pos, "found cps expression in non-cps position")
+ } else {
+ // all is well
+
+ if (expr.tpe.hasAnnotation(MarkerCPSAdaptPlus)) {
+ unit.warning(tree.pos, "expression " + expr + " of type " + expr.tpe + " is not expected to have a cps type")
+ expr.setType(removeAllCPSAnnotations(expr.tpe))
+ }
+
+ // TODO: sanity check that types agree
+ }
+
+ (stms, expr)
+ }
+
+ def transInlineValue(tree: Tree, cpsA: CPSInfo): (List[Tree], Tree, CPSInfo) = {
+
+ val (stms, expr, spc) = transValue(tree, cpsA, None) // never required to be cps
+
+ getAnswerTypeAnn(expr.tpe) match {
+ case spcVal @ Some(_) =>
+
+ val valueTpe = removeAllCPSAnnotations(expr.tpe)
+
+ val sym = currentOwner.newValue(tree.pos, unit.fresh.newName(tree.pos, "tmp"))
+ .setInfo(valueTpe)
+ .setFlag(Flags.SYNTHETIC)
+ .setAnnotations(List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil)))
+
+ (stms ::: List(ValDef(sym, expr) setType(NoType)),
+ Ident(sym) setType(valueTpe) setPos(tree.pos), linearize(spc, spcVal)(unit, tree.pos))
+
+ case _ =>
+ (stms, expr, spc)
+ }
+
+ }
+
+
+
+ def transInlineStm(stm: Tree, cpsA: CPSInfo): (List[Tree], CPSInfo) = {
+ stm match {
+
+ // TODO: what about DefDefs?
+ // TODO: relation to top-level val def?
+ // TODO: what about lazy vals?
+
+ case tree @ ValDef(mods, name, tpt, rhs) =>
+ val (stms, anfRhs, spc) = atOwner(tree.symbol) { transValue(rhs, cpsA, None) }
+
+ val tv = new ChangeOwnerTraverser(tree.symbol, currentOwner)
+ stms.foreach(tv.traverse(_))
+
+ // TODO: symbol might already have annotation. Should check conformance
+ // TODO: better yet: do without annotations on symbols
+
+ val spcVal = getAnswerTypeAnn(anfRhs.tpe)
+ if (spcVal.isDefined) {
+ tree.symbol.setAnnotations(List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil)))
+ }
+
+ (stms:::List(treeCopy.ValDef(tree, mods, name, tpt, anfRhs)), linearize(spc, spcVal)(unit, tree.pos))
+
+ case _ =>
+ val (headStms, headExpr, headSpc) = transInlineValue(stm, cpsA)
+ val valSpc = getAnswerTypeAnn(headExpr.tpe)
+ (headStms:::List(headExpr), linearize(headSpc, valSpc)(unit, stm.pos))
+ }
+ }
+
+ def transBlock(stms: List[Tree], expr: Tree, cpsA: CPSInfo, cpsR: CPSInfo): (List[Tree], Tree) = {
+ stms match {
+ case Nil =>
+ transTailValue(expr, cpsA, cpsR)
+
+ case stm::rest =>
+ var (rest2, expr2) = (rest, expr)
+ val (headStms, headSpc) = transInlineStm(stm, cpsA)
+ val (restStms, restExpr) = transBlock(rest2, expr2, headSpc, cpsR)
+ (headStms:::restStms, restExpr)
+ }
+ }
+
+
+ }
+}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
new file mode 100644
index 0000000000..a16e9b9a4c
--- /dev/null
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
@@ -0,0 +1,60 @@
+// $Id$
+
+package scala.tools.selectivecps
+
+import scala.tools.nsc
+import scala.tools.nsc.typechecker._
+import nsc.Global
+import nsc.Phase
+import nsc.plugins.Plugin
+import nsc.plugins.PluginComponent
+
+class SelectiveCPSPlugin(val global: Global) extends Plugin {
+ import global._
+
+ val name = "continuations"
+ val description = "applies selective cps conversion"
+
+ val anfPhase = new SelectiveANFTransform() {
+ val global = SelectiveCPSPlugin.this.global
+ val runsAfter = List("pickler")
+ }
+
+ val cpsPhase = new SelectiveCPSTransform() {
+ val global = SelectiveCPSPlugin.this.global
+ val runsAfter = List("selectiveanf")
+ }
+
+
+ val components = List[PluginComponent](anfPhase, cpsPhase)
+
+ val checker = new CPSAnnotationChecker {
+ val global: SelectiveCPSPlugin.this.global.type = SelectiveCPSPlugin.this.global
+ }
+ global.addAnnotationChecker(checker.checker)
+
+ global.log("instantiated cps plugin: " + this)
+
+ def setEnabled(flag: Boolean) = {
+ checker.cpsEnabled = flag
+ anfPhase.cpsEnabled = flag
+ cpsPhase.cpsEnabled = flag
+ }
+
+ // TODO: require -enabled command-line flag
+
+ override def processOptions(options: List[String], error: String => Unit) = {
+ var enabled = false
+ for (option <- options) {
+ if (option == "enable") {
+ enabled = true
+ } else {
+ error("Option not understood: "+option)
+ }
+ }
+ setEnabled(enabled)
+ }
+
+ override val optionsHelp: Option[String] =
+ Some(" -P:continuations:enable Enable continuations")
+}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
new file mode 100644
index 0000000000..07a9e5fed5
--- /dev/null
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
@@ -0,0 +1,384 @@
+// $Id$
+
+package scala.tools.selectivecps
+
+import scala.collection._
+
+import scala.tools.nsc._
+import scala.tools.nsc.transform._
+import scala.tools.nsc.plugins._
+
+import scala.tools.nsc.ast.TreeBrowsers
+import scala.tools.nsc.ast._
+
+/**
+ * In methods marked @cps, CPS-transform assignments introduced by ANF-transform phase.
+ */
+abstract class SelectiveCPSTransform extends PluginComponent with
+ InfoTransform with TypingTransformers with CPSUtils {
+ // inherits abstract value `global' and class `Phase' from Transform
+
+ import global._ // the global environment
+ import definitions._ // standard classes and methods
+ import typer.atOwner // methods to type trees
+
+ /** the following two members override abstract members in Transform */
+ val phaseName: String = "selectivecps"
+
+ protected def newTransformer(unit: CompilationUnit): Transformer =
+ new CPSTransformer(unit)
+
+ /** This class does not change linearization */
+ override def changesBaseClasses = false
+
+ /** - return symbol's transformed type,
+ */
+ def transformInfo(sym: Symbol, tp: Type): Type = {
+ if (!cpsEnabled) return tp
+
+ val newtp = transformCPSType(tp)
+
+ if (newtp != tp)
+ log("transformInfo changed type for " + sym + " to " + newtp);
+
+ if (sym == MethReifyR)
+ log("transformInfo (not)changed type for " + sym + " to " + newtp);
+
+ newtp
+ }
+
+ def transformCPSType(tp: Type): Type = { // TODO: use a TypeMap? need to handle more cases?
+ tp match {
+ case PolyType(params,res) => PolyType(params, transformCPSType(res))
+ case MethodType(params,res) =>
+ MethodType(params, transformCPSType(res))
+ case TypeRef(pre, sym, args) => TypeRef(pre, sym, args.map(transformCPSType(_)))
+ case _ =>
+ getExternalAnswerTypeAnn(tp) match {
+ case Some((res, outer)) =>
+ appliedType(Context.tpe, List(removeAllCPSAnnotations(tp), res, outer))
+ case _ =>
+ removeAllCPSAnnotations(tp)
+ }
+ }
+ }
+
+
+ class CPSTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+
+ override def transform(tree: Tree): Tree = {
+ if (!cpsEnabled) return tree
+ postTransform(mainTransform(tree))
+ }
+
+ def postTransform(tree: Tree): Tree = {
+ tree.setType(transformCPSType(tree.tpe))
+ }
+
+
+ def mainTransform(tree: Tree): Tree = {
+ tree match {
+
+ // TODO: can we generalize this?
+
+ case Apply(TypeApply(fun, targs), args)
+ if (fun.symbol == MethShift) =>
+ log("found shift: " + tree)
+ atPos(tree.pos) {
+ val funR = gen.mkAttributedRef(MethShiftR) // TODO: correct?
+ //gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedIdent(ScalaPackage),
+ //ScalaPackage.tpe.member("util")), ScalaPackage.tpe.member("util").tpe.member("continuations")), MethShiftR)
+ //gen.mkAttributedRef(ModCPS.tpe, MethShiftR) // TODO: correct?
+ log(funR.tpe)
+ Apply(
+ TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
+ args.map(transform(_))
+ ).setType(transformCPSType(tree.tpe))
+ }
+
+ case Apply(TypeApply(fun, targs), args)
+ if (fun.symbol == MethShiftUnit) =>
+ log("found shiftUnit: " + tree)
+ atPos(tree.pos) {
+ val funR = gen.mkAttributedRef(MethShiftUnitR) // TODO: correct?
+ log(funR.tpe)
+ Apply(
+ TypeApply(funR, List(targs(0), targs(1))).setType(appliedType(funR.tpe,
+ List(targs(0).tpe, targs(1).tpe))),
+ args.map(transform(_))
+ ).setType(appliedType(Context.tpe, List(targs(0).tpe,targs(1).tpe,targs(1).tpe)))
+ }
+
+ case Apply(TypeApply(fun, targs), args)
+ if (fun.symbol == MethReify) =>
+ log("found reify: " + tree)
+ atPos(tree.pos) {
+ val funR = gen.mkAttributedRef(MethReifyR) // TODO: correct?
+ log(funR.tpe)
+ Apply(
+ TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
+ args.map(transform(_))
+ ).setType(transformCPSType(tree.tpe))
+ }
+
+ case Try(block, catches, finalizer) =>
+ // currently duplicates the catch block into a partial function.
+ // this is kinda risky, but we don't expect there will be lots
+ // of try/catches inside catch blocks (exp. blowup unlikely).
+
+ // CAVEAT: finalizers are surprisingly tricky!
+ // the problem is that they cannot easily be removed
+ // from the regular control path and hence will
+ // also be invoked after creating the Context object.
+
+ /*
+ object Test {
+ def foo1 = {
+ throw new Exception("in sub")
+ shift((k:Int=>Int) => k(1))
+ 10
+ }
+ def foo2 = {
+ shift((k:Int=>Int) => k(2))
+ 20
+ }
+ def foo3 = {
+ shift((k:Int=>Int) => k(3))
+ throw new Exception("in sub")
+ 30
+ }
+ def foo4 = {
+ shift((k:Int=>Int) => 4)
+ throw new Exception("in sub")
+ 40
+ }
+ def bar(x: Int) = try {
+ if (x == 1)
+ foo1
+ else if (x == 2)
+ foo2
+ else if (x == 3)
+ foo3
+ else //if (x == 4)
+ foo4
+ } catch {
+ case _ =>
+ println("exception")
+ 0
+ } finally {
+ println("done")
+ }
+ }
+
+ reset(Test.bar(1)) // should print: exception,done,0
+ reset(Test.bar(2)) // should print: done,20 <-- but prints: done,done,20
+ reset(Test.bar(3)) // should print: exception,done,0 <-- but prints: done,exception,done,0
+ reset(Test.bar(4)) // should print: 4 <-- but prints: done,4
+ */
+
+ val block1 = transform(block)
+ val catches1 = transformCaseDefs(catches)
+ val finalizer1 = transform(finalizer)
+
+ if (hasAnswerTypeAnn(tree.tpe)) {
+ //vprintln("CPS Transform: " + tree + "/" + tree.tpe + "/" + block1.tpe)
+
+ val (stms, expr1) = block1 match {
+ case Block(stms, expr) => (stms, expr)
+ case expr => (Nil, expr)
+ }
+
+ val targettp = transformCPSType(tree.tpe)
+
+// val expr2 = if (catches.nonEmpty) {
+ val pos = catches.head.pos
+ val argSym = currentOwner.newValueParameter(pos, "$ex").setInfo(ThrowableClass.tpe)
+ val rhs = Match(Ident(argSym), catches1)
+ val fun = Function(List(ValDef(argSym)), rhs)
+ val funSym = currentOwner.newValueParameter(pos, "$catches").setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp)))
+ val funDef = localTyper.typed(atPos(pos) { ValDef(funSym, fun) })
+ val expr2 = localTyper.typed(atPos(pos) { Apply(Select(expr1, expr1.tpe.member("flatMapCatch")), List(Ident(funSym))) })
+
+ argSym.owner = fun.symbol
+ val chown = new ChangeOwnerTraverser(currentOwner, fun.symbol)
+ chown.traverse(rhs)
+
+ val exSym = currentOwner.newValueParameter(pos, "$ex").setInfo(ThrowableClass.tpe)
+ val catch2 = { localTyper.typedCases(tree, List(
+ CaseDef(Bind(exSym, Typed(Ident("_"), TypeTree(ThrowableClass.tpe))),
+ Apply(Select(Ident(funSym), "isDefinedAt"), List(Ident(exSym))),
+ Apply(Ident(funSym), List(Ident(exSym))))
+ ), ThrowableClass.tpe, targettp) }
+
+ //typedCases(tree, catches, ThrowableClass.tpe, pt)
+
+ localTyper.typed(Block(List(funDef), treeCopy.Try(tree, treeCopy.Block(block1, stms, expr2), catch2, finalizer1)))
+
+
+/*
+ disabled for now - see notes above
+
+ val expr3 = if (!finalizer.isEmpty) {
+ val pos = finalizer.pos
+ val finalizer2 = duplicateTree(finalizer1)
+ val fun = Function(List(), finalizer2)
+ val expr3 = localTyper.typed(atPos(pos) { Apply(Select(expr2, expr2.tpe.member("mapFinally")), List(fun)) })
+
+ val chown = new ChangeOwnerTraverser(currentOwner, fun.symbol)
+ chown.traverse(finalizer2)
+
+ expr3
+ } else
+ expr2
+*/
+ } else {
+ treeCopy.Try(tree, block1, catches1, finalizer1)
+ }
+
+ case Block(stms, expr) =>
+
+ val (stms1, expr1) = transBlock(stms, expr)
+ treeCopy.Block(tree, stms1, expr1)
+
+ case _ =>
+ super.transform(tree)
+ }
+ }
+
+
+
+ def transBlock(stms: List[Tree], expr: Tree): (List[Tree], Tree) = {
+
+ stms match {
+ case Nil =>
+ (Nil, transform(expr))
+
+ case stm::rest =>
+
+ stm match {
+ case vd @ ValDef(mods, name, tpt, rhs)
+ if (vd.symbol.hasAnnotation(MarkerCPSSym)) =>
+
+ log("found marked ValDef "+name+" of type " + vd.symbol.tpe)
+
+ val tpe = vd.symbol.tpe
+ val rhs1 = atOwner(vd.symbol) { transform(rhs) }
+
+ new ChangeOwnerTraverser(vd.symbol, currentOwner).traverse(rhs1) // TODO: don't traverse twice
+
+ log("valdef symbol " + vd.symbol + " has type " + tpe)
+ log("right hand side " + rhs1 + " has type " + rhs1.tpe)
+
+ log("currentOwner: " + currentOwner)
+ log("currentMethod: " + currentMethod)
+
+ val (bodyStms, bodyExpr) = transBlock(rest, expr)
+ // FIXME: result will later be traversed again by TreeSymSubstituter and
+ // ChangeOwnerTraverser => exp. running time.
+ // Should be changed to fuse traversals into one.
+
+ val specialCaseTrivial = bodyExpr match {
+ case Apply(fun, args) =>
+ // for now, look for explicit tail calls only.
+ // are there other cases that could profit from specializing on
+ // trivial contexts as well?
+ (bodyExpr.tpe.typeSymbol == Context) && (currentMethod == fun.symbol)
+ case _ => false
+ }
+
+ def applyTrivial(ctxValSym: Symbol, body: Tree) = {
+
+ val body1 = (new TreeSymSubstituter(List(vd.symbol), List(ctxValSym)))(body)
+
+ val body2 = localTyper.typed(atPos(vd.symbol.pos) { body1 })
+
+ // in theory it would be nicer to look for an @cps annotation instead
+ // of testing for Context
+ if ((body2.tpe == null) || !(body2.tpe.typeSymbol == Context)) {
+ //println(body2 + "/" + body2.tpe)
+ unit.error(rhs.pos, "cannot compute type for CPS-transformed function result")
+ }
+ body2
+ }
+
+ def applyCombinatorFun(ctxR: Tree, body: Tree) = {
+ val arg = currentOwner.newValueParameter(ctxR.pos, name).setInfo(tpe)
+ val body1 = (new TreeSymSubstituter(List(vd.symbol), List(arg)))(body)
+ val fun = localTyper.typed(atPos(vd.symbol.pos) { Function(List(ValDef(arg)), body1) }) // types body as well
+ arg.owner = fun.symbol
+ new ChangeOwnerTraverser(currentOwner, fun.symbol).traverse(body1)
+
+ // see note about multiple traversals above
+
+ log("fun.symbol: "+fun.symbol)
+ log("fun.symbol.owner: "+fun.symbol.owner)
+ log("arg.owner: "+arg.owner)
+
+ log("fun.tpe:"+fun.tpe)
+ log("return type of fun:"+body1.tpe)
+
+ var methodName = "map"
+
+ if (body1.tpe != null) {
+ if (body1.tpe.typeSymbol == Context)
+ methodName = "flatMap"
+ }
+ else
+ unit.error(rhs.pos, "cannot compute type for CPS-transformed function result")
+
+ log("will use method:"+methodName)
+
+ localTyper.typed(atPos(vd.symbol.pos) {
+ Apply(Select(ctxR, ctxR.tpe.member(methodName)), List(fun))
+ })
+ }
+
+ def mkBlock(stms: List[Tree], expr: Tree) = if (stms.nonEmpty) Block(stms, expr) else expr
+
+ try {
+ if (specialCaseTrivial) {
+ log("will optimize possible tail call: " + bodyExpr)
+
+ // FIXME: flatMap impl has become more complicated due to
+ // exceptions. do we need to put a try/catch in the then part??
+
+ // val ctx = <rhs>
+ // if (ctx.isTrivial)
+ // val <lhs> = ctx.getTrivialValue; ... <--- TODO: try/catch ??? don't bother for the moment...
+ // else
+ // ctx.flatMap { <lhs> => ... }
+ val ctxSym = currentOwner.newValue(vd.symbol.name + "$shift").setInfo(rhs1.tpe)
+ val ctxDef = localTyper.typed(ValDef(ctxSym, rhs1))
+ def ctxRef = localTyper.typed(Ident(ctxSym))
+ val argSym = currentOwner.newValue(vd.symbol.name).setInfo(tpe)
+ val argDef = localTyper.typed(ValDef(argSym, Select(ctxRef, ctxRef.tpe.member("getTrivialValue"))))
+ val switchExpr = localTyper.typed(atPos(vd.symbol.pos) {
+ val body2 = duplicateTree(mkBlock(bodyStms, bodyExpr)) // dup before typing!
+ If(Select(ctxRef, ctxSym.tpe.member("isTrivial")),
+ applyTrivial(argSym, mkBlock(argDef::bodyStms, bodyExpr)),
+ applyCombinatorFun(ctxRef, body2))
+ })
+ (List(ctxDef), switchExpr)
+ } else {
+ // ctx.flatMap { <lhs> => ... }
+ // or
+ // ctx.map { <lhs> => ... }
+ (Nil, applyCombinatorFun(rhs1, mkBlock(bodyStms, bodyExpr)))
+ }
+ } catch {
+ case ex:TypeError =>
+ unit.error(ex.pos, ex.msg)
+ (bodyStms, bodyExpr)
+ }
+
+ case _ =>
+ val stm1 = transform(stm)
+ val (a, b) = transBlock(rest, expr)
+ (stm1::a, b)
+ }
+ }
+ }
+
+
+ }
+}
diff --git a/src/continuations/plugin/scalac-plugin.xml b/src/continuations/plugin/scalac-plugin.xml
new file mode 100644
index 0000000000..04d42655c5
--- /dev/null
+++ b/src/continuations/plugin/scalac-plugin.xml
@@ -0,0 +1,5 @@
+<!-- $Id$ -->
+<plugin>
+ <name>continuations</name>
+ <classname>scala.tools.selectivecps.SelectiveCPSPlugin</classname>
+</plugin>
diff --git a/src/dbc/scala/dbc/DataType.scala b/src/dbc/scala/dbc/DataType.scala
index 9de4ed1285..8eb318d1c1 100644
--- a/src/dbc/scala/dbc/DataType.scala
+++ b/src/dbc/scala/dbc/DataType.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc;
diff --git a/src/dbc/scala/dbc/Database.scala b/src/dbc/scala/dbc/Database.scala
index ce768478d8..68afe0e51d 100644
--- a/src/dbc/scala/dbc/Database.scala
+++ b/src/dbc/scala/dbc/Database.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/Syntax.scala b/src/dbc/scala/dbc/Syntax.scala
index 40bdfb0e52..74959724a9 100644
--- a/src/dbc/scala/dbc/Syntax.scala
+++ b/src/dbc/scala/dbc/Syntax.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc;
diff --git a/src/dbc/scala/dbc/Utilities.scala b/src/dbc/scala/dbc/Utilities.scala
index 7de2f96e1a..9f16f895be 100644
--- a/src/dbc/scala/dbc/Utilities.scala
+++ b/src/dbc/scala/dbc/Utilities.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc;
diff --git a/src/dbc/scala/dbc/Value.scala b/src/dbc/scala/dbc/Value.scala
index 0ca22985ab..cf7b62c1cf 100644
--- a/src/dbc/scala/dbc/Value.scala
+++ b/src/dbc/scala/dbc/Value.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc;
diff --git a/src/dbc/scala/dbc/Vendor.scala b/src/dbc/scala/dbc/Vendor.scala
index 27dbd2a89f..6c840e4b15 100644
--- a/src/dbc/scala/dbc/Vendor.scala
+++ b/src/dbc/scala/dbc/Vendor.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc;
diff --git a/src/dbc/scala/dbc/datatype/ApproximateNumeric.scala b/src/dbc/scala/dbc/datatype/ApproximateNumeric.scala
index 8ff60f49eb..bbc77fc83e 100644
--- a/src/dbc/scala/dbc/datatype/ApproximateNumeric.scala
+++ b/src/dbc/scala/dbc/datatype/ApproximateNumeric.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/datatype/Boolean.scala b/src/dbc/scala/dbc/datatype/Boolean.scala
index f4b9251ab1..18aa5371c1 100644
--- a/src/dbc/scala/dbc/datatype/Boolean.scala
+++ b/src/dbc/scala/dbc/datatype/Boolean.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/datatype/Character.scala b/src/dbc/scala/dbc/datatype/Character.scala
index a4e99860d5..6873283c78 100644
--- a/src/dbc/scala/dbc/datatype/Character.scala
+++ b/src/dbc/scala/dbc/datatype/Character.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/datatype/CharacterLargeObject.scala b/src/dbc/scala/dbc/datatype/CharacterLargeObject.scala
index b4a838aaa7..c0ce3a6c01 100644
--- a/src/dbc/scala/dbc/datatype/CharacterLargeObject.scala
+++ b/src/dbc/scala/dbc/datatype/CharacterLargeObject.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/datatype/CharacterString.scala b/src/dbc/scala/dbc/datatype/CharacterString.scala
index 1ad249f50f..90efcb887f 100644
--- a/src/dbc/scala/dbc/datatype/CharacterString.scala
+++ b/src/dbc/scala/dbc/datatype/CharacterString.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/datatype/CharacterVarying.scala b/src/dbc/scala/dbc/datatype/CharacterVarying.scala
index 52649b5e69..dda07de36b 100644
--- a/src/dbc/scala/dbc/datatype/CharacterVarying.scala
+++ b/src/dbc/scala/dbc/datatype/CharacterVarying.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/datatype/ExactNumeric.scala b/src/dbc/scala/dbc/datatype/ExactNumeric.scala
index 9cb739b736..dfbdf969e6 100644
--- a/src/dbc/scala/dbc/datatype/ExactNumeric.scala
+++ b/src/dbc/scala/dbc/datatype/ExactNumeric.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/datatype/Factory.scala b/src/dbc/scala/dbc/datatype/Factory.scala
index 07da0c8cb5..d652556722 100644
--- a/src/dbc/scala/dbc/datatype/Factory.scala
+++ b/src/dbc/scala/dbc/datatype/Factory.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
@@ -22,7 +21,7 @@ object Factory {
final val java_lang_Integer_SIZE = 32;
final val java_lang_Long_SIZE = 64;
- /** Returns a mullable property formated as a boolean option */
+ /** Returns a nullable property formatted as a boolean option */
def isNullable (metadata:java.sql.ResultSetMetaData, index:Int): Option[scala.Boolean] =
metadata.isNullable(index) match {
case java.sql.ResultSetMetaData.columnNoNulls => Some(false);
diff --git a/src/dbc/scala/dbc/datatype/Numeric.scala b/src/dbc/scala/dbc/datatype/Numeric.scala
index 597b01d348..4c39869f0b 100644
--- a/src/dbc/scala/dbc/datatype/Numeric.scala
+++ b/src/dbc/scala/dbc/datatype/Numeric.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/datatype/String.scala b/src/dbc/scala/dbc/datatype/String.scala
index a3eb944a3c..b149aec04b 100644
--- a/src/dbc/scala/dbc/datatype/String.scala
+++ b/src/dbc/scala/dbc/datatype/String.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/datatype/Unknown.scala b/src/dbc/scala/dbc/datatype/Unknown.scala
index 3dda39e111..ef8ab1036c 100644
--- a/src/dbc/scala/dbc/datatype/Unknown.scala
+++ b/src/dbc/scala/dbc/datatype/Unknown.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/exception/IncompatibleSchema.scala b/src/dbc/scala/dbc/exception/IncompatibleSchema.scala
index c5998323df..883a2369f5 100644
--- a/src/dbc/scala/dbc/exception/IncompatibleSchema.scala
+++ b/src/dbc/scala/dbc/exception/IncompatibleSchema.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/exception/UnsupportedFeature.scala b/src/dbc/scala/dbc/exception/UnsupportedFeature.scala
index 19c5190704..3e3d2f64ca 100644
--- a/src/dbc/scala/dbc/exception/UnsupportedFeature.scala
+++ b/src/dbc/scala/dbc/exception/UnsupportedFeature.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/result/Field.scala b/src/dbc/scala/dbc/result/Field.scala
index 26284bff55..b01dd5dcdb 100644
--- a/src/dbc/scala/dbc/result/Field.scala
+++ b/src/dbc/scala/dbc/result/Field.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/result/FieldMetadata.scala b/src/dbc/scala/dbc/result/FieldMetadata.scala
index ff419c91ce..8314eaae3e 100644
--- a/src/dbc/scala/dbc/result/FieldMetadata.scala
+++ b/src/dbc/scala/dbc/result/FieldMetadata.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/result/Relation.scala b/src/dbc/scala/dbc/result/Relation.scala
index 9ead4051db..45f3b625d8 100644
--- a/src/dbc/scala/dbc/result/Relation.scala
+++ b/src/dbc/scala/dbc/result/Relation.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/result/Status.scala b/src/dbc/scala/dbc/result/Status.scala
index f1241858a2..c8eebb9d98 100644
--- a/src/dbc/scala/dbc/result/Status.scala
+++ b/src/dbc/scala/dbc/result/Status.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/result/Tuple.scala b/src/dbc/scala/dbc/result/Tuple.scala
index 720731f86b..659fa98325 100644
--- a/src/dbc/scala/dbc/result/Tuple.scala
+++ b/src/dbc/scala/dbc/result/Tuple.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/AccessMode.scala b/src/dbc/scala/dbc/statement/AccessMode.scala
index 2f256865d9..d5304c446b 100644
--- a/src/dbc/scala/dbc/statement/AccessMode.scala
+++ b/src/dbc/scala/dbc/statement/AccessMode.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/DerivedColumn.scala b/src/dbc/scala/dbc/statement/DerivedColumn.scala
index 186f9b0390..d1f9eb5ba4 100644
--- a/src/dbc/scala/dbc/statement/DerivedColumn.scala
+++ b/src/dbc/scala/dbc/statement/DerivedColumn.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/Expression.scala b/src/dbc/scala/dbc/statement/Expression.scala
index 21f20c831c..23b2e8e39c 100644
--- a/src/dbc/scala/dbc/statement/Expression.scala
+++ b/src/dbc/scala/dbc/statement/Expression.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/Insert.scala b/src/dbc/scala/dbc/statement/Insert.scala
index 9772bcfb08..ec4850bcc8 100644
--- a/src/dbc/scala/dbc/statement/Insert.scala
+++ b/src/dbc/scala/dbc/statement/Insert.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/InsertionData.scala b/src/dbc/scala/dbc/statement/InsertionData.scala
index e7fa20be04..e0f9083a40 100644
--- a/src/dbc/scala/dbc/statement/InsertionData.scala
+++ b/src/dbc/scala/dbc/statement/InsertionData.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/IsolationLevel.scala b/src/dbc/scala/dbc/statement/IsolationLevel.scala
index a5647f55ec..444c03aa41 100644
--- a/src/dbc/scala/dbc/statement/IsolationLevel.scala
+++ b/src/dbc/scala/dbc/statement/IsolationLevel.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/JoinType.scala b/src/dbc/scala/dbc/statement/JoinType.scala
index be62b682ae..4a9af824ca 100644
--- a/src/dbc/scala/dbc/statement/JoinType.scala
+++ b/src/dbc/scala/dbc/statement/JoinType.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/Jointure.scala b/src/dbc/scala/dbc/statement/Jointure.scala
index d0ec168cf5..f66d37869c 100644
--- a/src/dbc/scala/dbc/statement/Jointure.scala
+++ b/src/dbc/scala/dbc/statement/Jointure.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/Relation.scala b/src/dbc/scala/dbc/statement/Relation.scala
index a8eb350482..4f5f29eb53 100644
--- a/src/dbc/scala/dbc/statement/Relation.scala
+++ b/src/dbc/scala/dbc/statement/Relation.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/Select.scala b/src/dbc/scala/dbc/statement/Select.scala
index f6c577f101..1e432e2277 100644
--- a/src/dbc/scala/dbc/statement/Select.scala
+++ b/src/dbc/scala/dbc/statement/Select.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/SetClause.scala b/src/dbc/scala/dbc/statement/SetClause.scala
index 1d1effd3bf..177c869a9a 100644
--- a/src/dbc/scala/dbc/statement/SetClause.scala
+++ b/src/dbc/scala/dbc/statement/SetClause.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/SetQuantifier.scala b/src/dbc/scala/dbc/statement/SetQuantifier.scala
index 55ac402ff6..1f224eae97 100644
--- a/src/dbc/scala/dbc/statement/SetQuantifier.scala
+++ b/src/dbc/scala/dbc/statement/SetQuantifier.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/Statement.scala b/src/dbc/scala/dbc/statement/Statement.scala
index 9bc49e4b1e..a8d52ba333 100644
--- a/src/dbc/scala/dbc/statement/Statement.scala
+++ b/src/dbc/scala/dbc/statement/Statement.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/Status.scala b/src/dbc/scala/dbc/statement/Status.scala
index 88a8f1feed..bea3a2600b 100644
--- a/src/dbc/scala/dbc/statement/Status.scala
+++ b/src/dbc/scala/dbc/statement/Status.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/Table.scala b/src/dbc/scala/dbc/statement/Table.scala
index e59bd8a6b4..001b13279e 100644
--- a/src/dbc/scala/dbc/statement/Table.scala
+++ b/src/dbc/scala/dbc/statement/Table.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/Transaction.scala b/src/dbc/scala/dbc/statement/Transaction.scala
index 731167dfe4..101f50f197 100644
--- a/src/dbc/scala/dbc/statement/Transaction.scala
+++ b/src/dbc/scala/dbc/statement/Transaction.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/Update.scala b/src/dbc/scala/dbc/statement/Update.scala
index 76c33909c4..7afb750183 100644
--- a/src/dbc/scala/dbc/statement/Update.scala
+++ b/src/dbc/scala/dbc/statement/Update.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/expression/Aggregate.scala b/src/dbc/scala/dbc/statement/expression/Aggregate.scala
index b9642f0501..5411afde1f 100644
--- a/src/dbc/scala/dbc/statement/expression/Aggregate.scala
+++ b/src/dbc/scala/dbc/statement/expression/Aggregate.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/expression/BinaryOperator.scala b/src/dbc/scala/dbc/statement/expression/BinaryOperator.scala
index 8f33472296..0565ee81ff 100644
--- a/src/dbc/scala/dbc/statement/expression/BinaryOperator.scala
+++ b/src/dbc/scala/dbc/statement/expression/BinaryOperator.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/expression/Constant.scala b/src/dbc/scala/dbc/statement/expression/Constant.scala
index 571bd57017..ca099eae5f 100644
--- a/src/dbc/scala/dbc/statement/expression/Constant.scala
+++ b/src/dbc/scala/dbc/statement/expression/Constant.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/expression/Default.scala b/src/dbc/scala/dbc/statement/expression/Default.scala
index f337667740..5d629eea52 100644
--- a/src/dbc/scala/dbc/statement/expression/Default.scala
+++ b/src/dbc/scala/dbc/statement/expression/Default.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/expression/Field.scala b/src/dbc/scala/dbc/statement/expression/Field.scala
index 86b35c0b22..7f050fb29b 100644
--- a/src/dbc/scala/dbc/statement/expression/Field.scala
+++ b/src/dbc/scala/dbc/statement/expression/Field.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/expression/FunctionCall.scala b/src/dbc/scala/dbc/statement/expression/FunctionCall.scala
index 10ffff87e0..666bdc594c 100644
--- a/src/dbc/scala/dbc/statement/expression/FunctionCall.scala
+++ b/src/dbc/scala/dbc/statement/expression/FunctionCall.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/expression/Select.scala b/src/dbc/scala/dbc/statement/expression/Select.scala
index ae69549be3..28f4f0ec54 100644
--- a/src/dbc/scala/dbc/statement/expression/Select.scala
+++ b/src/dbc/scala/dbc/statement/expression/Select.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/expression/SetFunction.scala b/src/dbc/scala/dbc/statement/expression/SetFunction.scala
index f88bd22eb6..36061b013c 100644
--- a/src/dbc/scala/dbc/statement/expression/SetFunction.scala
+++ b/src/dbc/scala/dbc/statement/expression/SetFunction.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/expression/TypeCast.scala b/src/dbc/scala/dbc/statement/expression/TypeCast.scala
index 1b27f0e046..ead392ecfc 100644
--- a/src/dbc/scala/dbc/statement/expression/TypeCast.scala
+++ b/src/dbc/scala/dbc/statement/expression/TypeCast.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/statement/expression/UnaryOperator.scala b/src/dbc/scala/dbc/statement/expression/UnaryOperator.scala
index efe05cee64..6910de5b60 100644
--- a/src/dbc/scala/dbc/statement/expression/UnaryOperator.scala
+++ b/src/dbc/scala/dbc/statement/expression/UnaryOperator.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/syntax/DataTypeUtil.scala b/src/dbc/scala/dbc/syntax/DataTypeUtil.scala
index 9caca33eac..5882a08b63 100644
--- a/src/dbc/scala/dbc/syntax/DataTypeUtil.scala
+++ b/src/dbc/scala/dbc/syntax/DataTypeUtil.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/syntax/Database.scala b/src/dbc/scala/dbc/syntax/Database.scala
index 8675fa4a83..938c61b7f3 100644
--- a/src/dbc/scala/dbc/syntax/Database.scala
+++ b/src/dbc/scala/dbc/syntax/Database.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/syntax/Statement.scala b/src/dbc/scala/dbc/syntax/Statement.scala
index 8b43ca62eb..c78a718382 100644
--- a/src/dbc/scala/dbc/syntax/Statement.scala
+++ b/src/dbc/scala/dbc/syntax/Statement.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/syntax/StatementExpression.scala b/src/dbc/scala/dbc/syntax/StatementExpression.scala
index 8338372bdc..577c173241 100644
--- a/src/dbc/scala/dbc/syntax/StatementExpression.scala
+++ b/src/dbc/scala/dbc/syntax/StatementExpression.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/value/ApproximateNumeric.scala b/src/dbc/scala/dbc/value/ApproximateNumeric.scala
index f5969ad9fb..1d0c7ada15 100644
--- a/src/dbc/scala/dbc/value/ApproximateNumeric.scala
+++ b/src/dbc/scala/dbc/value/ApproximateNumeric.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/value/Boolean.scala b/src/dbc/scala/dbc/value/Boolean.scala
index 8a193f576e..9349eee28c 100644
--- a/src/dbc/scala/dbc/value/Boolean.scala
+++ b/src/dbc/scala/dbc/value/Boolean.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/value/Character.scala b/src/dbc/scala/dbc/value/Character.scala
index ace9ac90a0..57ad54ee89 100644
--- a/src/dbc/scala/dbc/value/Character.scala
+++ b/src/dbc/scala/dbc/value/Character.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/value/CharacterLargeObject.scala b/src/dbc/scala/dbc/value/CharacterLargeObject.scala
index cc49a13c3d..16a4ad0356 100644
--- a/src/dbc/scala/dbc/value/CharacterLargeObject.scala
+++ b/src/dbc/scala/dbc/value/CharacterLargeObject.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/value/CharacterVarying.scala b/src/dbc/scala/dbc/value/CharacterVarying.scala
index 40bf47955b..7991e4ffcd 100644
--- a/src/dbc/scala/dbc/value/CharacterVarying.scala
+++ b/src/dbc/scala/dbc/value/CharacterVarying.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/value/Conversion.scala b/src/dbc/scala/dbc/value/Conversion.scala
index 4f0adff7c6..dcd3e0ec0e 100644
--- a/src/dbc/scala/dbc/value/Conversion.scala
+++ b/src/dbc/scala/dbc/value/Conversion.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/value/ExactNumeric.scala b/src/dbc/scala/dbc/value/ExactNumeric.scala
index 877095274d..cee219e15a 100644
--- a/src/dbc/scala/dbc/value/ExactNumeric.scala
+++ b/src/dbc/scala/dbc/value/ExactNumeric.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/value/Factory.scala b/src/dbc/scala/dbc/value/Factory.scala
index 079a0ec07d..06e4bfb466 100644
--- a/src/dbc/scala/dbc/value/Factory.scala
+++ b/src/dbc/scala/dbc/value/Factory.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/value/Unknown.scala b/src/dbc/scala/dbc/value/Unknown.scala
index 5f3463851d..46e8a39e42 100644
--- a/src/dbc/scala/dbc/value/Unknown.scala
+++ b/src/dbc/scala/dbc/value/Unknown.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/dbc/scala/dbc/vendor/PostgreSQL.scala b/src/dbc/scala/dbc/vendor/PostgreSQL.scala
index 5b0cd32b3d..5962bb2fdd 100644
--- a/src/dbc/scala/dbc/vendor/PostgreSQL.scala
+++ b/src/dbc/scala/dbc/vendor/PostgreSQL.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.dbc
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java b/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java
index a2eb6e7fa1..569a9ac272 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java b/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java
index 8df4f9350e..7db047ee98 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java b/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java
index 85662428b6..53aeee3ab6 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java
index 981fbfc645..caaff778ec 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java b/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java
index a0f575e7d0..92333dc4b4 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JClass.java b/src/fjbg/ch/epfl/lamp/fjbg/JClass.java
index 1542bb1104..0fc604424a 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JClass.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JClass.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JCode.java
index 252186a18e..f7d275c8a7 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCode.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JCode.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java
index 79afaded44..44a3d551aa 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java b/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java
index d4effed412..34b38c828d 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java b/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
index 911acd18da..0df4498d8e 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
@@ -187,6 +186,10 @@ public class JConstantPool {
return addEntry(new Utf8Entry(value));
}
+ public int addUtf8(byte[] value) {
+ return addEntry(new Utf8Entry(value));
+ }
+
public String lookupUtf8(int index) {
Utf8Entry entry = (Utf8Entry)lookupEntry(index);
return entry.getValue();
@@ -344,22 +347,49 @@ public class JConstantPool {
public class Utf8Entry extends ChildlessEntry implements Entry {
private final String value;
- public Utf8Entry(String value) { this.value = value.intern(); }
+ private final byte[] bytes;
+ public Utf8Entry(String value) {
+ this.value = value.intern();
+ this.bytes = null;
+ }
public Utf8Entry(DataInputStream stream) throws IOException {
this(stream.readUTF());
}
+ public Utf8Entry(byte[] bytes) {
+ this.bytes = bytes;
+ this.value = null;
+ }
- public int hashCode() { return value.hashCode(); }
+ public int hashCode() {
+ if (bytes != null) return bytes.hashCode();
+ return value.hashCode();
+ }
public boolean equals(Object o) {
- return o instanceof Utf8Entry && ((Utf8Entry)o).value == value;
+ boolean isEqual = o instanceof Utf8Entry;
+ if (bytes != null) {
+ isEqual = isEqual && ((Utf8Entry)o).bytes == bytes;
+ }
+ else {
+ isEqual = isEqual && ((Utf8Entry)o).value == value;
+ }
+ return isEqual;
}
public int getTag() { return CONSTANT_Utf8; }
public String getValue() { return value; }
+ public byte[] getBytes() { return bytes; }
public int getSize() { return 1; }
public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeUTF(value);
+ if (bytes != null) {
+ if (bytes.length > 65535) {
+ throw new IOException("String literal of length " + bytes.length + " does not fit in Classfile");
+ }
+ stream.writeShort(bytes.length);
+ stream.write(bytes);
+ }
+ else
+ stream.writeUTF(value);
}
}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
index b8f29a6a2b..523f960b23 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
@@ -74,7 +73,7 @@ public class JExtendedCode extends JCode {
},
{
/* T_SHORT -> T_BOOLEAN */ forbidden,
- /* T_SHORT -> T_CHAR */ nothingToDo,
+ /* T_SHORT -> T_CHAR */ {JOpcode.I2C},
/* T_SHORT -> T_FLOAT */ {JOpcode.I2F},
/* T_SHORT -> T_DOUBLE */ {JOpcode.I2D},
/* T_SHORT -> T_BYTE */ {JOpcode.I2B},
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JField.java b/src/fjbg/ch/epfl/lamp/fjbg/JField.java
index d94bf8c64f..2c1e3063bf 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JField.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JField.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java b/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java
index bf82015c42..fec7310bdf 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java b/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java
index a8ba479c7b..39d7147c42 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java
index 2ca4a73252..1403c34cf2 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java
index b708a37e3c..9cbc8bb08a 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMember.java b/src/fjbg/ch/epfl/lamp/fjbg/JMember.java
index f2378cf473..8d082fb90d 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMember.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JMember.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java b/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java
index dbddf6547d..804b4314f2 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java b/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java
index de136bf53d..ec44967e27 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java b/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java
index a1930f77ef..b4edb86fec 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java b/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java
index 6b6d3b6a74..f7ee688784 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java
index ad73540940..7a64f91ee9 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java b/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java
index 99c6acff71..0ed6ef4dea 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java
index 77d6783c87..e4478728ae 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JType.java b/src/fjbg/ch/epfl/lamp/fjbg/JType.java
index 2557d45b83..b926a59f5a 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JType.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JType.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.fjbg;
diff --git a/src/fjbg/ch/epfl/lamp/util/ByteArray.java b/src/fjbg/ch/epfl/lamp/util/ByteArray.java
index 800fc9d0d8..d6b70485bc 100644
--- a/src/fjbg/ch/epfl/lamp/util/ByteArray.java
+++ b/src/fjbg/ch/epfl/lamp/util/ByteArray.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.util;
diff --git a/src/library/scala/concurrent/forkjoin/ForkJoinPool.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
index 92ee56c961..3fad92cbf1 100644
--- a/src/library/scala/concurrent/forkjoin/ForkJoinPool.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
@@ -57,7 +57,7 @@ import java.lang.reflect.*;
* pools with greater than the maximum result in
* IllegalArgumentExceptions.
*/
-public class ForkJoinPool extends AbstractExecutorService {
+public class ForkJoinPool /*extends AbstractExecutorService*/ {
/*
* See the extended comments interspersed below for design,
@@ -70,6 +70,10 @@ public class ForkJoinPool extends AbstractExecutorService {
/** Max pool size -- must be a power of two minus 1 */
private static final int MAX_THREADS = 0x7FFF;
+ // placeholder for java.util.concurrent.RunnableFuture
+ interface RunnableFuture<T> extends Runnable {
+ }
+
/**
* Factory for creating new ForkJoinWorkerThreads. A
* ForkJoinWorkerThreadFactory must be defined and used for
diff --git a/src/library/scala/concurrent/forkjoin/ForkJoinTask.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
index e6c0fa7bb4..dc1a6bcccc 100644
--- a/src/library/scala/concurrent/forkjoin/ForkJoinTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
@@ -1031,9 +1031,10 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
return (Unsafe) f.get(null);
}
- private static long fieldOffset(String fieldName)
+ private static long fieldOffset(String fieldName, Unsafe unsafe)
throws NoSuchFieldException {
- return _unsafe.objectFieldOffset
+ // do not use _unsafe to avoid NPE
+ return unsafe.objectFieldOffset
(ForkJoinTask.class.getDeclaredField(fieldName));
}
@@ -1041,12 +1042,22 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
static final long statusOffset;
static {
+ Unsafe tmpUnsafe = null;
+ long tmpStatusOffset = 0;
try {
- _unsafe = getUnsafe();
- statusOffset = fieldOffset("status");
+ tmpUnsafe = getUnsafe();
+ tmpStatusOffset = fieldOffset("status", tmpUnsafe);
} catch (Throwable e) {
- throw new RuntimeException("Could not initialize intrinsics", e);
+ // Ignore the failure to load sun.misc.Unsafe on Android so
+ // that platform can use the actor library without the
+ // fork/join scheduler.
+ String vmVendor = System.getProperty("java.vm.vendor");
+ if (!vmVendor.contains("Android")) {
+ throw new RuntimeException("Could not initialize intrinsics", e);
+ }
}
+ _unsafe = tmpUnsafe;
+ statusOffset = tmpStatusOffset;
}
}
diff --git a/src/library/scala/concurrent/forkjoin/ForkJoinWorkerThread.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
index b4d889750c..b4d889750c 100644
--- a/src/library/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
diff --git a/src/library/scala/concurrent/forkjoin/LinkedTransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
index 3b46c176ff..3b46c176ff 100644
--- a/src/library/scala/concurrent/forkjoin/LinkedTransferQueue.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
diff --git a/src/library/scala/concurrent/forkjoin/RecursiveAction.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
index 2d36f7eb33..2d36f7eb33 100644
--- a/src/library/scala/concurrent/forkjoin/RecursiveAction.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
diff --git a/src/library/scala/concurrent/forkjoin/RecursiveTask.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
index 1f3110580b..a526f75597 100644
--- a/src/library/scala/concurrent/forkjoin/RecursiveTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
@@ -37,7 +37,7 @@ package scala.concurrent.forkjoin;
public abstract class RecursiveTask<V> extends ForkJoinTask<V> {
/**
- * Empty contructor for use by subclasses.
+ * Empty constructor for use by subclasses.
*/
protected RecursiveTask() {
}
diff --git a/src/library/scala/concurrent/forkjoin/ThreadLocalRandom.java b/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
index 34e2e37f37..34e2e37f37 100644
--- a/src/library/scala/concurrent/forkjoin/ThreadLocalRandom.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
diff --git a/src/library/scala/concurrent/forkjoin/TransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
index 9c7b2289c4..9c7b2289c4 100644
--- a/src/library/scala/concurrent/forkjoin/TransferQueue.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
diff --git a/src/library/scala/concurrent/forkjoin/package-info.java b/src/forkjoin/scala/concurrent/forkjoin/package-info.java
index b8fa0fad02..b8fa0fad02 100644
--- a/src/library/scala/concurrent/forkjoin/package-info.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/package-info.java
diff --git a/actors.iml b/src/intellij/actors.iml.SAMPLE
index b002792a0c..ace6ec7f62 100644
--- a/actors.iml
+++ b/src/intellij/actors.iml.SAMPLE
@@ -7,8 +7,8 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/src/actors">
- <sourceFolder url="file://$MODULE_DIR$/src/actors" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../actors">
+ <sourceFolder url="file://$MODULE_DIR$/../actors" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/compiler.iml b/src/intellij/compiler.iml.SAMPLE
index 72b86fd446..cc0b64e735 100644
--- a/compiler.iml
+++ b/src/intellij/compiler.iml.SAMPLE
@@ -4,24 +4,24 @@
<facet type="Scala" name="Scala">
<configuration>
<option name="takeFromSettings" value="true" />
- <option name="myScalaCompilerJarPath" value="$MODULE_DIR$/build/locker/classes/compiler:/localhome/dragos/workspace-laptop/git/scala/lib/fjbg.jar" />
- <option name="myScalaSdkJarPath" value="$MODULE_DIR$/build/locker/classes/library" />
+ <option name="myScalaCompilerJarPath" value="build/locker/classes/compiler lib/fjbg.jar lib/msil.jar" />
+ <option name="myScalaSdkJarPath" value="build/locker/classes/library" />
</configuration>
</facet>
</component>
<component name="NewModuleRootManager" inherit-compiler-output="false">
- <output url="file://$MODULE_DIR$/build/quick/classes/compiler" />
- <output-test url="file://$MODULE_DIR$/out/test/compiler" />
+ <output url="file://$MODULE_DIR$/../../build/quick/classes/compiler" />
+ <output-test url="file://$MODULE_DIR$/../../out/test/compiler" />
<exclude-output />
- <content url="file://$MODULE_DIR$/src/compiler">
- <sourceFolder url="file://$MODULE_DIR$/src/compiler" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../compiler">
+ <sourceFolder url="file://$MODULE_DIR$/../compiler" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="library" name="Scala SDK" level="application" />
<orderEntry type="library" name="Project ant library" level="project" />
- <orderEntry type="library" name="ant" level="application" />
<orderEntry type="library" name="Project Scala SDK" level="project" />
+ <orderEntry type="library" name="ant" level="project" />
+ <orderEntry type="library" name="locker" level="project" />
</component>
</module>
diff --git a/dbc.iml b/src/intellij/dbc.iml.SAMPLE
index 9c035853e2..d82bda72b4 100644
--- a/dbc.iml
+++ b/src/intellij/dbc.iml.SAMPLE
@@ -7,8 +7,8 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/src/dbc">
- <sourceFolder url="file://$MODULE_DIR$/src/dbc" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../dbc">
+ <sourceFolder url="file://$MODULE_DIR$/../dbc" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/library.iml b/src/intellij/library.iml.SAMPLE
index 4d5668de90..a8719634aa 100644
--- a/library.iml
+++ b/src/intellij/library.iml.SAMPLE
@@ -7,11 +7,11 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/META-INF" />
- <content url="file://$MODULE_DIR$/bin" />
- <content url="file://$MODULE_DIR$/lib" />
- <content url="file://$MODULE_DIR$/src/library">
- <sourceFolder url="file://$MODULE_DIR$/src/library" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../../META-INF" />
+ <content url="file://$MODULE_DIR$/../../bin" />
+ <content url="file://$MODULE_DIR$/../../lib" />
+ <content url="file://$MODULE_DIR$/../library">
+ <sourceFolder url="file://$MODULE_DIR$/../library" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/manual.iml b/src/intellij/manual.iml.SAMPLE
index 0ee975326b..bbdcfbebc2 100644
--- a/manual.iml
+++ b/src/intellij/manual.iml.SAMPLE
@@ -7,8 +7,8 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/src/manual">
- <sourceFolder url="file://$MODULE_DIR$/src/manual" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../manual">
+ <sourceFolder url="file://$MODULE_DIR$/../manual" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/partest.iml b/src/intellij/partest.iml.SAMPLE
index addac88198..3df457abdb 100644
--- a/partest.iml
+++ b/src/intellij/partest.iml.SAMPLE
@@ -7,10 +7,10 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/src/partest">
- <sourceFolder url="file://$MODULE_DIR$/src/partest" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../partest">
+ <sourceFolder url="file://$MODULE_DIR$/../partest" isTestSource="false" />
</content>
- <content url="file://$MODULE_DIR$/test" />
+ <content url="file://$MODULE_DIR$/../../test" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="actors" />
diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE
new file mode 100644
index 0000000000..2506c74f6a
--- /dev/null
+++ b/src/intellij/scala-lang.ipr.SAMPLE
@@ -0,0 +1,1446 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+ <component name="AntConfiguration">
+ <defaultAnt bundledAnt="true" />
+ <buildFile url="file://$PROJECT_DIR$/../../build.xml">
+ <additionalClassPath />
+ <antReference projectDefault="true" />
+ <customJdkName value="" />
+ <maximumHeapSize value="128" />
+ <maximumStackSize value="32" />
+ <properties />
+ </buildFile>
+ </component>
+ <component name="BuildJarProjectSettings">
+ <option name="BUILD_JARS_ON_MAKE" value="false" />
+ </component>
+ <component name="CodeStyleSettingsManager">
+ <option name="PER_PROJECT_SETTINGS">
+ <value>
+ <ADDITIONAL_INDENT_OPTIONS fileType="java">
+ <option name="INDENT_SIZE" value="4" />
+ <option name="CONTINUATION_INDENT_SIZE" value="8" />
+ <option name="TAB_SIZE" value="4" />
+ <option name="USE_TAB_CHARACTER" value="false" />
+ <option name="SMART_TABS" value="false" />
+ <option name="LABEL_INDENT_SIZE" value="0" />
+ <option name="LABEL_INDENT_ABSOLUTE" value="false" />
+ </ADDITIONAL_INDENT_OPTIONS>
+ <ADDITIONAL_INDENT_OPTIONS fileType="jsp">
+ <option name="INDENT_SIZE" value="4" />
+ <option name="CONTINUATION_INDENT_SIZE" value="8" />
+ <option name="TAB_SIZE" value="4" />
+ <option name="USE_TAB_CHARACTER" value="false" />
+ <option name="SMART_TABS" value="false" />
+ <option name="LABEL_INDENT_SIZE" value="0" />
+ <option name="LABEL_INDENT_ABSOLUTE" value="false" />
+ </ADDITIONAL_INDENT_OPTIONS>
+ <ADDITIONAL_INDENT_OPTIONS fileType="xml">
+ <option name="INDENT_SIZE" value="4" />
+ <option name="CONTINUATION_INDENT_SIZE" value="8" />
+ <option name="TAB_SIZE" value="4" />
+ <option name="USE_TAB_CHARACTER" value="false" />
+ <option name="SMART_TABS" value="false" />
+ <option name="LABEL_INDENT_SIZE" value="0" />
+ <option name="LABEL_INDENT_ABSOLUTE" value="false" />
+ </ADDITIONAL_INDENT_OPTIONS>
+ </value>
+ </option>
+ </component>
+ <component name="CompilerAPISettings">
+ <option name="DEBUGGING_INFO" value="true" />
+ <option name="GENERATE_NO_WARNINGS" value="false" />
+ <option name="DEPRECATION" value="true" />
+ <option name="ADDITIONAL_OPTIONS_STRING" value="" />
+ <option name="MAXIMUM_HEAP_SIZE" value="128" />
+ </component>
+ <component name="CompilerConfiguration">
+ <option name="DEFAULT_COMPILER" value="Javac" />
+ <resourceExtensions>
+ <entry name=".+\.(properties|xml|html|dtd|tld)" />
+ <entry name=".+\.(gif|png|jpeg|jpg)" />
+ </resourceExtensions>
+ <wildcardResourcePatterns>
+ <entry name="?*.properties" />
+ <entry name="?*.xml" />
+ <entry name="?*.gif" />
+ <entry name="?*.png" />
+ <entry name="?*.jpeg" />
+ <entry name="?*.jpg" />
+ <entry name="?*.html" />
+ <entry name="?*.dtd" />
+ <entry name="?*.tld" />
+ <entry name="?*.ftl" />
+ </wildcardResourcePatterns>
+ <annotationProcessing enabled="false" useClasspath="true" />
+ </component>
+ <component name="CopyrightManager" default="">
+ <module2copyright />
+ </component>
+ <component name="DependencyValidationManager">
+ <option name="SKIP_IMPORT_STATEMENTS" value="false" />
+ </component>
+ <component name="EclipseCompilerSettings">
+ <option name="DEBUGGING_INFO" value="true" />
+ <option name="GENERATE_NO_WARNINGS" value="true" />
+ <option name="DEPRECATION" value="false" />
+ <option name="ADDITIONAL_OPTIONS_STRING" value="" />
+ <option name="MAXIMUM_HEAP_SIZE" value="128" />
+ </component>
+ <component name="EclipseEmbeddedCompilerSettings">
+ <option name="DEBUGGING_INFO" value="true" />
+ <option name="GENERATE_NO_WARNINGS" value="true" />
+ <option name="DEPRECATION" value="false" />
+ <option name="ADDITIONAL_OPTIONS_STRING" value="" />
+ <option name="MAXIMUM_HEAP_SIZE" value="128" />
+ </component>
+ <component name="Encoding" useUTFGuessing="true" native2AsciiForPropertiesFiles="false" />
+ <component name="FacetAutodetectingManager">
+ <autodetection-disabled>
+ <facet-type id="Scala">
+ <modules>
+ <module name="files">
+ <files>
+ <file url="file://$PROJECT_DIR$/../../test/files/android/HelloAndroid.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/ant/fsc.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/ant/scalac.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/ant/scaladoc.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cldc/randoms.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cli/test1/Main.check.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cli/test1/Main.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cli/test2/Main.check.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cli/test2/Main.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cli/test3/Main.check.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cli/test3/Main.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/JavaInteraction.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/bigints.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/bug560bis.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/inner.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/manifests.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/methvsfield.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/natives.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/nest.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/protectedacc.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/serialization.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/t0632.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/t1116.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/t1143-2/t1143-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/t1143.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/typerep.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/unittest_io.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/unittest_xml.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xml01.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xml02.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xml03syntax.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xml04embed.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xmlattr.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xmlmore.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xmlpull.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xmlstuff.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/annotations.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/bug676.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/bug680.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/console.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/genericNest.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/interpreter.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/outerEnum.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/stringbuilder.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/sync-var.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/t0014.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/t1461.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/t1464/MyTrait.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/throws-annot.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/typerep.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/abstract.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/accesses.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/annot-nonconst.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/badtok-1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/badtok-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/badtok-3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1010.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1011.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1017.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1041.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1106.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1112.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug112706A.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1181.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1183.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1224.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1241.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1275.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1392.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1523.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1623.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1838.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug200.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug276.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug278.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug284.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug343.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug391.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug409.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug412.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug414.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug418.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug421.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug452.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug473.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug500.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug501.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug510.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug512.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug515.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug520.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug521.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug545.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug550.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug555.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug556.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug558.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug562.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug563.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug565.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug576.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug585.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug588.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug591.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug593.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug608.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug630.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug631.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug633.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug639.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug649.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug650.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug663.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug664.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug667.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug668.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug677.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug691.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug692.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug693.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug696.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug700.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug708.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug712.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug715.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug729.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug752.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug765.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug766.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug779.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug783.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug798.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug800.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug835.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug836.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug845.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug846.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug856.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug875.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug876.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug877.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug882.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug900.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug908.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug909.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug910.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug935.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug944.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug960.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug961.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug987.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug997.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/checksensible.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/constrs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/cyclics.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/divergent-implicit.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/faculty.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/forward.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/gadts1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/higherkind_novalue.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/imp2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/implicits.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/lazy-override.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/lazyvals.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/lubs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/mixins.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/multi-array.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/nopredefs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/null-unsoundness.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/overload.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/parstar.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/patmatexhaust.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/patternalts.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-after-terminal/src/ThePlugin.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-after-terminal/testsource.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-before-parser/src/ThePlugin.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-before-parser/testsource.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-cyclic-dependency/src/ThePlugin.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-cyclic-dependency/testsource.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-multiple-rafter/src/ThePlugin.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-multiple-rafter/testsource.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-rafter-before-1/src/ThePlugin.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-rafter-before-1/testsource.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-rightafter-terminal/src/ThePlugin.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-rightafter-terminal/testsource.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/sabin2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/saito.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/sensitive.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/structural.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/switch.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0003.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0015.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0117.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0152.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0204.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0207.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0209.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0214.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0218.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0226.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0259.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0345.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0351.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0503.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0528neg.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0590.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0606.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0673/Test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0699/A.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0699/B.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0764.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0842.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0899.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0903.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1009.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1033.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1049.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1163.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1168.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1215.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1371.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1659.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/tailrec.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/tcpoly_bounds.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/tcpoly_override.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/tcpoly_typealias.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/tcpoly_variance.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/tcpoly_variance_enforce.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/typeerror.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/unreachablechar.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/variances.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/viewtest.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/volatile-intersection.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/volatile.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/wellkinded_app.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/wellkinded_app2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/wellkinded_bounds.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/wellkinded_wrongarity.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/wellkinded_wrongarity2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmlcorner.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmltruncated1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmltruncated2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmltruncated3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmltruncated4.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmltruncated5.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmltruncated6.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/A.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/List1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/MailBox.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/NoCyclicReference.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/S1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/S3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/S5.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/S8.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/Transactions.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/X.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/Z.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/abstract.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/aliases.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/annot-inner.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/annotations.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/arrays2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/attributes.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bounds.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0002.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0017.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0020.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0029.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0030.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0031.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0032.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0036.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0039.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0049.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0053.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0054.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0061.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0064.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0066.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0068.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0069.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0076.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0081.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0082.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0085.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0091.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0093.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0123.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0204.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0304.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0325.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0422.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0599.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0646.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1000.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1001.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1006.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1014.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1034.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1049.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1050.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1056.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1070.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1075.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1085.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1087.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1090.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1107.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1119.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1123.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug112606A.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1136.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug115.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug116.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1168.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1185.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug119.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1203.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug121.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1210.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1210a.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug122.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1237.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug124.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1241.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1279a.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1292.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1385.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug151.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1565.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug159.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug160.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug175.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug177.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug183.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1858.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug201.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug210.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug211.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug229.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug245.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug247.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug262.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug267.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug284.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug287.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug289.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug295.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug296.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug304.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug318.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug319.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug342.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug344.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug348plus.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug359.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug360.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug361.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug372.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug374.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug389.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug397.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug402.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug404.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug415.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug419.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug422.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug430-feb09.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug430.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug432.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug439.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug443.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug460.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug514.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug516.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug522.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug530.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug531.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug532.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug533.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug566.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug577.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug592.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug595.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug596.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug599.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug602.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug604.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug607.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug611.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug613.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug615.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug616.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug628.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug640.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug651.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug661.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug675.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug684.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug690.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug694.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug697.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug698.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug703.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug704.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug711.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug720.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug756.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug757.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug757a.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug758.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug759.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug762.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug767.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug780.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug788.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug789.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug796.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug802.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug803.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug805.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug807.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug812.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug839.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug851.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug873.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug880.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug892.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug911.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug927.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug946.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/builders.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/caseaccs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/cfcrash.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/channels.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/cls.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/cls1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/clsrefine.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/code.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/collections.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/comp-rec-test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/compile.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/compile1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/compound.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/constfold.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/contrib467.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/contrib701.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/cyclics.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/escapes2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/eta.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/exceptions.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/functions.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/gadt-gilles.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/gadts2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/gosh.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/gui.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/homonym.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/imp2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/imports.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/infer.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/infer2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/inferbroadtype.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/init.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/itay.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/jesper.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/kinzer.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/ksbug1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/lambda.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/lambdalift.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/lambdalift1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/largecasetest.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/listpattern.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/localmodules.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/looping-jsig.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/manifest1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/matchStarlift.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/matthias1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/matthias3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/matthias4.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/matthias5.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/maxim1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/michel1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/michel2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/michel3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/michel4.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/michel5.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/michel6.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/mixins.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/modules.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/modules1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/moduletrans.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/needstypeearly.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/nested.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/nested2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/null.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/nullary.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/nullary_poly.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/override.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/partialfun.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/pat_gilles.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/pat_iuli.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/patterns.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/patterns1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/patterns1213.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/patterns2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/patterns3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/philippe1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/philippe2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/philippe3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/philippe4.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/pmbug.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/proj-rec-test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/propagate.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/protected-t1010.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/rebind.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/return_thistype.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/scoping1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/scoping2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/scoping3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/seqtest2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/simplelists.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/stable.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/strings.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/sudoku.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0055.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0154.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0165.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0227.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0231.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0273.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0288/Foo.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0301.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0438.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0453.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0504.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0586.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0591.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0651.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0654.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0674.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0710.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0770.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0774/deathname.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0774/unrelated.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0786.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0851.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0872.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0904.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0905.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0999.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1001.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1027.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1049.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1059.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1087.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1107/O.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1107/T.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1131.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1146.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1147.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1159.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1164.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1280.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1380/hallo.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1391.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1409/ConcreteImpl.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1438.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1439.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1480.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1648.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1675.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1761.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1789.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1840/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_boundedmonad.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_bounds1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_checkkinds_mix.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_gm.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_higherorder_bound_method.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_late_method_params.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_method.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_overloaded.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_param_scoping.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_poly.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_return_overriding.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_seq.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_seq_typealias.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_subst.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_typeapp.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_typesub.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_variance.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_wildcards.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/ted.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test4.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test4a.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test4refine.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test5.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test5refine.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/testcast.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/thistype.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/thistypes.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/ticket0137.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tinondefcons.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/traits.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tryexpr.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/typealias_dubious.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/typealiases.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/typerep-stephane.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/typerep.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/typesafecons.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapply.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapplyComplex.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapplyContexts2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapplyGeneric.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapplyNeedsMemberType.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapplySeq.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapplyVal.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unicode-decode.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/valdefs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/variances.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/viewtest1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/viewtest2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/chang/Test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/ilya/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/ilya2/A.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/signatures/sig.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t0695/Test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1101/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1102/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1150/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1152/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1176/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1196/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1197/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1203/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1230/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1231/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1232/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1263/test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1711/Seq.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug597/Main.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug597/Test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug687/QueryA.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug687/QueryB.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug722/IfElse.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug722/Parser.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug722/ScanBased.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug735/ScalaExpressions.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug735/ScalaTyper.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug743/BracesXXX.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug743/ParserXXX.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug785/ScalaNewTyper.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug785/ScalaTrees.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug831/NewScalaParserXXX.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug831/NewScalaTestXXX.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-01.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-02.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-03.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-04.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-05.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-06.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-07.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-08.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-09.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-10.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-13.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/NestedClasses.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/absoverride.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/amp.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/arrays.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/arybufgrow.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bitsets.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/boolexprs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/boolord.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bridges.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug0325.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug1074.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug1192.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug1220.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug216.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug405.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug428.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug429.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug594.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug601.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug603.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug627.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug629.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug657.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug744.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug889.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug920.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug949.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug978.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bugs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/byname.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/caseclasses.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/checked.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/classof.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/collection-stacks.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/collections.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/colltest.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/complicatedmatch.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/constrained-types.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/constructors.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/contrib674.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/ctor-order.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/deeps.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/docgenerator.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/enums.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/exceptions-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/exceptions.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/existentials.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/exoticnames.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/fors.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/forvaleq.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/gadts.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/impconvtimes.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/implicits.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/imports.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/infiniteloop.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/infix.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/intmap.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/iq.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/issue192.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/iterables.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/iterators.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/json.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/jtptest.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/lazy-exprs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/lazy-locals.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/lazy-override.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/lazy-traits.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/lisp.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/lists.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/literals.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/map_test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/matcharraytail.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/matchbytes.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/matchemptyarray.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/matchintasany.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/matchonstream.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/misc.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/missingparams.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/mixins.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/multi-array.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/overloads.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/patmatnew.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/promotion.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/randomAccessSeq-apply.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/range.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/retclosure.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/retsynch.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/richs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/runtime-richChar.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/runtime.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/slices.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/sort.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/streams.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/structural.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/syncchannel.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0005.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0017.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0042.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0091.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0412.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0421.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0485.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0486.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0508.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0528.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0607.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0631.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0663.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0668.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0677.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0700.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0807.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0883.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0911.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0936.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1323.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1368.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1423.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1500.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1501.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1505.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1524.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1535.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1618.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1620.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1718.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1747.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1829.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/tailcalls.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/tcpoly_monads.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/tcpoly_overriding.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/tcpoly_parseridioms.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/try-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/try.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/tuples.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/typealias_overriding.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/unapply.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/unapplyArray.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/unboxingBug.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/unittest_collection.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/unittest_io.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/verify-ctor.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/withIndex.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/scalacheck/list.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/script/fact.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/script/second.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/script/t1015.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/script/t1017.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/script/utf8.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/ackermann.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/ary.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/binarytrees.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/binarytrees.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/binarytrees.scala-3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/chameneos.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/except.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/fannkuch.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/fannkuch.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/fibo.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/harmonic.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/hash.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/hash2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/hello.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/knucleotide.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/knucleotide.scala-3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/lists.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/message.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nbody.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nestedloop.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nsieve.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nsieve.scala-3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nsievebits.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nsievebits.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nsievebits.scala-3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/objinst.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/partialsums.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/partialsums.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/recursive.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/regexdna.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/spectralnorm.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/strcat.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/sumcol.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/sumcol.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/takfp.scala" />
+ </files>
+ </module>
+ <module name="library">
+ <files>
+ <file url="file://$PROJECT_DIR$/../android-library/scala/ScalaObject.scala" />
+ <file url="file://$PROJECT_DIR$/../android-library/scala/reflect/ScalaBeanInfo.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Application.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/BigDecimal.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/BigInt.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Console.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Math.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Numeric.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Ordering.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Predef.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Range.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Symbol.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/collection/JavaConversions.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/collection/immutable/List.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/collection/immutable/PagedSeq.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/collection/mutable/OpenHashMap.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/collection/mutable/StringBuilder.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/collection/mutable/WeakHashMap.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/compat/Platform.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/concurrent/DelayedLazyVal.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/concurrent/jolib.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/concurrent/ops.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/concurrent/pilib.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/BufferedSource.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/BytePickle.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/Codec.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/File.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/Position.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/Source.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/UTF8Codec.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/mobile/Code.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/mobile/Location.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/net/Utility.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/ref/PhantomReference.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/ref/ReferenceQueue.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/ref/ReferenceWrapper.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/ref/SoftReference.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/ref/WeakReference.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/reflect/Invocation.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/reflect/Manifest.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/reflect/ScalaBeanInfo.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/MethodCache.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/NonLocalReturnControl.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichChar.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichClass.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichDouble.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichException.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichFloat.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichInt.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichLong.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichString.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/StringAdd.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/testing/Benchmark.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/testing/SUnit.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/testing/Show.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/text/Document.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/ClassLoader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/DynamicVariable.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/Marshal.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/NameTransformer.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/Properties.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/Random.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/automata/BaseBerrySethi.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/automata/DetWordAutom.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/automata/Inclusion.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/automata/NondetWordAutom.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/automata/SubsetConstruction.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/automata/WordBerrySethi.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/control/Exception.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/grammar/HedgeRHS.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/grammar/TreeRHS.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/logging/ConsoleLogger.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/logging/Logged.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/matching/Regex.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/ast/AbstractSyntax.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/ast/Binders.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/ImplicitConversions.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/JavaTokenParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/PackratParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/Parsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/RegexParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/lexical/Lexical.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/lexical/Scanners.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/lexical/StdLexical.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/syntactical/TokenParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/testing/RegexTest.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/testing/Tester.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/$tilde.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/ImplicitConversions.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/Parsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/lexical/Lexical.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/lexical/Scanners.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/lexical/StdLexical.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/syntactical/BindingParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/syntactical/StdTokenParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/syntactical/TokenParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/testing/Tester.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/CharArrayPosition.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/CharArrayReader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/CharSequenceReader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/NoPosition.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/OffsetPosition.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/PagedSeqReader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/Position.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/Positional.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/Reader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/StreamReader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/json/JSON.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/json/Lexer.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/json/Parser.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/syntax/StdTokens.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/syntax/Tokens.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/regexp/Base.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/regexp/PointedHedgeExp.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/regexp/SyntaxError.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/regexp/WordExp.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Atom.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Attribute.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Comment.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Document.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Elem.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/EntityRef.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Group.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/HasKeyValue.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/MalformedAttributeException.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/MetaData.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/NamespaceBinding.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Node.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/NodeBuffer.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/NodeSeq.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/NodeTraverser.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Null.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/PCData.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Parsing.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/PrefixedAttribute.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/PrettyPrinter.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/ProcInstr.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/QNode.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/SpecialNode.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Text.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/TextBuffer.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/TopScope.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/TypeSymbol.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Unparsed.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/UnprefixedAttribute.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Utility.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/XML.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Xhtml.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/ContentModel.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/ContentModelParser.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/DTD.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/Decl.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/DocType.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/DtdTypeSymbol.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/ElementValidator.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/ExternalID.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/Scanner.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/Tokens.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/ValidationException.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/factory/Binder.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/factory/LoggedNodeFactory.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/factory/NodeFactory.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/factory/XMLLoader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/CircularIncludeException.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/UnavailableResourceException.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/XIncludeException.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/sax/EncodingHeuristics.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/sax/Main.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/sax/XIncludeFilter.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/sax/XIncluder.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/ConstructingHandler.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/ConstructingParser.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/DefaultMarkupHandler.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/ExternalSources.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/FactoryAdapter.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/FatalError.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/MarkupHandler.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/MarkupParser.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/NoBindingFactoryAdapter.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/TokenTests.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/ValidatingMarkupHandler.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/XhtmlEntities.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/XhtmlParser.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/path/Expression.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/persistent/CachedFileStorage.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/persistent/Index.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/persistent/IndexedStorage.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/persistent/SetStorage.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/pull/XMLEvent.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/pull/XMLEventReader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/transform/BasicTransformer.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/transform/RewriteRule.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/transform/RuleTransformer.scala" />
+ <file url="file://$PROJECT_DIR$/../scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala" />
+ </files>
+ </module>
+ </modules>
+ </facet-type>
+ </autodetection-disabled>
+ </component>
+ <component name="IdProvider" IDEtalkID="522B472C2EA573563CC2BA93160270BE" />
+ <component name="InspectionProjectProfileManager">
+ <list size="5">
+ <item index="0" class="java.lang.String" itemvalue="TYPO" />
+ <item index="1" class="java.lang.String" itemvalue="SERVER PROBLEM" />
+ <item index="2" class="java.lang.String" itemvalue="INFO" />
+ <item index="3" class="java.lang.String" itemvalue="WARNING" />
+ <item index="4" class="java.lang.String" itemvalue="ERROR" />
+ </list>
+ </component>
+ <component name="JavacSettings">
+ <option name="DEBUGGING_INFO" value="true" />
+ <option name="GENERATE_NO_WARNINGS" value="false" />
+ <option name="DEPRECATION" value="true" />
+ <option name="ADDITIONAL_OPTIONS_STRING" value="" />
+ <option name="MAXIMUM_HEAP_SIZE" value="128" />
+ </component>
+ <component name="JavadocGenerationManager">
+ <option name="OUTPUT_DIRECTORY" />
+ <option name="OPTION_SCOPE" value="protected" />
+ <option name="OPTION_HIERARCHY" value="true" />
+ <option name="OPTION_NAVIGATOR" value="true" />
+ <option name="OPTION_INDEX" value="true" />
+ <option name="OPTION_SEPARATE_INDEX" value="true" />
+ <option name="OPTION_DOCUMENT_TAG_USE" value="false" />
+ <option name="OPTION_DOCUMENT_TAG_AUTHOR" value="false" />
+ <option name="OPTION_DOCUMENT_TAG_VERSION" value="false" />
+ <option name="OPTION_DOCUMENT_TAG_DEPRECATED" value="true" />
+ <option name="OPTION_DEPRECATED_LIST" value="true" />
+ <option name="OTHER_OPTIONS" value="" />
+ <option name="HEAP_SIZE" />
+ <option name="LOCALE" />
+ <option name="OPEN_IN_BROWSER" value="true" />
+ </component>
+ <component name="JikesSettings">
+ <option name="JIKES_PATH" value="" />
+ <option name="DEBUGGING_INFO" value="true" />
+ <option name="DEPRECATION" value="true" />
+ <option name="GENERATE_NO_WARNINGS" value="false" />
+ <option name="IS_EMACS_ERRORS_MODE" value="true" />
+ <option name="ADDITIONAL_OPTIONS_STRING" value="" />
+ </component>
+ <component name="Palette2">
+ <group name="Swing">
+ <item class="com.intellij.uiDesigner.HSpacer" tooltip-text="Horizontal Spacer" icon="/com/intellij/uiDesigner/icons/hspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="1" hsize-policy="6" anchor="0" fill="1" />
+ </item>
+ <item class="com.intellij.uiDesigner.VSpacer" tooltip-text="Vertical Spacer" icon="/com/intellij/uiDesigner/icons/vspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="1" anchor="0" fill="2" />
+ </item>
+ <item class="javax.swing.JPanel" icon="/com/intellij/uiDesigner/icons/panel.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3" />
+ </item>
+ <item class="javax.swing.JScrollPane" icon="/com/intellij/uiDesigner/icons/scrollPane.png" removable="false" auto-create-binding="false" can-attach-label="true">
+ <default-constraints vsize-policy="7" hsize-policy="7" anchor="0" fill="3" />
+ </item>
+ <item class="javax.swing.JButton" icon="/com/intellij/uiDesigner/icons/button.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="3" anchor="0" fill="1" />
+ <initial-values>
+ <property name="text" value="Button" />
+ </initial-values>
+ </item>
+ <item class="javax.swing.JRadioButton" icon="/com/intellij/uiDesigner/icons/radioButton.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
+ <initial-values>
+ <property name="text" value="RadioButton" />
+ </initial-values>
+ </item>
+ <item class="javax.swing.JCheckBox" icon="/com/intellij/uiDesigner/icons/checkBox.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
+ <initial-values>
+ <property name="text" value="CheckBox" />
+ </initial-values>
+ </item>
+ <item class="javax.swing.JLabel" icon="/com/intellij/uiDesigner/icons/label.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="0" anchor="8" fill="0" />
+ <initial-values>
+ <property name="text" value="Label" />
+ </initial-values>
+ </item>
+ <item class="javax.swing.JTextField" icon="/com/intellij/uiDesigner/icons/textField.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
+ <preferred-size width="150" height="-1" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JPasswordField" icon="/com/intellij/uiDesigner/icons/passwordField.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
+ <preferred-size width="150" height="-1" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JFormattedTextField" icon="/com/intellij/uiDesigner/icons/formattedTextField.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
+ <preferred-size width="150" height="-1" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JTextArea" icon="/com/intellij/uiDesigner/icons/textArea.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JTextPane" icon="/com/intellij/uiDesigner/icons/textPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JEditorPane" icon="/com/intellij/uiDesigner/icons/editorPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JComboBox" icon="/com/intellij/uiDesigner/icons/comboBox.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="2" anchor="8" fill="1" />
+ </item>
+ <item class="javax.swing.JTable" icon="/com/intellij/uiDesigner/icons/table.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JList" icon="/com/intellij/uiDesigner/icons/list.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="2" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JTree" icon="/com/intellij/uiDesigner/icons/tree.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JTabbedPane" icon="/com/intellij/uiDesigner/icons/tabbedPane.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
+ <preferred-size width="200" height="200" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JSplitPane" icon="/com/intellij/uiDesigner/icons/splitPane.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
+ <preferred-size width="200" height="200" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JSpinner" icon="/com/intellij/uiDesigner/icons/spinner.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
+ </item>
+ <item class="javax.swing.JSlider" icon="/com/intellij/uiDesigner/icons/slider.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
+ </item>
+ <item class="javax.swing.JSeparator" icon="/com/intellij/uiDesigner/icons/separator.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3" />
+ </item>
+ <item class="javax.swing.JProgressBar" icon="/com/intellij/uiDesigner/icons/progressbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1" />
+ </item>
+ <item class="javax.swing.JToolBar" icon="/com/intellij/uiDesigner/icons/toolbar.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1">
+ <preferred-size width="-1" height="20" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JToolBar$Separator" icon="/com/intellij/uiDesigner/icons/toolbarSeparator.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="0" anchor="0" fill="1" />
+ </item>
+ <item class="javax.swing.JScrollBar" icon="/com/intellij/uiDesigner/icons/scrollbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="0" anchor="0" fill="2" />
+ </item>
+ </group>
+ </component>
+ <component name="ProjectDetails">
+ <option name="projectName" value="scala-lang" />
+ </component>
+ <component name="ProjectDictionaryState">
+ <dictionary name="dragos" />
+ <dictionary name="odersky" />
+ </component>
+ <component name="ProjectKey">
+ <option name="state" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk/scala-lang.ipr" />
+ </component>
+ <component name="ProjectModuleManager">
+ <modules>
+ <module fileurl="file://$PROJECT_DIR$/actors.iml" filepath="$PROJECT_DIR$/actors.iml" />
+ <module fileurl="file://$PROJECT_DIR$/compiler.iml" filepath="$PROJECT_DIR$/compiler.iml" />
+ <module fileurl="file://$PROJECT_DIR$/dbc.iml" filepath="$PROJECT_DIR$/dbc.iml" />
+ <module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
+ <module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
+ <module fileurl="file://$PROJECT_DIR$/partest.iml" filepath="$PROJECT_DIR$/partest.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
+ <module fileurl="file://$PROJECT_DIR$/swing.iml" filepath="$PROJECT_DIR$/swing.iml" />
+ </modules>
+ </component>
+ <component name="ProjectRootManager" version="2" languageLevel="JDK_1_5" assert-keyword="true" jdk-15="true" project-jdk-name="1.6" project-jdk-type="JavaSDK">
+ <output url="file://$PROJECT_DIR$/../../out" />
+ </component>
+ <component name="ResourceManagerContainer">
+ <option name="myResourceBundles">
+ <value>
+ <list size="0" />
+ </value>
+ </option>
+ </component>
+ <component name="RmicSettings">
+ <option name="IS_EANABLED" value="false" />
+ <option name="DEBUGGING_INFO" value="true" />
+ <option name="GENERATE_NO_WARNINGS" value="false" />
+ <option name="GENERATE_IIOP_STUBS" value="false" />
+ <option name="ADDITIONAL_OPTIONS_STRING" value="" />
+ </component>
+ <component name="ScalaSettings">
+ <option name="MAXIMUM_HEAP_SIZE" value="1024" />
+ <option name="DEPRECATION" value="false" />
+ <option name="UNCHECKED" value="false" />
+ </component>
+ <component name="SvnBranchConfigurationManager">
+ <option name="myConfigurationMap">
+ <map>
+ <entry key="$PROJECT_DIR$/../..">
+ <value>
+ <SvnBranchConfiguration>
+ <option name="branchMap">
+ <map>
+ <entry key="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches">
+ <value>
+ <list />
+ </value>
+ </entry>
+ <entry key="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags">
+ <value>
+ <list />
+ </value>
+ </entry>
+ </map>
+ </option>
+ <option name="branchUrls">
+ <list>
+ <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
+ <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
+ </list>
+ </option>
+ <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
+ </SvnBranchConfiguration>
+ </value>
+ </entry>
+ </map>
+ </option>
+ <option name="myVersion" value="124" />
+ <option name="mySupportsUserInfoFilter" value="true" />
+ </component>
+ <component name="VcsDirectoryMappings">
+ <mapping directory="" vcs="svn" />
+ </component>
+ <component name="WebServicesPlugin" addRequiredLibraries="true" />
+ <component name="libraryTable">
+ <library name="Project ant library">
+ <CLASSES>
+ <root url="jar://$PROJECT_DIR$/../../lib/ant/ant-contrib.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../lib/ant/vizant.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../lib/ant/maven-ant-tasks-2.0.9.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../lib/ant/ant-dotnet-1.0.jar!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ <library name="Project Scala SDK">
+ <CLASSES>
+ <root url="jar://$PROJECT_DIR$/../../lib/jline.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../lib/fjbg.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../lib/ScalaCheck.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../lib/msil.jar!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ <library name="ant">
+ <CLASSES>
+ <root url="jar:///../share/ant/lib/ant.jar!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ <library name="locker">
+ <CLASSES>
+ <root url="file://$PROJECT_DIR$/../../build/locker/classes/library" />
+ <root url="file://$PROJECT_DIR$/../../build/locker/classes/compiler" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ </component>
+</project>
+
diff --git a/scalap.iml b/src/intellij/scalap.iml.SAMPLE
index be028bff8f..48f98a0b1e 100644
--- a/scalap.iml
+++ b/src/intellij/scalap.iml.SAMPLE
@@ -7,8 +7,8 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/src/scalap">
- <sourceFolder url="file://$MODULE_DIR$/src/scalap" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../scalap">
+ <sourceFolder url="file://$MODULE_DIR$/../scalap" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/swing.iml b/src/intellij/swing.iml.SAMPLE
index 8ef3d159e7..c623fe0e72 100644
--- a/swing.iml
+++ b/src/intellij/swing.iml.SAMPLE
@@ -7,8 +7,8 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/src/swing">
- <sourceFolder url="file://$MODULE_DIR$/src/swing" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../swing">
+ <sourceFolder url="file://$MODULE_DIR$/../swing" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/src/library/scala/Annotation.scala b/src/library/scala/Annotation.scala
index 6902256432..8c75e15a79 100644
--- a/src/library/scala/Annotation.scala
+++ b/src/library/scala/Annotation.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/Application.scala b/src/library/scala/Application.scala
index 871e61913a..21e6b68091 100644
--- a/src/library/scala/Application.scala
+++ b/src/library/scala/Application.scala
@@ -1,17 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
-import java.lang.System.getProperty
import scala.compat.Platform.currentTime
/** <p>
@@ -75,8 +73,8 @@ import scala.compat.Platform.currentTime
trait Application {
- /** The time when execution of this program started.
- */
+ /** The time when the execution of this program started, in milliseconds since 1
+ * January 1970 UTC. */
val executionStart: Long = currentTime
/** The default main method.
@@ -84,7 +82,7 @@ trait Application {
* @param args the arguments passed to the main method
*/
def main(args: Array[String]) {
- if (getProperty("scala.time") ne null) {
+ if (util.Properties.propIsSet("scala.time")) {
val total = currentTime - executionStart
Console.println("[total " + total + "ms]")
}
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index a323dccffc..e7ee280cef 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -1,57 +1,55 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
import scala.collection.generic._
-import scala.collection.mutable.{ArrayBuilder, GenericArray}
+import scala.collection.mutable.{ArrayBuilder, ArraySeq}
import compat.Platform.arraycopy
import scala.reflect.ClassManifest
import scala.runtime.ScalaRunTime.{array_apply, array_update}
-/** A class containing a fall back builder for arrays where the element type
+/** Contains a fallback builder for arrays when the element type
* does not have a class manifest. In that case a generic array is built.
*/
class FallbackArrayBuilding {
/** A builder factory that generates a generic array.
* Called instead of Array.newBuilder if the element type of an array
- * does not have a class manifest. Note that fallbackBuilder fcatory
+ * does not have a class manifest. Note that fallbackBuilder factory
* needs an implicit parameter (otherwise it would not be dominated in implicit search
* by Array.canBuildFrom). We make sure that that implicit search is always
- * succesfull.
+ * successfull.
*/
- implicit def fallbackCanBuildFrom[T](implicit m: DummyImplicit): CanBuildFrom[Array[_], T, GenericArray[T]] =
- new CanBuildFrom[Array[_], T, GenericArray[T]] {
- def apply(from: Array[_]) = GenericArray.newBuilder[T]
- def apply() = GenericArray.newBuilder[T]
+ implicit def fallbackCanBuildFrom[T](implicit m: DummyImplicit): CanBuildFrom[Array[_], T, ArraySeq[T]] =
+ new CanBuildFrom[Array[_], T, ArraySeq[T]] {
+ def apply(from: Array[_]) = ArraySeq.newBuilder[T]
+ def apply() = ArraySeq.newBuilder[T]
}
}
-/** This object contains utility methods operating on arrays.
+/** Utility methods for operating on arrays.
*
* @author Martin Odersky
* @version 1.0
*/
object Array extends FallbackArrayBuilding {
-
- import runtime.BoxedArray;
- import scala.runtime.ScalaRunTime.boxArray;
-
implicit def canBuildFrom[T](implicit m: ClassManifest[T]): CanBuildFrom[Array[_], T, Array[T]] =
new CanBuildFrom[Array[_], T, Array[T]] {
def apply(from: Array[_]) = ArrayBuilder.make[T]()(m)
def apply() = ArrayBuilder.make[T]()(m)
}
+ /**
+ * Returns a new [[scala.collection.mutable.ArrayBuilder]].
+ */
def newBuilder[T](implicit m: ClassManifest[T]): ArrayBuilder[T] = ArrayBuilder.make[T]()(m)
private def slowcopy(src : AnyRef,
@@ -59,23 +57,30 @@ object Array extends FallbackArrayBuilding {
dest : AnyRef,
destPos : Int,
length : Int) {
- var i = 0
- while (i < length) {
- array_update(dest, i, array_apply(src, i))
+ var i = srcPos
+ var j = destPos
+ val srcUntil = srcPos + length
+ while (i < srcUntil) {
+ array_update(dest, j, array_apply(src, i))
i += 1
+ j += 1
}
}
/** Copy one array to another.
- * Equivalent to
- * <code>System.arraycopy(src, srcPos, dest, destPos, length)</code>,
- * except that this works also for polymorphic and boxed arrays.
+ * Equivalent to Java's
+ * `System.arraycopy(src, srcPos, dest, destPos, length)`,
+ * except that this also works for polymorphic and boxed arrays.
+ *
+ * Note that the passed-in `dest` array will be modified by this call.
*
- * @param src ...
- * @param srcPos ...
- * @param dest ...
- * @param destPos ...
- * @param length ...
+ * @param src the source array.
+ * @param srcPos starting position in the source array.
+ * @param dest destination array.
+ * @param destPos starting position in the destination array.
+ * @param length the number of array elements to be copied.
+ *
+ * @see `java.lang.System#arraycopy`
*/
def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int) {
val srcClass = src.getClass
@@ -85,13 +90,13 @@ object Array extends FallbackArrayBuilding {
slowcopy(src, srcPos, dest, destPos, length)
}
- /** Returns array of length 0 */
+ /** Returns an array of length 0 */
def empty[T: ClassManifest]: Array[T] = new Array[T](0)
- /** Create an array with given elements.
+ /** Creates an array with given elements.
*
* @param xs the elements to put in the array
- * @return the array containing elements xs.
+ * @return an array containing all elements from xs.
*/
def apply[T: ClassManifest](xs: T*): Array[T] = {
val array = new Array[T](xs.length)
@@ -100,6 +105,7 @@ object Array extends FallbackArrayBuilding {
array
}
+ /** Creates an array of `Boolean` objects */
def apply(x: Boolean, xs: Boolean*): Array[Boolean] = {
val array = new Array[Boolean](xs.length + 1)
array(0) = x
@@ -108,6 +114,7 @@ object Array extends FallbackArrayBuilding {
array
}
+ /** Creates an array of `Byte` objects */
def apply(x: Byte, xs: Byte*): Array[Byte] = {
val array = new Array[Byte](xs.length + 1)
array(0) = x
@@ -116,6 +123,7 @@ object Array extends FallbackArrayBuilding {
array
}
+ /** Creates an array of `Short` objects */
def apply(x: Short, xs: Short*): Array[Short] = {
val array = new Array[Short](xs.length + 1)
array(0) = x
@@ -124,6 +132,7 @@ object Array extends FallbackArrayBuilding {
array
}
+ /** Creates an array of `Char` objects */
def apply(x: Char, xs: Char*): Array[Char] = {
val array = new Array[Char](xs.length + 1)
array(0) = x
@@ -132,6 +141,7 @@ object Array extends FallbackArrayBuilding {
array
}
+ /** Creates an array of `Int` objects */
def apply(x: Int, xs: Int*): Array[Int] = {
val array = new Array[Int](xs.length + 1)
array(0) = x
@@ -140,6 +150,7 @@ object Array extends FallbackArrayBuilding {
array
}
+ /** Creates an array of `Long` objects */
def apply(x: Long, xs: Long*): Array[Long] = {
val array = new Array[Long](xs.length + 1)
array(0) = x
@@ -148,6 +159,7 @@ object Array extends FallbackArrayBuilding {
array
}
+ /** Creates an array of `Float` objects */
def apply(x: Float, xs: Float*): Array[Float] = {
val array = new Array[Float](xs.length + 1)
array(0) = x
@@ -156,6 +168,7 @@ object Array extends FallbackArrayBuilding {
array
}
+ /** Creates an array of `Double` objects */
def apply(x: Double, xs: Double*): Array[Double] = {
val array = new Array[Double](xs.length + 1)
array(0) = x
@@ -164,6 +177,7 @@ object Array extends FallbackArrayBuilding {
array
}
+ /** Creates an array of `Unit` objects */
def apply(x: Unit, xs: Unit*): Array[Unit] = {
val array = new Array[Unit](xs.length + 1)
array(0) = x
@@ -172,26 +186,30 @@ object Array extends FallbackArrayBuilding {
array
}
- /** Create array with given dimensions */
+ /** Creates array with given dimensions */
def ofDim[T: ClassManifest](n1: Int): Array[T] =
new Array[T](n1)
+ /** Creates a 2-dimensional array */
def ofDim[T: ClassManifest](n1: Int, n2: Int): Array[Array[T]] = {
val arr: Array[Array[T]] = (new Array[Array[T]](n1): Array[Array[T]])
for (i <- 0 until n1) arr(i) = new Array[T](n2)
arr
// tabulate(n1)(_ => ofDim[T](n2))
}
+ /** Creates a 3-dimensional array */
def ofDim[T: ClassManifest](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] =
tabulate(n1)(_ => ofDim[T](n2, n3))
+ /** Creates a 4-dimensional array */
def ofDim[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] =
tabulate(n1)(_ => ofDim[T](n2, n3, n4))
+ /** Creates a 5-dimensional array */
def ofDim[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] =
tabulate(n1)(_ => ofDim[T](n2, n3, n4, n5))
- /** Concatenate all argument sequences into a single array.
+ /** Concatenates all arrays into a single array.
*
- * @param xs the given argument sequences
- * @return the array created from the concatenated arguments
+ * @param xss the given arrays
+ * @return the array created from concatenating `xss`
*/
def concat[T: ClassManifest](xss: Array[T]*): Array[T] = {
val b = newBuilder[T]
@@ -200,14 +218,23 @@ object Array extends FallbackArrayBuilding {
b.result
}
- /** An array that contains the results of some element computation a number
+ /** Returns an array that contains the results of some element computation a number
* of times.
*
- * @param n the number of elements returned
+ * Note that this means that `elem` is computed a total of n times:
+ * {{{
+ * scala> Array.fill(3){ java.lang.Math.random }
+ * res3: Array[Double] = Array(0.365461167592537, 1.550395944913685E-4, 0.7907242137333306)
+ * }}}
+ *
+ * @param n the number of elements desired
* @param elem the element computation
+ * @return an Array of size n, where each element contains the result of computing
+ * `elem`.
*/
def fill[T: ClassManifest](n: Int)(elem: => T): Array[T] = {
val b = newBuilder[T]
+ b.sizeHint(n)
var i = 0
while (i < n) {
b += elem
@@ -216,7 +243,7 @@ object Array extends FallbackArrayBuilding {
b.result
}
- /** A two-dimensional array that contains the results of some element
+ /** Returns a two-dimensional array that contains the results of some element
* computation a number of times.
*
* @param n1 the number of elements in the 1st dimension
@@ -226,7 +253,7 @@ object Array extends FallbackArrayBuilding {
def fill[T: ClassManifest](n1: Int, n2: Int)(elem: => T): Array[Array[T]] =
tabulate(n1)(_ => fill(n2)(elem))
- /** A three-dimensional array that contains the results of some element
+ /** Returns a three-dimensional array that contains the results of some element
* computation a number of times.
*
* @param n1 the number of elements in the 1st dimension
@@ -237,7 +264,7 @@ object Array extends FallbackArrayBuilding {
def fill[T: ClassManifest](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] =
tabulate(n1)(_ => fill(n2, n3)(elem))
- /** A four-dimensional array that contains the results of some element
+ /** Returns a four-dimensional array that contains the results of some element
* computation a number of times.
*
* @param n1 the number of elements in the 1st dimension
@@ -249,7 +276,7 @@ object Array extends FallbackArrayBuilding {
def fill[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] =
tabulate(n1)(_ => fill(n2, n3, n4)(elem))
- /** A five-dimensional array that contains the results of some element
+ /** Returns a five-dimensional array that contains the results of some element
* computation a number of times.
*
* @param n1 the number of elements in the 1st dimension
@@ -262,15 +289,16 @@ object Array extends FallbackArrayBuilding {
def fill[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] =
tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem))
- /** An array containing values of a given function over a range of integer
+ /** Returns an array containing values of a given function over a range of integer
* values starting from 0.
*
- * @param n The number of elements in the traversable
+ * @param n The number of elements in the array
* @param f The function computing element values
- * @return A traversable consisting of elements `f(0), ..., f(n -1)`
+ * @return A traversable consisting of elements `f(0),f(1), ..., f(n - 1)`
*/
def tabulate[T: ClassManifest](n: Int)(f: Int => T): Array[T] = {
val b = newBuilder[T]
+ b.sizeHint(n)
var i = 0
while (i < n) {
b += f(i)
@@ -279,7 +307,7 @@ object Array extends FallbackArrayBuilding {
b.result
}
- /** A two-dimensional array containing values of a given function over
+ /** Returns a two-dimensional array containing values of a given function over
* ranges of integer values starting from 0.
*
* @param n1 the number of elements in the 1st dimension
@@ -289,7 +317,7 @@ object Array extends FallbackArrayBuilding {
def tabulate[T: ClassManifest](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] =
tabulate(n1)(i1 => tabulate(n2)(f(i1, _)))
- /** A three-dimensional array containing values of a given function over
+ /** Returns a three-dimensional array containing values of a given function over
* ranges of integer values starting from 0.
*
* @param n1 the number of elements in the 1st dimension
@@ -300,7 +328,7 @@ object Array extends FallbackArrayBuilding {
def tabulate[T: ClassManifest](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] =
tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _)))
- /** A four-dimensional array containing values of a given function over
+ /** Returns a four-dimensional array containing values of a given function over
* ranges of integer values starting from 0.
*
* @param n1 the number of elements in the 1st dimension
@@ -312,7 +340,7 @@ object Array extends FallbackArrayBuilding {
def tabulate[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] =
tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _)))
- /** A five-dimensional array containing values of a given function over
+ /** Returns a five-dimensional array containing values of a given function over
* ranges of integer values starting from 0.
*
* @param n1 the number of elements in the 1st dimension
@@ -325,25 +353,27 @@ object Array extends FallbackArrayBuilding {
def tabulate[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] =
tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _)))
- /** An array containing a sequence of increasing integers in a range.
+ /** Returns an array containing a sequence of increasing integers in a range.
*
* @param from the start value of the array
- * @param end the end value of the array (the first value NOT returned)
+ * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned)
* @return the array with values in range `start, start + 1, ..., end - 1`
- * up to, but exclusding, `end`.
+ * up to, but excluding, `end`.
*/
def range(start: Int, end: Int): Array[Int] = range(start, end, 1)
- /** An array containing equally spaced values in some integer interval.
+ /** Returns an array containing equally spaced values in some integer interval.
*
* @param start the start value of the array
- * @param end the end value of the array (the first value NOT returned)
- * @param step the increment value of the array (must be positive or negative)
+ * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned)
+ * @param step the increment value of the array (may not be zero)
* @return the array with values in `start, start + step, ...` up to, but excluding `end`
*/
def range(start: Int, end: Int, step: Int): Array[Int] = {
if (step == 0) throw new IllegalArgumentException("zero step")
val b = newBuilder[Int]
+ b.sizeHint(Range.count(start, end, step, false))
+
var i = start
while (if (step < 0) end < i else i < end) {
b += i
@@ -352,36 +382,42 @@ object Array extends FallbackArrayBuilding {
b.result
}
- /** An array containing repeated applications of a function to a start value.
+ /** Returns an array containing repeated applications of a function to a start value.
*
* @param start the start value of the array
* @param len the number of elements returned by the array
- * @param f the function that's repeatedly applied
+ * @param f the function that is repeatedly applied
* @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...`
*/
def iterate[T: ClassManifest](start: T, len: Int)(f: T => T): Array[T] = {
val b = newBuilder[T]
- var acc = start
- var i = 0
- while (i < len) {
+
+ if (len > 0) {
+ b.sizeHint(len)
+ var acc = start
+ var i = 1
b += acc
- acc = f(acc)
- i += 1
+
+ while (i < len) {
+ acc = f(acc)
+ i += 1
+ b += acc
+ }
}
b.result
}
- /** This method is called in a pattern match { case Seq(...) => }.
+ /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`.
*
* @param x the selector value
- * @return sequence wrapped in an option, if this is a Seq, otherwise none
+ * @return sequence wrapped in a [[scala.Some]], if x is a Seq, otherwise `None`
*/
def unapplySeq[T](x: Array[T]): Option[IndexedSeq[T]] =
if (x == null) None else Some(x.toIndexedSeq)
// !!! the null check should to be necessary, but without it 2241 fails. Seems to be a bug
// in pattern matcher.
- /** Create an array containing several copies of an element.
+ /** Creates an array containing several copies of an element.
*
* @param n the length of the resulting array
* @param elem the element composing the resulting array
@@ -398,8 +434,8 @@ object Array extends FallbackArrayBuilding {
a
}
- /** Create an array containing the values of a given function <code>f</code>
- * over given range <code>[0..n)</code>
+ /** Creates an array containing the values of a given function `f`
+ * over given range `[0..n)`
*/
@deprecated("use `Array.tabulate' instead")
def fromFunction[T: ClassManifest](f: Int => T)(n: Int): Array[T] = {
@@ -412,37 +448,37 @@ object Array extends FallbackArrayBuilding {
a
}
- /** Create an array containing the values of a given function <code>f</code>
- * over given range <code>[0..n1, 0..n2)</code>
+ /** Creates an array containing the values of a given function `f`
+ * over given range `[0..n1, 0..n2)`
*/
@deprecated("use `Array.tabulate' instead")
def fromFunction[T: ClassManifest](f: (Int, Int) => T)(n1: Int, n2: Int): Array[Array[T]] =
fromFunction(i => fromFunction(f(i, _))(n2))(n1)
- /** Create an array containing the values of a given function <code>f</code>
- * over given range <code>[0..n1, 0..n2, 0..n3)</code>
+ /** Creates an array containing the values of a given function `f`
+ * over given range `[0..n1, 0..n2, 0..n3)`
*/
@deprecated("use `Array.tabulate' instead")
def fromFunction[T: ClassManifest](f: (Int, Int, Int) => T)(n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] =
fromFunction(i => fromFunction(f(i, _, _))(n2, n3))(n1)
- /** Create an array containing the values of a given function <code>f</code>
- * over given range <code>[0..n1, 0..n2, 0..n3, 0..n4)</code>
+ /** Creates an array containing the values of a given function `f`
+ * over given range `[0..n1, 0..n2, 0..n3, 0..n4)`
*/
@deprecated("use `Array.tabulate' instead")
def fromFunction[T: ClassManifest](f: (Int, Int, Int, Int) => T)(n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] =
fromFunction(i => fromFunction(f(i, _, _, _))(n2, n3, n4))(n1)
- /** Create an array containing the values of a given function <code>f</code>
- * over given range <code>[0..n1, 0..n2, 0..n3, 0..n4, 0..n5)</code>
+ /** Creates an array containing the values of a given function `f`
+ * over given range `[0..n1, 0..n2, 0..n3, 0..n4, 0..n5)`
*/
@deprecated("use `Array.tabulate' instead")
def fromFunction[T: ClassManifest](f: (Int, Int, Int, Int, Int) => T)(n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] =
fromFunction(i => fromFunction(f(i, _, _, _, _))(n2, n3, n4, n5))(n1)
}
-/** This class represents polymorphic arrays. <code>Array[T]</code> is Scala's representation
- * for Java's <code>T[]</code>.
+/** Represents polymorphic arrays. `Array[T]` is Scala's representation
+ * for Java's `T[]`.
*
* @author Martin Odersky
* @version 1.0
@@ -510,17 +546,17 @@ final class Array[T](_length: Int) {
/** The element at given index.
* <p>
- * Indices start a <code>0</code>; <code>xs.apply(0)</code> is the first
- * element of array <code>xs</code>.
+ * Indices start a `0`; `xs.apply(0)` is the first
+ * element of array `xs`.
* </p>
* <p>
- * Note the indexing syntax <code>xs(i)</code> is a shorthand for
- * <code>xs.apply(i)</code>.
+ * Note the indexing syntax `xs(i)` is a shorthand for
+ * `xs.apply(i)`.
* </p>
*
* @param i the index
- * @throws ArrayIndexOutOfBoundsException if <code>i < 0</code> or
- * <code>length <= i</code>
+ * @throws ArrayIndexOutOfBoundsException if `i < 0` or
+ * `length <= i`
*/
def apply(i: Int): T = throw new Error()
@@ -528,18 +564,26 @@ final class Array[T](_length: Int) {
* Update the element at given index.
* </p>
* <p>
- * Indices start a <code>0</code>; <code>xs.apply(0)</code> is the first
- * element of array <code>xs</code>.
+ * Indices start a `0`; `xs.apply(0)` is the first
+ * element of array `xs`.
* </p>
* <p>
- * Note the indexing syntax <code>xs(i) = x</code> is a shorthand
- * for <code>xs.update(i, x)</code>.
+ * Note the indexing syntax `xs(i) = x` is a shorthand
+ * for `xs.update(i, x)`.
* </p>
*
* @param i the index
- * @param x the value to be written at index <code>i</code>
- * @throws ArrayIndexOutOfBoundsException if <code>i < 0</code> or
- * <code>length <= i</code>
+ * @param x the value to be written at index `i`
+ * @throws ArrayIndexOutOfBoundsException if `i < 0` or
+ * `length <= i`
*/
def update(i: Int, x: T) { throw new Error() }
+
+ /** <p>
+ * Clone the Array.
+ * </p>
+ *
+ * @return A clone of the Array.
+ */
+ override def clone: Array[T] = throw new Error()
}
diff --git a/src/library/scala/Cell.scala b/src/library/scala/Cell.scala
index 9730b12f25..0e054d5a4f 100644
--- a/src/library/scala/Cell.scala
+++ b/src/library/scala/Cell.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/ClassfileAnnotation.scala b/src/library/scala/ClassfileAnnotation.scala
index ecc488e74f..b762d71a97 100644
--- a/src/library/scala/ClassfileAnnotation.scala
+++ b/src/library/scala/ClassfileAnnotation.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala
index 8be1e0f8dc..0f2a76cbc5 100644
--- a/src/library/scala/Console.scala
+++ b/src/library/scala/Console.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
@@ -83,7 +82,7 @@ object Console {
/** Set the default output stream.
*
- * @param@ out the new output stream.
+ * @param out the new output stream.
*/
def setOut(out: OutputStream): Unit =
setOut(new PrintStream(out))
@@ -213,17 +212,6 @@ object Console {
*/
def printf(text: String, args: Any*) { out.print(text format (args : _*)) }
- /**
- * @see <a href="#printf(java.lang.String,scala.Any*)"
- * target="contentFrame">Console.printf</a>.
- */
- @deprecated("For console output, use <code>Console.printf</code>. For <code>String</code>\n"+
- "formatting, <code>StringOps</code>'s <code>format</code> method.")
- def format(text: String, args: Any*) {
- if (text eq null) out.printf("null")
- else out.print(text format (args : _*))
- }
-
/** Read a full line from the terminal. Returns <code>null</code> if the end of the
* input stream has been reached.
*
@@ -435,26 +423,4 @@ object Console {
}
res
}
-
- private def textParams(s: Seq[Any]): Array[AnyRef] = {
- val res = new Array[AnyRef](s.length)
- var i: Int = 0
- val iter = s.iterator
- while (iter.hasNext) {
- res(i) = iter.next match {
- case x: Boolean => java.lang.Boolean.valueOf(x)
- case x: Byte => java.lang.Byte.valueOf(x)
- case x: Short => java.lang.Short.valueOf(x)
- case x: Char => java.lang.Character.valueOf(x)
- case x: Int => java.lang.Integer.valueOf(x)
- case x: Long => java.lang.Long.valueOf(x)
- case x: Float => java.lang.Float.valueOf(x)
- case x: Double => java.lang.Double.valueOf(x)
- case x: Unit => "()"
- case x: AnyRef => x
- }
- i += 1
- }
- res
- }
}
diff --git a/src/library/scala/CountedIterator.scala b/src/library/scala/CountedIterator.scala
index ed3ac99f0e..3aff6f2f9b 100644
--- a/src/library/scala/CountedIterator.scala
+++ b/src/library/scala/CountedIterator.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/Either.scala b/src/library/scala/Either.scala
index cc84685287..cbfbee1f35 100644
--- a/src/library/scala/Either.scala
+++ b/src/library/scala/Either.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
@@ -328,38 +327,6 @@ object Either {
case Right(t) => t
}
- /**
- * Returns the <code>Left</code> values in the given <code>Iterable</code> of <code>Either</code>s.
- */
- @deprecated("use `for (Left(a) <- es) yield a'")
- def lefts[A, B](es: Iterable[Either[A, B]]) =
- es.foldRight[List[A]](Nil)((e, as) => e match {
- case Left(a) => a :: as
- case Right(_) => as
- })
-
- /**
- * Returns the <code>Right</code> values in the given<code>Iterable</code> of <code>Either</code>s.
- */
- @deprecated("use `for (Right(a) <- es) yield a'")
- def rights[A, B](es: Iterable[Either[A, B]]) =
- es.foldRight[List[B]](Nil)((e, bs) => e match {
- case Left(_) => bs
- case Right(b) => b :: bs
- })
-
- /** Transforms an Iterable of Eithers into a pair of lists.
- *
- * @param xs the iterable of Eithers to separate
- * @return a pair of lists.
- */
- @deprecated("use `for ((Left(l), Right(r)) <- es partition isLeft) yield (l, r)'")
- def separate[A,B](es: Iterable[Either[A,B]]): (List[A], List[B]) =
- es.foldRight[(List[A], List[B])]((Nil, Nil)) {
- case (Left(a), (lefts, rights)) => (a :: lefts, rights)
- case (Right(b), (lefts, rights)) => (lefts, b :: rights)
- }
-
/** If the condition satisfies, return the given A in <code>Left</code>,
* otherwise, return the given B in <code>Right</code>.
*/
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index 01f99d550a..c1fe45c8c1 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -1,20 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
-
package scala
import scala.collection.SetLike
-import scala.collection.mutable.{Builder, AddingBuilder, Map, HashMap}
-import scala.collection.immutable.{Set, BitSet}
-import scala.collection.generic.CanBuildFrom
+import scala.collection.{ mutable, immutable, generic }
+import java.lang.reflect.{ Modifier, Method => JMethod, Field => JField }
/** <p>
* Defines a finite set of values specific to the enumeration. Typically
@@ -44,7 +40,7 @@ import scala.collection.generic.CanBuildFrom
*
* <b>def</b> isWorkingDay(d: WeekDay) = ! (d == Sat || d == Sun)
*
- * WeekDay.iterator filter isWorkingDay foreach println
+ * WeekDay.values filter isWorkingDay foreach println
* }</pre>
*
* @param initial The initial value from which to count the integers that
@@ -52,31 +48,26 @@ import scala.collection.generic.CanBuildFrom
* @param names The sequence of names to give to this enumeration's values.
*
* @author Matthias Zenger
- * @version 1.0, 10/02/2004
*/
@serializable
@SerialVersionUID(8476000850333817230L)
abstract class Enumeration(initial: Int, names: String*) {
+ thisenum =>
- def this() = this(0, null)
+ def this() = this(0)
def this(names: String*) = this(0, names: _*)
+ /* Note that `readResolve` cannot be private, since otherwise
+ the JVM does not invoke it when deserializing subclasses. */
+ protected def readResolve(): AnyRef = thisenum.getClass.getField("MODULE$").get()
+
/** The name of this enumeration.
*/
- override def toString = {
- val name = this.getClass.getName
- var string =
- if (name endsWith "$") name.substring(0, name.length - 1) else name
- val idx1 = string.lastIndexOf('.' : Int)
- if (idx1 != -1) string = string.substring(idx1 + 1)
- val idx2 = string.indexOf('$')
- if (idx2 != -1) string = string.substring(idx2 + 1)
- string
- }
+ override def toString = (getClass.getName stripSuffix "$" split '.' last) split '$' last
/** The mapping from the integer used to identify values to the actual
* values. */
- private val vmap: Map[Int, Value] = new HashMap
+ private val vmap: mutable.Map[Int, Value] = new mutable.HashMap
/** The cache listing all values of this enumeration. */
@transient private var vset: ValueSet = null
@@ -84,13 +75,13 @@ abstract class Enumeration(initial: Int, names: String*) {
/** The mapping from the integer used to identify values to their
* names. */
- private val nmap: Map[Int, String] = new HashMap
+ private val nmap: mutable.Map[Int, String] = new mutable.HashMap
/** The values of this enumeration as a set.
*/
def values: ValueSet = {
if (!vsetDefined) {
- vset = new ValueSet(BitSet.empty ++ (vmap.valuesIterator map (_.id)))
+ vset = new ValueSet(immutable.SortedSet.empty[Int] ++ (vmap.values map (_.id)))
vsetDefined = true
}
vset
@@ -101,6 +92,8 @@ abstract class Enumeration(initial: Int, names: String*) {
/** The string to use to name the next created value. */
protected var nextName = names.iterator
+ private def nextNameOrNull =
+ if (nextName.hasNext) nextName.next else null
/** The highest integer amongst those used to identify values in this
* enumeration. */
@@ -140,14 +133,14 @@ abstract class Enumeration(initial: Int, names: String*) {
*
* @param i An integer that identifies this value at run-time. It must be
* unique amongst all values of the enumeration.
- * @return ..
+ * @return Fresh value identified by <code>i</code>.
*/
- protected final def Value(i: Int): Value =
- Value(i, if (nextName.hasNext) nextName.next else null)
+ protected final def Value(i: Int): Value = Value(i, nextNameOrNull)
/** Creates a fresh value, part of this enumeration, called <code>name</code>.
*
* @param name A human-readable name for that value.
+ * @return Fresh value called <code>name</code>.
*/
protected final def Value(name: String): Value = Value(nextId, name)
@@ -157,44 +150,47 @@ abstract class Enumeration(initial: Int, names: String*) {
* @param i An integer that identifies this value at run-time. It must be
* unique amongst all values of the enumeration.
* @param name A human-readable name for that value.
- * @return ..
+ * @return Fresh value with the provided identifier <code>i</code> and name <code>name</code>.
*/
protected final def Value(i: Int, name: String): Value = new Val(i, name)
+ private def populateNameMap() {
+ // The list of possible Value methods: 0-args which return a conforming type
+ val methods = getClass.getMethods filter (m => m.getParameterTypes.isEmpty &&
+ classOf[Value].isAssignableFrom(m.getReturnType) &&
+ m.getDeclaringClass != classOf[Enumeration])
+ methods foreach { m =>
+ val name = m.getName
+ // invoke method to obtain actual `Value` instance
+ val value = m.invoke(this).asInstanceOf[Value]
+ // verify that outer points to the correct Enumeration: ticket #3616.
+ if (value.outerEnum eq thisenum) {
+ val id = Int.unbox(classOf[Val] getMethod "id" invoke value)
+ nmap += ((id, name))
+ }
+ }
+ }
+
/* Obtains the name for the value with id `i`. If no name is cached
* in `nmap`, it populates `nmap` using reflection.
*/
- private def nameOf(i: Int): String = nmap.get(i) match {
- case Some(name) => name
- case None =>
- val methods = getClass.getMethods
- for (m <- methods
- if classOf[Value].isAssignableFrom(m.getReturnType) &&
- !java.lang.reflect.Modifier.isFinal(m.getModifiers)) {
- val name = m.getName
- // invoke method to obtain actual `Value` instance
- val value = m.invoke(this)
- // invoke `id` method
- val idMeth = classOf[Val].getMethod("id")
- val id: Int = idMeth.invoke(value).asInstanceOf[Integer].intValue()
- nmap += (id -> name)
- }
- nmap(i)
- }
+ private def nameOf(i: Int): String = synchronized { nmap.getOrElse(i, { populateNameMap() ; nmap(i) }) }
/** The type of the enumerated values. */
@serializable
@SerialVersionUID(7091335633555234129L)
- abstract class Value extends Ordered[Enumeration#Value] {
+ abstract class Value extends Ordered[Value] {
/** the id and bit location of this enumeration value */
def id: Int
- override def compare(that: Enumeration#Value): Int = this.id - that.id
- override def equals(other: Any): Boolean =
- other match {
- case that: Enumeration#Value => compare(that) == 0
- case _ => false
- }
- override def hashCode: Int = id.hashCode
+ /** a marker so we can tell whose values belong to whom come reflective-naming time */
+ private[Enumeration] val outerEnum = thisenum
+
+ override def compare(that: Value): Int = this.id - that.id
+ override def equals(other: Any) = other match {
+ case that: Enumeration#Value => (outerEnum eq that.outerEnum) && (id == that.id)
+ case _ => false
+ }
+ override def hashCode: Int = id.##
/** this enumeration value as an <code>Int</code> bit mask.
* @throws IllegalArgumentException if <code>id</code> is greater than 31
@@ -204,7 +200,7 @@ abstract class Enumeration(initial: Int, names: String*) {
if (id >= 32) throw new IllegalArgumentException
1 << id
}
- /** this enumeration value as an <code>Long</code> bit mask.
+ /** this enumeration value as a <code>Long</code> bit mask.
* @throws IllegalArgumentException if <code>id</code> is greater than 63
*/
@deprecated("mask64 will be removed")
@@ -216,50 +212,56 @@ abstract class Enumeration(initial: Int, names: String*) {
/** A class implementing the <a href="Enumeration.Value.html"
* target="contentFrame"><code>Value</code></a> type. This class can be
- * overriden to change the enumeration's naming and integer identification
+ * overridden to change the enumeration's naming and integer identification
* behaviour.
*/
@serializable
@SerialVersionUID(0 - 3501153230598116017L)
protected class Val(i: Int, name: String) extends Value {
- def this(i: Int) =
- this(i, if (nextName.hasNext) nextName.next else i.toString())
- def this(name: String) = this(nextId, name)
- def this() =
- this(nextId, if (nextName.hasNext) nextName.next else nextId.toString())
- assert(!vmap.isDefinedAt(i))
+ def this(i: Int) = this(i, nextNameOrNull)
+ def this(name: String) = this(nextId, name)
+ def this() = this(nextId)
+
+ assert(!vmap.isDefinedAt(i), "Duplicate id: " + i)
vmap(i) = this
vsetDefined = false
nextId = i + 1
if (nextId > topId) topId = nextId
def id = i
override def toString() =
- if (name eq null) Enumeration.this.nameOf(i)
- else name
- private def readResolve(): AnyRef =
- if (vmap ne null) vmap(i)
- else this
+ if (name != null) name
+ else try thisenum.nameOf(i)
+ catch { case _: NoSuchElementException => "<Invalid enum: no field for #" + i + ">" }
+
+ protected def readResolve(): AnyRef = {
+ val enum = thisenum.readResolve().asInstanceOf[Enumeration]
+ if (enum.vmap == null) this
+ else enum.vmap(i)
+ }
}
/** A class for sets of values
* Iterating through this set will yield values in increasing order of their ids.
- * @param ids The set of ids of values, organized as a BitSet.
+ * @param ids The set of ids of values, organized as a SortedSet.
*/
- class ValueSet private[Enumeration] (val ids: BitSet) extends Set[Value] with SetLike[Value, ValueSet] {
+ class ValueSet private[Enumeration] (val ids: immutable.SortedSet[Int]) extends Set[Value] with SetLike[Value, ValueSet] {
override def empty = ValueSet.empty
def contains(v: Value) = ids contains (v.id)
def + (value: Value) = new ValueSet(ids + value.id)
def - (value: Value) = new ValueSet(ids - value.id)
- def iterator = ids.iterator map Enumeration.this.apply
- override def stringPrefix = Enumeration.this + ".ValueSet"
+ def iterator = ids.iterator map thisenum.apply
+ override def stringPrefix = thisenum + ".ValueSet"
}
/** A factory object for value sets */
object ValueSet {
+ import mutable.{ Builder, AddingBuilder }
+ import generic.CanBuildFrom
+
/** The empty value set */
- val empty = new ValueSet(BitSet.empty)
+ val empty = new ValueSet(immutable.SortedSet.empty)
/** A value set consisting of given elements */
- def apply(elems: Value*): ValueSet = elems.foldLeft(empty)(_ + _)
+ def apply(elems: Value*): ValueSet = empty ++ elems
/** A builder object for value sets */
def newBuilder: Builder[Value, ValueSet] = new AddingBuilder(empty)
/** The implicit builder for value sets */
diff --git a/src/library/scala/Equals.scala b/src/library/scala/Equals.scala
index e4dfb19982..2bb2c56834 100644
--- a/src/library/scala/Equals.scala
+++ b/src/library/scala/Equals.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/Function.scala b/src/library/scala/Function.scala
index e333959550..d2f79bb270 100644
--- a/src/library/scala/Function.scala
+++ b/src/library/scala/Function.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
@@ -36,6 +35,7 @@ object Function
* @param f ...
* @return ...
*/
+ @deprecated("Use `f.curried` instead")
def curried[a1, a2, b](f: (a1, a2) => b): a1 => a2 => b = {
x1 => x2 => f(x1, x2)
}
@@ -45,18 +45,21 @@ object Function
* @param f ...
* @return ...
*/
+ @deprecated("Use `f.curried` instead")
def curried[a1, a2, a3, b](f: (a1, a2, a3) => b): a1 => a2 => a3 => b = {
x1 => x2 => x3 => f(x1, x2, x3)
}
/** Currying for functions of arity 4.
*/
+ @deprecated("Use `f.curried` instead")
def curried[a1, a2, a3, a4, b](f: (a1, a2, a3, a4) => b): a1 => a2 => a3 => a4 => b = {
x1 => x2 => x3 => x4 => f(x1, x2, x3, x4)
}
/** Currying for functions of arity 5.
*/
+ @deprecated("Use `f.curried` instead")
def curried[a1, a2, a3, a4, a5, b](f: (a1, a2, a3, a4, a5) => b): a1 => a2 => a3 => a4 => a5 => b = {
x1 => x2 => x3 => x4 => x5 => f(x1, x2, x3, x4, x5)
}
@@ -89,9 +92,13 @@ object Function
/** Tupling for functions of arity 2. This transforms a function
* of arity 2 into a unary function that takes a pair of arguments.
*
+ * @note These functions are slotted for deprecation, but it is on
+ * hold pending superior type inference for tupling anonymous functions.
+ *
* @param f ...
* @return ...
*/
+ // @deprecated("Use `f.tupled` instead")
def tupled[a1, a2, b](f: (a1, a2) => b): Tuple2[a1, a2] => b = {
case Tuple2(x1, x2) => f(x1, x2)
}
@@ -99,6 +106,7 @@ object Function
/** Tupling for functions of arity 3. This transforms a function
* of arity 3 into a unary function that takes a triple of arguments.
*/
+ // @deprecated("Use `f.tupled` instead")
def tupled[a1, a2, a3, b](f: (a1, a2, a3) => b): Tuple3[a1, a2, a3] => b = {
case Tuple3(x1, x2, x3) => f(x1, x2, x3)
}
@@ -106,6 +114,7 @@ object Function
/** Tupling for functions of arity 4. This transforms a function
* of arity 4 into a unary function that takes a 4-tuple of arguments.
*/
+ // @deprecated("Use `f.tupled` instead")
def tupled[a1, a2, a3, a4, b](f: (a1, a2, a3, a4) => b): Tuple4[a1, a2, a3, a4] => b = {
case Tuple4(x1, x2, x3, x4) => f(x1, x2, x3, x4)
}
@@ -113,6 +122,7 @@ object Function
/** Tupling for functions of arity 5. This transforms a function
* of arity 5 into a unary function that takes a 5-tuple of arguments.
*/
+ // @deprecated("Use `f.tupled` instead")
def tupled[a1, a2, a3, a4, a5, b](f: (a1, a2, a3, a4, a5) => b): Tuple5[a1, a2, a3, a4, a5] => b = {
case Tuple5(x1, x2, x3, x4, x5) => f(x1, x2, x3, x4, x5)
}
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index fd02993a8c..c6f81064b6 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with fancy comment)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with fancy comment)
package scala
+
/** <p>
* Function with 0 parameters.
* </p>
@@ -35,7 +35,7 @@ package scala
* println(anonfun0())
* }</pre>
*/
-trait Function0[+R] extends AnyRef { self =>
+trait Function0[@specialized +R] extends AnyRef { self =>
def apply(): R
override def toString() = "<function0>"
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index 7cfd32304e..a6a6fa2828 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with fancy comment) (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with fancy comment) (with extra methods)
package scala
+
/** <p>
* Function with 1 parameter.
* </p>
@@ -35,7 +35,7 @@ package scala
* println(anonfun1(0))
* }</pre>
*/
-trait Function1[-T1, +R] extends AnyRef { self =>
+trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
def apply(v1:T1): R
override def toString() = "<function1>"
diff --git a/src/library/scala/Function10.scala b/src/library/scala/Function10.scala
index 4c3437dbb3..c009fb5c48 100644
--- a/src/library/scala/Function10.scala
+++ b/src/library/scala/Function10.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 10 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function10[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, +R] extends
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9,v10:T10): R
override def toString() = "<function10>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10) == (f.tupled)(Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10)).curry
+ def tupled: Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10] => R = {
+ case Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)
}
}
diff --git a/src/library/scala/Function11.scala b/src/library/scala/Function11.scala
index 0d1f62b017..3c3c79db3a 100644
--- a/src/library/scala/Function11.scala
+++ b/src/library/scala/Function11.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 11 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function11[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, +R] ex
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9,v10:T10,v11:T11): R
override def toString() = "<function11>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11) == (f.tupled)(Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11)).curry
+ def tupled: Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11] => R = {
+ case Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)
}
}
diff --git a/src/library/scala/Function12.scala b/src/library/scala/Function12.scala
index e4975b03f0..3cb2d4bb6f 100644
--- a/src/library/scala/Function12.scala
+++ b/src/library/scala/Function12.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 12 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function12[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9,v10:T10,v11:T11,v12:T12): R
override def toString() = "<function12>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12) == (f.tupled)(Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12)).curry
+ def tupled: Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12] => R = {
+ case Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)
}
}
diff --git a/src/library/scala/Function13.scala b/src/library/scala/Function13.scala
index 7a05f655d6..5ec61d86fc 100644
--- a/src/library/scala/Function13.scala
+++ b/src/library/scala/Function13.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 13 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function13[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9,v10:T10,v11:T11,v12:T12,v13:T13): R
override def toString() = "<function13>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13) == (f.tupled)(Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13)).curry
+ def tupled: Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13] => R = {
+ case Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)
}
}
diff --git a/src/library/scala/Function14.scala b/src/library/scala/Function14.scala
index 75cbbe8e3c..8624464f48 100644
--- a/src/library/scala/Function14.scala
+++ b/src/library/scala/Function14.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 14 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function14[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9,v10:T10,v11:T11,v12:T12,v13:T13,v14:T14): R
override def toString() = "<function14>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14) == (f.tupled)(Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14)).curry
+ def tupled: Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14] => R = {
+ case Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)
}
}
diff --git a/src/library/scala/Function15.scala b/src/library/scala/Function15.scala
index 6b25c18474..beeaa0b55e 100644
--- a/src/library/scala/Function15.scala
+++ b/src/library/scala/Function15.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 15 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function15[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9,v10:T10,v11:T11,v12:T12,v13:T13,v14:T14,v15:T15): R
override def toString() = "<function15>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15) == (f.tupled)(Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15)).curry
+ def tupled: Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15] => R = {
+ case Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)
}
}
diff --git a/src/library/scala/Function16.scala b/src/library/scala/Function16.scala
index a54ed01be3..43ffcee0f3 100644
--- a/src/library/scala/Function16.scala
+++ b/src/library/scala/Function16.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 16 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9,v10:T10,v11:T11,v12:T12,v13:T13,v14:T14,v15:T15,v16:T16): R
override def toString() = "<function16>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16) == (f.tupled)(Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16)).curry
+ def tupled: Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16] => R = {
+ case Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)
}
}
diff --git a/src/library/scala/Function17.scala b/src/library/scala/Function17.scala
index a20212919b..1bd5532f7d 100644
--- a/src/library/scala/Function17.scala
+++ b/src/library/scala/Function17.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 17 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function17[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9,v10:T10,v11:T11,v12:T12,v13:T13,v14:T14,v15:T15,v16:T16,v17:T17): R
override def toString() = "<function17>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17) == (f.tupled)(Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17)).curry
+ def tupled: Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17] => R = {
+ case Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)
}
}
diff --git a/src/library/scala/Function18.scala b/src/library/scala/Function18.scala
index 16b3c8ae03..31641d9496 100644
--- a/src/library/scala/Function18.scala
+++ b/src/library/scala/Function18.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 18 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function18[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9,v10:T10,v11:T11,v12:T12,v13:T13,v14:T14,v15:T15,v16:T16,v17:T17,v18:T18): R
override def toString() = "<function18>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18) == (f.tupled)(Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18)).curry
+ def tupled: Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18] => R = {
+ case Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)
}
}
diff --git a/src/library/scala/Function19.scala b/src/library/scala/Function19.scala
index 85669db978..92d855666e 100644
--- a/src/library/scala/Function19.scala
+++ b/src/library/scala/Function19.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 19 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function19[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9,v10:T10,v11:T11,v12:T12,v13:T13,v14:T14,v15:T15,v16:T16,v17:T17,v18:T18,v19:T19): R
override def toString() = "<function19>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19) == (f.tupled)(Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19)).curry
+ def tupled: Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19] => R = {
+ case Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)
}
}
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index 8a792b4f0b..334cc530bc 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with fancy comment) (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with fancy comment) (with extra methods)
package scala
+
/** <p>
* Function with 2 parameters.
* </p>
@@ -35,14 +35,22 @@ package scala
* println(anonfun2(0, 1))
* }</pre>
*/
-trait Function2[-T1, -T2, +R] extends AnyRef { self =>
+trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
def apply(v1:T1,v2:T2): R
override def toString() = "<function2>"
- /** f(x1,x2) == (f.curry)(x1)(x2)
+ /** f(x1, x2) == (f.curried)(x1)(x2)
+ */
+ def curried: T1 => T2 => R = {
+ (x1: T1) => (x2: T2) => apply(x1, x2)
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2) == (f.tupled)(Tuple2(x1, x2))
*/
- def curry: T1 => T2 => R = {
- (x1: T1) => (x2: T2) => apply(x1,x2)
+ def tupled: Tuple2[T1, T2] => R = {
+ case Tuple2(x1, x2) => apply(x1, x2)
}
}
diff --git a/src/library/scala/Function20.scala b/src/library/scala/Function20.scala
index dbae793f1e..f3ddc31474 100644
--- a/src/library/scala/Function20.scala
+++ b/src/library/scala/Function20.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 20 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function20[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9,v10:T10,v11:T11,v12:T12,v13:T13,v14:T14,v15:T15,v16:T16,v17:T17,v18:T18,v19:T19,v20:T20): R
override def toString() = "<function20>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20) == (f.tupled)(Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20)).curry
+ def tupled: Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20] => R = {
+ case Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)
}
}
diff --git a/src/library/scala/Function21.scala b/src/library/scala/Function21.scala
index 5759319bf2..dcb500732f 100644
--- a/src/library/scala/Function21.scala
+++ b/src/library/scala/Function21.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 21 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9,v10:T10,v11:T11,v12:T12,v13:T13,v14:T14,v15:T15,v16:T16,v17:T17,v18:T18,v19:T19,v20:T20,v21:T21): R
override def toString() = "<function21>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21) == (f.tupled)(Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21)).curry
+ def tupled: Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21] => R = {
+ case Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)
}
}
diff --git a/src/library/scala/Function22.scala b/src/library/scala/Function22.scala
index 6233ec07df..9e3f3c9f8f 100644
--- a/src/library/scala/Function22.scala
+++ b/src/library/scala/Function22.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 22 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function22[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9,v10:T10,v11:T11,v12:T12,v13:T13,v14:T14,v15:T15,v16:T16,v17:T17,v18:T18,v19:T19,v20:T20,v21:T21,v22:T22): R
override def toString() = "<function22>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21)(x22)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21)(x22)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21, x22: T22) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22) == (f.tupled)(Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21, x22: T22) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22)).curry
+ def tupled: Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22] => R = {
+ case Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)
}
}
diff --git a/src/library/scala/Function3.scala b/src/library/scala/Function3.scala
index 76c48215a5..2afc6acd4c 100644
--- a/src/library/scala/Function3.scala
+++ b/src/library/scala/Function3.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 3 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function3[-T1, -T2, -T3, +R] extends AnyRef { self =>
def apply(v1:T1,v2:T2,v3:T3): R
override def toString() = "<function3>"
- /** f(x1,x2,x3) == (f.curry)(x1)(x2)(x3)
+ /** f(x1, x2, x3) == (f.curried)(x1)(x2)(x3)
+ */
+ def curried: T1 => T2 => T3 => R = {
+ (x1: T1) => (x2: T2) => (x3: T3) => apply(x1, x2, x3)
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3) == (f.tupled)(Tuple3(x1, x2, x3))
*/
- def curry: T1 => T2 => T3 => R = {
- (x1: T1) => (x2: T2) => (x3: T3) => apply(x1,x2,x3)
+ def tupled: Tuple3[T1, T2, T3] => R = {
+ case Tuple3(x1, x2, x3) => apply(x1, x2, x3)
}
}
diff --git a/src/library/scala/Function4.scala b/src/library/scala/Function4.scala
index 21112a3345..c0708d8c06 100644
--- a/src/library/scala/Function4.scala
+++ b/src/library/scala/Function4.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 4 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function4[-T1, -T2, -T3, -T4, +R] extends AnyRef { self =>
def apply(v1:T1,v2:T2,v3:T3,v4:T4): R
override def toString() = "<function4>"
- /** f(x1,x2,x3,x4) == (f.curry)(x1)(x2)(x3)(x4)
+ /** f(x1, x2, x3, x4) == (f.curried)(x1)(x2)(x3)(x4)
+ */
+ def curried: T1 => T2 => T3 => T4 => R = {
+ (x1: T1) => (x2: T2) => (x3: T3) => (x4: T4) => apply(x1, x2, x3, x4)
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4) == (f.tupled)(Tuple4(x1, x2, x3, x4))
*/
- def curry: T1 => T2 => T3 => T4 => R = {
- (x1: T1) => (x2: T2) => (x3: T3) => (x4: T4) => apply(x1,x2,x3,x4)
+ def tupled: Tuple4[T1, T2, T3, T4] => R = {
+ case Tuple4(x1, x2, x3, x4) => apply(x1, x2, x3, x4)
}
}
diff --git a/src/library/scala/Function5.scala b/src/library/scala/Function5.scala
index 1a310cca83..0c318f6317 100644
--- a/src/library/scala/Function5.scala
+++ b/src/library/scala/Function5.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 5 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function5[-T1, -T2, -T3, -T4, -T5, +R] extends AnyRef { self =>
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5): R
override def toString() = "<function5>"
- /** f(x1,x2,x3,x4,x5) == (f.curry)(x1)(x2)(x3)(x4)(x5)
+ /** f(x1, x2, x3, x4, x5) == (f.curried)(x1)(x2)(x3)(x4)(x5)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5) => self.apply(x1, x2, x3, x4, x5)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5) == (f.tupled)(Tuple5(x1, x2, x3, x4, x5))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5) => self.apply(x1,x2,x3,x4,x5)).curry
+ def tupled: Tuple5[T1, T2, T3, T4, T5] => R = {
+ case Tuple5(x1, x2, x3, x4, x5) => apply(x1, x2, x3, x4, x5)
}
}
diff --git a/src/library/scala/Function6.scala b/src/library/scala/Function6.scala
index 9be31515b4..0e77fbd2ce 100644
--- a/src/library/scala/Function6.scala
+++ b/src/library/scala/Function6.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 6 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function6[-T1, -T2, -T3, -T4, -T5, -T6, +R] extends AnyRef { self =>
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6): R
override def toString() = "<function6>"
- /** f(x1,x2,x3,x4,x5,x6) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)
+ /** f(x1, x2, x3, x4, x5, x6) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6) => self.apply(x1, x2, x3, x4, x5, x6)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6) == (f.tupled)(Tuple6(x1, x2, x3, x4, x5, x6))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6) => self.apply(x1,x2,x3,x4,x5,x6)).curry
+ def tupled: Tuple6[T1, T2, T3, T4, T5, T6] => R = {
+ case Tuple6(x1, x2, x3, x4, x5, x6) => apply(x1, x2, x3, x4, x5, x6)
}
}
diff --git a/src/library/scala/Function7.scala b/src/library/scala/Function7.scala
index ce4e99f749..4cfdf1962a 100644
--- a/src/library/scala/Function7.scala
+++ b/src/library/scala/Function7.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 7 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function7[-T1, -T2, -T3, -T4, -T5, -T6, -T7, +R] extends AnyRef { self =>
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7): R
override def toString() = "<function7>"
- /** f(x1,x2,x3,x4,x5,x6,x7) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)
+ /** f(x1, x2, x3, x4, x5, x6, x7) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7) => self.apply(x1, x2, x3, x4, x5, x6, x7)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7) == (f.tupled)(Tuple7(x1, x2, x3, x4, x5, x6, x7))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7) => self.apply(x1,x2,x3,x4,x5,x6,x7)).curry
+ def tupled: Tuple7[T1, T2, T3, T4, T5, T6, T7] => R = {
+ case Tuple7(x1, x2, x3, x4, x5, x6, x7) => apply(x1, x2, x3, x4, x5, x6, x7)
}
}
diff --git a/src/library/scala/Function8.scala b/src/library/scala/Function8.scala
index 995af11640..92d7f217fd 100644
--- a/src/library/scala/Function8.scala
+++ b/src/library/scala/Function8.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 8 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function8[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, +R] extends AnyRef { sel
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8): R
override def toString() = "<function8>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8) == (f.tupled)(Tuple8(x1, x2, x3, x4, x5, x6, x7, x8))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8)).curry
+ def tupled: Tuple8[T1, T2, T3, T4, T5, T6, T7, T8] => R = {
+ case Tuple8(x1, x2, x3, x4, x5, x6, x7, x8) => apply(x1, x2, x3, x4, x5, x6, x7, x8)
}
}
diff --git a/src/library/scala/Function9.scala b/src/library/scala/Function9.scala
index ebf8b17954..0223ac57f7 100644
--- a/src/library/scala/Function9.scala
+++ b/src/library/scala/Function9.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
+
/** <p>
* Function with 9 parameters.
* </p>
@@ -23,10 +23,18 @@ trait Function9[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, +R] extends AnyRef
def apply(v1:T1,v2:T2,v3:T3,v4:T4,v5:T5,v6:T6,v7:T7,v8:T8,v9:T9): R
override def toString() = "<function9>"
- /** f(x1,x2,x3,x4,x5,x6,x7,x8,x9) == (f.curry)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)
+ /** f(x1, x2, x3, x4, x5, x6, x7, x8, x9) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)
+ */
+ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = {
+ (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)).curried
+ }
+ @deprecated("Use 'curried' instead")
+ def curry = curried
+
+ /* f(x1, x2, x3, x4, x5, x6, x7, x8, x9) == (f.tupled)(Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9))
*/
- def curry: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = {
- (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9) => self.apply(x1,x2,x3,x4,x5,x6,x7,x8,x9)).curry
+ def tupled: Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9] => R = {
+ case Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)
}
}
diff --git a/src/library/scala/Immutable.scala b/src/library/scala/Immutable.scala
index 441e304b21..56511df29a 100644
--- a/src/library/scala/Immutable.scala
+++ b/src/library/scala/Immutable.scala
@@ -1,17 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
-/** A marker trait for all immutable datastructures such as imutable
+/** A marker trait for all immutable datastructures such as immutable
* collections.
*
* @since 2.8
diff --git a/src/library/scala/LowPriorityImplicits.scala b/src/library/scala/LowPriorityImplicits.scala
index 64247023fd..32bdf7e30d 100644
--- a/src/library/scala/LowPriorityImplicits.scala
+++ b/src/library/scala/LowPriorityImplicits.scala
@@ -1,14 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
-
package scala
import collection.mutable._
@@ -26,21 +23,21 @@ import collection.generic.CanBuildFrom
class LowPriorityImplicits {
implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] =
- WrappedArray.make(xs)
-
- implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = new WrappedArray.ofRef[T](xs)
- implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = new WrappedArray.ofInt(xs)
- implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = new WrappedArray.ofDouble(xs)
- implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = new WrappedArray.ofLong(xs)
- implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = new WrappedArray.ofFloat(xs)
- implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = new WrappedArray.ofChar(xs)
- implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = new WrappedArray.ofByte(xs)
- implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = new WrappedArray.ofShort(xs)
- implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = new WrappedArray.ofBoolean(xs)
- implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = new WrappedArray.ofUnit(xs)
-
- implicit def wrapString(s: String): WrappedString = new WrappedString(s)
- implicit def unwrapString(ws: WrappedString): String = ws.self
+ if (xs ne null) WrappedArray.make(xs) else null
+
+ implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = if (xs ne null) new WrappedArray.ofRef[T](xs) else null
+ implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null
+ implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null
+ implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null
+ implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null
+ implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null
+ implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null
+ implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null
+ implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null
+ implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null
+
+ implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null
+ implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null
implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, collection.immutable.IndexedSeq[T]] =
new CanBuildFrom[String, T, collection.immutable.IndexedSeq[T]] {
@@ -49,6 +46,7 @@ class LowPriorityImplicits {
}
/** Can go away after next newstarr */
+ /** Caution - not yet. pos/t1459, pos/t2569, jvm/t1342 all fail without the next line. */
def wrapArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = new WrappedArray.ofRef[T](xs)
def wrapArray(xs: Array[Int]): WrappedArray[Int] = new WrappedArray.ofInt(xs)
def wrapArray(xs: Array[Double]): WrappedArray[Double] = new WrappedArray.ofDouble(xs)
@@ -59,6 +57,4 @@ class LowPriorityImplicits {
def wrapArray(xs: Array[Short]): WrappedArray[Short] = new WrappedArray.ofShort(xs)
def wrapArray(xs: Array[Boolean]): WrappedArray[Boolean] = new WrappedArray.ofBoolean(xs)
def wrapArray(xs: Array[Unit]): WrappedArray[Unit] = new WrappedArray.ofUnit(xs)
-
-
}
diff --git a/src/library/scala/MatchError.scala b/src/library/scala/MatchError.scala
index cc8aef4635..31783da4d3 100644
--- a/src/library/scala/MatchError.scala
+++ b/src/library/scala/MatchError.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/Math.scala b/src/library/scala/Math.scala
index c3e619594b..69a3985871 100644
--- a/src/library/scala/Math.scala
+++ b/src/library/scala/Math.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala
@@ -15,188 +13,83 @@ package scala
* operations such as the elementary exponential, logarithm, square root, and
* trigonometric functions.
*/
-object Math {
+@deprecated("use scala.math package instead")
+object Math extends MathCommon {
+ @deprecated("Use scala.Byte.MinValue instead")
+ val MIN_BYTE = java.lang.Byte.MIN_VALUE
+
+ @deprecated("Use scala.Byte.MaxValue instead")
+ val MAX_BYTE = java.lang.Byte.MAX_VALUE
+
+ @deprecated("Use scala.Short.MinValue instead")
+ val MIN_SHORT = java.lang.Short.MIN_VALUE
+
+ @deprecated("Use scala.Short.MaxValue instead")
+ val MAX_SHORT = java.lang.Short.MAX_VALUE
- /** The smallest possible value for <a href="Byte.html" target="_self">scala.Byte</a>. */
- val MIN_BYTE = java.lang.Byte.MIN_VALUE
- /** The greatest possible value for <a href="Byte.html" target="_self">scala.Byte</a>. */
- val MAX_BYTE = java.lang.Byte.MAX_VALUE
+ @deprecated("Use scala.Char.MinValue instead")
+ val MIN_CHAR = java.lang.Character.MIN_VALUE
- /** The smallest possible value for <a href="Short.html" target="_self">scala.Short</a>. */
- val MIN_SHORT = java.lang.Short.MIN_VALUE
- /** The greatest possible value for <a href="Short.html" target="_self">scala.Short</a>. */
- val MAX_SHORT = java.lang.Short.MAX_VALUE
+ @deprecated("Use scala.Char.MaxValue instead")
+ val MAX_CHAR = java.lang.Character.MAX_VALUE
- /** The smallest possible value for <a href="Char.html" target="_self">scala.Char</a>. */
- val MIN_CHAR = java.lang.Character.MIN_VALUE
- /** The greatest possible value for <a href="Char.html" target="_self">scala.Char</a>. */
- val MAX_CHAR = java.lang.Character.MAX_VALUE
+ @deprecated("Use scala.Int.MinValue instead")
+ val MIN_INT = java.lang.Integer.MIN_VALUE
- /** The smallest possible value for <a href="Int.html" target="_self">scala.Int</a>. */
- val MIN_INT = java.lang.Integer.MIN_VALUE
- /** The greatest possible value for <a href="Int.html" target="_self">scala.Int</a>. */
- val MAX_INT = java.lang.Integer.MAX_VALUE
+ @deprecated("Use scala.Int.MaxValue instead")
+ val MAX_INT = java.lang.Integer.MAX_VALUE
- /** The smallest possible value for <a href="Long.html" target="_self">scala.Long</a>. */
- val MIN_LONG = java.lang.Long.MIN_VALUE
- /** The greatest possible value for <a href="Long.html" target="_self">scala.Long</a>. */
- val MAX_LONG = java.lang.Long.MAX_VALUE
+ @deprecated("Use scala.Long.MinValue instead")
+ val MIN_LONG = java.lang.Long.MIN_VALUE
+
+ @deprecated("Use scala.Long.MaxValue instead")
+ val MAX_LONG = java.lang.Long.MAX_VALUE
/** The smallest possible value for <a href="Float.html" target="_self">scala.Float</a>. */
+ @deprecated("Use scala.Float.MinValue instead")
val MIN_FLOAT = -java.lang.Float.MAX_VALUE
+
/** The smallest difference between two values of <a href="Float.html" target="_self">scala.Float</a>. */
+ @deprecated("Use scala.Float.Epsilon instead")
val EPS_FLOAT = java.lang.Float.MIN_VALUE
+
/** The greatest possible value for <a href="Float.html" target="_self">scala.Float</a>. */
+ @deprecated("Use scala.Float.MaxValue instead")
val MAX_FLOAT = java.lang.Float.MAX_VALUE
+
/** A value of type <a href="Float.html" target="_self">scala.Float</a> that represents no number. */
+ @deprecated("Use scala.Float.NaN instead")
val NaN_FLOAT = java.lang.Float.NaN
+
/** Negative infinity of type <a href="Float.html" target="_self">scala.Float</a>. */
+ @deprecated("Use scala.Float.NegativeInfinity instead")
val NEG_INF_FLOAT = java.lang.Float.NEGATIVE_INFINITY
+
/** Positive infinity of type <a href="Float.html" target="_self">scala.Float</a>. */
+ @deprecated("Use scala.Float.PositiveInfinity instead")
val POS_INF_FLOAT = java.lang.Float.POSITIVE_INFINITY
/** The smallest possible value for <a href="Double.html" target="_self">scala.Double</a>. */
+ @deprecated("Use scala.Double.MinValue instead")
val MIN_DOUBLE = -java.lang.Double.MAX_VALUE
+
/** The smallest difference between two values of <a href="Double.html" target="_self">scala.Double</a>. */
+ @deprecated("Use scala.Double.Epsilon instead")
val EPS_DOUBLE = java.lang.Double.MIN_VALUE
+
/** The greatest possible value for <a href="Double.html" target="_self">scala.Double</a>. */
+ @deprecated("Use scala.Double.MaxValue instead")
val MAX_DOUBLE = java.lang.Double.MAX_VALUE
+
/** A value of type <a href="Double.html" target="_self">scala.Double</a> that represents no number. */
+ @deprecated("Use scala.Double.NaN instead")
val NaN_DOUBLE = java.lang.Double.NaN
+
/** Negative infinity of type <a href="Double.html" target="_self">scala.Double</a>. */
+ @deprecated("Use scala.Double.NegativeInfinity instead")
val NEG_INF_DOUBLE = java.lang.Double.NEGATIVE_INFINITY
+
/** Positive infinity of type <a href="Double.html" target="_self">scala.Double</a>. */
+ @deprecated("Use scala.Double.PositiveInfinity instead")
val POS_INF_DOUBLE = java.lang.Double.POSITIVE_INFINITY
-
- /** The <code>double</code> value that is closer than any other to
- * <code>e</code>, the base of the natural logarithms.
- */
- val E = java.lang.Math.E
-
- /** The <code>double</code> value that is closer than any other to
- * <code>pi</code>, the ratio of the circumference of a circle to its
- * diameter.
- */
- val Pi = java.lang.Math.PI
-
- /** Returns a <code>double</code> value with a positive sign, greater than
- * or equal to <code>0.0</code> and less than <code>1.0</code>.
- */
- def random: Double = java.lang.Math.random()
-
- def sin(x: Double): Double = java.lang.Math.sin(x)
- def cos(x: Double): Double = java.lang.Math.cos(x)
- def tan(x: Double): Double = java.lang.Math.tan(x)
- def asin(x: Double): Double = java.lang.Math.asin(x)
- def acos(x: Double): Double = java.lang.Math.acos(x)
- def atan(x: Double): Double = java.lang.Math.atan(x)
-
- /** Converts an angle measured in degrees to an approximately equivalent
- * angle measured in radians.
- *
- * @param x an angle, in degrees
- * @return the measurement of the angle <code>x</code> in radians.
- */
- def toRadians(x: Double): Double = java.lang.Math.toRadians(x)
-
- /** Converts an angle measured in radians to an approximately equivalent
- * angle measured in degrees.
- *
- * @param x angle, in radians
- * @return the measurement of the angle <code>x</code> in degrees.
- */
- def toDegrees(x: Double): Double = java.lang.Math.toDegrees(x)
-
- /** Returns Euler's number <code>e</code> raised to the power of a
- * <code>double</code> value.
- *
- * @param x the exponent to raise <code>e</code> to.
- * @return the value <code>e<sup>a</sup></code>, where <code>e</code>
- * is the base of the natural logarithms.
- */
- def exp(x: Double): Double = java.lang.Math.exp(x)
- def log(x: Double): Double = java.lang.Math.log(x)
- def sqrt(x: Double): Double = java.lang.Math.sqrt(x)
- def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y)
-
- def ceil(x: Double): Double = java.lang.Math.ceil(x)
- def floor(x: Double): Double = java.lang.Math.floor(x)
-
- /** Returns the <code>double</code> value that is closest in value to the
- * argument and is equal to a mathematical integer.
- *
- * @param x a <code>double</code> value
- * @return the closest floating-point value to a that is equal to a
- * mathematical integer.
- */
- def rint(x: Double): Double = java.lang.Math.rint(x)
-
- /** Converts rectangular coordinates <code>(x, y)</code> to polar
- * <code>(r, theta)</code>.
- *
- * @param x the ordinate coordinate
- * @param y the abscissa coordinate
- * @return the <em>theta</em> component of the point <code>(r, theta)</code>
- * in polar coordinates that corresponds to the point
- * <code>(x, y)</code> in Cartesian coordinates.
- */
- def atan2(y: Double, x: Double): Double = java.lang.Math.atan2(y, x)
-
- /** Returns the value of the first argument raised to the power of the
- * second argument.
- *
- * @param x the base.
- * @param y the exponent.
- * @return the value <code>x<sup>y</sup></code>.
- */
- def pow(x: Double, y: Double): Double = java.lang.Math.pow(x, y)
-
- /** Returns the closest <code>long</code> to the argument.
- *
- * @param x a floating-point value to be rounded to a <code>long</code>.
- * @return the value of the argument rounded to the nearest
- * <code>long</code> value.
- */
- def round(x: Float): Int = java.lang.Math.round(x)
- def round(x: Double): Long = java.lang.Math.round(x)
- def abs(x: Int): Int = java.lang.Math.abs(x)
- def abs(x: Long): Long = java.lang.Math.abs(x)
- def abs(x: Float): Float = java.lang.Math.abs(x)
- def abs(x: Double): Double = java.lang.Math.abs(x)
-
- def max(x: Int, y: Int): Int = java.lang.Math.max(x, y)
- def max(x: Long, y: Long): Long = java.lang.Math.max(x, y)
- def max(x: Float, y: Float): Float = java.lang.Math.max(x, y)
- def max(x: Double, y: Double): Double = java.lang.Math.max(x, y)
-
- def min(x: Int, y: Int): Int = java.lang.Math.min(x, y)
- def min(x: Long, y: Long): Long = java.lang.Math.min(x, y)
- def min(x: Float, y: Float): Float = java.lang.Math.min(x, y)
- def min(x: Double, y: Double): Double = java.lang.Math.min(x, y)
-
- def signum(x: Double): Double = x match { case 0 => 0
- case y if y < 0 => -1.0
- case y if y > 0 => 1.0 }
- def signum(x: Float): Float = x match { case 0f => 0f
- case y if y < 0f => -1.0f
- case y if y > 0f => 1.0f }
- def signum(x: Long): Long = x match { case 0l => 0l
- case y if y < 0l => -1l
- case y if y > 0l => 1l }
- def signum(x: Int): Int = x match { case 0 => 0
- case y if y < 0 => -1
- case y if y > 0 => 1}
-
- // from Java 1.5
- // def log10(x: Double): Double = java.lang.Math.log10(x)
- // def cbrt(x: Double): Double = java.lang.Math.cbrt(x)
- //
- // def ulp(x: Double): Double = java.lang.Math.ulp(x)
- // def ulp(x: Float): Float = java.lang.Math.ulp(x)
- // def sinh(x: Double): Double = java.lang.Math.sinh(x)
- // def cosh(x: Double): Double = java.lang.Math.cosh(x)
- // def tanh(x: Double):Double = java.lang.Math.tanh(x)
- // def hypot(x: Double, y: Double): Double = java.lang.Math.hypot(x, y)
- // def expm1(x: Double): Double = java.lang.Math.expm1(x)
- // def log1p(x: Double): Double = java.lang.Math.log1p(x)
-}
+} \ No newline at end of file
diff --git a/src/library/scala/MathCommon.scala b/src/library/scala/MathCommon.scala
new file mode 100644
index 0000000000..a72fc0e4ee
--- /dev/null
+++ b/src/library/scala/MathCommon.scala
@@ -0,0 +1,143 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala
+
+/** Common code between the deprecated scala.Math object and
+ * the scala.math package object.
+ */
+private[scala] class MathCommon {
+ /** The <code>double</code> value that is closer than any other to
+ * <code>e</code>, the base of the natural logarithms.
+ */
+ val E = java.lang.Math.E
+
+ /** The <code>double</code> value that is closer than any other to
+ * <code>pi</code>, the ratio of the circumference of a circle to its
+ * diameter.
+ */
+ val Pi = java.lang.Math.PI
+
+ /** Returns a <code>double</code> value with a positive sign, greater than
+ * or equal to <code>0.0</code> and less than <code>1.0</code>.
+ */
+ def random: Double = java.lang.Math.random()
+
+ def sin(x: Double): Double = java.lang.Math.sin(x)
+ def cos(x: Double): Double = java.lang.Math.cos(x)
+ def tan(x: Double): Double = java.lang.Math.tan(x)
+ def asin(x: Double): Double = java.lang.Math.asin(x)
+ def acos(x: Double): Double = java.lang.Math.acos(x)
+ def atan(x: Double): Double = java.lang.Math.atan(x)
+
+ /** Converts an angle measured in degrees to an approximately equivalent
+ * angle measured in radians.
+ *
+ * @param x an angle, in degrees
+ * @return the measurement of the angle <code>x</code> in radians.
+ */
+ def toRadians(x: Double): Double = java.lang.Math.toRadians(x)
+
+ /** Converts an angle measured in radians to an approximately equivalent
+ * angle measured in degrees.
+ *
+ * @param x angle, in radians
+ * @return the measurement of the angle <code>x</code> in degrees.
+ */
+ def toDegrees(x: Double): Double = java.lang.Math.toDegrees(x)
+
+ /** Returns Euler's number <code>e</code> raised to the power of a
+ * <code>double</code> value.
+ *
+ * @param x the exponent to raise <code>e</code> to.
+ * @return the value <code>e<sup>a</sup></code>, where <code>e</code>
+ * is the base of the natural logarithms.
+ */
+ def exp(x: Double): Double = java.lang.Math.exp(x)
+ def log(x: Double): Double = java.lang.Math.log(x)
+ def sqrt(x: Double): Double = java.lang.Math.sqrt(x)
+ def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y)
+
+ def ceil(x: Double): Double = java.lang.Math.ceil(x)
+ def floor(x: Double): Double = java.lang.Math.floor(x)
+
+ /** Returns the <code>double</code> value that is closest in value to the
+ * argument and is equal to a mathematical integer.
+ *
+ * @param x a <code>double</code> value
+ * @return the closest floating-point value to a that is equal to a
+ * mathematical integer.
+ */
+ def rint(x: Double): Double = java.lang.Math.rint(x)
+
+ /** Converts rectangular coordinates <code>(x, y)</code> to polar
+ * <code>(r, theta)</code>.
+ *
+ * @param x the ordinate coordinate
+ * @param y the abscissa coordinate
+ * @return the <em>theta</em> component of the point <code>(r, theta)</code>
+ * in polar coordinates that corresponds to the point
+ * <code>(x, y)</code> in Cartesian coordinates.
+ */
+ def atan2(y: Double, x: Double): Double = java.lang.Math.atan2(y, x)
+
+ /** Returns the value of the first argument raised to the power of the
+ * second argument.
+ *
+ * @param x the base.
+ * @param y the exponent.
+ * @return the value <code>x<sup>y</sup></code>.
+ */
+ def pow(x: Double, y: Double): Double = java.lang.Math.pow(x, y)
+
+ /** Returns the closest <code>long</code> to the argument.
+ *
+ * @param x a floating-point value to be rounded to a <code>long</code>.
+ * @return the value of the argument rounded to the nearest
+ * <code>long</code> value.
+ */
+ def round(x: Float): Int = java.lang.Math.round(x)
+ def round(x: Double): Long = java.lang.Math.round(x)
+ def abs(x: Int): Int = java.lang.Math.abs(x)
+ def abs(x: Long): Long = java.lang.Math.abs(x)
+ def abs(x: Float): Float = java.lang.Math.abs(x)
+ def abs(x: Double): Double = java.lang.Math.abs(x)
+
+ def max(x: Int, y: Int): Int = java.lang.Math.max(x, y)
+ def max(x: Long, y: Long): Long = java.lang.Math.max(x, y)
+ def max(x: Float, y: Float): Float = java.lang.Math.max(x, y)
+ def max(x: Double, y: Double): Double = java.lang.Math.max(x, y)
+
+ def min(x: Int, y: Int): Int = java.lang.Math.min(x, y)
+ def min(x: Long, y: Long): Long = java.lang.Math.min(x, y)
+ def min(x: Float, y: Float): Float = java.lang.Math.min(x, y)
+ def min(x: Double, y: Double): Double = java.lang.Math.min(x, y)
+
+ def signum(x: Double): Double =
+ if (x == 0d) 0d
+ else if (x < 0) -1.0
+ else if (x > 0) 1.0
+ else x // NaN
+
+ def signum(x: Float): Float =
+ if (x == 0f) 0f
+ else if (x < 0) -1.0f
+ else if (x > 0) 1.0f
+ else x // NaN
+
+ def signum(x: Long): Long =
+ if (x == 0l) 0l
+ else if (x < 0) -1l
+ else 1l
+
+ def signum(x: Int): Int =
+ if (x == 0) 0
+ else if (x < 0) -1
+ else 1
+}
diff --git a/src/library/scala/Mutable.scala b/src/library/scala/Mutable.scala
index 26fa3dafca..92db95261f 100644
--- a/src/library/scala/Mutable.scala
+++ b/src/library/scala/Mutable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/NotDefinedError.scala b/src/library/scala/NotDefinedError.scala
index 75a835b83e..3694f28c02 100644
--- a/src/library/scala/NotDefinedError.scala
+++ b/src/library/scala/NotDefinedError.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
@@ -14,4 +13,5 @@ package scala
/**
* @since 2.0
*/
+@deprecated("Use a custom Error class instead")
final class NotDefinedError(msg: String) extends Error("not defined: " + msg)
diff --git a/src/library/scala/NotNull.scala b/src/library/scala/NotNull.scala
index 189189e331..8e4a29f06e 100644
--- a/src/library/scala/NotNull.scala
+++ b/src/library/scala/NotNull.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index 6f9ba306ad..bd15c31609 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -1,160 +1,247 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
-
package scala
-object Option
-{
+object Option {
/** An implicit conversion that converts an option to an iterable value
*/
implicit def option2Iterable[A](xo: Option[A]): Iterable[A] = xo.toList
- /** An Option factory which creates Some(value) if the argument is not null,
+ /** An Option factory which creates Some(x) if the argument is not null,
* and None if it is null.
*
* @param x the value
* @return Some(value) if value != null, None if value == null
*/
def apply[A](x: A): Option[A] = if (x == null) None else Some(x)
+
+ /** An Option factory which returns $none in a manner consistent with
+ * the collections hierarchy.
+ */
+ def empty[A] : Option[A] = None
}
-/** This class represents optional values. Instances of <code>Option</code>
- * are either instances of case class <code>Some</code> or it is case
- * object <code>None</code>.
+/** Represents optional values. Instances of `Option`
+ * are either an instance of $some or the object $none.
+ *
+ * The most idiomatic way to use an $option instance
+ * is to treat it as a collection or monad and
+ * use `map`,`flatMap`, `filter`,
+ * or `foreach`:
+ *
+ * {{{
+ * val name:Option[String] = request.getParameter("name")
+ * val upper = name map { _.trim } filter { _.length != 0 } map { _.toUpperCase }
+ * println(upper.getOrElse(""))
+ * }}}
+ *
+ * Note that this is equivalent to {{{
+ * val upper = for {
+ * name <- request.getParameter("name")
+ * trimmed <- Some(name.trim)
+ * upper <- Some(trimmed.toUpperCase) if trimmed.length != 0
+ * } yield upper
+ * println(upper.getOrElse(""))
+ * }}}
+ *
+ * Because of how for comprehension works, if $none is returned
+ * from `request.getParameter`, the entire expression results in
+ * $none
+ *
+ * This allows for sophisticated chaining of $option values without
+ * having to check for the existence of a value.
+ *
+ * A less-idiomatic way to use $option values is via pattern matching: {{{
+ * val nameMaybe = request.getParameter("name")
+ * nameMaybe match {
+ * case Some(name) => {
+ * println(name.trim.toUppercase)
+ * }
+ * case None => {
+ * println("No name value")
+ * }
+ * }
+ * }}}
*
* @author Martin Odersky
* @author Matthias Zenger
* @version 1.1, 16/01/2007
+ * @define none `None`
+ * @define some [[scala.Some]]
+ * @define option [[scala.Option]]
+ * @define p `p`
+ * @define f `f`
*/
sealed abstract class Option[+A] extends Product {
+ self =>
- /** True if the option is the <code>None</code> value, false otherwise.
+ /** Returns true if the option is $none, false otherwise.
*/
def isEmpty: Boolean
- /** True if the option is a <code>Some</code>(...) false otherwise.
+ /** Returns true if the option is an instance of $some, false otherwise.
*/
def isDefined: Boolean = !isEmpty
- /** get the value of this option.
- * @requires that the option is nonEmpty.
+ /** Returns the option's value.
+ * @note The option must be nonEmpty.
* @throws Predef.NoSuchElementException if the option is empty.
*/
def get: A
- @deprecated("use <code>getOrElse</code> instead")
- def get[B >: A](default: B): B = this match {
- case None => default
- case Some(x) => x
- }
-
- /** If the option is nonempty return its value,
- * otherwise return the result of evaluating a default expression.
+ /** Returns the option's value if the option is nonempty, otherwise
+ * return the result of evaluating `default`.
*
* @param default the default expression.
*/
def getOrElse[B >: A](default: => B): B =
if (isEmpty) default else this.get
- /** The option's value if it is nonempty, or <code>null</code> if it is empty.
- * The use of null of course is discouraged, but code written to use Options
- * often must interface with code which expects and returns nulls.
+ /** Returns the option's value if it is nonempty,
+ * or `null` if it is empty.
+ * Although the use of null is discouraged, code written to use
+ * $option must often interface with code that expects and returns nulls.
+ * @example {{{
+ * val initalText: Option[String] = getInitialText
+ * val textField = new JComponent(initalText.orNull,20)
+ * }}}
*/
def orNull[A1 >: A](implicit ev: Null <:< A1): A1 = this getOrElse null
- /** If the option is nonempty, return a function applied to its value,
- * wrapped in a Some i.e. <code>Some(f(this.get))</code>.
- * Otherwise return <code>None</code>.
+ /** Returns a $some containing the result of applying $f to this $option's
+ * value if this $option is nonempty.
+ * Otherwise return $none.
+ *
+ * @note This is similar to `flatMap` except here,
+ * $f does not need to wrap its result in an $option.
*
* @param f the function to apply
+ * @see flatMap
+ * @see foreach
*/
def map[B](f: A => B): Option[B] =
if (isEmpty) None else Some(f(this.get))
- /** If the option is nonempty, return a function applied to its value.
- * Otherwise return None.
+ /** Returns the result of applying $f to this $option's value if
+ * this $option is nonempty.
+ * Returns $none if this $option is empty.
+ * Slightly different from `map` in that $f is expected to
+ * return an $option (which could be $none).
+ *
* @param f the function to apply
+ * @see map
+ * @see foreach
*/
def flatMap[B](f: A => Option[B]): Option[B] =
if (isEmpty) None else f(this.get)
- /** If the option is nonempty and the given predicate <code>p</code>
- * yields <code>false</code> on its value, return <code>None</code>.
- * Otherwise return the option value itself.
+ /** Returns this $option if it is nonempty '''and''' applying the predicate $p to
+ * this $option's value returns true. Otherwise, return $none.
*
* @param p the predicate used for testing.
*/
def filter(p: A => Boolean): Option[A] =
if (isEmpty || p(this.get)) this else None
- /** If the option is nonempty, p(value), otherwise false.
+ /** Necessary to keep $option from being implicitly converted to
+ * [[scala.collection.Iterable]] in `for` comprehensions.
+ */
+ def withFilter(p: A => Boolean): WithFilter = new WithFilter(p)
+
+ /** We need a whole WithFilter class to honor the "doesn't create a new
+ * collection" contract even though it seems unlikely to matter much in a
+ * collection with max size 1.
+ */
+ class WithFilter(p: A => Boolean) {
+ def map[B](f: A => B): Option[B] = self filter p map f
+ def flatMap[B](f: A => Option[B]): Option[B] = self filter p flatMap f
+ def foreach[U](f: A => U): Unit = self filter p foreach f
+ def withFilter(q: A => Boolean): WithFilter = new WithFilter(x => p(x) && q(x))
+ }
+
+ /** Returns true if this option is nonempty '''and''' the predicate
+ * $p returns true when applied to this $option's value.
+ * Otherwise, returns false.
*
* @param p the predicate to test
*/
def exists(p: A => Boolean): Boolean =
!isEmpty && p(this.get)
- /** Apply the given procedure <code>f</code> to the option's value,
- * if it is nonempty. Do nothing if it is empty.
+ /** Apply the given procedure $f to the option's value,
+ * if it is nonempty. Otherwise, do nothing.
*
* @param f the procedure to apply.
+ * @see map
+ * @see flatMap
*/
def foreach[U](f: A => U) {
if (!isEmpty) f(this.get)
}
- /** If the given partial function <code>pf</code> is defined for the
- * option's value, apply it to the value. Otherwise, None.
+ /** Returns a $some containing the result of
+ * applying `pf` to this $option's contained
+ * value, '''if''' this option is
+ * nonempty '''and''' `pf` is defined for that value.
+ * Returns $none otherwise.
*
* @param pf the partial function.
+ * @return the result of applying `pf` to this $option's
+ * value (if possible), or $none.
*/
- def partialMap[B](pf: PartialFunction[Any, B]): Option[B] =
+ def collect[B](pf: PartialFunction[A, B]): Option[B] =
if (!isEmpty && pf.isDefinedAt(this.get)) Some(pf(this.get)) else None
- /** If the option is nonempty return it,
- * otherwise return the result of evaluating an alternative expression.
- * @param alternative the alternative expression.
+ /** Returns this $option if it is nonempty,
+ * otherwise return the result of evaluating `alternative`.
+ * @param alternative the alternative expression.
*/
def orElse[B >: A](alternative: => Option[B]): Option[B] =
if (isEmpty) alternative else this
- /** An singleton iterator returning the option's value if it is nonempty
- * or the empty iterator if the option is empty.
+ /** Returns a singleton iterator returning the $option's value
+ * if it is nonempty, or an empty iterator if the option is empty.
*/
def iterator: Iterator[A] =
if (isEmpty) Iterator.empty else Iterator.single(this.get)
- /** A singleton list containing the option's value if it is nonempty
- * or the empty list if the option is empty.
+ /** Returns a singleton list containing the $option's value
+ * if it is nonempty, or the empty list if the $option is empty.
*/
def toList: List[A] =
if (isEmpty) List() else List(this.get)
- /** An <code>Either</code> that is a <code>Left</code> with the given argument
- * <code>left</code> if this is empty, or a <code>Right</code> if this is nonempty with the
- * option's value.
+ /** Returns a [[scala.Left]] containing the given
+ * argument `left` if this $option is empty, or
+ * a [[scala.Right]] containing this $option's value if
+ * this is nonempty.
+ *
+ * @param left the expression to evaluate and return if this is empty
+ * @see toLeft
*/
def toRight[X](left: => X) =
if (isEmpty) Left(left) else Right(this.get)
- /** An <code>Either</code> that is a <code>Right</code> with the given argument
- * <code>right</code> if this is empty, or a <code>Left</code> if this is nonempty with the
- * option's value.
+ /** Returns a [[scala.Right]] containing the given
+ * argument `right` if this is empty, or
+ * a [[scala.Left]] containing this $option's value
+ * if this $option is nonempty.
+ *
+ * @param right the expression to evaluate and return if this is empty
+ * @see toRight
*/
def toLeft[X](right: => X) =
if (isEmpty) Right(right) else Left(this.get)
}
-/** Class <code>Some[A]</code> represents existing values of type
- * <code>A</code>.
+/** Class `Some[A]` represents existing values of type
+ * `A`.
*
* @author Martin Odersky
* @version 1.0, 16/07/2003
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index 0ba7527976..41c2ac70e7 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -1,33 +1,42 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
-/** A partial function of type <code>PartialFunction[A, B]</code> is a
- * unary function where the domain does not include all values of type
- * <code>A</code>. The function <code>isDefinedAt</code> allows to
- * test dynamically, if a value is in the domain of the function.
+/** A partial function of type `PartialFunction[A, B]` is a
+ * unary function where the domain does not necessarily include all values of type
+ * `A`. The function `isDefinedAt` allows to
+ * test dynamically if a value is in the domain of the function.
*
* @author Martin Odersky
* @version 1.0, 16/07/2003
*/
-trait PartialFunction[-A, +B] extends AnyRef with (A => B) {
+trait PartialFunction[-A, +B] extends (A => B) {
- /** Checks if a value is contained in the functions domain.
+ /** Checks if a value is contained in the function's domain.
*
* @param x the value to test
- * @return true, iff <code>x</code> is in the domain of this function.
+ * @return `true`, iff `x` is in the domain of this function, `false` otherwise.
*/
def isDefinedAt(x: A): Boolean
+ /** Composes this partial function with a fallback partial function which gets applied where this partial function
+ * is not defined.
+ *
+ * @param that the fallback function
+ * @tparam A1 the argument type of the fallback function
+ * @tparam B1 the result type of the fallback function
+ * @return a partial function which has as domain the union of the domains
+ * of this partial function and `that`. The resulting partial function
+ * takes `x` to `this(x)` where `this` is defined, and to `that(x)` where it is not.
+ */
def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]) : PartialFunction[A1, B1] =
new PartialFunction[A1, B1] {
def isDefinedAt(x: A1): Boolean =
@@ -37,10 +46,23 @@ trait PartialFunction[-A, +B] extends AnyRef with (A => B) {
else that.apply(x)
}
+ /** Composes this partial function with a transformation function that gets applied
+ * to results of this partial function.
+ * @param k the transformation function
+ * @tparam C the result type of the transformation function.
+ * @return a partial function with the same domain as this partial function, which maps
+ * arguments `x` to `k(this(x))`.
+ */
override def andThen[C](k: B => C) : PartialFunction[A, C] = new PartialFunction[A, C] {
def isDefinedAt(x: A): Boolean = PartialFunction.this.isDefinedAt(x)
def apply(x: A): C = k(PartialFunction.this.apply(x))
}
+
+ /** Turns this partial function into an plain function returning an `Option` result.
+ * @return a function that takes an argument `x` to `Some(this(x))` if `this`
+ * is defined for `x`, and to `None` otherwise.
+ */
+ def lift: A => Option[B] = { x => if (isDefinedAt(x)) Some(this(x)) else None }
}
/** A few handy operations which leverage the extra bit of information
@@ -67,19 +89,19 @@ object PartialFunction
*
* @param x the value to test
* @param pf the partial function
- * @return true, iff <code>x</code> is in the domain of pf && pf(x) == true
+ * @return true, iff `x` is in the domain of `pf` and `pf(x) == true`.
*/
def cond[T](x: T)(pf: PartialFunction[T, Boolean]): Boolean =
(pf isDefinedAt x) && pf(x)
- /** Transforms a PartialFunction[T,U] `pf' into Function1[T, Option[U]] `f'
+ /** Transforms a PartialFunction[T, U] `pf' into Function1[T, Option[U]] `f'
* whose result is Some(x) if the argument is in pf's domain and None otherwise,
* and applies it to the value `x'. In effect, it is a 'match' statement
* which wraps all case results in Some(_) and adds 'case _ => None' to the end.
*
* @param x the value to test
- * @param pf the PartialFunction[T,U]
- * @return Some(pf(x)) iff (pf isDefinedAt x) and None otherwise
+ * @param pf the PartialFunction[T, U]
+ * @return `Some(pf(x))` if `pf isDefinedAt x`, `None` otherwise.
*/
def condOpt[T,U](x: T)(pf: PartialFunction[T, U]): Option[U] =
if (pf isDefinedAt x) Some(pf(x)) else None
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index c44ec72299..1e926ea07a 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
@@ -14,68 +13,30 @@ package scala
import collection.immutable.StringOps
import collection.mutable.ArrayOps
import collection.generic.CanBuildFrom
+import annotation.elidable
+import annotation.elidable.ASSERTION
/** The <code>Predef</code> object provides definitions that are
* accessible in all Scala compilation units without explicit
* qualification.
*/
object Predef extends LowPriorityImplicits {
-
- // classOf dummy ------------------------------------------------------
-
- /** Return the runtime representation of a class type. */
+ /** Return the runtime representation of a class type. This is a stub method.
+ * The actual implementation is filled in by the compiler.
+ */
def classOf[T]: Class[T] = null
- // aliases ------------------------------------------------------------
-
- @deprecated("lower-case type aliases will be removed") type byte = scala.Byte
- @deprecated("lower-case type aliases will be removed") type short = scala.Short
- @deprecated("lower-case type aliases will be removed") type char = scala.Char
- @deprecated("lower-case type aliases will be removed") type int = scala.Int
- @deprecated("lower-case type aliases will be removed") type long = scala.Long
- @deprecated("lower-case type aliases will be removed") type float = scala.Float
- @deprecated("lower-case type aliases will be removed") type double = scala.Double
- @deprecated("lower-case type aliases will be removed") type boolean = scala.Boolean
- @deprecated("lower-case type aliases will be removed") type unit = scala.Unit
-
- @deprecated("use <code>java.lang.Integer</code> instead")
- type Integer = java.lang.Integer
- @deprecated("use <code>java.lang.Character</code> instead")
- type Character = java.lang.Character
-
type String = java.lang.String
type Class[T] = java.lang.Class[T]
- type Runnable = java.lang.Runnable
-
- type Throwable = java.lang.Throwable
- type Exception = java.lang.Exception
- type Error = java.lang.Error
-
- type RuntimeException = java.lang.RuntimeException
- type NullPointerException = java.lang.NullPointerException
- type ClassCastException = java.lang.ClassCastException
- type IndexOutOfBoundsException = java.lang.IndexOutOfBoundsException
- type ArrayIndexOutOfBoundsException = java.lang.ArrayIndexOutOfBoundsException
- type StringIndexOutOfBoundsException = java.lang.StringIndexOutOfBoundsException
- type UnsupportedOperationException = java.lang.UnsupportedOperationException
- type IllegalArgumentException = java.lang.IllegalArgumentException
- type NoSuchElementException = java.util.NoSuchElementException
- type NumberFormatException = java.lang.NumberFormatException
- type AbstractMethodError = java.lang.AbstractMethodError
// miscelleaneous -----------------------------------------------------
-
- private val P = scala.`package` // to force scala package object to be seen.
- private val L = scala.collection.immutable.List // to force Nil, :: to be seen.
- private val S = scala.collection.mutable.StringBuilder // to force StringBuilder to be seen.
-
- val $scope = scala.xml.TopScope
+ scala.`package` // to force scala package object to be seen.
+ scala.collection.immutable.List // to force Nil, :: to be seen.
type Function[-A, +B] = Function1[A, B]
type Map[A, +B] = collection.immutable.Map[A, B]
type Set[A] = collection.immutable.Set[A]
-
val Map = collection.immutable.Map
val Set = collection.immutable.Set
@@ -85,7 +46,6 @@ object Predef extends LowPriorityImplicits {
def manifest[T](implicit m: Manifest[T]) = m
def classManifest[T](implicit m: ClassManifest[T]) = m
- // will soon stop being a view: subsumed by `conforms` (which is less likely to give rise to ambiguities)
// @see `conforms` for the implicit version
def identity[A](x: A): A = x
@@ -93,22 +53,6 @@ object Predef extends LowPriorityImplicits {
@inline def locally[T](x: T): T = x
- // hashcode -----------------------------------------------------------
-
- @inline def hash(x: Any): Int =
- if (x.isInstanceOf[Number]) runtime.BoxesRunTime.hashFromNumber(x.asInstanceOf[Number])
- else x.hashCode
-
- @inline def hash(x: Number): Int =
- runtime.BoxesRunTime.hashFromNumber(x)
-
- @inline def hash(x: java.lang.Long): Int = {
- val iv = x.intValue
- if (iv == x.longValue) iv else x.hashCode
- }
-
- @inline def hash(x: Int): Int = x
-
// errors and asserts -------------------------------------------------
def error(message: String): Nothing = throw new RuntimeException(message)
@@ -120,43 +64,95 @@ object Predef extends LowPriorityImplicits {
throw new Throwable()
}
- import annotation.elidable
- import annotation.elidable.ASSERTION
-
+ /** Tests an expression, throwing an AssertionError if false.
+ * Calls to this method will not be generated if -Xelide-below
+ * is at least ASSERTION.
+ *
+ * @see elidable
+ * @param p the expression to test
+ */
@elidable(ASSERTION)
def assert(assertion: Boolean) {
if (!assertion)
throw new java.lang.AssertionError("assertion failed")
}
+ /** Tests an expression, throwing an AssertionError if false.
+ * Calls to this method will not be generated if -Xelide-below
+ * is at least ASSERTION.
+ *
+ * @see elidable
+ * @param p the expression to test
+ * @param msg a String to include in the failure message
+ */
@elidable(ASSERTION)
def assert(assertion: Boolean, message: => Any) {
if (!assertion)
throw new java.lang.AssertionError("assertion failed: "+ message)
}
+ /** Tests an expression, throwing an AssertionError if false.
+ * This method differs from assert only in the intent expressed:
+ * assert contains a predicate which needs to be proven, while
+ * assume contains an axiom for a static checker. Calls to this method
+ * will not be generated if -Xelide-below is at least ASSERTION.
+ *
+ * @see elidable
+ * @param p the expression to test
+ */
@elidable(ASSERTION)
def assume(assumption: Boolean) {
if (!assumption)
throw new java.lang.AssertionError("assumption failed")
}
+ /** Tests an expression, throwing an AssertionError if false.
+ * This method differs from assert only in the intent expressed:
+ * assert contains a predicate which needs to be proven, while
+ * assume contains an axiom for a static checker. Calls to this method
+ * will not be generated if -Xelide-below is at least ASSERTION.
+ *
+ * @see elidable
+ * @param p the expression to test
+ * @param msg a String to include in the failure message
+ */
@elidable(ASSERTION)
def assume(assumption: Boolean, message: => Any) {
if (!assumption)
throw new java.lang.AssertionError("assumption failed: "+ message)
}
+ /** Tests an expression, throwing an IllegalArgumentException if false.
+ * This method is similar to assert, but blames the caller of the method
+ * for violating the condition.
+ *
+ * @param p the expression to test
+ */
def require(requirement: Boolean) {
if (!requirement)
throw new IllegalArgumentException("requirement failed")
}
+ /** Tests an expression, throwing an IllegalArgumentException if false.
+ * This method is similar to assert, but blames the caller of the method
+ * for violating the condition.
+ *
+ * @param p the expression to test
+ * @param msg a String to include in the failure message
+ */
def require(requirement: Boolean, message: => Any) {
if (!requirement)
throw new IllegalArgumentException("requirement failed: "+ message)
}
+ final class Ensuring[A](val x: A) {
+ def ensuring(cond: Boolean): A = { assert(cond); x }
+ def ensuring(cond: Boolean, msg: Any): A = { assert(cond, msg); x }
+ def ensuring(cond: A => Boolean): A = { assert(cond(x)); x }
+ def ensuring(cond: A => Boolean, msg: Any): A = { assert(cond(x), msg); x }
+ }
+ implicit def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x)
+
// tupling ------------------------------------------------------------
type Pair[+A, +B] = Tuple2[A, B]
@@ -171,36 +167,18 @@ object Predef extends LowPriorityImplicits {
def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x)
}
- class Ensuring[A](x: A) {
- def ensuring(cond: Boolean): A = { assert(cond); x }
- def ensuring(cond: Boolean, msg: Any): A = { assert(cond, msg); x }
- def ensuring(cond: A => Boolean): A = { assert(cond(x)); x }
- def ensuring(cond: A => Boolean, msg: Any): A = { assert(cond(x), msg); x }
- }
- implicit def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x)
-
- class ArrowAssoc[A](x: A) {
- def -> [B](y: B): Tuple2[A, B] = Tuple2(x, y)
+ final class ArrowAssoc[A](val x: A) {
+ @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(x, y)
def →[B](y: B): Tuple2[A, B] = ->(y)
}
implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
- def Tuple[A1](x1: A1) = Tuple1(x1)
- def Tuple[A1, A2](x1: A1, x2: A2) = Tuple2(x1, x2)
- def Tuple[A1, A2, A3](x1: A1, x2: A2, x3: A3) = Tuple3(x1, x2, x3)
- def Tuple[A1, A2, A3, A4](x1: A1, x2: A2, x3: A3, x4: A4) = Tuple4(x1, x2, x3, x4)
- def Tuple[A1, A2, A3, A4, A5](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5) = Tuple5(x1, x2, x3, x4, x5)
- def Tuple[A1, A2, A3, A4, A5, A6](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6) = Tuple6(x1, x2, x3, x4, x5, x6)
- def Tuple[A1, A2, A3, A4, A5, A6, A7](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7) = Tuple7(x1, x2, x3, x4, x5, x6, x7)
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8) = Tuple8(x1, x2, x3, x4, x5, x6, x7, x8)
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9) = Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9)
-
// printing and reading -----------------------------------------------
def print(x: Any) = Console.print(x)
def println() = Console.println()
def println(x: Any) = Console.println(x)
- def printf(text: String, xs: Any*) = Console.printf(text, xs: _*)
+ def printf(text: String, xs: Any*) = Console.print(format(text, xs: _*))
def format(text: String, xs: Any*) = augmentString(text).format(xs: _*)
def readLine(): String = Console.readLine()
@@ -227,19 +205,9 @@ object Predef extends LowPriorityImplicits {
implicit def longWrapper(x: Long) = new runtime.RichLong(x)
implicit def floatWrapper(x: Float) = new runtime.RichFloat(x)
implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x)
-
implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
- implicit def augmentString(x: String): StringOps = new StringOps(x)
- implicit def unaugmentString(x: StringOps): String = x.repr
-
- implicit def stringCanBuildFrom: CanBuildFrom[String, Char, String] =
- new CanBuildFrom[String, Char, String] {
- def apply(from: String) = new scala.collection.mutable.StringBuilder
- def apply() = new scala.collection.mutable.StringBuilder
- }
-
- implicit def any2stringadd(x: Any) = new runtime.StringAdd(x)
+ implicit def exceptionWrapper(exc: Throwable) = new runtime.RichException(exc)
implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs: AnyRef) match { // !!! drop the AnyRef and get unreachable code errors!
case x: Array[AnyRef] => refArrayOps[AnyRef](x).asInstanceOf[ArrayOps[T]]
@@ -266,7 +234,7 @@ object Predef extends LowPriorityImplicits {
implicit def booleanArrayOps(xs: Array[Boolean]): ArrayOps[Boolean] = new ArrayOps.ofBoolean(xs)
implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit] = new ArrayOps.ofUnit(xs)
- implicit def exceptionWrapper(exc: Throwable) = new runtime.RichException(exc)
+ // Primitive Widenings --------------------------------------------------------------
implicit def byte2short(x: Byte): Short = x.toShort
implicit def byte2int(x: Byte): Int = x.toInt
@@ -293,6 +261,8 @@ object Predef extends LowPriorityImplicits {
implicit def float2double(x: Float): Double = x.toDouble
+ // "Autoboxing" --------------------------------------------------------------
+
implicit def byte2Byte(x: Byte) = java.lang.Byte.valueOf(x)
implicit def short2Short(x: Short) = java.lang.Short.valueOf(x)
implicit def char2Character(x: Char) = java.lang.Character.valueOf(x)
@@ -302,10 +272,17 @@ object Predef extends LowPriorityImplicits {
implicit def double2Double(x: Double) = java.lang.Double.valueOf(x)
implicit def boolean2Boolean(x: Boolean) = java.lang.Boolean.valueOf(x)
- /** any array projection can be automatically converted into an array */
- //implicit def forceArrayProjection[A](x: Array.Projection[A]): Array[A] = x.force !!! re-enable?
+ // Strings and CharSequences --------------------------------------------------------------
+
+ implicit def any2stringadd(x: Any) = new runtime.StringAdd(x)
+ implicit def augmentString(x: String): StringOps = new StringOps(x)
+ implicit def unaugmentString(x: StringOps): String = x.repr
- //implicit def lazyStreamToConsable[A](xs: => Stream[A]) = new runtime.StreamCons(xs)
+ implicit def stringCanBuildFrom: CanBuildFrom[String, Char, String] =
+ new CanBuildFrom[String, Char, String] {
+ def apply(from: String) = new scala.collection.mutable.StringBuilder
+ def apply() = new scala.collection.mutable.StringBuilder
+ }
implicit def seqToCharSequence(xs: collection.IndexedSeq[Char]): CharSequence = new CharSequence {
def length: Int = xs.length
@@ -321,15 +298,28 @@ object Predef extends LowPriorityImplicits {
override def toString: String = xs.mkString("")
}
+ // Type Constraints --------------------------------------------------------------
+
// used, for example, in the encoding of generalized constraints
// we need a new type constructor `<:<` and evidence `conforms`, as
// reusing `Function2` and `identity` leads to ambiguities (any2stringadd is inferred)
// to constrain any abstract type T that's in scope in a method's argument list (not just the method's own type parameters)
// simply add an implicit argument of type `T <:< U`, where U is the required upper bound (for lower-bounds, use: `U <: T`)
+ // in part contributed by Jason Zaugg
sealed abstract class <:<[-From, +To] extends (From => To)
- implicit def conforms[A]: A <:< A = new (A <:< A) {def apply(x: A) = x}
+ implicit def conforms[A]: A <:< A = new (A <:< A) {def apply(x: A) = x} // not in the <:< companion object because it is also intended to subsume identity (which is no longer implicit)
+
+ sealed abstract class =:=[From, To] extends (From => To)
+ object =:= {
+ implicit def tpEquals[A]: A =:= A = new (A =:= A) {def apply(x: A) = x}
+ }
+
+ sealed abstract class <%<[-From, +To] extends (From => To)
+ object <%< {
+ implicit def conformsOrViewsAs[A <% B, B]: A <%< B = new (A <%< B) {def apply(x: A) = x}
+ }
- /** A type for which there is aways an implicit value.
+ /** A type for which there is always an implicit value.
* @see fallbackCanBuildFrom in Array.scala
*/
class DummyImplicit
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index ce8f733771..b7dd6cf728 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
@@ -20,7 +19,7 @@ package scala
*/
trait Product extends Equals {
- /** for a product <code>A(x_1,...,x_k)</code>, returns <code>x_(n+1)</code>
+ /** For a product <code>A(x_1,...,x_k)</code>, returns <code>x_(n+1)</code>
* for <code>0 &lt;= n &lt; k</code>
*
* @param n the index of the element to return
diff --git a/src/library/scala/Product1.scala b/src/library/scala/Product1.scala
index 72f16accdd..f90d720c33 100644
--- a/src/library/scala/Product1.scala
+++ b/src/library/scala/Product1.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product1 {
def unapply[T1](x: Product1[T1]): Option[Product1[T1]] =
Some(x)
@@ -22,7 +22,7 @@ object Product1 {
*
* @since 2.3
*/
-trait Product1[+T1] extends Product {
+trait Product1[@specialized(Int, Long, Double) +T1] extends Product {
/**
* The arity of this product.
* @return 1
diff --git a/src/library/scala/Product10.scala b/src/library/scala/Product10.scala
index bd786e0a07..3cdcaf190c 100644
--- a/src/library/scala/Product10.scala
+++ b/src/library/scala/Product10.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product10 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](x: Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]): Option[Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]] =
Some(x)
diff --git a/src/library/scala/Product11.scala b/src/library/scala/Product11.scala
index ff56827537..20004258dc 100644
--- a/src/library/scala/Product11.scala
+++ b/src/library/scala/Product11.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product11 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](x: Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]): Option[Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]] =
Some(x)
diff --git a/src/library/scala/Product12.scala b/src/library/scala/Product12.scala
index 0434ffc205..0f069a2973 100644
--- a/src/library/scala/Product12.scala
+++ b/src/library/scala/Product12.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product12 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](x: Product12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]): Option[Product12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]] =
Some(x)
diff --git a/src/library/scala/Product13.scala b/src/library/scala/Product13.scala
index 992992cb10..5abae53eae 100644
--- a/src/library/scala/Product13.scala
+++ b/src/library/scala/Product13.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product13 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](x: Product13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]): Option[Product13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]] =
Some(x)
diff --git a/src/library/scala/Product14.scala b/src/library/scala/Product14.scala
index 1966400eb0..818d80ae04 100644
--- a/src/library/scala/Product14.scala
+++ b/src/library/scala/Product14.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product14 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](x: Product14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]): Option[Product14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]] =
Some(x)
diff --git a/src/library/scala/Product15.scala b/src/library/scala/Product15.scala
index 2683dbe549..e36985614d 100644
--- a/src/library/scala/Product15.scala
+++ b/src/library/scala/Product15.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product15 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](x: Product15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]): Option[Product15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]] =
Some(x)
diff --git a/src/library/scala/Product16.scala b/src/library/scala/Product16.scala
index 119cfe759b..6881b33227 100644
--- a/src/library/scala/Product16.scala
+++ b/src/library/scala/Product16.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product16 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](x: Product16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]): Option[Product16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]] =
Some(x)
diff --git a/src/library/scala/Product17.scala b/src/library/scala/Product17.scala
index 46f02abaa2..727c90c0fc 100644
--- a/src/library/scala/Product17.scala
+++ b/src/library/scala/Product17.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product17 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](x: Product17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]): Option[Product17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]] =
Some(x)
diff --git a/src/library/scala/Product18.scala b/src/library/scala/Product18.scala
index 481824c61f..4f05b54a08 100644
--- a/src/library/scala/Product18.scala
+++ b/src/library/scala/Product18.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product18 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](x: Product18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]): Option[Product18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]] =
Some(x)
diff --git a/src/library/scala/Product19.scala b/src/library/scala/Product19.scala
index 28db9c74ee..ada6f9e950 100644
--- a/src/library/scala/Product19.scala
+++ b/src/library/scala/Product19.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product19 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](x: Product19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]): Option[Product19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]] =
Some(x)
diff --git a/src/library/scala/Product2.scala b/src/library/scala/Product2.scala
index 81bde9495c..9811a7b110 100644
--- a/src/library/scala/Product2.scala
+++ b/src/library/scala/Product2.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product2 {
def unapply[T1, T2](x: Product2[T1, T2]): Option[Product2[T1, T2]] =
Some(x)
@@ -22,7 +22,7 @@ object Product2 {
*
* @since 2.3
*/
-trait Product2[+T1, +T2] extends Product {
+trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Double) +T2] extends Product {
/**
* The arity of this product.
* @return 2
diff --git a/src/library/scala/Product20.scala b/src/library/scala/Product20.scala
index aa79b8d429..9b6bb09892 100644
--- a/src/library/scala/Product20.scala
+++ b/src/library/scala/Product20.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product20 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](x: Product20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]): Option[Product20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]] =
Some(x)
diff --git a/src/library/scala/Product21.scala b/src/library/scala/Product21.scala
index fa4d2bb9e4..bf834c2571 100644
--- a/src/library/scala/Product21.scala
+++ b/src/library/scala/Product21.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product21 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](x: Product21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]): Option[Product21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]] =
Some(x)
diff --git a/src/library/scala/Product22.scala b/src/library/scala/Product22.scala
index 57ee0ac4a2..1b32785ec1 100644
--- a/src/library/scala/Product22.scala
+++ b/src/library/scala/Product22.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product22 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](x: Product22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]): Option[Product22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]] =
Some(x)
diff --git a/src/library/scala/Product3.scala b/src/library/scala/Product3.scala
index d26932773a..f366782268 100644
--- a/src/library/scala/Product3.scala
+++ b/src/library/scala/Product3.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product3 {
def unapply[T1, T2, T3](x: Product3[T1, T2, T3]): Option[Product3[T1, T2, T3]] =
Some(x)
diff --git a/src/library/scala/Product4.scala b/src/library/scala/Product4.scala
index 074c6865d6..dbd96da077 100644
--- a/src/library/scala/Product4.scala
+++ b/src/library/scala/Product4.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product4 {
def unapply[T1, T2, T3, T4](x: Product4[T1, T2, T3, T4]): Option[Product4[T1, T2, T3, T4]] =
Some(x)
diff --git a/src/library/scala/Product5.scala b/src/library/scala/Product5.scala
index 96396bdc78..9a34d38722 100644
--- a/src/library/scala/Product5.scala
+++ b/src/library/scala/Product5.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product5 {
def unapply[T1, T2, T3, T4, T5](x: Product5[T1, T2, T3, T4, T5]): Option[Product5[T1, T2, T3, T4, T5]] =
Some(x)
diff --git a/src/library/scala/Product6.scala b/src/library/scala/Product6.scala
index 678a22254f..50eb0a4532 100644
--- a/src/library/scala/Product6.scala
+++ b/src/library/scala/Product6.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product6 {
def unapply[T1, T2, T3, T4, T5, T6](x: Product6[T1, T2, T3, T4, T5, T6]): Option[Product6[T1, T2, T3, T4, T5, T6]] =
Some(x)
diff --git a/src/library/scala/Product7.scala b/src/library/scala/Product7.scala
index 8885dccf77..c7b7919687 100644
--- a/src/library/scala/Product7.scala
+++ b/src/library/scala/Product7.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product7 {
def unapply[T1, T2, T3, T4, T5, T6, T7](x: Product7[T1, T2, T3, T4, T5, T6, T7]): Option[Product7[T1, T2, T3, T4, T5, T6, T7]] =
Some(x)
diff --git a/src/library/scala/Product8.scala b/src/library/scala/Product8.scala
index 9150bf64b7..e1afb0d007 100644
--- a/src/library/scala/Product8.scala
+++ b/src/library/scala/Product8.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product8 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8](x: Product8[T1, T2, T3, T4, T5, T6, T7, T8]): Option[Product8[T1, T2, T3, T4, T5, T6, T7, T8]] =
Some(x)
diff --git a/src/library/scala/Product9.scala b/src/library/scala/Product9.scala
index 2cdc08d8e4..0f904c6e3d 100644
--- a/src/library/scala/Product9.scala
+++ b/src/library/scala/Product9.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Nov 04 18:46:21 CET 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
object Product9 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9](x: Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9]): Option[Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9]] =
Some(x)
diff --git a/src/library/scala/Proxy.scala b/src/library/scala/Proxy.scala
index b8b48c7d81..feac5904a5 100644
--- a/src/library/scala/Proxy.scala
+++ b/src/library/scala/Proxy.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
@@ -22,7 +21,7 @@ package scala
*/
trait Proxy {
def self: Any
- override def hashCode: Int = self.hashCode
+ override def hashCode: Int = self.##
override def equals(that: Any): Boolean =
if(that == null) false
else that equals self
diff --git a/src/library/scala/Responder.scala b/src/library/scala/Responder.scala
index f628b08b9e..f86f883894 100644
--- a/src/library/scala/Responder.scala
+++ b/src/library/scala/Responder.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
@@ -67,6 +66,7 @@ object Responder {
* @version 1.0
* @since 2.1
*/
+@serializable
abstract class Responder[+A] {
def respond(k: A => Unit): Unit
@@ -90,5 +90,7 @@ abstract class Responder[+A] {
Responder.this.respond(x => if (p(x)) k(x) else ())
}
}
+
+ override def toString = "Responder"
}
diff --git a/src/library/scala/ScalaObject.scala b/src/library/scala/ScalaObject.scala
index a7af5426eb..fe6778792f 100644
--- a/src/library/scala/ScalaObject.scala
+++ b/src/library/scala/ScalaObject.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/SerialVersionUID.scala b/src/library/scala/SerialVersionUID.scala
index dd0eabf2e9..0a2e29c934 100644
--- a/src/library/scala/SerialVersionUID.scala
+++ b/src/library/scala/SerialVersionUID.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
*/
-// $Id$
package scala
diff --git a/src/library/scala/StaticAnnotation.scala b/src/library/scala/StaticAnnotation.scala
index 936c36c25f..a1c3828173 100644
--- a/src/library/scala/StaticAnnotation.scala
+++ b/src/library/scala/StaticAnnotation.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/Symbol.scala b/src/library/scala/Symbol.scala
index 610cc839c3..9463a186b6 100644
--- a/src/library/scala/Symbol.scala
+++ b/src/library/scala/Symbol.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
@@ -48,11 +47,10 @@ object Symbol extends UniquenessCache[String, Symbol]
* abstracted to offer some hope of reusability. */
private[scala] abstract class UniquenessCache[K, V >: Null]
{
- import java.lang.ref.{ ReferenceQueue, WeakReference }
+ import java.lang.ref.WeakReference
import java.util.WeakHashMap
import java.util.concurrent.locks.ReentrantReadWriteLock
- private val queue = new ReferenceQueue[V]
private val rwl = new ReentrantReadWriteLock()
private val rlock = rwl.readLock
private val wlock = rwl.writeLock
@@ -78,7 +76,7 @@ private[scala] abstract class UniquenessCache[K, V >: Null]
if (res != null) res
else {
val sym = valueFromKey(name)
- map.put(name, new WeakReference(sym, queue))
+ map.put(name, new WeakReference(sym))
sym
}
}
diff --git a/src/library/scala/Tuple1.scala b/src/library/scala/Tuple1.scala
index 050668708e..74de5bf688 100644
--- a/src/library/scala/Tuple1.scala
+++ b/src/library/scala/Tuple1.scala
@@ -1,23 +1,23 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple1 is the canonical representation of a @see Product1
*
*/
-case class Tuple1[+T1](_1:T1)
+case class Tuple1[@specialized(Int, Long, Double) +T1](_1:T1)
extends Product1[T1]
{
override def toString() = "(" + _1 + ")"
diff --git a/src/library/scala/Tuple10.scala b/src/library/scala/Tuple10.scala
index 7cd4672255..5fc0e9fec3 100644
--- a/src/library/scala/Tuple10.scala
+++ b/src/library/scala/Tuple10.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple10 is the canonical representation of a @see Product10
*
*/
diff --git a/src/library/scala/Tuple11.scala b/src/library/scala/Tuple11.scala
index 84e2d5b07e..c27cddfcc4 100644
--- a/src/library/scala/Tuple11.scala
+++ b/src/library/scala/Tuple11.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple11 is the canonical representation of a @see Product11
*
*/
diff --git a/src/library/scala/Tuple12.scala b/src/library/scala/Tuple12.scala
index d768fca65e..0d78fd7e46 100644
--- a/src/library/scala/Tuple12.scala
+++ b/src/library/scala/Tuple12.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple12 is the canonical representation of a @see Product12
*
*/
diff --git a/src/library/scala/Tuple13.scala b/src/library/scala/Tuple13.scala
index 2a7b17632e..955a8e914a 100644
--- a/src/library/scala/Tuple13.scala
+++ b/src/library/scala/Tuple13.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple13 is the canonical representation of a @see Product13
*
*/
diff --git a/src/library/scala/Tuple14.scala b/src/library/scala/Tuple14.scala
index f88b212a00..524166896f 100644
--- a/src/library/scala/Tuple14.scala
+++ b/src/library/scala/Tuple14.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple14 is the canonical representation of a @see Product14
*
*/
diff --git a/src/library/scala/Tuple15.scala b/src/library/scala/Tuple15.scala
index 664b110d4b..2b88e353bd 100644
--- a/src/library/scala/Tuple15.scala
+++ b/src/library/scala/Tuple15.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple15 is the canonical representation of a @see Product15
*
*/
diff --git a/src/library/scala/Tuple16.scala b/src/library/scala/Tuple16.scala
index d3b646fa6a..ace0d731c0 100644
--- a/src/library/scala/Tuple16.scala
+++ b/src/library/scala/Tuple16.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple16 is the canonical representation of a @see Product16
*
*/
diff --git a/src/library/scala/Tuple17.scala b/src/library/scala/Tuple17.scala
index f27ed64e6a..a2e77aa9cb 100644
--- a/src/library/scala/Tuple17.scala
+++ b/src/library/scala/Tuple17.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple17 is the canonical representation of a @see Product17
*
*/
diff --git a/src/library/scala/Tuple18.scala b/src/library/scala/Tuple18.scala
index a66ccabfb8..58835c861f 100644
--- a/src/library/scala/Tuple18.scala
+++ b/src/library/scala/Tuple18.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple18 is the canonical representation of a @see Product18
*
*/
diff --git a/src/library/scala/Tuple19.scala b/src/library/scala/Tuple19.scala
index e66c4e65c1..abb4f248ec 100644
--- a/src/library/scala/Tuple19.scala
+++ b/src/library/scala/Tuple19.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple19 is the canonical representation of a @see Product19
*
*/
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index 8a08552149..39d8f6bb1d 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -1,38 +1,35 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009 (with extra methods)
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
-import scala.collection.{TraversableLike, Traversable, IterableLike}
+import scala.collection.{TraversableLike, IterableLike, IndexedSeqLike}
import scala.collection.generic.CanBuildFrom
+
+
/** Tuple2 is the canonical representation of a @see Product2
*
*/
-case class Tuple2[+T1, +T2](_1:T1, _2:T2) extends Product2[T1, T2] {
- override def toString() = {
- val sb = new StringBuilder
- sb.append('(').append(_1).append(',').append(_2).append(')')
- sb.toString
- }
+case class Tuple2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Double) +T2](_1:T1,_2:T2)
+ extends Product2[T1, T2]
+{
+ override def toString() = "(" + _1 + "," + _2 + ")"
/** Swap the elements of the tuple */
def swap: Tuple2[T2,T1] = Tuple2(_2, _1)
-// TODO: probably loosen zip and zipped from <:< to <%<
-
- def zip[Repr1, El1, El2, To](implicit w1: T1 <:< TraversableLike[El1, Repr1],
- w2: T2 <:< Iterable[El2],
+ def zip[Repr1, El1, El2, To](implicit w1: T1 => TraversableLike[El1, Repr1],
+ w2: T2 => Iterable[El2],
cbf1: CanBuildFrom[Repr1, (El1, El2), To]): To = {
val coll1: TraversableLike[El1, Repr1] = _1
val coll2: Iterable[El2] = _2
@@ -46,19 +43,24 @@ case class Tuple2[+T1, +T2](_1:T1, _2:T2) extends Product2[T1, T2] {
b1.result
}
- def zipped[Repr1, El1, Repr2, El2](implicit w1: T1 <:< TraversableLike[El1, Repr1], w2: T2 <:< IterableLike[El2, Repr2]): Zipped[Repr1, El1, Repr2, El2]
- = new Zipped[Repr1, El1, Repr2, El2](_1, _2)
+ /** Wraps a tuple in a `Zipped`, which supports 2-ary generalisations of map, flatMap, filter,...
+ *
+ * @see Zipped
+ * $willNotTerminateInf
+ */
+ def zipped[Repr1, El1, Repr2, El2](implicit w1: T1 => TraversableLike[El1, Repr1], w2: T2 => IterableLike[El2, Repr2]): Zipped[Repr1, El1, Repr2, El2]
+ = new Zipped[Repr1, El1, Repr2, El2](_1, _2)
class Zipped[+Repr1, +El1, +Repr2, +El2](coll1: TraversableLike[El1, Repr1], coll2: IterableLike[El2, Repr2]) { // coll2: IterableLike for filter
def map[B, To](f: (El1, El2) => B)(implicit cbf: CanBuildFrom[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
-
- for(el1 <- coll1)
+ val b = cbf(coll1.repr)
+ b.sizeHint(coll1)
+ val elems2 = coll2.iterator
+ for(el1 <- coll1)
if(elems2.hasNext)
b += f(el1, elems2.next)
- b.result
+ b.result
}
def flatMap[B, To](f: (El1, El2) => Traversable[B])(implicit cbf: CanBuildFrom[Repr1, B, To]): To = {
@@ -120,4 +122,5 @@ case class Tuple2[+T1, +T2](_1:T1, _2:T2) extends Product2[T1, T2] {
f(el1, elems2.next)
}
}
+
}
diff --git a/src/library/scala/Tuple20.scala b/src/library/scala/Tuple20.scala
index fb2e8d6402..683c04de69 100644
--- a/src/library/scala/Tuple20.scala
+++ b/src/library/scala/Tuple20.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple20 is the canonical representation of a @see Product20
*
*/
diff --git a/src/library/scala/Tuple21.scala b/src/library/scala/Tuple21.scala
index a0a2935997..6f20fca435 100644
--- a/src/library/scala/Tuple21.scala
+++ b/src/library/scala/Tuple21.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple21 is the canonical representation of a @see Product21
*
*/
diff --git a/src/library/scala/Tuple22.scala b/src/library/scala/Tuple22.scala
index a1322744e0..b8a8a624d1 100644
--- a/src/library/scala/Tuple22.scala
+++ b/src/library/scala/Tuple22.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple22 is the canonical representation of a @see Product22
*
*/
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index 42418ecdd5..1621d94968 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -1,21 +1,22 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010 (with extra methods)
package scala
-import scala.collection.{TraversableLike, Traversable, IterableLike}
+import scala.collection.{TraversableLike, IterableLike}
import scala.collection.generic.CanBuildFrom
+
+
/** Tuple3 is the canonical representation of a @see Product3
*
*/
@@ -24,11 +25,9 @@ case class Tuple3[+T1, +T2, +T3](_1:T1,_2:T2,_3:T3)
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + ")"
-// TODO: probably loosen zip and zipped from <:< to <%<
-
- def zip[Repr1, El1, El2, El3, To](implicit w1: T1 <:< TraversableLike[El1, Repr1],
- w2: T2 <:< Iterable[El2],
- w3: T3 <:< Iterable[El3],
+ def zip[Repr1, El1, El2, El3, To](implicit w1: T1 => TraversableLike[El1, Repr1],
+ w2: T2 => Iterable[El2],
+ w3: T3 => Iterable[El3],
cbf1: CanBuildFrom[Repr1, (El1, El2, El3), To]): To = {
val coll1: TraversableLike[El1, Repr1] = _1
val coll2: Iterable[El2] = _2
@@ -44,10 +43,15 @@ case class Tuple3[+T1, +T2, +T3](_1:T1,_2:T2,_3:T3)
b1.result
}
- def zipped[Repr1, El1, Repr2, El2, Repr3, El3](implicit w1: T1 <:< TraversableLike[El1, Repr1],
- w2: T2 <:< IterableLike[El2, Repr2],
- w3: T3 <:< IterableLike[El3, Repr3]): Zipped[Repr1, El1, Repr2, El2, Repr3, El3]
- = new Zipped[Repr1, El1, Repr2, El2, Repr3, El3](_1, _2, _3)
+ /** Wraps a tuple in a `Zipped`, which supports 3-ary generalisations of map, flatMap, filter,...
+ *
+ * @see Zipped
+ * $willNotTerminateInf
+ */
+ def zipped[Repr1, El1, Repr2, El2, Repr3, El3](implicit w1: T1 => TraversableLike[El1, Repr1],
+ w2: T2 => IterableLike[El2, Repr2],
+ w3: T3 => IterableLike[El3, Repr3]): Zipped[Repr1, El1, Repr2, El2, Repr3, El3]
+ = new Zipped[Repr1, El1, Repr2, El2, Repr3, El3](_1, _2, _3)
class Zipped[+Repr1, +El1, +Repr2, +El2, +Repr3, +El3](coll1: TraversableLike[El1, Repr1],
coll2: IterableLike[El2, Repr2],
@@ -134,4 +138,5 @@ case class Tuple3[+T1, +T2, +T3](_1:T1,_2:T2,_3:T3)
f(el1, elems2.next, elems3.next)
}
}
+
}
diff --git a/src/library/scala/Tuple4.scala b/src/library/scala/Tuple4.scala
index 4c67ee70d3..79fc125502 100644
--- a/src/library/scala/Tuple4.scala
+++ b/src/library/scala/Tuple4.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple4 is the canonical representation of a @see Product4
*
*/
diff --git a/src/library/scala/Tuple5.scala b/src/library/scala/Tuple5.scala
index b7b8d9402f..b441fd8961 100644
--- a/src/library/scala/Tuple5.scala
+++ b/src/library/scala/Tuple5.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple5 is the canonical representation of a @see Product5
*
*/
diff --git a/src/library/scala/Tuple6.scala b/src/library/scala/Tuple6.scala
index 5263180f14..0e2d0e277e 100644
--- a/src/library/scala/Tuple6.scala
+++ b/src/library/scala/Tuple6.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple6 is the canonical representation of a @see Product6
*
*/
diff --git a/src/library/scala/Tuple7.scala b/src/library/scala/Tuple7.scala
index 94e6d12737..9f62c1900a 100644
--- a/src/library/scala/Tuple7.scala
+++ b/src/library/scala/Tuple7.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple7 is the canonical representation of a @see Product7
*
*/
diff --git a/src/library/scala/Tuple8.scala b/src/library/scala/Tuple8.scala
index aed114f431..9891797aa4 100644
--- a/src/library/scala/Tuple8.scala
+++ b/src/library/scala/Tuple8.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple8 is the canonical representation of a @see Product8
*
*/
diff --git a/src/library/scala/Tuple9.scala b/src/library/scala/Tuple9.scala
index 44a680baca..28a39be444 100644
--- a/src/library/scala/Tuple9.scala
+++ b/src/library/scala/Tuple9.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-// generated by genprod on Wed Jun 17 14:10:05 PDT 2009
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
+
/** Tuple9 is the canonical representation of a @see Product9
*
*/
diff --git a/src/library/scala/TypeConstraint.scala b/src/library/scala/TypeConstraint.scala
index 45abf9beb3..1cb7e55823 100644
--- a/src/library/scala/TypeConstraint.scala
+++ b/src/library/scala/TypeConstraint.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/UninitializedError.scala b/src/library/scala/UninitializedError.scala
index 00d4dbe4ba..f130c7c0c4 100644
--- a/src/library/scala/UninitializedError.scala
+++ b/src/library/scala/UninitializedError.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/UninitializedFieldError.scala b/src/library/scala/UninitializedFieldError.scala
index 730b6d32d5..1d6799ec6f 100644
--- a/src/library/scala/UninitializedFieldError.scala
+++ b/src/library/scala/UninitializedFieldError.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala
index a58b696844..c75299e9fd 100644
--- a/src/library/scala/annotation/elidable.scala
+++ b/src/library/scala/annotation/elidable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,18 +13,18 @@ import java.util.logging.Level
/** An annotation for methods for which invocations might
* be removed in the generated code.
*
- * Behavior is influenced by passing -Xelide-level <arg>
+ * Behavior is influenced by passing -Xelide-below <arg>
* to scalac. Methods marked elidable will be omitted from
* generated code if the priority given the annotation is lower
* than to the command line argument. Examples:
- *
+ * {{{
* import annotation.elidable._
*
* @elidable(WARNING) def foo = log("foo")
* @elidable(FINE) def bar = log("bar")
*
- * scalac -Xelide-methods-below=1000
- *
+ * scalac -Xelide-below=1000
+ * }}}
* @since 2.8
*/
final class elidable(final val level: Int) extends StaticAnnotation {}
diff --git a/src/library/scala/annotation/experimental.scala b/src/library/scala/annotation/implicitNotFound.scala
index 43ce631e2d..5d9b29c5f8 100644
--- a/src/library/scala/annotation/experimental.scala
+++ b/src/library/scala/annotation/implicitNotFound.scala
@@ -1,19 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
+
package scala.annotation
-/** <p>
- * An annotation for experimental features.
- * </p>
+/**
+ * An annotation that specifies the error message that is emitted when the compiler
+ * cannot find an implicit value of the annotated type.
*
- * @since 2.8
+ * @author Adriaan Moors
+ * @since 2.8.1
*/
-@experimental // and an experiment which may soon be ending
-final class experimental(message: String) extends StaticAnnotation {
- def this() = this("")
-}
+final class implicitNotFound(msg: String) extends StaticAnnotation {} \ No newline at end of file
diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala
new file mode 100644
index 0000000000..b0915cde34
--- /dev/null
+++ b/src/library/scala/annotation/migration.scala
@@ -0,0 +1,28 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.annotation
+
+/**
+ * An annotation that marks a member as having changed semantics
+ * between versions. This is intended for methods which for one
+ * reason or another retain the same name and type signature,
+ * but some aspect of their behavior is different. An illustrative
+ * examples is Stack.iterator, which reversed from LIFO to FIFO
+ * order between scala 2.7 and 2.8.
+ *
+ * The version numbers are to mark the scala major/minor release
+ * version where the change took place.
+ *
+ * @since 2.8
+ */
+private[scala] final class migration(
+ majorVersion: Int,
+ minorVersion: Int,
+ message: String)
+extends StaticAnnotation {}
diff --git a/src/library/scala/annotation/switch.scala b/src/library/scala/annotation/switch.scala
index 3bb2a9d211..a34436f503 100644
--- a/src/library/scala/annotation/switch.scala
+++ b/src/library/scala/annotation/switch.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/annotation/tailrec.scala b/src/library/scala/annotation/tailrec.scala
index 6773b206c2..5e59c9fbf8 100644
--- a/src/library/scala/annotation/tailrec.scala
+++ b/src/library/scala/annotation/tailrec.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/annotation/target/beanGetter.scala b/src/library/scala/annotation/target/beanGetter.scala
index cc39a32874..e815e697ad 100644
--- a/src/library/scala/annotation/target/beanGetter.scala
+++ b/src/library/scala/annotation/target/beanGetter.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,8 +14,8 @@ package scala.annotation.target
* used to control to which of the above members the annotations on
* the field are copied. By default, field annotations are only added
* to the actual field, but not to any of the accessors. By annotating
- * the annotation type with one or several of the meta-annotations this
- * behavior can be changed.
+ * the annotation type or the annotation class with one or several of
+ * the meta-annotations this behavior can be changed.
*
* In the following example, the annotation {{{@Id}}} will be added
* only to the bean getter {{{getX}}}. In order to annotate the field
@@ -39,5 +39,13 @@ package scala.annotation.target
* @Id @BeanProperty val x = 0
* }
* }}}
+ *
+ * For annotations defined in Scala, a default target can be specified
+ * in the annotation class itself, for example
+ *
+ * {{{
+ * @getter
+ * class myAnnotation extends Annotation
+ * }}}
*/
final class beanGetter extends StaticAnnotation
diff --git a/src/library/scala/annotation/target/beanSetter.scala b/src/library/scala/annotation/target/beanSetter.scala
index 5f1513fd51..d7199694e5 100644
--- a/src/library/scala/annotation/target/beanSetter.scala
+++ b/src/library/scala/annotation/target/beanSetter.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,8 +14,8 @@ package scala.annotation.target
* used to control to which of the above members the annotations on
* the field are copied. By default, field annotations are only added
* to the actual field, but not to any of the accessors. By annotating
- * the annotation type with one or several of the meta-annotations this
- * behavior can be changed.
+ * the annotation type or the annotation class with one or several of
+ * the meta-annotations this behavior can be changed.
*
* In the following example, the annotation {{{@Id}}} will be added
* only to the bean getter {{{getX}}}. In order to annotate the field
@@ -39,5 +39,13 @@ package scala.annotation.target
* @Id @BeanProperty val x = 0
* }
* }}}
+ *
+ * For annotations defined in Scala, a default target can be specified
+ * in the annotation class itself, for example
+ *
+ * {{{
+ * @getter
+ * class myAnnotation extends Annotation
+ * }}}
*/
final class beanSetter extends StaticAnnotation
diff --git a/src/library/scala/annotation/target/field.scala b/src/library/scala/annotation/target/field.scala
index 19533c6908..94e82bcbe2 100644
--- a/src/library/scala/annotation/target/field.scala
+++ b/src/library/scala/annotation/target/field.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,8 +14,8 @@ package scala.annotation.target
* used to control to which of the above members the annotations on
* the field are copied. By default, field annotations are only added
* to the actual field, but not to any of the accessors. By annotating
- * the annotation type with one or several of the meta-annotations this
- * behavior can be changed.
+ * the annotation type or the annotation class with one or several of
+ * the meta-annotations this behavior can be changed.
*
* In the following example, the annotation {{{@Id}}} will be added
* only to the bean getter {{{getX}}}. In order to annotate the field
@@ -39,5 +39,13 @@ package scala.annotation.target
* @Id @BeanProperty val x = 0
* }
* }}}
+ *
+ * For annotations defined in Scala, a default target can be specified
+ * in the annotation class itself, for example
+ *
+ * {{{
+ * @getter
+ * class myAnnotation extends Annotation
+ * }}}
*/
final class field extends StaticAnnotation
diff --git a/src/library/scala/annotation/target/getter.scala b/src/library/scala/annotation/target/getter.scala
index 45e2a8ac4b..2d5e856173 100644
--- a/src/library/scala/annotation/target/getter.scala
+++ b/src/library/scala/annotation/target/getter.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,8 +14,8 @@ package scala.annotation.target
* used to control to which of the above members the annotations on
* the field are copied. By default, field annotations are only added
* to the actual field, but not to any of the accessors. By annotating
- * the annotation type with one or several of the meta-annotations this
- * behavior can be changed.
+ * the annotation type or the annotation class with one or several of
+ * the meta-annotations this behavior can be changed.
*
* In the following example, the annotation {{{@Id}}} will be added
* only to the bean getter {{{getX}}}. In order to annotate the field
@@ -39,5 +39,13 @@ package scala.annotation.target
* @Id @BeanProperty val x = 0
* }
* }}}
+ *
+ * For annotations defined in Scala, a default target can be specified
+ * in the annotation class itself, for example
+ *
+ * {{{
+ * @getter
+ * class myAnnotation extends Annotation
+ * }}}
*/
final class getter extends StaticAnnotation
diff --git a/src/library/scala/annotation/target/param.scala b/src/library/scala/annotation/target/param.scala
new file mode 100644
index 0000000000..317080f2cb
--- /dev/null
+++ b/src/library/scala/annotation/target/param.scala
@@ -0,0 +1,51 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.annotation.target
+
+/**
+ * For every field of a class, the Scala compiler generates up to four
+ * synthetic accessors: getter, setter, bean getter and bean setter.
+ * The meta-annotations in package {{{scala.annotation.target}}} are
+ * used to control to which of the above members the annotations on
+ * the field are copied. By default, field annotations are only added
+ * to the actual field, but not to any of the accessors. By annotating
+ * the annotation type or the annotation class with one or several of
+ * the meta-annotations this behavior can be changed.
+ *
+ * In the following example, the annotation {{{@Id}}} will be added
+ * only to the bean getter {{{getX}}}. In order to annotate the field
+ * as well, the meta-annotation {{{@field}}} would need to be added.
+ *
+ * {{{
+ * import javax.persistence.Id
+ * class A {
+ * @(Id @beanGetter) @BeanProperty val x = 0
+ * }
+ * }}}
+ *
+ * The syntax can be improved using a type alias:
+ *
+ * {{{
+ * object ScalaJPA {
+ * type Id = javax.persistence.Id @beanGetter
+ * }
+ * import ScalaJPA.Id
+ * class A {
+ * @Id @BeanProperty val x = 0
+ * }
+ * }}}
+ *
+ * For annotations defined in Scala, a default target can be specified
+ * in the annotation class itself, for example
+ *
+ * {{{
+ * @getter
+ * class myAnnotation extends Annotation
+ * }}}
+ */
+final class param extends StaticAnnotation
diff --git a/src/library/scala/annotation/target/setter.scala b/src/library/scala/annotation/target/setter.scala
index 6f270a6116..4e0758b6eb 100644
--- a/src/library/scala/annotation/target/setter.scala
+++ b/src/library/scala/annotation/target/setter.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,8 +14,8 @@ package scala.annotation.target
* used to control to which of the above members the annotations on
* the field are copied. By default, field annotations are only added
* to the actual field, but not to any of the accessors. By annotating
- * the annotation type with one or several of the meta-annotations this
- * behavior can be changed.
+ * the annotation type or the annotation class with one or several of
+ * the meta-annotations this behavior can be changed.
*
* In the following example, the annotation {{{@Id}}} will be added
* only to the bean getter {{{getX}}}. In order to annotate the field
@@ -39,5 +39,13 @@ package scala.annotation.target
* @Id @BeanProperty val x = 0
* }
* }}}
+ *
+ * For annotations defined in Scala, a default target can be specified
+ * in the annotation class itself, for example
+ *
+ * {{{
+ * @getter
+ * class myAnnotation extends Annotation
+ * }}}
*/
final class setter extends StaticAnnotation
diff --git a/src/library/scala/annotation/unchecked/uncheckedStable.scala b/src/library/scala/annotation/unchecked/uncheckedStable.scala
index da4f219dfd..a778f45147 100644
--- a/src/library/scala/annotation/unchecked/uncheckedStable.scala
+++ b/src/library/scala/annotation/unchecked/uncheckedStable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/annotation/unchecked/uncheckedVariance.scala b/src/library/scala/annotation/unchecked/uncheckedVariance.scala
index 878dd31766..52f4fb5cc0 100644
--- a/src/library/scala/annotation/unchecked/uncheckedVariance.scala
+++ b/src/library/scala/annotation/unchecked/uncheckedVariance.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/cloneable.scala b/src/library/scala/cloneable.scala
index 37684f1e66..9becd3f231 100644
--- a/src/library/scala/cloneable.scala
+++ b/src/library/scala/cloneable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/collection/BitSet.scala b/src/library/scala/collection/BitSet.scala
index 1f7e2d4e86..9a56b47e93 100644
--- a/src/library/scala/collection/BitSet.scala
+++ b/src/library/scala/collection/BitSet.scala
@@ -1,32 +1,34 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
import generic._
-/** common base class for mutable and immutable bit sets
- *
- * @since 1
+/** A common base class for mutable and immutable bitsets.
+ * $bitsetinfo
*/
trait BitSet extends Set[Int]
with BitSetLike[BitSet] {
override def empty: BitSet = BitSet.empty
}
-/** A factory object for bitsets
- *
- * @since 2.8
+/** $factoryInfo
+ * @define coll bitset
+ * @define Coll BitSet
*/
object BitSet extends BitSetFactory[BitSet] {
val empty: BitSet = immutable.BitSet.empty
+ def newBuilder = immutable.BitSet.newBuilder
+
+ /** $canBuildFromInfo */
+ implicit def canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom
}
diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala
index afcd2da938..0202c81513 100644
--- a/src/library/scala/collection/BitSetLike.scala
+++ b/src/library/scala/collection/BitSetLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,9 +14,23 @@ import BitSetLike._
import generic._
import mutable.StringBuilder
-/** common base class for mutable and immutable bit sets
+/** A template trait for bitsets.
+ * $bitsetinfo
*
+ * This trait provides most of the operations of a `BitSet` independently of its representation.
+ * It is inherited by all concrete implementations of bitsets.
+ *
+ * @tparam This the type of the bitset itself.
+ *
+ * @define bitsetinfo
+ * Bitsets are sets of non-negative integers which are represented as
+ * variable-size arrays of bits packed into 64-bit words. The memory footprint of a bitset is
+ * determined by the largest number stored in it.
+ * @author Martin Odersky
+ * @version 2.8
* @since 2.8
+ * @define coll bitset
+ * @define Coll BitSet
*/
trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, This] { self =>
@@ -27,16 +40,14 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
protected def nwords: Int
/** The words at index `idx', or 0L if outside the range of the set
- * @pre idx >= 0
+ * '''Note:''' requires `idx >= 0`
*/
protected def word(idx: Int): Long
- /** Create a new set of this kind from an array of longs
+ /** Creates a new set of this kind from an array of longs
*/
protected def fromArray(elems: Array[Long]): This
- /** The number of elements in the bitset.
- */
override def size: Int = {
var s = 0
var i = nwords
@@ -68,7 +79,12 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
}
}
- /** A new bitset which is the logical or of this set and the given argument set.
+ /** Computes the union between this bitset and another bitset by performing
+ * a bitwise "or".
+ *
+ * @param other the bitset to form the union with.
+ * @return a new bitset consisting of all bits that are in this
+ * bitset or in the given bitset `other`.
*/
def | (other: BitSet): This = {
val len = this.nwords max other.nwords
@@ -78,7 +94,11 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
fromArray(words)
}
- /** A new bitset which is the logical and of this set and the given argument set.
+ /** Computes the intersection between this bitset and another bitset by performing
+ * a bitwise "and".
+ * @param that the bitset to intersect with.
+ * @return a new bitset consisting of all elements that are both in this
+ * bitset and in the given bitset `other`.
*/
def & (other: BitSet): This = {
val len = this.nwords min other.nwords
@@ -88,7 +108,12 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
fromArray(words)
}
- /** A new bitset which is the logical and-not of this set and the given argument set.
+ /** Computes the difference of this bitset and another bitset by performing
+ * a bitwise "and-not".
+ *
+ * @param that the set of bits to exclude.
+ * @return a bitset containing those bits of this
+ * bitset that are not also contained in the given bitset `other`.
*/
def &~ (other: BitSet): This = {
val len = this.nwords
@@ -98,7 +123,12 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
fromArray(words)
}
- /** A new bitset which is the logical exclusive or of this set and the given argument set.
+ /** Computes the symmetric difference of this bitset and another bitset by performing
+ * a bitwise "exclusive-or".
+ *
+ * @param that the other bitset to take part in the symmetric difference.
+ * @return a bitset containing those bits of this
+ * bitset or the other bitset that are not contained in both bitsets.
*/
def ^ (other: BitSet): This = {
val len = this.nwords max other.nwords
@@ -108,18 +138,18 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
fromArray(words)
}
- /** Does the set contain the given element?
- */
def contains(elem: Int): Boolean =
0 <= elem && (word(elem >> LogWL) & (1L << elem)) != 0L
- /** Is the set a subset of the given bitset
+ /** Tests whether this bitset is a subset of another bitset.
+ *
+ * @param that the bitset to test.
+ * @return `true` if this bitset is a subset of `other`, i.e. if
+ * every bit of this set is also an element in `other`.
*/
- def subSet(other: BitSet): Boolean =
+ def subsetOf(other: BitSet): Boolean =
(0 until nwords) forall (idx => (this.word(idx) & ~ other.word(idx)) == 0L)
- /** Add bitset elements as numbers to string buffer
- */
override def addString(sb: StringBuilder, start: String, sep: String, end: String) = {
sb append start
var pre = ""
@@ -134,6 +164,7 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
override def stringPrefix = "BitSet"
}
+/** Companion object for BitSets. Contains private data only */
object BitSetLike {
private[collection] val LogWL = 6
private val WordLength = 64
diff --git a/src/library/scala/collection/BufferedIterator.scala b/src/library/scala/collection/BufferedIterator.scala
index 76e426d9b4..ab56d9377f 100644
--- a/src/library/scala/collection/BufferedIterator.scala
+++ b/src/library/scala/collection/BufferedIterator.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala
index 87ae659a3b..046e96d9b6 100644
--- a/src/library/scala/collection/DefaultMap.scala
+++ b/src/library/scala/collection/DefaultMap.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,7 +13,7 @@ package scala.collection
import generic._
/** <p>
- * A default map which implements the <code>updated</code> and <code>-</code>
+ * A default map which implements the <code>+</code> and <code>-</code>
* methods of maps.<br/>
* Instances that inherit from <code>DefaultMap[A, B]</code> still have to
* define:
diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala
index bb86761f71..8c2eef65cd 100644
--- a/src/library/scala/collection/IndexedSeq.scala
+++ b/src/library/scala/collection/IndexedSeq.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id: IndexedSeq.scala 19035 2009-10-10 22:54:28Z rompf $
package scala.collection
@@ -14,18 +13,8 @@ package scala.collection
import generic._
import mutable.Builder
-/** <p>
- * Sequences that support O(1) element access and O(1) length computation.
- * </p>
- * <p>
- * This class does not add any methods to <code>Sequence</code> but
- * overrides several methods with optimized implementations.
- * </p>
- *
- * @author Sean McDirmid
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** A base trait for indexed sequences.
+ * $indexedSeqInfo
*/
trait IndexedSeq[+A] extends Seq[A]
with GenericTraversableTemplate[A, IndexedSeq]
@@ -33,6 +22,11 @@ trait IndexedSeq[+A] extends Seq[A]
override def companion: GenericCompanion[IndexedSeq] = IndexedSeq
}
+/** $factoryInfo
+ * The current default implementation of a $Coll is a `Vector`.
+ * @define coll indexed sequence
+ * @define Coll IndexedSeq
+ */
object IndexedSeq extends SeqFactory[IndexedSeq] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = new GenericCanBuildFrom[A]
def newBuilder[A]: Builder[A, IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A]
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index 74d872665d..1ad1da715d 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,23 +14,39 @@ import generic._
import mutable.ArrayBuffer
import scala.annotation.tailrec
-/** Sequences that support O(1) element access and O(1) length computation.
- * This class does not add any methods to Seq but overrides several
- * methods with optimized implementations.
+/** A template trait for indexed sequences of type `IndexedSeq[A]`.
*
- * @author Sean McDirmid
+ * $indexedSeqInfo
+ *
+ * This trait just implements `iterator` in terms of `apply` and `length`.
+ * However, see `IndexedSeqOptimized` for an implementation trait that overrides operations
+ * to make them run faster under the assumption of fast random access with `apply`.
+ *
+ * @define Coll IndexedSeq
+ * @define indexedSeqInfo
+ * Indexed sequences support constant-time or near constant-time element
+ * access and length computation. They are defined in terms of abstract methods
+ * `apply` for indexing and `length`.
+ *
+ * Indexed sequences do not add any new methods wrt `Seq`, but promise
+ * efficient implementations of random access patterns.
+ *
+ * @tparam A the element type of the $coll
+ * @tparam Repr the type of the actual $coll containing the elements.
* @author Martin Odersky
* @version 2.8
* @since 2.8
+ * @define willNotTerminateInf
+ * @define mayNotTerminateInf
*/
trait IndexedSeqLike[+A, +Repr] extends SeqLike[A, Repr] { self =>
override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]]
override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]]
- // Overridden methods from IterableLike
-
- /** The iterator returned by the iterator method
+ /** The class of the iterator returned by the `iterator` method.
+ * multiple `take`, `drop`, and `slice` operations on this iterator are bunched
+ * together for better efficiency.
*/
@serializable @SerialVersionUID(1756321872811029277L)
protected class Elements(start: Int, end: Int) extends BufferedIterator[A] {
@@ -49,225 +64,34 @@ trait IndexedSeqLike[+A, +Repr] extends SeqLike[A, Repr] { self =>
def head =
if (i < end) self(i) else Iterator.empty.next
- /** drop is overridden to enable fast searching in the middle of random access sequences */
+ /** $super
+ * '''Note:''' `drop` is overridden to enable fast searching in the middle of indexed sequences.
+ */
override def drop(n: Int): Iterator[A] =
- if (n > 0) new Elements(start + n, end) else this
+ if (n > 0) new Elements(i + n, end) else this
- /** take is overridden to be symmetric to drop */
+ /** $super
+ * '''Note:''' `take` is overridden to be symmetric to `drop`.
+ */
override def take(n: Int): Iterator[A] =
if (n <= 0) Iterator.empty.buffered
- else if (start + n < end) new Elements(start, start + n)
+ else if (i + n < end) new Elements(i, i + n)
else this
}
- override def iterator: Iterator[A] = new Elements(0, length)
-
- override def isEmpty: Boolean = { length == 0 }
-
- override def foreach[U](f: A => U): Unit = {
- var i = 0
- val len = length
- while (i < len) { f(this(i)); i += 1 }
- }
-
- override def forall(p: A => Boolean): Boolean = prefixLength(p(_)) == length
- override def exists(p: A => Boolean): Boolean = prefixLength(!p(_)) != length
-
- override def find(p: A => Boolean): Option[A] = {
- val i = prefixLength(!p(_))
- if (i < length) Some(this(i)) else None
- }
-
- @tailrec
- private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B =
- if (start == end) z
- else foldl(start + 1, end, op(z, this(start)), op)
-
- @tailrec
- private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B =
- if (start == end) z
- else foldr(start, end - 1, op(this(end - 1), z), op)
-
- override def foldLeft[B](z: B)(op: (B, A) => B): B =
- foldl(0, length, z, op)
- override def foldRight[B](z: B)(op: (A, B) => B): B =
- foldr(0, length, z, op)
- override def reduceLeft[B >: A](op: (B, A) => B): B =
- if (length > 0) foldl(1, length, this(0), op) else super.reduceLeft(op)
- override def reduceRight[B >: A](op: (A, B) => B): B =
- if (length > 0) foldr(0, length - 1, this(length - 1), op) else super.reduceRight(op)
-
- override def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = that match {
- case that: IndexedSeq[_] =>
- val b = bf(repr)
- var i = 0
- val len = this.length min that.length
- b.sizeHint(len)
- while (i < len) {
- b += ((this(i), that(i).asInstanceOf[B]))
- i += 1
- }
- b.result
- case _ =>
- super.zip[A1, B, That](that)(bf)
- }
-
- override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = {
- val b = bf(repr)
- val len = length
- b.sizeHint(len)
- var i = 0
- while (i < len) {
- b += ((this(i), i))
- i += 1
- }
- b.result
- }
-
- override def slice(from: Int, until: Int): Repr = {
- var i = from max 0
- val end = until min length
- val b = newBuilder
- b.sizeHint(end - i)
- while (i < end) {
- b += this(i)
- i += 1
- }
- b.result
- }
-
- override def head: A = if (isEmpty) super.head else this(0)
- override def tail: Repr = if (isEmpty) super.tail else slice(1, length)
- override def last: A = if (length > 0) this(length - 1) else super.last
- override def init: Repr = if (length > 0) slice(0, length - 1) else super.init
- override def take(n: Int): Repr = slice(0, n)
- override def drop(n: Int): Repr = slice(n, length)
- override def takeRight(n: Int): Repr = slice(length - n, length)
- override def dropRight(n: Int): Repr = slice(0, length - n)
- override def splitAt(n: Int): (Repr, Repr) = (take(n), drop(n))
- override def takeWhile(p: A => Boolean): Repr = take(prefixLength(p))
- override def dropWhile(p: A => Boolean): Repr = drop(prefixLength(p))
- override def span(p: A => Boolean): (Repr, Repr) = splitAt(prefixLength(p))
-
- override def sameElements[B >: A](that: Iterable[B]): Boolean = that match {
- case that: IndexedSeq[_] =>
- val len = length
- len == that.length && {
- var i = 0
- while (i < len && this(i) == that(i)) i += 1
- i == len
- }
- case _ =>
- super.sameElements(that)
- }
-
- override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
- var i = 0
- var j = start
- val end = length min len min (xs.length - start)
- while (i < end) {
- xs(j) = this(i)
- i += 1
- j += 1
- }
- }
-
-
- // Overridden methods from Seq
-
- override def lengthCompare(len: Int): Int = length - len
-
- override def segmentLength(p: A => Boolean, from: Int): Int = {
- val start = from
- val len = length
- var i = start
- while (i < len && p(this(i))) i += 1
- i - start
- }
-
- private def negLength(n: Int) = if (n == length) -1 else n
-
- override def indexWhere(p: A => Boolean, from: Int): Int = {
- val start = from max 0
- negLength(start + segmentLength(!p(_), start))
- }
-
- override def lastIndexWhere(p: A => Boolean, end: Int): Int = {
- var i = end
- while (i >= 0 && !p(this(i))) i -= 1
- i
- }
-
- override def reverse: Repr = {
- val b = newBuilder
- b.sizeHint(length)
- var i = length
- while (0 < i) {
- i -= 1
- b += this(i)
- }
- b.result
- }
-
- override def reverseIterator: Iterator[A] = new Iterator[A] {
- private var i = self.length
- def hasNext: Boolean = 0 < i
- def next: A =
- if (0 < i) {
- i -= 1
- self(i)
- } else Iterator.empty.next
- }
-
- override def startsWith[B](that: Seq[B], offset: Int): Boolean = that match {
- case that: IndexedSeq[_] =>
- var i = offset
- var j = 0
- val thisLen = length
- val thatLen = that.length
- while (i < thisLen && j < thatLen && this(i) == that(j)) {
- i += 1
- j += 1
- }
- j == thatLen
- case _ =>
- var i = offset
- val thisLen = length
- val thatElems = that.iterator
- while (i < thisLen && thatElems.hasNext) {
- if (this(i) != thatElems.next())
- return false
-
- i += 1
- }
- !thatElems.hasNext
- }
-
- override def endsWith[B](that: Seq[B]): Boolean = that match {
- case that: IndexedSeq[_] =>
- var i = length - 1
- var j = that.length - 1
-
- (j <= i) && {
- while (j >= 0) {
- if (this(i) != that(j))
- return false
- i -= 1
- j -= 1
- }
- true
- }
- case _ =>
- super.endsWith(that)
- }
-
- override def view = new IndexedSeqView[A, Repr] {
+ override /*IterableLike*/
+ def iterator: Iterator[A] = new Elements(0, length)
+/*
+ override /*SeqLike*/
+ def view = new IndexedSeqView[A, Repr] {
protected lazy val underlying = self.repr
override def iterator = self.iterator
override def length = self.length
override def apply(idx: Int) = self.apply(idx)
}
- override def view(from: Int, until: Int) = view.slice(from, until)
+ override /*SeqLike*/
+ def view(from: Int, until: Int) = view.slice(from, until)
+*/
}
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
new file mode 100755
index 0000000000..6360de33f1
--- /dev/null
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -0,0 +1,287 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+
+import generic._
+import mutable.ArrayBuffer
+import scala.annotation.tailrec
+
+/** A template trait for indexed sequences of type `IndexedSeq[A]` which optimizes
+ * the implementation of several methods under the assumption of fast random access.
+ *
+ * $indexedSeqInfo
+ *
+ * @define willNotTerminateInf
+ * @define mayNotTerminateInf
+ */
+trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
+
+ override /*IterableLike*/
+ def isEmpty: Boolean = { length == 0 }
+
+ override /*IterableLike*/
+ def foreach[U](f: A => U): Unit = {
+ var i = 0
+ val len = length
+ while (i < len) { f(this(i)); i += 1 }
+ }
+
+ override /*IterableLike*/
+ def forall(p: A => Boolean): Boolean = prefixLength(p(_)) == length
+
+ override /*IterableLike*/
+ def exists(p: A => Boolean): Boolean = prefixLength(!p(_)) != length
+
+ override /*IterableLike*/
+ def find(p: A => Boolean): Option[A] = {
+ val i = prefixLength(!p(_))
+ if (i < length) Some(this(i)) else None
+ }
+/*
+ override /*IterableLike*/
+ def mapFind[B](f: A => Option[B]): Option[B] = {
+ var i = 0
+ var res: Option[B] = None
+ val len = length
+ while (res.isEmpty && i < len) {
+ res = f(this(i))
+ i += 1
+ }
+ res
+ }
+*/
+ @tailrec
+ private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B =
+ if (start == end) z
+ else foldl(start + 1, end, op(z, this(start)), op)
+
+ @tailrec
+ private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B =
+ if (start == end) z
+ else foldr(start, end - 1, op(this(end - 1), z), op)
+
+ override /*TraversableLike*/
+ def foldLeft[B](z: B)(op: (B, A) => B): B =
+ foldl(0, length, z, op)
+
+ override /*IterableLike*/
+ def foldRight[B](z: B)(op: (A, B) => B): B =
+ foldr(0, length, z, op)
+
+ override /*TraversableLike*/
+ def reduceLeft[B >: A](op: (B, A) => B): B =
+ if (length > 0) foldl(1, length, this(0), op) else super.reduceLeft(op)
+
+ override /*IterableLike*/
+ def reduceRight[B >: A](op: (A, B) => B): B =
+ if (length > 0) foldr(0, length - 1, this(length - 1), op) else super.reduceRight(op)
+
+ override /*IterableLike*/
+ def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = that match {
+ case that: IndexedSeq[_] =>
+ val b = bf(repr)
+ var i = 0
+ val len = this.length min that.length
+ b.sizeHint(len)
+ while (i < len) {
+ b += ((this(i), that(i).asInstanceOf[B]))
+ i += 1
+ }
+ b.result
+ case _ =>
+ super.zip[A1, B, That](that)(bf)
+ }
+
+ override /*IterableLike*/
+ def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = {
+ val b = bf(repr)
+ val len = length
+ b.sizeHint(len)
+ var i = 0
+ while (i < len) {
+ b += ((this(i), i))
+ i += 1
+ }
+ b.result
+ }
+
+ override /*IterableLike*/
+ def slice(from: Int, until: Int): Repr = {
+ var i = from max 0
+ val end = until min length
+ val b = newBuilder
+ b.sizeHint(end - i)
+ while (i < end) {
+ b += this(i)
+ i += 1
+ }
+ b.result
+ }
+
+ override /*IterableLike*/
+ def head: A = if (isEmpty) super.head else this(0)
+
+ override /*TraversableLike*/
+ def tail: Repr = if (isEmpty) super.tail else slice(1, length)
+
+ override /*TraversableLike*/
+ def last: A = if (length > 0) this(length - 1) else super.last
+
+ override /*IterableLike*/
+ def init: Repr = if (length > 0) slice(0, length - 1) else super.init
+
+ override /*TraversableLike*/
+ def take(n: Int): Repr = slice(0, n)
+
+ override /*TraversableLike*/
+ def drop(n: Int): Repr = slice(n, length)
+
+ override /*IterableLike*/
+ def takeRight(n: Int): Repr = slice(length - n, length)
+
+ override /*IterableLike*/
+ def dropRight(n: Int): Repr = slice(0, length - n)
+
+ override /*TraversableLike*/
+ def splitAt(n: Int): (Repr, Repr) = (take(n), drop(n))
+
+ override /*IterableLike*/
+ def takeWhile(p: A => Boolean): Repr = take(prefixLength(p))
+
+ override /*TraversableLike*/
+ def dropWhile(p: A => Boolean): Repr = drop(prefixLength(p))
+
+ override /*TraversableLike*/
+ def span(p: A => Boolean): (Repr, Repr) = splitAt(prefixLength(p))
+
+ override /*IterableLike*/
+ def sameElements[B >: A](that: Iterable[B]): Boolean = that match {
+ case that: IndexedSeq[_] =>
+ val len = length
+ len == that.length && {
+ var i = 0
+ while (i < len && this(i) == that(i)) i += 1
+ i == len
+ }
+ case _ =>
+ super.sameElements(that)
+ }
+
+ override /*IterableLike*/
+ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
+ var i = 0
+ var j = start
+ val end = length min len min (xs.length - start)
+ while (i < end) {
+ xs(j) = this(i)
+ i += 1
+ j += 1
+ }
+ }
+
+
+ // Overridden methods from Seq
+
+ override /*SeqLike*/
+ def lengthCompare(len: Int): Int = length - len
+
+ override /*SeqLike*/
+ def segmentLength(p: A => Boolean, from: Int): Int = {
+ val start = from
+ val len = length
+ var i = start
+ while (i < len && p(this(i))) i += 1
+ i - start
+ }
+
+ private def negLength(n: Int) = if (n == length) -1 else n
+
+ override /*SeqLike*/
+ def indexWhere(p: A => Boolean, from: Int): Int = {
+ val start = from max 0
+ negLength(start + segmentLength(!p(_), start))
+ }
+
+ override /*SeqLike*/
+ def lastIndexWhere(p: A => Boolean, end: Int): Int = {
+ var i = end
+ while (i >= 0 && !p(this(i))) i -= 1
+ i
+ }
+
+ override /*SeqLike*/
+ def reverse: Repr = {
+ val b = newBuilder
+ b.sizeHint(length)
+ var i = length
+ while (0 < i) {
+ i -= 1
+ b += this(i)
+ }
+ b.result
+ }
+
+ override /*SeqLike*/
+ def reverseIterator: Iterator[A] = new Iterator[A] {
+ private var i = self.length
+ def hasNext: Boolean = 0 < i
+ def next: A =
+ if (0 < i) {
+ i -= 1
+ self(i)
+ } else Iterator.empty.next
+ }
+
+ override /*SeqLike*/
+ def startsWith[B](that: Seq[B], offset: Int): Boolean = that match {
+ case that: IndexedSeq[_] =>
+ var i = offset
+ var j = 0
+ val thisLen = length
+ val thatLen = that.length
+ while (i < thisLen && j < thatLen && this(i) == that(j)) {
+ i += 1
+ j += 1
+ }
+ j == thatLen
+ case _ =>
+ var i = offset
+ val thisLen = length
+ val thatElems = that.iterator
+ while (i < thisLen && thatElems.hasNext) {
+ if (this(i) != thatElems.next())
+ return false
+
+ i += 1
+ }
+ !thatElems.hasNext
+ }
+
+ override /*SeqLike*/
+ def endsWith[B](that: Seq[B]): Boolean = that match {
+ case that: IndexedSeq[_] =>
+ var i = length - 1
+ var j = that.length - 1
+
+ (j <= i) && {
+ while (j >= 0) {
+ if (this(i) != that(j))
+ return false
+ i -= 1
+ j -= 1
+ }
+ true
+ }
+ case _ =>
+ super.endsWith(that)
+ }
+}
+
diff --git a/src/library/scala/collection/IndexedSeqView.scala b/src/library/scala/collection/IndexedSeqView.scala
deleted file mode 100644
index e693ff36e5..0000000000
--- a/src/library/scala/collection/IndexedSeqView.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.collection
-
-import TraversableView.NoBuilder
-import generic._
-
-/** A non-strict projection of an iterable.
- *
- * @author Sean McDirmid
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
- */
-trait IndexedSeqView[+A, +Coll] extends IndexedSeqViewLike[A, Coll, IndexedSeqView[A, Coll]]
-
-object IndexedSeqView {
- type Coll = TraversableView[_, C] forSome {type C <: Traversable[_]}
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeqView[A, IndexedSeq[_]]] =
- new CanBuildFrom[Coll, A, IndexedSeqView[A, IndexedSeq[_]]] {
- def apply(from: Coll) = new NoBuilder
- def apply() = new NoBuilder
- }
- implicit def arrCanBuildFrom[A]: CanBuildFrom[TraversableView[_, Array[_]], A, IndexedSeqView[A, Array[A]]] =
- new CanBuildFrom[TraversableView[_, Array[_]], A, IndexedSeqView[A, Array[A]]] {
- def apply(from: TraversableView[_, Array[_]]) = new NoBuilder
- def apply() = new NoBuilder
- }
-}
diff --git a/src/library/scala/collection/IndexedSeqViewLike.scala b/src/library/scala/collection/IndexedSeqViewLike.scala
deleted file mode 100644
index ca4e99de07..0000000000
--- a/src/library/scala/collection/IndexedSeqViewLike.scala
+++ /dev/null
@@ -1,109 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: Seq.scala 16092 2008-09-12 10:37:06Z nielsen $
-
-
-package scala.collection
-
-import generic._
-import TraversableView.NoBuilder
-
-/** A template trait for a non-strict view of a IndexedSeq.
- *
- * @author Sean McDirmid
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
- */
-trait IndexedSeqViewLike[+A,
- +Coll,
- +This <: IndexedSeqView[A, Coll] with IndexedSeqViewLike[A, Coll, This]]
- extends IndexedSeq[A] with IndexedSeqLike[A, This] with SeqView[A, Coll] with SeqViewLike[A, Coll, This]
-{ self =>
-
- trait Transformed[+B] extends IndexedSeqView[B, Coll] with super.Transformed[B]
-
- trait Sliced extends Transformed[A] with super.Sliced {
- /** Override to use IndexedSeq's foreach; todo: see whether this is really faster */
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait Mapped[B] extends Transformed[B] with super.Mapped[B] {
- override def foreach[U](f: B => U) = super[Transformed].foreach(f)
- }
-
- trait FlatMapped[B] extends Transformed[B] with super.FlatMapped[B] {
- override def foreach[U](f: B => U) = super[Transformed].foreach(f)
- }
-
- trait Appended[B >: A] extends Transformed[B] with super.Appended[B] {
- override def foreach[U](f: B => U) = super[Transformed].foreach(f)
- }
-
- trait Filtered extends Transformed[A] with super.Filtered {
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait TakenWhile extends Transformed[A] with super.TakenWhile {
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait DroppedWhile extends Transformed[A] with super.DroppedWhile {
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait Reversed extends Transformed[A] with super.Reversed {
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait Patched[B >: A] extends Transformed[B] with super.Patched[B] {
- override def foreach[U](f: B => U) = super[Transformed].foreach(f)
- }
-
- trait Zipped[B] extends Transformed[(A, B)] {
- protected[this] val other: Iterable[B]
- def length = self.length min other.size
- def apply(idx: Int): (A, B) = (self.apply(idx), other.iterator drop idx next)
- override def stringPrefix = self.stringPrefix+"Z"
- }
-
- trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] {
- protected[this] val other: Iterable[B]
- val thisElem: A1
- val thatElem: B
- override def iterator: Iterator[(A1, B)] =
- self.iterator.zipAll(other.iterator, thisElem, thatElem)
-
- def length = self.length max other.size
- def apply(idx: Int): (A1, B) = {
- val z1 = if (idx < self.length) self.apply(idx) else thisElem
- val z2 = if (idx < other.size) other drop idx head else thatElem
- (z1, z2)
- }
- override def stringPrefix = self.stringPrefix+"Z"
- }
-
- /** Boilerplate method, to override in each subclass
- * This method could be eliminated if Scala had virtual classes
- */
- protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
- protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
- protected override def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
- protected override def newFiltered(p: A => Boolean): Transformed[A] = new Filtered { val pred = p }
- protected override def newSliced(_from: Int, _until: Int): Transformed[A] = new Sliced { val from = _from; val until = _until }
- protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
- protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
- protected override def newZipped[B](that: Iterable[B]): Transformed[(A, B)] = new Zipped[B] { val other = that }
- protected override def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new ZippedAll[A1, B] { val other = that; val thisElem = _thisElem; val thatElem = _thatElem }
- protected override def newReversed: Transformed[A] = new Reversed { }
- protected override def newPatched[B >: A](_from: Int, _patch: Seq[B], _replaced: Int): Transformed[B] = new Patched[B] {
- val from = _from; val patch = _patch; val replaced = _replaced
- }
- override def stringPrefix = "IndexedSeqView"
-}
diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala
index 1e80fafbfc..a25dd3df0a 100644
--- a/src/library/scala/collection/Iterable.scala
+++ b/src/library/scala/collection/Iterable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,32 +14,8 @@ import generic._
import scala.util.control.Breaks._
import mutable.Builder
-/** <p>
- * A template trait for iterable collections.
- * </p>
- * <p>
- * Collection classes mixing in this trait provide a method
- * <code>iterator</code> which returns an iterator over all the
- * elements contained in the collection. They also provide a method
- * <code>newBuilder</code> which creates a builder for collections
- * of the same kind.
- * </p>
- * <p>
- * This trait implements <code>Traversable</code>'s <code>foreach</code>
- * method by stepping through all elements. Subclasses of <code>Iterable</code>
- * should re-implement <code>foreach</code> with something more efficient,
- * if possible.
- * </p>
- * <p>
- * This trait adds methods <code>iterator</code>, <code>zip</code>,
- * <code>zipAll</code>, <code>zipWithIndex</code>, <code>sameElements</code>,
- * <code>takeRight</code>, <code>dropRight</code> to the methods inherited
- * from trait <code>Traversable</code>.
- * </p>
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** A base trait for iterable collections.
+ * $iterableInfo
*/
trait Iterable[+A] extends Traversable[A]
with GenericTraversableTemplate[A, Iterable]
@@ -59,22 +34,24 @@ trait Iterable[+A] extends Traversable[A]
}
-/** Factory methods and utilities for instances of type <code>Iterable</code>.
- *
- * @author Martin Odersky
- * @version 2.8
+/** $factoryInfo
+ * The current default implementation of a $Coll is a `Vector`.
+ * @define coll iterable collection
+ * @define Coll Iterable
*/
object Iterable extends TraversableFactory[Iterable] {
+ /** $genericCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = new GenericCanBuildFrom[A]
+
def newBuilder[A]: Builder[A, Iterable[A]] = immutable.Iterable.newBuilder[A]
/** The minimum element of a non-empty sequence of ordered elements */
- @deprecated("use seq.min instead")
+ @deprecated("use <seq>.min instead, where <seq> is the sequence for which you want to compute the minimum")
def min[A](seq: Iterable[A])(implicit ord: Ordering[A]): A = seq.min
/** The maximum element of a non-empty sequence of ordered elements */
- @deprecated("use seq.max instead")
+ @deprecated("use <seq>.max instead, where <seq> is the sequence for which you want to compute the maximum")
def max[A](seq: Iterable[A])(implicit ord: Ordering[A]): A = seq.max
@deprecated("use View instead")
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 2946426748..538fd09c0e 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -1,46 +1,59 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
import generic._
import immutable.{List, Stream}
import annotation.unchecked.uncheckedVariance
-/** <p>
- * A template trait for iterable collections.
- * </p>
- * <p>
- * Collection classes mixing in this trait provide a method
- * <code>iterator</code> which returns an iterator over all the
- * elements contained in the collection. They also provide a method
- * <code>newBuilder</code> which creates a builder for collections of the
- * same kind.
- * </p>
- * <p>
- * This trait implements <code>Iterable</code>'s <code>foreach</code>
- * method by stepping through all elements. Subclasses of <code>Iterable</code>
- * should re-implement <code>foreach</code> with something more efficient,
- * if possible.
- * </p>
- * <p>
- * This trait adds methods <code>iterator</code>, <code>sameElements</code>,
- * <code>takeRight</code>, <code>dropRight</code> to the methods inherited
- * from trait <a href="../Iterable.html" target="ContentFrame">
- * <code>Iterable</code></a>.
- * </p>
+/** A template trait for iterable collections of type `Iterable[A]`.
+ * $iterableInfo
+ * @define iterableInfo
+ * This is a base trait for all $mutability Scala collections that define an `iterator`
+ * method to step through one-by-one the collection's elements.
+ * Implementations of this trait need to provide a concrete method with
+ * signature:
+ * {{{
+ * def iterator: Iterator[A]
+ * }}}
+ * They also need to provide a method `newBuilder`
+ * which creates a builder for collections of the same kind.
*
- * @note This trait replaces every method that uses breaks in the original by an iterator version.
+ * This trait implements `Iterable`'s `foreach`
+ * method by stepping through all elements using `iterator`.
+ * Subclasses should re-implement `foreach` with something more efficient,
+ * if possible.
+
+ * This trait adds methods `iterator`, `sameElements`,
+ * `takeRight`, `dropRight` to the methods inherited
+ * from trait <a href="../Traversable.html" target="ContentFrame">
+ * `Traversable`</a>.
+
+ * Note: This trait replaces every method that uses `break` in
+ * `TraversableLike` by an iterator version.
*
* @author Martin Odersky
* @version 2.8
* @since 2.8
+ * @tparam A the element type of the collection
+ * @tparam Repr the type of the actual collection containing the elements.
+ *
+ * @define Coll Iterable
+ * @define coll iterable collection
+ * @define zipthatinfo the class of the returned collection. Where possible, `That` is
+ * the same class as the current collection class `Repr`, but this
+ * depends on the element type `(A1, B)` being admissible for that class,
+ * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, B), That]`.
+ * is found.
+ * @define zipbfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `(A1, B)`.
*/
trait IterableLike[+A, +Repr] extends Equals with TraversableLike[A, Repr] {
self =>
@@ -55,106 +68,42 @@ self =>
*/
def iterator: Iterator[A]
- @deprecated("use `iterator' instead")
- def elements = iterator
-
- /** Apply a function <code>f</code> to all elements of this
- * iterable object.
- *
- * @param f A function that is applied for its side-effect to every element.
- * The result (of arbitrary type U) of function `f` is discarded.
- *
- * @note This method underlies the implementation of most other bulk operations.
- * Implementing `foreach` with `iterator` is often suboptimal.
- * So `foreach` should be overridden in concrete collection classes if a more
- * efficient implementation is available.
- */
- def foreach[U](f: A => U): Unit = iterator.foreach(f)
-
-
- /** Return true iff the given predicate `p` yields true for all elements
- * of this iterable.
- *
- * @note May not terminate for infinite-sized collections.
- * @param p the predicate
- */
- override def forall(p: A => Boolean): Boolean = iterator.forall(p)
-
- /** Return true iff there is an element in this iterable for which the
- * given predicate `p` yields true.
+ /** Applies a function `f` to all elements of this $coll.
*
- * @note May not terminate for infinite-sized collections.
- * @param p the predicate
- */
- override def exists(p: A => Boolean): Boolean = iterator.exists(p)
-
- /** Find and return the first element of the iterable object satisfying a
- * predicate, if any.
+ * Note: this method underlies the implementation of most other bulk operations.
+ * Subclasses should re-implement this method if a more efficient implementation exists.
*
- * @note may not terminate for infinite-sized collections.
- * @note Might return different results for different runs, unless this iterable is ordered.
- * @param p the predicate
- * @return an option containing the first element in the iterable object
- * satisfying <code>p</code>, or <code>None</code> if none exists.
+ * @usecase def foreach(f: A => Unit): Unit
*/
- override def find(p: A => Boolean): Option[A] = iterator.find(p)
-
- /** Does this iterable contain no elements?
- */
- override def isEmpty: Boolean = !this.iterator.hasNext
-
- /** Combines the elements of this iterable together using the binary
- * function <code>f</code>, from right to left, and starting with
- * the value <code>z</code>.
- *
- * @note Will not terminate for infinite-sized collections.
- * @note Might return different results for different runs, unless this iterable is ordered, or
- * the operator is associative and commutative.
- * @return <code>f(a<sub>0</sub>, f(a<sub>1</sub>, f(..., f(a<sub>n</sub>, z)...)))</code>
- * if the iterable is <code>[a<sub>0</sub>, a1, ..., a<sub>n</sub>]</code>.
- */
- override def foldRight[B](z: B)(op: (A, B) => B): B =
- this.iterator.foldRight(z)(op)
-
- /** Combines the elements of this iterable object together using the binary
- * operator <code>op</code>, from right to left
- * @note Will not terminate for infinite-sized collections.
- * @note Might return different results for different runs, unless this iterable is ordered, or
- * the operator is associative and commutative.
- * @param op The operator to apply
- *
- * @return <code>a<sub>0</sub> op (... op (a<sub>n-1</sub> op a<sub>n</sub>)...)</code>
- * if the iterable object has elements <code>a<sub>0</sub>, a<sub>1</sub>, ...,
- * a<sub>n</sub></code>.
- *
- * @throws Predef.UnsupportedOperationException if the iterator is empty.
- */
- override def reduceRight[B >: A](op: (A, B) => B): B =
- this.iterator.reduceRight(op)
-
- /** The iterable itself */
- override def toIterable: Iterable[A] = thisCollection
-
- /** The first element of this iterable.
- *
- * @note Might return different results for different runs, unless this iterable is ordered
- * @throws Predef.NoSuchElementException if the iterable is empty.
- */
- override def head: A =
- if (isEmpty)
- throw new NoSuchElementException
- else
- this.iterator.next
-
- /** Return an iterable consisting only of the first <code>n</code>
- * elements of this iterable, or else the whole iterable, if it has less
- * than <code>n</code> elements.
- *
- * @param n the number of elements to take
- * @note Might return different results for different runs, unless this iterable is ordered
- */
- override def take(n: Int): Repr = {
+ def foreach[U](f: A => U): Unit =
+ iterator.foreach(f)
+
+ override /*TraversableLike*/ def forall(p: A => Boolean): Boolean =
+ iterator.forall(p)
+ override /*TraversableLike*/ def exists(p: A => Boolean): Boolean =
+ iterator.exists(p)
+ override /*TraversableLike*/ def find(p: A => Boolean): Option[A] =
+ iterator.find(p)
+/*
+ override /*TraversableLike*/ def mapFind[B](f: A => Option[B]): Option[B] =
+ iterator.mapFind(f)
+*/
+ override /*TraversableLike*/ def isEmpty: Boolean =
+ !iterator.hasNext
+ override /*TraversableLike*/ def foldRight[B](z: B)(op: (A, B) => B): B =
+ iterator.foldRight(z)(op)
+ override /*TraversableLike*/ def reduceRight[B >: A](op: (A, B) => B): B =
+ iterator.reduceRight(op)
+ override /*TraversableLike*/ def toIterable: Iterable[A] =
+ thisCollection
+
+ override /*TraversableLike*/ def head: A =
+ if (isEmpty) throw new NoSuchElementException
+ else iterator.next
+
+ override /*TraversableLike*/ def take(n: Int): Repr = {
val b = newBuilder
+ b.sizeHintBounded(n, this)
var i = 0
val it = iterator
while (i < n && it.hasNext) {
@@ -164,18 +113,9 @@ self =>
b.result
}
- /** A sub-iterable starting at index `from`
- * and extending up to (but not including) index `until`.
- *
- * @note c.slice(from, to) is equivalent to (but possibly more efficient than)
- * c.drop(from).take(to - from)
- *
- * @param from The index of the first element of the returned subsequence
- * @param until The index of the element following the returned subsequence
- * @note Might return different results for different runs, unless this iterable is ordered
- */
- override def slice(from: Int, until: Int): Repr = {
+ override /*TraversableLike*/ def slice(from: Int, until: Int): Repr = {
val b = newBuilder
+ b.sizeHintBounded(until - from, this)
var i = from
val it = iterator drop from
while (i < until && it.hasNext) {
@@ -185,13 +125,7 @@ self =>
b.result
}
- /** Returns the longest prefix of this iterable whose elements satisfy
- * the predicate <code>p</code>.
- *
- * @param p the test predicate.
- * @note Might return different results for different runs, unless this iterable is ordered
- */
- override def takeWhile(p: A => Boolean): Repr = {
+ override /*TraversableLike*/ def takeWhile(p: A => Boolean): Repr = {
val b = newBuilder
val it = iterator
while (it.hasNext) {
@@ -202,13 +136,49 @@ self =>
b.result
}
- /** Returns the rightmost <code>n</code> elements from this iterable.
+ /** Partitions elements in fixed size ${coll}s.
+ * @see Iterator#grouped
+ *
+ * @param size the number of elements per group
+ * @return An iterator producing ${coll}s of size `size`, except the
+ * last will be truncated if the elements don't divide evenly.
+ */
+ def grouped(size: Int): Iterator[Repr] =
+ for (xs <- iterator grouped size) yield {
+ val b = newBuilder
+ b ++= xs
+ b.result
+ }
+
+ /** Groups elements in fixed size blocks by passing a "sliding window"
+ * over them (as opposed to partitioning them, as is done in grouped.)
+ * @see Iterator#sliding
+ *
+ * @param size the number of elements per group
+ * @param step the distance between the first elements of successive
+ * groups (defaults to 1)
+ * @return An iterator producing ${coll}s of size `size`, except the
+ * last and the only element will be truncated if there are
+ * fewer elements than size.
+ */
+ def sliding[B >: A](size: Int): Iterator[Repr] = sliding(size, 1)
+ def sliding[B >: A](size: Int, step: Int): Iterator[Repr] =
+ for (xs <- iterator.sliding(size, step)) yield {
+ val b = newBuilder
+ b ++= xs
+ b.result
+ }
+
+ /** Selects last ''n'' elements.
+ * $orderDependent
*
* @param n the number of elements to take
- * @note Might return different results for different runs, unless this iterable is ordered
+ * @return a $coll consisting only of the last `n` elements of this $coll, or else the
+ * whole $coll, if it has less than `n` elements.
*/
def takeRight(n: Int): Repr = {
val b = newBuilder
+ b.sizeHintBounded(n, this)
val lead = this.iterator drop n
var go = false
for (x <- this) {
@@ -219,13 +189,16 @@ self =>
b.result
}
- /** Returns the iterable wihtout its rightmost <code>n</code> elements.
+ /** Selects all elements except last ''n'' ones.
+ * $orderDependent
*
- * @param n the number of elements to take
- * @note Might return different results for different runs, unless this iterable is ordered
+ * @param n The number of elements to take
+ * @return a $coll consisting of all elements of this $coll except the first `n` ones, or else the
+ * empty $coll, if this $coll has less than `n` elements.
*/
def dropRight(n: Int): Repr = {
val b = newBuilder
+ if (n >= 0) b.sizeHint(this, -n)
val lead = iterator drop n
val it = iterator
while (lead.hasNext) {
@@ -235,17 +208,7 @@ self =>
b.result
}
- /** Fills the given array <code>xs</code> with at most `len` elements of
- * this iterable starting at position `start`.
- * Copying will stop once either the end of the current iterable is reached or
- * `len` elements have been copied or the end of the array is reached.
- *
- * @note Will not terminate for infinite-sized collections.
- * @param xs the array to fill.
- * @param start starting index.
- * @param len number of elements to copy
- */
- override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
+ override /*TraversableLike*/ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
var i = start
val end = (start + len) min xs.length
val it = iterator
@@ -255,10 +218,29 @@ self =>
}
}
- /** Returns an iterable formed from this iterable and another iterable
+ /** Returns a $coll formed from this $coll and another iterable collection
* by combining corresponding elements in pairs.
- * If one of the two iterables is longer than the other, its remaining elements are ignored.
+ * If one of the two collections is longer than the other, its remaining elements are ignored.
+ *
+ * $orderDependent
+ *
* @param that The iterable providing the second half of each result pair
+ * @tparam A1 the type of the first half of the returned pairs (this is always a supertype
+ * of the collection's element type `A`).
+ * @tparam B the type of the second half of the returned pairs
+ * @tparam That $zipthatinfo
+ * @param bf $zipbfinfo
+ * @return a new collection of type `That` containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the minimum of the lengths of this $coll and `that`.
+ *
+ * @usecase def zip[B](that: Iterable[B]): $Coll[(A, B)]
+ *
+ * @param that The iterable providing the second half of each result pair
+ * @tparam B the type of the second half of the returned pairs
+ * @return a new $coll containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the minimum of the lengths of this $coll and `that`.
*/
def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
val b = bf(repr)
@@ -269,25 +251,33 @@ self =>
b.result
}
- /** Returns an iterable formed from this iterable and the specified iterable
- * <code>that</code> by associating each element of the former with
- * the element at the same position in the latter.
+ /** Returns a $coll formed from this $coll and another iterable collection
+ * by combining corresponding elements in pairs.
+ * If one of the two collections is shorter than the other,
+ * placeholder elements are used to extend the shorter collection to the length of the longer.
+ *
+ * $orderDependent
+ *
+ * @param that the iterable providing the second half of each result pair
+ * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
+ * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
+ * @return a new collection of type `That` containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the maximum of the lengths of this $coll and `that`.
+ * If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
+ * If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
*
- * @param that iterable <code>that</code> may have a different length
- * as the self iterable.
- * @param thisElem element <code>thisElem</code> is used to fill up the
- * resulting iterable if the self iterable is shorter than
- * <code>that</code>
- * @param thatElem element <code>thatElem</code> is used to fill up the
- * resulting iterable if <code>that</code> is shorter than
- * the self iterable
- * @return <code>Sequence((a<sub>0</sub>,b<sub>0</sub>), ...,
- * (a<sub>n</sub>,b<sub>n</sub>), (elem,b<sub>n+1</sub>),
- * ..., {elem,b<sub>m</sub>})</code>
- * when <code>[a<sub>0</sub>, ..., a<sub>n</sub>] zip
- * [b<sub>0</sub>, ..., b<sub>m</sub>]</code> is
- * invoked where <code>m &gt; n</code>.
+ * @usecase def zipAll[B](that: Iterable[B], thisElem: A, thatElem: B): $Coll[(A, B)]
*
+ * @param that The iterable providing the second half of each result pair
+ * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
+ * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
+ * @tparam B the type of the second half of the returned pairs
+ * @return a new $coll containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the maximum of the lengths of this $coll and `that`.
+ * If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
+ * If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
*/
def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
val b = bf(repr)
@@ -302,7 +292,30 @@ self =>
b.result
}
- /** Zips this iterable with its indices (startiong from 0).
+ /** Zips this $coll with its indices.
+ *
+ * $orderDependent
+ *
+ * @tparam A1 the type of the first half of the returned pairs (this is always a supertype
+ * of the collection's element type `A`).
+ * @tparam That the class of the returned collection. Where possible, `That` is
+ * the same class as the current collection class `Repr`, but this
+ * depends on the element type `(A1, Int)` being admissible for that class,
+ * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, Int), That]`.
+ * is found.
+ * @tparam bf an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `(A1, Int)`.
+ * @return A new collection of type `That` containing pairs consisting of all elements of this
+ * $coll paired with their index. Indices start at `0`.
+ *
+ * @usecase def zipWithIndex: $Coll[(A, Int)]
+ *
+ * @return A new $coll containing pairs consisting of all elements of this
+ * $coll paired with their index. Indices start at `0`.
+ * @example
+ * `List("a", "b", "c").zipWithIndex = List(("a", 0), ("b", 1), ("c", 2))`
+ *
*/
def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = {
val b = bf(repr)
@@ -314,12 +327,19 @@ self =>
b.result
}
- /** Checks if the other iterable object contains the same elements as this one.
+ /** Checks if the other iterable collection contains the same elements in the same order as this $coll.
+ *
+ * $orderDependent
+ * $willNotTerminateInf
+ *
+ * @param that the collection to compare with.
+ * @tparam B the type of the elements of collection `that`.
+ * @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
*
- * @note will not terminate for infinite-sized iterables.
- * @param that the other iterable
- * @return true, iff both iterables contain the same elements in the same order.
- * @note Might return different results for different runs, unless this iterable is ordered
+ * @usecase def sameElements(that: Iterable[A]): Boolean
+ *
+ * @param that the collection to compare with.
+ * @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
*/
def sameElements[B >: A](that: Iterable[B]): Boolean = {
val these = this.iterator
@@ -331,43 +351,42 @@ self =>
!these.hasNext && !those.hasNext
}
- /** Returns a stream with all elements in this iterable object.
+ override /*TraversableLike*/ def toStream: Stream[A] = iterator.toStream
+
+ /** Converts this $coll to a sequence.
+ *
+ * $willNotTerminateInf
+ * @return a sequence containing all the elements of this $coll.
*/
- override def toStream: Stream[A] = iterator.toStream
+ override /*TraversableOnce*/ def toSeq: Seq[A] = toList
/** Method called from equality methods, so that user-defined subclasses can
* refuse to be equal to other collections of the same kind.
+ * @param that The object with which this $coll should be compared
+ * @return `true`, if this $coll can possibly equal `that`, `false` otherwise. The test
+ * takes into consideration only the run-time types of objects but ignores their elements.
*/
- override def canEqual(that: Any) = true
+ override /*TraversableLike*/ def canEqual(that: Any) = true
- /** Creates a view of this iterable @see IterableView
- */
- override def view = new IterableView[A, Repr] {
+ override /*TraversableLike*/ def view = new IterableView[A, Repr] {
protected lazy val underlying = self.repr
override def iterator = self.iterator
}
- /** A sub-iterable view starting at index `from`
- * and extending up to (but not including) index `until`.
- *
- * @param from The index of the first element of the slice
- * @param until The index of the element following the slice
- * @note The difference between `view` and `slice` is that `view` produces
- * a view of the current iterable, whereas `slice` produces a new iterable.
- *
- * @note Might return different results for different runs, unless this iterable is ordered
- * @note view(from, to) is equivalent to view.slice(from, to)
- */
- override def view(from: Int, until: Int) = view.slice(from, until)
+ override /*TraversableLike*/ def view(from: Int, until: Int) = view.slice(from, until)
+
+ @deprecated("use `iterator' instead")
+ def elements = iterator
@deprecated("use `head' instead") def first: A = head
- /** <code>None</code> if iterable is empty. */
+ /** `None` if iterable is empty.
+ */
@deprecated("use `headOption' instead") def firstOption: Option[A] = headOption
/**
- * returns a projection that can be used to call non-strict <code>filter</code>,
- * <code>map</code>, and <code>flatMap</code> methods that build projections
+ * returns a projection that can be used to call non-strict `filter`,
+ * `map`, and `flatMap` methods that build projections
* of the collection.
*/
@deprecated("use `view' instead")
diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala
index dce5d8dd2d..0827590856 100644
--- a/src/library/scala/collection/IterableProxy.scala
+++ b/src/library/scala/collection/IterableProxy.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala
index 6c4a9cddcb..6193d30cba 100644
--- a/src/library/scala/collection/IterableProxyLike.scala
+++ b/src/library/scala/collection/IterableProxyLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -16,32 +15,26 @@ import mutable.Buffer
// Methods could be printed by cat IterableLike.scala | egrep '^ (override )?def'
-
-/** This trait implements a proxy for iterable objects. It forwards
- * all calls to a different iterable object
+/** This trait implements a proxy for Iterable objects. It forwards
+ * all calls to a different Iterable object.
*
* @author Martin Odersky
* @version 2.8
* @since 2.8
*/
-trait IterableProxyLike[+A, +This <: IterableLike[A, This] with Iterable[A]]
- extends IterableLike[A, This]
- with TraversableProxyLike[A, This]
-{
+trait IterableProxyLike[+A, +Repr <: IterableLike[A, Repr] with Iterable[A]]
+ extends IterableLike[A, Repr]
+ with TraversableProxyLike[A, Repr] {
override def iterator: Iterator[A] = self.iterator
- override def foreach[U](f: A => U): Unit = self.foreach(f)
- override def isEmpty: Boolean = self.isEmpty
- override def foldRight[B](z: B)(op: (A, B) => B): B = self.foldRight(z)(op)
- override def reduceRight[B >: A](op: (A, B) => B): B = self.reduceRight(op)
- override def toIterable: Iterable[A] = self.toIterable
- override def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That = self.zip[A1, B, That](that)(bf)
- override def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That = self.zipAll(that, thisElem, thatElem)(bf)
- override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[This, (A1, Int), That]): That = self.zipWithIndex(bf)
- override def head: A = self.head
- override def takeRight(n: Int): This = self.takeRight(n)
- override def dropRight(n: Int): This = self.dropRight(n)
+ override def grouped(size: Int): Iterator[Repr] = self.grouped(size)
+ override def sliding[B >: A](size: Int): Iterator[Repr] = self.sliding(size)
+ override def sliding[B >: A](size: Int, step: Int): Iterator[Repr] = self.sliding(size, step)
+ override def takeRight(n: Int): Repr = self.takeRight(n)
+ override def dropRight(n: Int): Repr = self.dropRight(n)
+ override def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zip[A1, B, That](that)(bf)
+ override def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zipAll(that, thisElem, thatElem)(bf)
+ override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = self.zipWithIndex(bf)
override def sameElements[B >: A](that: Iterable[B]): Boolean = self.sameElements(that)
- override def toStream: Stream[A] = self.toStream
override def view = self.view
override def view(from: Int, until: Int) = self.view(from, until)
}
diff --git a/src/library/scala/collection/IterableView.scala b/src/library/scala/collection/IterableView.scala
index 0f6cf880aa..faeb56c886 100644
--- a/src/library/scala/collection/IterableView.scala
+++ b/src/library/scala/collection/IterableView.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,14 +13,14 @@ package scala.collection
import generic._
import TraversableView.NoBuilder
-/** A base class for views of Iterables.
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** A base trait for non-strict views of `Iterable`s.
+ * $iterableViewInfo
*/
trait IterableView[+A, +Coll] extends IterableViewLike[A, Coll, IterableView[A, Coll]]
+/** An object containing the necessary implicit definitions to make
+ * `IterableView`s work. Its definitions are generally not accessed directly by clients.
+ */
object IterableView {
type Coll = TraversableView[_, C] forSome {type C <: Traversable[_]}
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IterableView[A, Iterable[_]]] =
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 3500280508..77d3ac770b 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,11 +14,19 @@ import generic._
import collection.immutable.Stream
import TraversableView.NoBuilder
-/** A template trait for a non-strict view of an iterable.
+/** A template trait for non-strict views of iterable collections.
+ * $iterableViewInfo
+ *
+ * @define iterableViewInfo
+ * $viewInfo
+ * All views for iterable collections are defined by re-interpreting the `iterator` method.
*
* @author Martin Odersky
* @version 2.8
* @since 2.8
+ * @tparam A the element type of the view
+ * @tparam Coll the type of the underlying collection containing the elements.
+ * @tparam This the type of the view itself
*/
trait IterableViewLike[+A,
+Coll,
@@ -29,6 +36,10 @@ extends Iterable[A] with IterableLike[A, This] with TraversableView[A, Coll] wit
trait Transformed[+B] extends IterableView[B, Coll] with super.Transformed[B]
+ trait Forced[B] extends Transformed[B] with super.Forced[B] {
+ override def iterator = forced.iterator
+ }
+
trait Sliced extends Transformed[A] with super.Sliced {
override def iterator = self.iterator slice (from, until)
}
@@ -96,6 +107,7 @@ extends Iterable[A] with IterableLike[A, This] with TraversableView[A, Coll] wit
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
+ protected override def newForced[B](xs: => Seq[B]): Transformed[B] = new Forced[B] { val forced = xs }
protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
protected override def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
@@ -104,5 +116,11 @@ extends Iterable[A] with IterableLike[A, This] with TraversableView[A, Coll] wit
protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
+ override def grouped(size: Int): Iterator[This] =
+ self.iterator.grouped(size).map(xs => newForced(xs).asInstanceOf[This])
+
+ override def sliding[B >: A](size: Int, step: Int): Iterator[This] =
+ self.iterator.sliding(size).map(xs => newForced(xs).asInstanceOf[This])
+
override def stringPrefix = "IterableView"
}
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 22b8328b1e..b8dd03110d 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -1,22 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
-import mutable.{Buffer, ArrayBuffer, ListBuffer, StringBuilder}
-import immutable.{List, Stream}
-import annotation.{ tailrec }
-// import immutable.{List, Nil, ::, Stream}
+import mutable.ArrayBuffer
+import annotation.{ tailrec, migration }
-/** The <code>Iterator</code> object provides various functions for
+/** The `Iterator` object provides various functions for
* creating specialized iterators.
*
* @author Martin Odersky
@@ -26,14 +23,17 @@ import annotation.{ tailrec }
*/
object Iterator {
+ /** The iterator which produces no values */
val empty = new Iterator[Nothing] {
def hasNext: Boolean = false
def next(): Nothing = throw new NoSuchElementException("next on empty iterator")
}
- /** An iterator with a single element.
+ /** Creates an iterator which produces a single element.
+ * '''Note:''' Equivalent, but more efficient than Iterator(elem)
* @param elem the element
- * @note Equivalent, but more efficient than Iterator(elem)
+ * @return An iterator which produces `elem` on the first call to `next`,
+ * and which has no further elements.
*/
def single[A](elem: A) = new Iterator[A] {
private var hasnext = true
@@ -45,20 +45,16 @@ object Iterator {
/** Creates an iterator with given elements
* @param elems The elements returned one-by-one from the iterator
+ * @return An iterator which produces the given elements on the
+ * first calls to `next`, and which has no further elements.
*/
def apply[A](elems: A*): Iterator[A] = elems.iterator
- /** Concatenates the given argument iterators into a single iterator.
- *
- * @param its the argument iterators that are to be concatenated
- * @return the concatenation of all the argument iterators
- */
- @deprecated("use <code>++</code>")
- def concat[A](xss: Iterator[A]*): Iterator[A] = xss.iterator.flatten
-
- /** An iterator that returns the results of some element computation a number of times.
- * @param len The number of elements returned
- * @param elem The element computation determinining each result
+ /** Creates iterator that produces the results of some element computation
+ * a number of times.
+ * @param n the number of elements returned by the iterator.
+ * @param elem the element computation
+ * @return An iterator that produces the results of `n` evaluations of `elem`.
*/
def fill[A](len: Int)(elem: => A) = new Iterator[A] {
private var i = 0
@@ -68,12 +64,10 @@ object Iterator {
else empty.next()
}
- /** An iterator that returns values of a given function over a range of
- * integer values starting from 0.
- *
- * @param end The argument up to which values are tabulated.
- * @param f The function computing the results
- * @return An iterator with values `f(0) ... f(end-1)`
+ /** Creates an iterator producing the values of a given function over a range of integer values starting from 0.
+ * @param n The number of elements returned by the iterator
+ * @param f The function computing element values
+ * @return An iterator that produces the values `f(0), ..., f(n -1)`.
*/
def tabulate[A](end: Int)(f: Int => A) = new Iterator[A] {
private var i = 0
@@ -83,20 +77,20 @@ object Iterator {
else empty.next()
}
- /** An iterator returning successive values in some integer interval.
+ /** Creates nn iterator returning successive values in some integer interval.
*
* @param start the start value of the iterator
* @param end the end value of the iterator (the first value NOT returned)
- * @return the iterator with values in range `start, start + 1, ..., end - 1`
+ * @return the iterator producing values `start, start + 1, ..., end - 1`
*/
def range(start: Int, end: Int): Iterator[Int] = range(start, end, 1)
- /** An iterator returning equally spaced values in some integer interval.
+ /** An iterator producing equally spaced values in some integer interval.
*
* @param start the start value of the iterator
* @param end the end value of the iterator (the first value NOT returned)
* @param step the increment value of the iterator (must be positive or negative)
- * @return the iterator with values in `start, start + step, ...` up to, but excluding `end`
+ * @return the iterator producing values `start, start + step, ...` up to, but excluding `end`
*/
def range(start: Int, end: Int, step: Int) = new Iterator[Int] {
if (step == 0) throw new IllegalArgumentException("zero step")
@@ -107,30 +101,36 @@ object Iterator {
else empty.next()
}
- /** An infinite iterator that repeatedly applies a given function to the previous result.
+ /** Creates an infinite iterator that repeatedly applies a given function to the previous result.
*
* @param start the start value of the iterator
* @param f the function that's repeatedly applied
- * @return the iterator returning the infinite sequence of values `start, f(start), f(f(start)), ...`
+ * @return the iterator producing the infinite sequence of values `start, f(start), f(f(start)), ...`
*/
def iterate[T](start: T)(f: T => T): Iterator[T] = new Iterator[T] {
+ private[this] var first = true
private[this] var acc = start
def hasNext: Boolean = true
- def next(): T = { val res = acc ; acc = f(acc) ; res }
+ def next(): T = {
+ if (first) first = false
+ else acc = f(acc)
+
+ acc
+ }
}
- /** An infinite-length iterator which returns successive values from some start value.
+ /** Creates an infinite-length iterator which returns successive values from some start value.
* @param start the start value of the iterator
- * @return the iterator returning the infinite sequence of values `start, start + 1, start + 2, ...`
+ * @return the iterator producing the infinite sequence of values `start, start + 1, start + 2, ...`
*/
def from(start: Int): Iterator[Int] = from(start, 1)
- /** An infinite-length iterator returning values equally spaced apart.
+ /** Creates an infinite-length iterator returning values equally spaced apart.
*
* @param start the start value of the iterator
* @param step the increment between successive values
- * @return the iterator returning the infinite sequence of values `start, start + 1 * step, start + 2 * step, ...`
+ * @return the iterator producing the infinite sequence of values `start, start + 1 * step, start + 2 * step, ...`
*/
def from(start: Int, step: Int): Iterator[Int] = new Iterator[Int] {
private var i = start
@@ -138,19 +138,40 @@ object Iterator {
def next(): Int = { val result = i; i += step; result }
}
- /** Create an infinite iterator based on the given expression
- * (which is recomputed for every element)
+ /** Creates an infinite-length iterator returning the results of evaluating
+ * an expression. The expression is recomputed for every element.
*
- * @param elem the element composing the resulting iterator
- * @return the iterator containing an infinite number of elem
+ * @param elem the element computation.
+ * @return the iterator containing an infinite number of results of evaluating `elem`.
*/
def continually[A](elem: => A): Iterator[A] = new Iterator[A] {
def hasNext = true
def next = elem
}
- /** A wrapper class for the <code>flatten</code> method that is added to
- * class <code>Iterator</code> with implicit conversion
+ /** With the advent of TraversableOnce, it can be useful to have a builder
+ * for Iterators so they can be treated uniformly along with the collections.
+ * See scala.util.Random.shuffle for an example.
+ */
+ class IteratorCanBuildFrom[A] extends generic.CanBuildFrom[Iterator[A], A, Iterator[A]] {
+ def newIterator = new ArrayBuffer[A] mapResult (_.iterator)
+
+ /** Creates a new builder on request of a collection.
+ * @param from the collection requesting the builder to be created.
+ * @return the result of invoking the `genericBuilder` method on `from`.
+ */
+ def apply(from: Iterator[A]) = newIterator
+
+ /** Creates a new builder from scratch
+ * @return the result of invoking the `newBuilder` method of this factory.
+ */
+ def apply() = newIterator
+ }
+
+ implicit def iteratorCanBuildFrom[T]: IteratorCanBuildFrom[T] = new IteratorCanBuildFrom[T]
+
+ /** A wrapper class for the `flatten` method that is added to
+ * class `Iterator` with implicit conversion
* @see iteratorIteratorWrapper.
*/
class IteratorIteratorOps[A](its: Iterator[Iterator[A]]) {
@@ -171,8 +192,7 @@ object Iterator {
@deprecated("use `xs.iterator' or `Iterator(xs)' instead")
def fromValues[a](xs: a*) = xs.iterator
- /**
- * @param xs the array of elements
+ /** @param xs the array of elements
* @see also: IndexedSeq.iterator and slice
*/
@deprecated("use `xs.iterator' instead")
@@ -190,15 +210,8 @@ object Iterator {
xs.slice(start, start + length).iterator
/**
- * @param str the given string
- * @return the iterator on <code>str</code>
- */
- @deprecated("replaced by <code>str.iterator</code>")
- def fromString(str: String): Iterator[Char] = str.iterator
-
- /**
* @param n the product arity
- * @return the iterator on <code>Product&lt;n&gt;</code>.
+ * @return the iterator on `Product&lt;n&gt;`.
*/
@deprecated("use product.productIterator instead")
def fromProduct(n: Product): Iterator[Any] = new Iterator[Any] {
@@ -209,15 +222,15 @@ object Iterator {
}
/** Create an iterator with elements
- * <code>e<sub>n+1</sub> = step(e<sub>n</sub>)</code>
- * where <code>e<sub>0</sub> = start</code>
- * and elements are in the range between <code>start</code> (inclusive)
- * and <code>end</code> (exclusive)
+ * `e<sub>n+1</sub> = step(e<sub>n</sub>)`
+ * where `e<sub>0</sub> = start`
+ * and elements are in the range between `start` (inclusive)
+ * and `end` (exclusive)
*
* @param start the start value of the iterator
* @param end the end value of the iterator
* @param step the increment function of the iterator, must be monotonically increasing or decreasing
- * @return the iterator with values in range <code>[start;end)</code>.
+ * @return the iterator with values in range `[start;end)`.
*/
@deprecated("use Iterator.iterate(start, end - start)(step) instead")
def range(start: Int, end: Int, step: Int => Int) = new Iterator[Int] {
@@ -231,12 +244,12 @@ object Iterator {
}
/** Create an iterator with elements
- * <code>e<sub>n+1</sub> = step(e<sub>n</sub>)</code>
- * where <code>e<sub>0</sub> = start</code>.
+ * `e<sub>n+1</sub> = step(e<sub>n</sub>)`
+ * where `e<sub>0</sub> = start`.
*
* @param start the start value of the iterator
* @param step the increment function of the iterator
- * @return the iterator starting at value <code>start</code>.
+ * @return the iterator starting at value `start`.
*/
@deprecated("use iterate(start)(step) instead")
def from(start: Int, step: Int => Int): Iterator[Int] = new Iterator[Int] {
@@ -245,7 +258,7 @@ object Iterator {
def next(): Int = { val j = i; i = step(i); j }
}
- /** Create an iterator that is the concantenation of all iterators
+ /** Create an iterator that is the concatenation of all iterators
* returned by a given iterator of iterators.
* @param its The iterator which returns on each call to next
* a new iterator whose elements are to be concatenated to the result.
@@ -265,43 +278,68 @@ object Iterator {
import Iterator.empty
/** Iterators are data structures that allow to iterate over a sequence
- * of elements. They have a <code>hasNext</code> method for checking
- * if there is a next element available, and a <code>next</code> method
+ * of elements. They have a `hasNext` method for checking
+ * if there is a next element available, and a `next` method
* which returns the next element and discards it from the iterator.
*
* @author Martin Odersky, Matthias Zenger
* @version 2.8
- * @since 2.8
+ * @since 1
+ * @define willNotTerminateInf
+ * Note: will not terminate for infinite iterators.
+ * @define mayNotTerminateInf
+ * Note: may not terminate for infinite iterators.
*/
-trait Iterator[+A] { self =>
+trait Iterator[+A] extends TraversableOnce[A] {
+ self =>
- /** Does this iterator provide another element?
+ /** Tests whether this iterator can provide another element.
+ * @return `true` if a subsequent call to `next` will yield an element,
+ * `false` otherwise.
*/
def hasNext: Boolean
- /** Returns the next element of this iterator.
+ /** Produces the next element of this iterator.
+ * @return the next element of this iterator, if `hasNext` is `true`,
+ * undefined behavior otherwise.
*/
def next(): A
- /** Returns a new iterator that iterates only over the first <code>n</code>
- * elements of this iterator, or the length of the iterator, whichever is smaller.
+ /** Tests whether this iterator is empty.
+ * @return `true` if hasNext is false, `false` otherwise.
+ */
+ def isEmpty: Boolean = !hasNext
+
+ /** Tests whether this Iterator can be repeatedly traversed.
+ * @return `false`
+ */
+ def isTraversableAgain = false
+
+ /** Tests whether this Iterator has a known size.
*
- * @param n the number of elements to take
- * @return the new iterator
+ * @return `true` for empty Iterators, `false` otherwise.
+ */
+ def hasDefiniteSize = isEmpty
+
+ /** Selects first ''n'' values of this iterator.
+ * @param n the number of values to take
+ * @return an iterator producing only of the first `n` values of this iterator, or else the
+ * whole iterator, if it produces less than `n` values.
*/
def take(n: Int): Iterator[A] = new Iterator[A] {
private var remaining = n
def hasNext = remaining > 0 && self.hasNext
def next(): A =
- if (hasNext) { remaining -= 1; self.next }
+ if (hasNext) { remaining -= 1; self.next() }
else empty.next()
}
- /** Advances this iterator past the first <code>n</code> elements,
+ /** Advances this iterator past the first ''n'' elements,
* or the length of the iterator, whichever is smaller.
*
* @param n the number of elements to drop
- * @return the new iterator
+ * @return an iterator which produces all values of the current iterator, except
+ * it omits the first `n` values.
*/
def drop(n: Int): Iterator[A] = {
@tailrec
@@ -312,39 +350,54 @@ trait Iterator[+A] { self =>
loop(n)
}
- /** Advances this iterator past the first `from` elements using `drop`,
+ /** Creates an iterator returning an interval of the values produced by this iterator.
+ * @param from the index of the first element in this iterator which forms part of the slice.
+ * @param until the index of the first element following the slice.
+ * @return an iterator which advances this iterator past the first `from` elements using `drop`,
* and then takes `until - from` elements, using `take`.
- *
- * @param from The index of the first element of the slice
- * @param until The index of the element following the slice
*/
def slice(from: Int, until: Int): Iterator[A] = drop(from).take(until - from)
- /** Returns a new iterator that maps all elements of this iterator
- * to new elements using function <code>f</code>.
+ /** Creates a new iterator that maps all produced values of this iterator
+ * to new values using a transformation function.
+ * @param f the transformation function
+ * @return a new iterator which transforms every value produced by this
+ * iterator by applying the function `f` to it.
*/
def map[B](f: A => B): Iterator[B] = new Iterator[B] {
def hasNext = self.hasNext
def next() = f(self.next())
}
- /** Returns a new iterator that first yields the elements of this
- * iterator followed by the elements provided by iterator <code>that</code>.
+ /** Concatenates this iterator with another.
+ * @param that the other iterator
+ * @return a new iterator that first yields the values produced by this
+ * iterator followed by the values produced by iterator `that`.
+ * @usecase def ++(that: => Iterator[A]): Iterator[A]
*/
- def ++[B >: A](that: => Iterator[B]) = new Iterator[B] {
+ def ++[B >: A](that: => Iterator[B]): Iterator[B] = new Iterator[B] {
// optimize a little bit to prevent n log n behavior.
- var cur : Iterator[B] = self
- def hasNext = cur.hasNext || (cur eq self) && { cur = that; hasNext }
+ private var cur : Iterator[B] = self
+ // since that is by-name, make sure it's only referenced once -
+ // if "val it = that" is inside the block, then hasNext on an empty
+ // iterator will continually reevaluate it. (ticket #3269)
+ lazy val it = that
+ // the eq check is to avoid an infinite loop on "x ++ x"
+ def hasNext = cur.hasNext || ((cur eq self) && {
+ it.hasNext && {
+ cur = it
+ true
+ }
+ })
def next() = { hasNext; cur.next() }
}
- /** Applies the given function <code>f</code> to each element of
- * this iterator, then concatenates the results.
+ /** Creates a new iterator by applying a function to all values produced by this iterator
+ * and concatenating the results.
*
* @param f the function to apply on each element.
- * @return an iterator over <code>f(a<sub>0</sub>), ... ,
- * f(a<sub>n</sub>)</code> if this iterator yields the
- * elements <code>a<sub>0</sub>, ..., a<sub>n</sub></code>.
+ * @return the iterator resulting from applying the given iterator-valued function
+ * `f` to each value produced by this iterator and concatenating the results.
*/
def flatMap[B](f: A => Iterator[B]): Iterator[B] = new Iterator[B] {
private var cur: Iterator[B] = empty
@@ -354,43 +407,60 @@ trait Iterator[+A] { self =>
}
/** Returns an iterator over all the elements of this iterator that
- * satisfy the predicate <code>p</code>. The order of the elements
+ * satisfy the predicate `p`. The order of the elements
* is preserved.
*
- * @param p the predicate used to filter the iterator.
- * @return the elements of this iterator satisfying <code>p</code>.
+ * @param p the predicate used to test values.
+ * @return an iterator which produces those values of this iterator which satisfy the predicate `p`.
*/
- def filter(p: A => Boolean): Iterator[A] = {
- val self = buffered
- new Iterator[A] {
- private def skip() = while (self.hasNext && !p(self.head)) self.next()
- def hasNext = { skip(); self.hasNext }
- def next() = { skip(); self.next() }
+ def filter(p: A => Boolean): Iterator[A] = new Iterator[A] {
+ private var hd: A = _
+ private var hdDefined: Boolean = false
+
+ def hasNext: Boolean = hdDefined || {
+ do {
+ if (!self.hasNext) return false
+ hd = self.next()
+ } while (!p(hd))
+ hdDefined = true
+ true
}
+
+ def next() = if (hasNext) { hdDefined = false; hd } else empty.next()
}
- /** !!! Temporary, awaiting more general implementation.
- * ... better wait longer, this fails once flatMap gets in the mix.
+ /** Creates an iterator over all the elements of this iterator that
+ * satisfy the predicate `p`. The order of the elements
+ * is preserved.
+ *
+ * '''Note:''' `withFilter` is the same as `filter` on iterators. It exists so that
+ * for-expressions with filters work over iterators.
+ *
+ * @param p the predicate used to test values.
+ * @return an iterator which produces those values of this iterator which satisfy the predicate `p`.
*/
- // def withFilter(p: A => Boolean) = this.toStream withFilter p
+ def withFilter(p: A => Boolean): Iterator[A] = filter(p)
- /** Returns an iterator over all the elements of this iterator which
- * do not satisfy the predicate <code>p</code>.
+ /** Creates an iterator over all the elements of this iterator which
+ * do not satisfy a predicate p.
*
- * @param p the predicate used to filter.
- * @return the elements of this iterator not satisfying <code>p</code>.
+ * @param p the predicate used to test values.
+ * @return an iterator which produces those values of this iterator which do not satisfy the predicate `p`.
*/
-
def filterNot(p: A => Boolean): Iterator[A] = filter(!p(_))
- /** Returns a new iterator based on the partial function <code>pf</code>,
- * containing <code>pf(x)</code> for all the elements which are defined on pf.
- * The order of the elements is preserved.
- *
+ /** Creates an iterator by transforming values
+ * produced by this iterator with a partial function, dropping those
+ * values for which the partial function is not defined.
* @param pf the partial function which filters and maps the iterator.
- * @return the new iterator.
+ * @return a new iterator which yields each value `x` produced by this iterator for
+ * which `pf` is defined the image `pf(x)`.
*/
- def partialMap[B](pf: PartialFunction[Any, B]): Iterator[B] = {
+ @migration(2, 8,
+ "This collect implementation bears no relationship to the one before 2.8.\n"+
+ "The previous behavior can be reproduced with toSeq."
+ )
+ def collect[B](pf: PartialFunction[A, B]): Iterator[B] = {
val self = buffered
new Iterator[B] {
private def skip() = while (self.hasNext && !pf.isDefinedAt(self.head)) self.next()
@@ -399,26 +469,31 @@ trait Iterator[+A] { self =>
}
}
- /** Returns an iterator over the longest prefix of this iterator such that
- * all elements of the result satisfy the predicate <code>p</code>.
- * The order of the elements is preserved.
- *
- * @param p the predicate used to filter the iterator.
- * @return the longest prefix of this iterator satisfying <code>p</code>.
+ /** Takes longest prefix of values produced by this iterator that satisfy a predicate.
+ * @param p The predicate used to test elements.
+ * @return An iterator returning the values produced by this iterator, until
+ * this iterator produces a value that does not satisfy
+ * the predicate `p`.
*/
- def takeWhile(p: A => Boolean): Iterator[A] = {
- val self = buffered
- new Iterator[A] {
- def hasNext = { self.hasNext && p(self.head) }
- def next() = (if (hasNext) self else empty).next()
+ def takeWhile(p: A => Boolean): Iterator[A] = new Iterator[A] {
+ private var hd: A = _
+ private var hdDefined: Boolean = false
+ private var tail: Iterator[A] = self
+
+ def hasNext = hdDefined || tail.hasNext && {
+ hd = tail.next()
+ if (p(hd)) hdDefined = true
+ else tail = Iterator.empty
+ hdDefined
}
+ def next() = if (hasNext) { hdDefined = false; hd } else empty.next()
}
/** Partitions this iterator in two iterators according to a predicate.
*
* @param p the predicate on which to partition
* @return a pair of iterators: the iterator that satisfies the predicate
- * <code>p</code> and the iterator that does not.
+ * `p` and the iterator that does not.
* The relative order of the elements in the resulting iterators
* is the same as in the original iterator.
*/
@@ -443,7 +518,7 @@ trait Iterator[+A] { self =>
}
/** Skips longest sequence of elements of this iterator which satisfy given
- * predicate <code>p</code>, and returns an iterator of the remaining elements.
+ * predicate `p`, and returns an iterator of the remaining elements.
*
* @param p the predicate used to skip elements.
* @return an iterator consisting of the remaining elements
@@ -462,20 +537,29 @@ trait Iterator[+A] { self =>
}
}
- /** Return an iterator formed from this iterator and the specified iterator
- * <code>that</code> by associating each element of the former with
- * the element at the same position in the latter.
+ /** Creates an iterator formed from this iterator and another iterator
+ * by combining corresponding values in pairs.
* If one of the two iterators is longer than the other, its remaining
* elements are ignored.
+ * @param that The iterator providing the second half of each result pair
+ * @return a new iterator containing pairs consisting of
+ * corresponding elements of this iterator and `that`. The number
+ * of elements returned by the new iterator is the
+ * minimum of the number of elements returned by this
+ * iterator and `that`.
*/
def zip[B](that: Iterator[B]) = new Iterator[(A, B)] {
def hasNext = self.hasNext && that.hasNext
def next = (self.next, that.next)
}
- /** Return a new iterator with a length equal or longer to <code>len</code>.
- * If the current iterator returns fewer than `len` elements
- * return `elem` until the required length `len` is reached.
+ /** Appends an element value to this iterator until a given target length is reached.
+ * @param len the target length
+ * @param elem the padding value
+ * @return a new iterator consisting of producing all values of this iterator,
+ * followed by the minimal number of occurrences of `elem` so
+ * that the number of produced values is at least `len`.
+ * @usecase def padTo(len: Int, elem: A): Iterator[A]
*/
def padTo[A1 >: A](len: Int, elem: A1) = new Iterator[A1] {
private var count = 0
@@ -488,9 +572,8 @@ trait Iterator[+A] { self =>
}
}
- /** Return an iterator that pairs each element of this iterator
+ /** Creates an iterator that pairs each element produced by this iterator
* with its index, counting from 0.
- *
*/
def zipWithIndex = new Iterator[(A, Int)] {
var idx = 0
@@ -502,24 +585,25 @@ trait Iterator[+A] { self =>
}
}
- /** Returns an iterator formed from this iterator and the specified iterator
- * <code>that</code> by associating each element of the former with
- * the element at the same position in the latter.
+ /** Creates an iterator formed from this iterator and another iterator
+ * by combining corresponding elements in pairs.
+ * If one of the two iterators is shorter than the other,
+ * placeholder elements are used to extend the shorter iterator to the length of the longer.
*
- * @param that iterator <code>that</code> may have a different length
+ * @param that iterator `that` may have a different length
* as the self iterator.
- * @param thisElem element <code>thisElem</code> is used to fill up the
+ * @param thisElem element `thisElem` is used to fill up the
* resulting iterator if the self iterator is shorter than
- * <code>that</code>
- * @param thatElem element <code>thatElem</code> is used to fill up the
- * resulting iterator if <code>that</code> is shorter than
+ * `that`
+ * @param thatElem element `thatElem` is used to fill up the
+ * resulting iterator if `that` is shorter than
* the self iterator
- * @return <code>Iterator((a<sub>0</sub>,b<sub>0</sub>), ...,
- * (a<sub>n</sub>,b<sub>n</sub>), (elem,b<sub>n+1</sub>),
- * ..., {elem,b<sub>m</sub>})</code>
- * when <code>[a<sub>0</sub>, ..., a<sub>n</sub>] zip
- * [b<sub>0</sub>, ..., b<sub>m</sub>]</code> is
- * invoked where <code>m &gt; n</code>.
+ * @return a new iterator containing pairs consisting of
+ * corresponding values of this iterator and `that`. The length
+ * of the returned iterator is the maximum of the lengths of this iterator and `that`.
+ * If this iterator is shorter than `that`, `thisElem` values are used to pad the result.
+ * If `that` is shorter than this iterator, `thatElem` values are used to pad the result.
+ * @usecase def zipAll[B](that: Iterator[B], thisElem: A, thatElem: B): Iterator[(A, B)]
*/
def zipAll[B, A1 >: A, B1 >: B](that: Iterator[B], thisElem: A1, thatElem: B1) = new Iterator[(A1, B1)] {
def hasNext = self.hasNext || that.hasNext
@@ -533,20 +617,25 @@ trait Iterator[+A] { self =>
}
}
- /** Execute a function <code>f</code> for all elements of this
- * iterator.
+ /** Applies a function `f` to all values produced by this iterator.
+ *
+ * @param f the function that is applied for its side-effect to every element.
+ * The result of function `f` is discarded.
*
- * @param f a function that is applied to every element.
+ * @tparam U the type parameter describing the result of function `f`.
+ * This result will always be ignored. Typically `U` is `Unit`,
+ * but this is not necessary.
+ *
+ * @usecase def foreach(f: A => Unit): Unit
*/
def foreach[U](f: A => U) { while (hasNext) f(next()) }
- /** Apply a predicate <code>p</code> to all elements of this
- * iterable object and return <code>true</code> iff the predicate yields
- * <code>true</code> for all elements.
+ /** Tests whether a predicate holds for all values produced by this iterator.
+ * $mayNotTerminateInf
*
- * @param p the predicate
- * @return <code>true</code> iff the predicate yields <code>true</code>
- * for all elements.
+ * @param p the predicate used to test elements.
+ * @return `true` if the given predicate `p` holds for all values
+ * produced by this iterator, otherwise `false`.
*/
def forall(p: A => Boolean): Boolean = {
var res = true
@@ -554,13 +643,12 @@ trait Iterator[+A] { self =>
res
}
- /** Apply a predicate <code>p</code> to all elements of this
- * iterable object and return true iff there is at least one
- * element for which <code>p</code> yields <code>true</code>.
+ /** Tests whether a predicate holds for some of the values produced by this iterator.
+ * $mayNotTerminateInf
*
- * @param p the predicate
- * @return <code>true</code> iff the predicate yields <code>true</code>
- * for at least one element.
+ * @param p the predicate used to test elements.
+ * @return `true` if the given predicate `p` holds for some of the values
+ * produced by this iterator, otherwise `false`.
*/
def exists(p: A => Boolean): Boolean = {
var res = false
@@ -568,18 +656,22 @@ trait Iterator[+A] { self =>
res
}
- /** Tests if the given value <code>elem</code> is a member of this iterator.
+ /** Tests whether this iterator contains a given value as an element.
+ * $mayNotTerminateInf
*
- * @param elem element whose membership has to be tested.
+ * @param elem the element to test.
+ * @return `true` if this iterator produces some value that is
+ * is equal (wrt `==`) to `elem`, `false` otherwise.
*/
def contains(elem: Any): Boolean = exists(_ == elem)
- /** Find and return the first value returned by the iterator satisfying a
+ /** Finds the first value produced by the iterator satisfying a
* predicate, if any.
+ * $mayNotTerminateInf
*
- * @param p the predicate
- * @return the first element in the iterable object satisfying
- * <code>p</code>, or <code>None</code> if none exists.
+ * @param p the predicate used to test values.
+ * @return an option value containing the first value produced by the iterator that satisfies
+ * predicate `p`, or `None` if none exists.
*/
def find(p: A => Boolean): Option[A] = {
var res: Option[A] = None
@@ -590,12 +682,26 @@ trait Iterator[+A] { self =>
res
}
- /** Returns index of the first element satisfying a predicate, or -1.
+ /** Applies option-valued function to successive elements of this iterator
+ * until a defined value is found.
*
- * @note may not terminate for infinite-sized collections.
- * @param p the predicate
- * @return the index of the first element satisfying <code>p</code>,
- * or -1 if such an element does not exist
+ * @param f the function to be applied to successive elements.
+ * @return an option value containing the first defined result of
+ * `f`, or `None` if `f` returns `None` for all all elements.
+ def mapFind[B](f: A => Option[B]): Option[B] = {
+ var res: Option[B] = None
+ while (res.isEmpty && hasNext) {
+ res = f(next())
+ }
+ res
+ }
+ */
+
+ /** Returns the index of the first produced value satisfying a predicate, or -1.
+ * $mayNotTerminateInf
+ * @param p the predicate to test values
+ * @return the index of the first produced value satisfying `p`,
+ * or -1 if such an element does not exist until the end of the iterator is reached.
*/
def indexWhere(p: A => Boolean): Int = {
var i = 0
@@ -610,14 +716,13 @@ trait Iterator[+A] { self =>
if (found) i else -1
}
- /** Returns the index of the first occurence of the specified
+ /** Returns the index of the first occurrence of the specified
* object in this iterable object.
+ * $mayNotTerminateInf
*
- * @note may not terminate for infinite-sized collections.
* @param elem element to search for.
- * @return the index in this sequence of the first occurence of the
- * specified element, or -1 if the sequence does not contain
- * this element.
+ * @return the index of the first occurrence of `elem` in the values produced by this iterator,
+ * or -1 if such an element does not exist until the end of the iterator is reached.
*/
def indexOf[B >: A](elem: B): Int = {
var i = 0
@@ -632,103 +737,9 @@ trait Iterator[+A] { self =>
if (found) i else -1
}
- /** Combines the elements of this iterator together using the binary
- * operator <code>op</code>, from left to right, and starting with
- * the value <code>z</code>.
- *
- * @return <code>op(... (op(op(z,a<sub>0</sub>),a<sub>1</sub>) ...),
- * a<sub>n</sub>)</code> if the iterator yields elements
- * <code>a<sub>0</sub>, a<sub>1</sub>, ..., a<sub>n</sub></code>.
- */
- def foldLeft[B](z: B)(op: (B, A) => B): B = {
- var acc = z
- while (hasNext) acc = op(acc, next())
- acc
- }
-
- /** Combines the elements of this iterator together using the binary
- * operator <code>op</code>, from right to left, and starting with
- * the value <code>z</code>.
- *
- * @return <code>a<sub>0</sub> op (... op (a<sub>n</sub> op z)...)</code>
- * if the iterator yields elements <code>a<sub>0</sub>, a<sub>1</sub>, ...,
- * a<sub>n</sub></code>.
- */
- def foldRight[B](z: B)(op: (A, B) => B): B =
- if (hasNext) op(next(), foldRight(z)(op)) else z
-
- /** Similar to <code>foldLeft</code> but can be used as
- * an operator with the order of iterator and zero arguments reversed.
- * That is, <code>z /: xs</code> is the same as <code>xs foldLeft z</code>.
- *
- * @param z the left argument of the first application of <code>op</code>
- * (evaluation occurs from left to right).
- * @param op the applied operator.
- * @return the result value
- * @see <code><a href="#foldLeft">foldLeft</a></code>.
- */
- def /:[B](z: B)(op: (B, A) => B): B = foldLeft(z)(op)
-
- /** An alias for <code>foldRight</code>.
- * That is, <code>xs :\ z</code> is the same as <code>xs foldRight z</code>.
- *
- * @param z the right argument of the first application of <code>op</code>
- * (evaluation occurs from right to left).
- * @param op the applied operator.
- * @return the result value.
- * @see <code><a href="#foldRight">foldRight</a></code>.
- */
- def :\[B](z: B)(op: (A, B) => B): B = foldRight(z)(op)
-
- /** Combines the elements of this iterator together using the binary
- * operator <code>op</code>, from left to right.
- *
- * @param op The operator to apply
- * @return <code>op(... op(a<sub>0</sub>,a<sub>1</sub>), ..., a<sub>n</sub>)</code>
- * if the iterator yields elements
- * <code>a<sub>0</sub>, a<sub>1</sub>, ..., a<sub>n</sub></code>.
- * @throws Predef.UnsupportedOperationException if the iterator is empty.
- */
- def reduceLeft[B >: A](op: (B, A) => B): B = {
- if (hasNext) foldLeft[B](next())(op)
- else throw new UnsupportedOperationException("empty.reduceLeft")
- }
-
- /** Combines the elements of this iterator together using the binary
- * operator <code>op</code>, from right to left
- * @param op The operator to apply
- *
- * @return <code>a<sub>0</sub> op (... op (a<sub>n-1</sub> op a<sub>n</sub>)...)</code>
- * if the iterator yields elements <code>a<sub>0</sub>, a<sub>1</sub>, ...,
- * a<sub>n</sub></code>.
-
- * @throws Predef.UnsupportedOperationException if the iterator is empty.
- */
- def reduceRight[B >: A](op: (A, B) => B): B = {
- if (hasNext) foldRight[B](next())(op)
- else throw new UnsupportedOperationException("empty.reduceRight")
- }
-
- /** Combines the elements of this iterator together using the binary
- * operator <code>op</code>, from left to right
- * @param op The operator to apply
- * @return If the iterable is non-empty, the result of the operations as an Option, otherwise None.
- */
- def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = {
- if (!hasNext) None else Some(reduceLeft(op))
- }
-
- /** Combines the elements of this iterable object together using the binary
- * operator <code>op</code>, from right to left.
- *
- * @param op The operator to apply
- * @return If the iterable is non-empty, the result of the operations as an Option, otherwise None.
- */
- def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = {
- if (!hasNext) None else Some(reduceRight(op))
- }
-
- /** Returns a buffered iterator from this iterator.
+ /** Creates a buffered iterator from this iterator.
+ * @see BufferedIterator
+ * @return a buffered iterator producing the same values as this iterator.
*/
def buffered = new BufferedIterator[A] {
private var hd: A = _
@@ -752,22 +763,7 @@ trait Iterator[+A] { self =>
} else self.next
}
- /** Since I cannot reliably get take(n) to influence the original
- * iterator (it seems to depend on some ordering issue I don't
- * understand) this method takes the way one might expect, leaving
- * the original iterator with 'size' fewer elements.
- */
- private def takeDestructively(size: Int): Seq[A] = {
- val buf = new ArrayBuffer[A]
- var i = 0
- while (self.hasNext && i < size) {
- buf += self.next
- i += 1
- }
- buf
- }
-
- /** A flexible iterator for transforming an <code>Iterator[A]</code> into an
+ /** A flexible iterator for transforming an `Iterator[A]` into an
* Iterator[Seq[A]], with configurable sequence size, step, and
* strategy for dealing with elements which don't fit evenly.
*
@@ -794,6 +790,20 @@ trait Iterator[+A] { self =>
this
}
+ /** For reasons which remain to be determined, calling
+ * self.take(n).toSeq cause an infinite loop, so we have
+ * a slight variation on take for local usage.
+ */
+ private def takeDestructively(size: Int): Seq[A] = {
+ val buf = new ArrayBuffer[A]
+ var i = 0
+ while (self.hasNext && i < size) {
+ buf += self.next
+ i += 1
+ }
+ buf
+ }
+
private def padding(x: Int) = List.fill(x)(pad.get())
private def gap = (step - size) max 0
@@ -803,7 +813,7 @@ trait Iterator[+A] { self =>
// If there is padding defined we insert it immediately
// so the rest of the code can be oblivious
val xs: Seq[B] = {
- val res = self takeDestructively count
+ val res = takeDestructively(count)
// extra checks so we don't calculate length unless there's reason
if (pad.isDefined && !self.hasNext) {
val shortBy = count - res.length
@@ -817,7 +827,7 @@ trait Iterator[+A] { self =>
// if 0 elements are requested, or if the number of newly obtained
// elements is less than the gap between sequences, we are done.
def deliver(howMany: Int) = {
- (howMany > 0 && len > gap) && {
+ (howMany > 0 && (isFirst || len > gap)) && {
if (!isFirst)
buffer trimStart (step min prevSize)
@@ -851,6 +861,8 @@ trait Iterator[+A] { self =>
if (!filled)
fill()
+ if (!filled)
+ throw new NoSuchElementException("next on empty iterator")
filled = false
buffer.toList
}
@@ -895,18 +907,15 @@ trait Iterator[+A] { self =>
new GroupedIterator[B](self, size, step)
/** Returns the number of elements in this iterator.
- * @note The iterator is at its end after this method returns.
+ * $willNotTerminateInf
+ *
+ * Note: The iterator is at its end after this method returns.
*/
- def length: Int = {
- var i = 0
- while (hasNext) {
- next(); i += 1
- }
- i
- }
+ def length: Int = this.size
/** Creates two new iterators that both iterate over the same elements
- * as this iterator (in the same order).
+ * as this iterator (in the same order). The duplicate iterators are
+ * considered equal if they are positioned at the same element.
*
* @return a pair of iterators
*/
@@ -925,6 +934,14 @@ trait Iterator[+A] { self =>
e
} else gap.dequeue
}
+ // to verify partnerhood we use reference equality on gap because
+ // type testing does not discriminate based on origin.
+ private def compareGap(queue: scala.collection.mutable.Queue[A]) = gap eq queue
+ override def hashCode = gap.hashCode
+ override def equals(other: Any) = other match {
+ case x: Partner => x.compareGap(gap) && gap.isEmpty
+ case _ => super.equals(other)
+ }
}
(new Partner, new Partner)
}
@@ -950,13 +967,21 @@ trait Iterator[+A] { self =>
}
}
- /** Fills the given array `xs` with at most `len` elements of
- * this iterator starting at position `start` until either `len` elements have been copied,
- * or the end of the iterator is reached, or the end of the array `xs` is reached.
+ /** Copies selected values produced by this iterator to an array.
+ * Fills the given array `xs` with at most `len` values produced by this
+ * iterator, after skipping `start` values.
+ * Copying will stop once either the end of the current iterator is reached,
+ * or the end of the array is reached, or `len` elements have been copied.
+ *
+ * $willNotTerminateInf
*
- * @param xs the array to fill.
- * @param start starting index.
- * @param len number of elements to copy
+ * @param xs the array to fill.
+ * @param start the starting index.
+ * @param len the maximal number of elements to copy.
+ * @tparam B the type of the elements of the array.
+ *
+ *
+ * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = {
var i = start
@@ -967,143 +992,40 @@ trait Iterator[+A] { self =>
}
}
- /** Fills the given array <code>xs</code> with the elements of
- * this iterator starting at position <code>start</code>
- * until either the end of the current iterator or the end of array `xs` is reached.
- *
- * @param xs the array to fill.
- * @param start starting index.
- */
- def copyToArray[B >: A](xs: Array[B], start: Int): Unit =
- copyToArray(xs, start, xs.length - start)
-
- /** Fills the given array <code>xs</code> with the elements of
- * this iterator starting at position <code>0</code>
- * until either the end of the current iterator or the end of array `xs` is reached.
- *
- * @param xs the array to fill.
+ /** Tests if another iterator produces the same values as this one.
+ * $willNotTerminateInf
+ * @param that the other iterator
+ * @return `true`, if both iterators produce the same elements in the same order, `false` otherwise.
*/
- def copyToArray[B >: A](xs: Array[B]): Unit = copyToArray(xs, 0, xs.length)
+ def sameElements(that: Iterator[_]): Boolean = {
+ while (hasNext && that.hasNext)
+ if (next != that.next)
+ return false
- /** Copy all elements to a buffer
- * @param dest The buffer to which elements are copied
- */
- def copyToBuffer[B >: A](dest: Buffer[B]) {
- while (hasNext) dest += next
+ !hasNext && !that.hasNext
}
- /** Traverse this iterator and return all elements in a list.
- *
- * @return A list which enumerates all elements of this iterator.
- */
- def toList: List[A] = {
- val res = new ListBuffer[A]
- while (hasNext) res += next
- res.toList
- }
-
- /** Traverse this iterator and return all elements in a stream.
- *
- * @return A stream which enumerates all elements of this iterator.
- */
+ def toTraversable: Traversable[A] = toStream
+ def toIterator: Iterator[A] = self
def toStream: Stream[A] =
- if (hasNext) Stream.cons(next, toStream) else Stream.empty
+ if (self.hasNext) Stream.cons(self.next, self.toStream)
+ else Stream.empty[A]
- /** Traverse this iterator and return all elements in a sequence.
- *
- * @return A sequence which enumerates all elements of this iterator.
- */
- def toSeq: Seq[A] = {
- val buffer = new ArrayBuffer[A]
- this copyToBuffer buffer
- buffer
- }
-
- /** Returns a string representation of the elements in this iterator. The resulting string
- * begins with the string <code>start</code> and is finished by the string
- * <code>end</code>. Inside, the string representations of elements (w.r.t.
- * the method <code>toString</code>) are separated by the string
- * <code>sep</code>.
- * <p/>
- * Ex: <br/>
- * <code>List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"</code>
- *
- * @param start starting string.
- * @param sep separator string.
- * @param end ending string.
- * @return a string representation of this iterable object.
- */
- def mkString(start: String, sep: String, end: String): String = {
- val buf = new StringBuilder
- addString(buf, start, sep, end).toString
- }
-
- /** Returns a string representation of this iterable object. The string
- * representations of elements (w.r.t. the method <code>toString()</code>)
- * are separated by the string <code>sep</code>.
- *
- * @param sep separator string.
- * @return a string representation of this iterable object.
+ /** Converts this iterator to a string.
+ * @return `"empty iterator"` or `"non-empty iterator"`, depending on whether or not the iterator is empty.
*/
- def mkString(sep: String): String = mkString("", sep, "")
-
- /** Returns a string representation of this iterable object. The string
- * representations of elements (w.r.t. the method <code>toString()</code>)
- * are concatenated without any separator string.
- *
- * @return a string representation of this iterable object.
- */
- def mkString: String = mkString("")
-
- /** Write all elements of this iterator into given string builder.
- * The written text begins with the string <code>start</code> and is finished by the string
- * <code>end</code>. Inside, the string representations of elements (w.r.t.
- * the method <code>toString()</code>) are separated by the string
- * <code>sep</code>.
- */
- def addString(buf: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
- buf.append(start)
- val elems = this
- if (elems.hasNext) buf.append(elems.next)
- while (elems.hasNext) {
- buf.append(sep); buf.append(elems.next)
- }
- buf.append(end)
- }
-
- /** Write all elements of this iterator into given string builder.
- * The string representations of elements (w.r.t. the method <code>toString()</code>)
- * are separated by the string <code>sep</code>.
- */
- def addString(buf: StringBuilder, sep: String): StringBuilder = addString(buf, "", sep, "")
-
- /** Write all elements of this string into given string builder without using
- * any separator between consecutive elements.
- */
- def addString(buf: StringBuilder): StringBuilder = addString(buf, "", "", "")
-
override def toString = (if (hasNext) "non-empty" else "empty")+" iterator"
/** Returns a new iterator that first yields the elements of this
- * iterator followed by the elements provided by iterator <code>that</code>.
+ * iterator followed by the elements provided by iterator `that`.
*/
- @deprecated("use <code>++</code>")
- def append[B >: A](that: Iterator[B]) = new Iterator[B] {
- def hasNext = self.hasNext || that.hasNext
- def next() = (if (self.hasNext) self else that).next()
- }
+ @deprecated("use `++`")
+ def append[B >: A](that: Iterator[B]) = self ++ that
/** Returns index of the first element satisfying a predicate, or -1. */
@deprecated("use `indexWhere` instead")
def findIndexOf(p: A => Boolean): Int = indexWhere(p)
- /** Collect elements into a seq.
- *
- * @return a sequence which enumerates all elements of this iterator.
- */
- @deprecated("use toSeq instead")
- def collect: Seq[A] = toSeq
-
/** Returns a counted iterator from this iterator.
*/
@deprecated("use zipWithIndex in Iterator")
@@ -1114,14 +1036,14 @@ trait Iterator[+A] { self =>
def next(): A = { cnt += 1; self.next }
}
- /** Fills the given array <code>xs</code> with the elements of
- * this sequence starting at position <code>start</code>. Like <code>copyToArray</code>,
+ /** Fills the given array `xs` with the elements of
+ * this sequence starting at position `start`. Like `copyToArray`,
* but designed to accomodate IO stream operations.
*
+ * '''Note:''' the array must be large enough to hold `sz` elements.
* @param xs the array to fill.
* @param start the starting index.
* @param sz the maximum number of elements to be read.
- * @pre the array must be large enough to hold <code>sz</code> elements.
*/
@deprecated("use copyToArray instead")
def readInto[B >: A](xs: Array[B], start: Int, sz: Int) {
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index b19abf6271..bbd3976490 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -24,7 +23,8 @@ package scala.collection
* <li><code>scala.collection.Iterator</code> <=> <code>java.util.{ Iterator, Enumeration }</code></li>
* <li><code>scala.collection.mutable.Buffer</code> <=> <code>java.util.List</code></li>
* <li><code>scala.collection.mutable.Set</code> <=> <code>java.util.Set</code></li>
- * <li><code>scala.collection.mutable.Map</code> <=> <code>java.util.Map</code></li>
+ * <li><code>scala.collection.mutable.Map</code> <=> <code>java.util.{ Map, Dictionary }</code></li>
+ * <li><code>scala.collection.mutable.ConcurrentMap</code> <=> <code>java.util.concurrent.ConcurrentMap</code></li>
* </ul>
* <p>
* In all cases, converting from a source type to a target type and back
@@ -38,23 +38,22 @@ package scala.collection
* <b>val</b> sl2 : scala.collection.mutable.Buffer[Int] = jl
* assert(sl eq sl2)g</pre>
* <p>
- * Note that no conversion is provided from <code>scala.collection.immutable.List</code>
- * to <code>java.util.List</code>. Instead it is convertible to an immutable
- * <code>java.util.Collection</code> which provides size and interation
- * capabilities, but not access by index as would be provided by
- * <code>java.util.List</code>.<br/>
- * This is intentional: in combination the implementation of
- * <code>scala.collection.immutable.List</code> and the typical usage
- * patterns of <code>java.util.List</code> would perform extremely poorly.
+ * In addition, the following one way conversions are provided:
* </p>
+ * <ul>
+ * <li><code>scala.collection.Seq => <code>java.util.List }</code></li>
+ * <li><code>scala.collection.mutable.Seq => <code>java.util.List</code></li>
+ * <li><code>scala.collection.Set</code> => <code>java.util.Set</code></li>
+ * <li><code>scala.collection.Map</code> => <code>java.util.Map</code></li>
+ * </ul>
*
* @author Miles Sabin
+ * @author Martin Odersky
* @since 2.8
*/
object JavaConversions {
import java.{ lang => jl, util => ju }
- import scala.collection.{ generic, immutable, mutable, Traversable }
- import scala.reflect.ClassManifest
+ import java.util.{ concurrent => juc }
// Scala => Java
@@ -71,11 +70,14 @@ object JavaConversions {
* @param i The <code>Iterator</code> to be converted.
* @return A Java <code>Iterator</code> view of the argument.
*/
- implicit def asIterator[A](i : Iterator[A]) = i match {
+ implicit def asJavaIterator[A](i : Iterator[A]): ju.Iterator[A] = i match {
case JIteratorWrapper(wrapped) => wrapped
case _ => IteratorWrapper(i)
}
+ @deprecated("use asJavaIterator instead")
+ def asIterator[A](i : Iterator[A]): ju.Iterator[A] = asJavaIterator[A](i)
+
/**
* Implicitly converts a Scala <code>Iterator</code> to a Java <code>Enumeration</code>.
* The returned Java <code>Enumeration</code> is backed by the provided Scala
@@ -89,11 +91,14 @@ object JavaConversions {
* @param i The <code>Iterator</code> to be converted.
* @return A Java <code>Enumeration</code> view of the argument.
*/
- implicit def asEnumeration[A](i : Iterator[A]) = i match {
+ implicit def asJavaEnumeration[A](i : Iterator[A]): ju.Enumeration[A] = i match {
case JEnumerationWrapper(wrapped) => wrapped
case _ => IteratorWrapper(i)
}
+ @deprecated("use asJavaEnmeration instead")
+ def asEnumeration[A](i : Iterator[A]): ju.Enumeration[A] = asJavaEnumeration[A](i)
+
/**
* Implicitly converts a Scala <code>Iterable</code> to a Java <code>Iterable</code>.
* The returned Java <code>Iterable</code> is backed by the provided Scala
@@ -107,11 +112,14 @@ object JavaConversions {
* @param i The <code>Iterable</code> to be converted.
* @return A Java <code>Iterable</code> view of the argument.
*/
- implicit def asIterable[A](i : Iterable[A]) = i match {
+ implicit def asJavaIterable[A](i : Iterable[A]): jl.Iterable[A] = i match {
case JIterableWrapper(wrapped) => wrapped
case _ => IterableWrapper(i)
}
+ @deprecated("use asJavaIterable instead")
+ def asIterable[A](i : Iterable[A]): jl.Iterable[A] = asJavaIterable[A](i)
+
/**
* Implicitly converts a Scala <code>Iterable</code> to an immutable Java
* <code>Collection</code>.
@@ -123,11 +131,14 @@ object JavaConversions {
* @param i The <code>SizedIterable</code> to be converted.
* @return A Java <code>Collection</code> view of the argument.
*/
- implicit def asCollection[A](i : Iterable[A]) = i match {
+ implicit def asJavaCollection[A](i : Iterable[A]): ju.Collection[A] = i match {
case JCollectionWrapper(wrapped) => wrapped
case _ => new IterableWrapper(i)
}
+ @deprecated("use asJavaCollection instead")
+ def asCollection[A](i : Iterable[A]): ju.Collection[A] = asJavaCollection[A](i)
+
/**
* Implicitly converts a Scala mutable <code>Buffer</code> to a Java <code>List</code>.
* The returned Java <code>List</code> is backed by the provided Scala
@@ -141,11 +152,56 @@ object JavaConversions {
* @param b The <code>Buffer</code> to be converted.
* @return A Java <code>List</code> view of the argument.
*/
- implicit def asList[A](b : mutable.Buffer[A]) : ju.List[A] = b match {
+ implicit def asJavaList[A](b : mutable.Buffer[A]): ju.List[A] = b match {
case JListWrapper(wrapped) => wrapped
case _ => new MutableBufferWrapper(b)
}
+ @deprecated("use asJavaList instead")
+ def asList[A](b : mutable.Buffer[A]): ju.List[A] = asJavaList[A](b)
+
+ /**
+ * Implicitly converts a Scala mutable <code>Seq</code> to a Java <code>List</code>.
+ * The returned Java <code>List</code> is backed by the provided Scala
+ * <code>Seq</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Seq</code> was previously obtained from an implicit or
+ * explicit call of <code>asSeq(java.util.List)</code> then the original
+ * Java <code>List</code> will be returned.
+ *
+ * @param b The <code>Seq</code> to be converted.
+ * @return A Java <code>List</code> view of the argument.
+ */
+ implicit def asJavaList[A](b : mutable.Seq[A]): ju.List[A] = b match {
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableSeqWrapper(b)
+ }
+
+ @deprecated("use asJavaList instead")
+ def asList[A](b : mutable.Seq[A]): ju.List[A] = asJavaList[A](b)
+
+ /**
+ * Implicitly converts a Scala <code>Seq</code> to a Java <code>List</code>.
+ * The returned Java <code>List</code> is backed by the provided Scala
+ * <code>Seq</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Seq</code> was previously obtained from an implicit or
+ * explicit call of <code>asSeq(java.util.List)</code> then the original
+ * Java <code>List</code> will be returned.
+ *
+ * @param b The <code>Seq</code> to be converted.
+ * @return A Java <code>List</code> view of the argument.
+ */
+ implicit def asJavaList[A](b : Seq[A]): ju.List[A] = b match {
+ case JListWrapper(wrapped) => wrapped
+ case _ => new SeqWrapper(b)
+ }
+
+ @deprecated("use asJavaList instead")
+ def asList[A](b : Seq[A]): ju.List[A] = asJavaList[A](b)
+
/**
* Implicitly converts a Scala mutable <code>Set</code> to a Java <code>Set</code>.
* The returned Java <code>Set</code> is backed by the provided Scala
@@ -159,11 +215,35 @@ object JavaConversions {
* @param s The <code>Set</code> to be converted.
* @return A Java <code>Set</code> view of the argument.
*/
- implicit def asSet[A](s : mutable.Set[A])(implicit m : ClassManifest[A]) : ju.Set[A] = s match {
+ implicit def asJavaSet[A](s : mutable.Set[A]): ju.Set[A] = s match {
case JSetWrapper(wrapped) => wrapped
- case _ => new MutableSetWrapper(s)(m)
+ case _ => new MutableSetWrapper(s)
}
+ @deprecated("use asJavaSet instead")
+ def asSet[A](s : mutable.Set[A]): ju.Set[A] = asJavaSet[A](s)
+
+ /**
+ * Implicitly converts a Scala <code>Set</code> to a Java <code>Set</code>.
+ * The returned Java <code>Set</code> is backed by the provided Scala
+ * <code>Set</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Set</code> was previously obtained from an implicit or
+ * explicit call of <code>asSet(java.util.Set)</code> then the original
+ * Java <code>Set</code> will be returned.
+ *
+ * @param s The <code>Set</code> to be converted.
+ * @return A Java <code>Set</code> view of the argument.
+ */
+ implicit def asJavaSet[A](s: Set[A]): ju.Set[A] = s match {
+ case JSetWrapper(wrapped) => wrapped
+ case _ => new SetWrapper(s)
+ }
+
+ @deprecated("use asJavaSet instead")
+ def asSet[A](s : Set[A]): ju.Set[A] = asJavaSet[A](s)
+
/**
* Implicitly converts a Scala mutable <code>Map</code> to a Java <code>Map</code>.
* The returned Java <code>Map</code> is backed by the provided Scala
@@ -177,11 +257,80 @@ object JavaConversions {
* @param m The <code>Map</code> to be converted.
* @return A Java <code>Map</code> view of the argument.
*/
- implicit def asMap[A, B](m : mutable.Map[A, B])(implicit ma : ClassManifest[A]) : ju.Map[A, B] = m match {
+ implicit def asJavaMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = m match {
+ //case JConcurrentMapWrapper(wrapped) => wrapped
case JMapWrapper(wrapped) => wrapped
- case _ => new MutableMapWrapper(m)(ma)
+ case _ => new MutableMapWrapper(m)
+ }
+
+ @deprecated("use asJavaMap instead")
+ def asMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = asJavaMap[A, B](m)
+
+ /**
+ * Implicitly converts a Scala mutable <code>Map</code> to a Java <code>Dictionary</code>.
+ * The returned Java <code>Dictionary</code> is backed by the provided Scala
+ * <code>Dictionary</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Dictionary</code> was previously obtained from an implicit or
+ * explicit call of <code>asMap(java.util.Dictionary)</code> then the original
+ * Java <code>Dictionary</code> will be returned.
+ *
+ * @param m The <code>Map</code> to be converted.
+ * @return A Java <code>Dictionary</code> view of the argument.
+ */
+ implicit def asJavaDictionary[A, B](m : mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
+ //case JConcurrentMapWrapper(wrapped) => wrapped
+ case JDictionaryWrapper(wrapped) => wrapped
+ case _ => new DictionaryWrapper(m)
}
+ @deprecated("use asJavaDictionary instead")
+ def asDictionary[A, B](m : mutable.Map[A, B]): ju.Dictionary[A, B] = asJavaDictionary[A, B](m)
+
+ /**
+ * Implicitly converts a Scala <code>Map</code> to a Java <code>Map</code>.
+ * The returned Java <code>Map</code> is backed by the provided Scala
+ * <code>Map</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Map</code> was previously obtained from an implicit or
+ * explicit call of <code>asMap(java.util.Map)</code> then the original
+ * Java <code>Map</code> will be returned.
+ *
+ * @param m The <code>Map</code> to be converted.
+ * @return A Java <code>Map</code> view of the argument.
+ */
+ implicit def asJavaMap[A, B](m : Map[A, B]): ju.Map[A, B] = m match {
+ //case JConcurrentMapWrapper(wrapped) => wrapped
+ case JMapWrapper(wrapped) => wrapped
+ case _ => new MapWrapper(m)
+ }
+
+ @deprecated("use asJavaMap instead")
+ def asMap[A, B](m : Map[A, B]): ju.Map[A, B] = asJavaMap[A, B](m)
+
+ /**
+ * Implicitly converts a Scala mutable `ConcurrentMap` to a Java `ConcurrentMap`.
+ * The returned Java `ConcurrentMap` is backed by the provided Scala `ConcurrentMap`
+ * and any side-effects of using it via the Java interface will be visible
+ * via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>ConcurrentMap</code> was previously obtained from an implicit or
+ * explicit call of <code>asConcurrentMap(java.util.concurrect.ConcurrentMap)</code> then the original
+ * Java <code>ConcurrentMap</code> will be returned.
+ *
+ * @param m The <code>ConcurrentMap</code> to be converted.
+ * @return A Java <code>ConcurrentMap</code> view of the argument.
+ */
+ implicit def asJavaConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = m match {
+ case JConcurrentMapWrapper(wrapped) => wrapped
+ case _ => new ConcurrentMapWrapper(m)
+ }
+
+ @deprecated("use asJavaConcurrentMap instead")
+ def asConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = asJavaConcurrentMap[A, B](m)
+
// Java => Scala
/**
@@ -197,11 +346,14 @@ object JavaConversions {
* @param i The <code>Iterator</code> to be converted.
* @return A Scala <code>Iterator</code> view of the argument.
*/
- implicit def asIterator[A](i : ju.Iterator[A]) = i match {
+ implicit def asScalaIterator[A](i : ju.Iterator[A]): Iterator[A] = i match {
case IteratorWrapper(wrapped) => wrapped
case _ => JIteratorWrapper(i)
}
+ @deprecated("use asScalaIterator instead")
+ def asIterator[A](i : ju.Iterator[A]): Iterator[A] = asScalaIterator[A](i)
+
/**
* Implicitly converts a Java <code>Enumeration</code> to a Scala <code>Iterator</code>.
* The returned Scala <code>Iterator</code> is backed by the provided Java
@@ -215,11 +367,14 @@ object JavaConversions {
* @param i The <code>Enumeration</code> to be converted.
* @return A Scala <code>Iterator</code> view of the argument.
*/
- implicit def asIterator[A](i : ju.Enumeration[A]) = i match {
+ implicit def enumerationAsScalaIterator[A](i : ju.Enumeration[A]): Iterator[A] = i match {
case IteratorWrapper(wrapped) => wrapped
case _ => JEnumerationWrapper(i)
}
+ @deprecated("use enumerationAsScalaIterator instead")
+ def asIterator[A](i : ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator[A](i)
+
/**
* Implicitly converts a Java <code>Iterable</code> to a Scala <code>Iterable</code>.
* The returned Scala <code>Iterable</code> is backed by the provided Java
@@ -233,11 +388,14 @@ object JavaConversions {
* @param i The <code>Iterable</code> to be converted.
* @return A Scala <code>Iterable</code> view of the argument.
*/
- implicit def asIterable[A](i : jl.Iterable[A]) = i match {
+ implicit def asScalaIterable[A](i : jl.Iterable[A]): Iterable[A] = i match {
case IterableWrapper(wrapped) => wrapped
case _ => JIterableWrapper(i)
}
+ @deprecated("use asScalaIterable instead")
+ def asIterable[A](i : jl.Iterable[A]): Iterable[A] = asScalaIterable[A](i)
+
/**
* Implicitly converts a Java <code>Collection</code> to an Scala <code>Iterable</code>.
* <p>
@@ -248,11 +406,14 @@ object JavaConversions {
* @param i The <code>Collection</code> to be converted.
* @return A Scala <code>SizedIterable</code> view of the argument.
*/
- implicit def asIterable[A](i : ju.Collection[A]) = i match {
+ implicit def asScalaIterable[A](i : ju.Collection[A]): Iterable[A] = i match {
case IterableWrapper(wrapped) => wrapped
case _ => JCollectionWrapper(i)
}
+ @deprecated("use asScalaIterable instead")
+ def asIterable[A](i : ju.Collection[A]): Iterable[A] = asScalaIterable[A](i)
+
/**
* Implicitly converts a Java <code>List</code> to a Scala mutable <code>Buffer</code>.
* The returned Scala <code>Buffer</code> is backed by the provided Java
@@ -266,11 +427,14 @@ object JavaConversions {
* @param l The <code>List</code> to be converted.
* @return A Scala mutable <code>Buffer</code> view of the argument.
*/
- implicit def asBuffer[A](l : ju.List[A]) = l match {
+ implicit def asScalaBuffer[A](l : ju.List[A]): mutable.Buffer[A] = l match {
case MutableBufferWrapper(wrapped) => wrapped
case _ =>new JListWrapper(l)
}
+ @deprecated("use asScalaBuffer instead")
+ def asBuffer[A](l : ju.List[A]): mutable.Buffer[A] = asScalaBuffer[A](l)
+
/**
* Implicitly converts a Java <code>Set</code> to a Scala mutable <code>Set</code>.
* The returned Scala <code>Set</code> is backed by the provided Java
@@ -279,16 +443,19 @@ object JavaConversions {
* <p>
* If the Java <code>Set</code> was previously obtained from an implicit or
* explicit call of <code>asSet(scala.collection.mutable.Set)</code> then the original
- * Scala <code>Set</code> will be returned.
+ * ScalaThe reported problems have to do with dependent method types, which is currently an experimental feature in Scala and is still under development. We emphasize that these problems are related to type-inference and, as stated in the paper, it is possible to run and type-check the programs with additional annotations. <code>Set</code> will be returned.
*
* @param s The <code>Set</code> to be converted.
* @return A Scala mutable <code>Set</code> view of the argument.
*/
- implicit def asSet[A](s : ju.Set[A]) = s match {
+ implicit def asScalaSet[A](s : ju.Set[A]): mutable.Set[A] = s match {
case MutableSetWrapper(wrapped) => wrapped
case _ =>new JSetWrapper(s)
}
+ @deprecated("use asScalaSet instead")
+ def asSet[A](s : ju.Set[A]): mutable.Set[A] = asScalaSet[A](s)
+
/**
* Implicitly converts a Java <code>Map</code> to a Scala mutable <code>Map</code>.
* The returned Scala <code>Map</code> is backed by the provided Java
@@ -302,12 +469,70 @@ object JavaConversions {
* @param m The <code>Map</code> to be converted.
* @return A Scala mutable <code>Map</code> view of the argument.
*/
- implicit def asMap[A, B](m : ju.Map[A, B]) = m match {
+ implicit def asScalaMap[A, B](m : ju.Map[A, B]): mutable.Map[A, B] = m match {
+ //case ConcurrentMapWrapper(wrapped) => wrapped
case MutableMapWrapper(wrapped) => wrapped
- case _ =>new JMapWrapper(m)
+ case _ => new JMapWrapper(m)
+ }
+
+ @deprecated("use asScalaMap instead")
+ def asMap[A, B](m : ju.Map[A, B]): mutable.Map[A, B] = asScalaMap[A, B](m)
+
+ /**
+ * Implicitly converts a Java <code>ConcurrentMap</code> to a Scala mutable <code>ConcurrentMap</code>.
+ * The returned Scala <code>ConcurrentMap</code> is backed by the provided Java
+ * <code>ConcurrentMap</code> and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ * <p>
+ * If the Java <code>ConcurrentMap</code> was previously obtained from an implicit or
+ * explicit call of <code>asConcurrentMap(scala.collection.mutable.ConcurrentMap)</code> then the original
+ * Scala <code>ConcurrentMap</code> will be returned.
+ *
+ * @param m The <code>ConcurrentMap</code> to be converted.
+ * @return A Scala mutable <code>ConcurrrentMap</code> view of the argument.
+ */
+ implicit def asScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = m match {
+ case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying
+ case _ => new JConcurrentMapWrapper(m)
}
- // Private implementations ...
+ @deprecated("use asScalaConcurrentMap instead")
+ def asConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = asScalaConcurrentMap[A, B](m)
+
+ /**
+ * Implicitly converts a Java <code>Dictionary</code> to a Scala mutable <code>Map[String, String]</code>.
+ * The returned Scala <code>Map[String, String]</code> is backed by the provided Java
+ * <code>Dictionary</code> and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * @param m The <code>Dictionary</code> to be converted.
+ * @return A Scala mutable <code>Map[String, String]</code> view of the argument.
+ */
+ implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
+ case DictionaryWrapper(wrapped) => wrapped
+ case _ => new JDictionaryWrapper(p)
+ }
+
+ @deprecated("use dictionaryAsScalaMap instead")
+ def asMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = dictionaryAsScalaMap[A, B](p)
+
+ /**
+ * Implicitly converts a Java <code>Properties</code> to a Scala mutable <code>Map[String, String]</code>.
+ * The returned Scala <code>Map[String, String]</code> is backed by the provided Java
+ * <code>Properties</code> and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * @param m The <code>Properties</code> to be converted.
+ * @return A Scala mutable <code>Map[String, String]</code> view of the argument.
+ */
+ implicit def asScalaMap(p: ju.Properties): mutable.Map[String, String] = p match {
+ case _ => new JPropertiesWrapper(p)
+ }
+
+ @deprecated("use asScalaMap instead")
+ def asMap(p: ju.Properties): mutable.Map[String, String] = asScalaMap(p)
+
+ // Private implementations (shared by JavaConverters) ...
case class IteratorWrapper[A](underlying : Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] {
def hasNext = underlying.hasNext
@@ -317,6 +542,10 @@ object JavaConversions {
def remove = throw new UnsupportedOperationException
}
+ class ToIteratorWrapper[A](underlying : Iterator[A]) {
+ def asJava = new IteratorWrapper(underlying)
+ }
+
case class JIteratorWrapper[A](underlying : ju.Iterator[A]) extends Iterator[A] {
def hasNext = underlying.hasNext
def next = underlying.next
@@ -345,6 +574,17 @@ object JavaConversions {
def newBuilder[B] = new mutable.ArrayBuffer[B]
}
+ case class SeqWrapper[A](underlying : Seq[A]) extends ju.AbstractList[A] {
+ def size = underlying.length
+ def get(i : Int) = underlying(i)
+ }
+
+ case class MutableSeqWrapper[A](underlying : mutable.Seq[A]) extends ju.AbstractList[A] {
+ def size = underlying.length
+ def get(i : Int) = underlying(i)
+ override def set(i : Int, elem: A) = { val p = underlying(i) ; underlying(i) = elem ; p }
+ }
+
case class MutableBufferWrapper[A](underlying : mutable.Buffer[A]) extends ju.AbstractList[A] {
def size = underlying.length
def get(i : Int) = underlying(i)
@@ -367,29 +607,36 @@ object JavaConversions {
def result = this
}
- case class MutableSetWrapper[A](underlying : mutable.Set[A])(m : ClassManifest[A]) extends ju.AbstractSet[A] {
+ class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
self =>
def size = underlying.size
- override def add(elem: A) = { val sz = underlying.size ; underlying += elem ; sz < underlying.size }
- override def remove(elem : AnyRef) = {
- m.erasure.isInstance(elem) && {
- val sz = underlying.size
- underlying -= elem.asInstanceOf[A]
- sz > underlying.size
- }
- }
def iterator = new ju.Iterator[A] {
val ui = underlying.iterator
var prev : Option[A] = None
-
def hasNext = ui.hasNext
def next = { val e = ui.next ; prev = Some(e) ; e }
def remove = prev match {
- case Some(e) => self.remove(e.asInstanceOf[AnyRef]) ; prev = None
+ case Some(e) =>
+ underlying match {
+ case ms: mutable.Set[a] =>
+ ms.remove(e.asInstanceOf[a])
+ prev = None
+ case _ =>
+ throw new UnsupportedOperationException("remove")
+ }
case _ => throw new IllegalStateException("next must be called at least once before remove")
}
}
+ }
+ case class MutableSetWrapper[A](underlying : mutable.Set[A]) extends SetWrapper[A](underlying) {
+ override def add(elem: A) = { val sz = underlying.size ; underlying += elem ; sz < underlying.size }
+ override def remove(elem : AnyRef) = try {
+ underlying.remove(elem.asInstanceOf[A])
+ } catch {
+ case ex: ClassCastException => false
+ }
+ override def clear() = underlying.clear()
}
case class JSetWrapper[A](underlying : ju.Set[A]) extends mutable.Set[A] with mutable.SetLike[A, JSetWrapper[A]] {
@@ -404,34 +651,24 @@ object JavaConversions {
override def add(elem: A): Boolean = underlying.add(elem)
override def remove(elem: A): Boolean = underlying.remove(elem)
-
- override def clear = underlying.clear
+ override def clear() = underlying.clear()
override def empty = JSetWrapper(new ju.HashSet[A])
}
- case class MutableMapWrapper[A, B](underlying : mutable.Map[A, B])(m : ClassManifest[A]) extends ju.AbstractMap[A, B] {
- self =>
+ class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] { self =>
override def size = underlying.size
- override def put(k : A, v : B) = underlying.put(k, v) match {
- case Some(v1) => v1
- case None => null.asInstanceOf[B]
- }
-
- override def remove(k : AnyRef) = {
- if (!m.erasure.isInstance(k))
- null.asInstanceOf[B]
- else {
- val k1 = k.asInstanceOf[A]
- underlying.get(k1) match {
- case Some(v) => underlying -= k1 ; v
- case None => null.asInstanceOf[B]
- }
+ override def get(key: AnyRef): B = try {
+ underlying get key.asInstanceOf[A] match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
}
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
}
- override def entrySet : ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
+ override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
def size = self.size
def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
@@ -455,14 +692,45 @@ object JavaConversions {
}
def remove = prev match {
- case Some(k) => val v = self.remove(k.asInstanceOf[AnyRef]) ; prev = None ; v
- case _ => throw new IllegalStateException("next must be called at least once before remove")
+ case Some(k) =>
+ underlying match {
+ case mm: mutable.Map[a, _] =>
+ val v = mm.remove(k.asInstanceOf[a])
+ prev = None
+ v
+ case _ =>
+ throw new UnsupportedOperationException("remove")
+ }
+ case _ =>
+ throw new IllegalStateException("next must be called at least once before remove")
}
}
}
}
- case class JMapWrapper[A, B](underlying : ju.Map[A, B]) extends mutable.Map[A, B] with mutable.MapLike[A, B, JMapWrapper[A, B]] {
+ case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B])
+ extends MapWrapper[A, B](underlying) {
+ override def put(k : A, v : B) = underlying.put(k, v) match {
+ case Some(v1) => v1
+ case None => null.asInstanceOf[B]
+ }
+
+ override def remove(k : AnyRef): B = try {
+ underlying.remove(k.asInstanceOf[A]) match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+
+ override def clear() = underlying.clear()
+ }
+
+ trait JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]]
+ extends mutable.Map[A, B] with mutable.MapLike[A, B, Repr] {
+ def underlying: ju.Map[A, B]
+
override def size = underlying.size
def get(k : A) = {
@@ -496,8 +764,164 @@ object JavaConversions {
def next = { val e = ui.next ; (e.getKey, e.getValue) }
}
- override def clear = underlying.clear
+ override def clear() = underlying.clear()
+
+ override def empty: Repr = null.asInstanceOf[Repr]
+ }
+ case class JMapWrapper[A, B](val underlying : ju.Map[A, B])
+ extends JMapWrapperLike[A, B, JMapWrapper[A, B]] {
override def empty = JMapWrapper(new ju.HashMap[A, B])
}
+
+ class ConcurrentMapWrapper[A, B](override val underlying: mutable.ConcurrentMap[A, B])
+ extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
+
+ def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+
+ def remove(k: AnyRef, v: AnyRef) = try {
+ underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B])
+ } catch {
+ case ex: ClassCastException =>
+ false
+ }
+
+ def replace(k: A, v: B): B = underlying.replace(k, v) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+
+ def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
+ }
+
+ case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B])
+ extends JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with mutable.ConcurrentMap[A, B] {
+ override def get(k: A) = {
+ val v = underlying.get(k)
+ if (v != null) Some(v)
+ else None
+ }
+
+ override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[A, B])
+
+ def putIfAbsent(k: A, v: B): Option[B] = {
+ val r = underlying.putIfAbsent(k, v)
+ if (r != null) Some(r) else None
+ }
+
+ def remove(k: A, v: B): Boolean = underlying.remove(k, v)
+
+ def replace(k: A, v: B): Option[B] = {
+ val prev = underlying.replace(k, v)
+ if (prev != null) Some(prev) else None
+ }
+
+ def replace(k: A, oldvalue: B, newvalue: B): Boolean = underlying.replace(k, oldvalue, newvalue)
+ }
+
+ case class DictionaryWrapper[A, B](underlying: mutable.Map[A, B])
+ extends ju.Dictionary[A, B] {
+ def size: Int = underlying.size
+ def isEmpty: Boolean = underlying.isEmpty
+ def keys: ju.Enumeration[A] = asJavaEnumeration(underlying.keysIterator)
+ def elements: ju.Enumeration[B] = asJavaEnumeration(underlying.valuesIterator)
+ def get(key: AnyRef) = try {
+ underlying.get(key.asInstanceOf[A]) match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+ def put(key: A, value: B): B = underlying.put(key, value) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+ override def remove(key: AnyRef) = try {
+ underlying.remove(key.asInstanceOf[A]) match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+ }
+
+ case class JDictionaryWrapper[A, B](underlying: ju.Dictionary[A, B])
+ extends mutable.Map[A, B] {
+
+ override def size: Int = underlying.size
+
+ def get(k : A) = {
+ val v = underlying.get(k)
+ if (v != null) Some(v) else None
+ }
+
+ def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
+ def -=(key: A): this.type = { underlying.remove(key); this }
+
+ override def put(k : A, v : B): Option[B] = {
+ val r = underlying.put(k, v)
+ if (r != null) Some(r) else None
+ }
+
+ override def update(k : A, v : B) { underlying.put(k, v) }
+
+ override def remove(k : A): Option[B] = {
+ val r = underlying.remove(k)
+ if (r != null) Some(r) else None
+ }
+
+ def iterator = enumerationAsScalaIterator(underlying.keys) map (k => (k, underlying get k))
+
+ override def clear() = underlying.clear()
+ }
+
+ case class JPropertiesWrapper(underlying: ju.Properties)
+ extends mutable.Map[String, String] with mutable.MapLike[String, String, JPropertiesWrapper] {
+ override def size = underlying.size
+
+ def get(k : String) = {
+ val v = underlying.get(k)
+ if (v != null)
+ Some(v.asInstanceOf[String])
+ else
+ None
+ }
+
+ def +=(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this }
+ def -=(key: String): this.type = { underlying.remove(key); this }
+
+ override def put(k : String, v : String): Option[String] = {
+ val r = underlying.put(k, v)
+ if (r != null) Some(r.asInstanceOf[String]) else None
+ }
+
+ override def update(k : String, v : String) { underlying.put(k, v) }
+
+ override def remove(k : String): Option[String] = {
+ val r = underlying.remove(k)
+ if (r != null) Some(r.asInstanceOf[String]) else None
+ }
+
+ def iterator = new Iterator[(String, String)] {
+ val ui = underlying.entrySet.iterator
+ def hasNext = ui.hasNext
+ def next = { val e = ui.next ; (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String]) }
+ }
+
+ override def clear() = underlying.clear()
+
+ override def empty = JPropertiesWrapper(new ju.Properties)
+
+ def getProperty(key: String) = underlying.getProperty(key)
+
+ def getProperty(key: String, defaultValue: String) = underlying.getProperty(key, defaultValue)
+
+ def setProperty(key: String, value: String) = underlying.setProperty(key, value)
+ }
}
+
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
new file mode 100755
index 0000000000..cb4c56fd59
--- /dev/null
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -0,0 +1,456 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+
+/** <p>
+ * A collection of decorators that allow to convert between
+ * Scala and Java collections using `asScala` and `asJava` methods.
+ * </p>
+ * <p>
+ * The following conversions are supported via `asJava`, `asScala`
+ * </p>
+ * <ul>
+ * <li><code>scala.collection.Iterable</code> <=> <code>java.lang.Iterable</code></li>
+ * <li><code>scala.collection.Iterator</code> <=> <code>java.util.Iterator</code></li>
+ * <li><code>scala.collection.mutable.Buffer</code> <=> <code>java.util.List</code></li>
+ * <li><code>scala.collection.mutable.Set</code> <=> <code>java.util.Set</code></li>
+ * <li><code>scala.collection.mutable.Map</code> <=> <code>java.util.Map</code></li>
+ * <li><code>scala.collection.mutable.ConcurrentMap</code> <=> <code>java.util.concurrent.ConcurrentMap</code></li>
+ * </ul>
+ * <p>
+ * In all cases, converting from a source type to a target type and back
+ * again will return the original source object, e.g.
+ * </p>
+ * <pre>
+ * <b>import</b> scala.collection.JavaConverters._
+ *
+ * <b>val</b> sl = <b>new</b> scala.collection.mutable.ListBuffer[Int]
+ * <b>val</b> jl : java.util.List[Int] = sl.asJava
+ * <b>val</b> sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala
+ * assert(sl eq sl2)g</pre>
+ * <p>
+ * <p>
+ * The following conversions also are supported, but the
+ * direction Scala to Java is done my a more specifically named method:
+ * `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`.
+ * </p>
+ * <ul>
+ * <li><code>scala.collection.Iterable</code> <=> <code>java.util.Collection</code></li>
+ * <li><code>scala.collection.Iterator</code> <=> <code>java.util.Enumeration</code></li>
+ * <li><code>scala.collection.mutable.Map</code> <=> <code>java.util.Dictionary</code></li>
+ * </ul>
+ * In addition, the following one way conversions are provided via `asJava`:
+ * </p>
+ * <ul>
+ * <li><code>scala.collection.Seq => <code>java.util.List }</code></li>
+ * <li><code>scala.collection.mutable.Seq => <code>java.util.List</code></li>
+ * <li><code>scala.collection.Set</code> => <code>java.util.Set</code></li>
+ * <li><code>scala.collection.Map</code> => <code>java.util.Map</code></li>
+ * </ul>
+ *
+ * @author Martin Odersky
+ * @since 2.8.1
+ */
+object JavaConverters {
+ import java.{ lang => jl, util => ju }
+ import java.util.{ concurrent => juc }
+ import JavaConversions._
+
+ // Conversion decorator classes
+
+ /** Generic class containing the `asJava` converter method */
+ class AsJava[C](op: => C) {
+ /** Converts a Scala collection to the corresponding Java collection */
+ def asJava: C = op
+ }
+
+ /** Generic class containing the `asScala` converter method */
+ class AsScala[C](op: => C) {
+ /** Converts a Java collection to the corresponding Scala collection */
+ def asScala: C = op
+ }
+
+ /** Generic class containing the `asJavaCollection` converter method */
+ class AsJavaCollection[A](i: Iterable[A]) {
+ /** Converts a Scala `Iterable` to a Java `Collection` */
+ def asJavaCollection: ju.Collection[A] = JavaConversions.asJavaCollection(i)
+ }
+
+ /** Generic class containing the `asJavaEnumeration` converter method */
+ class AsJavaEnumeration[A](i: Iterator[A]) {
+ /** Converts a Scala `Iterator` to a Java `Enumeration` */
+ def asJavaEnumeration: ju.Enumeration[A] = JavaConversions.asJavaEnumeration(i)
+ }
+
+ /** Generic class containing the `asJavaDictionary` converter method */
+ class AsJavaDictionary[A, B](m : mutable.Map[A, B]) {
+ /** Converts a Scala `Map` to a Java `Dictionary` */
+ def asJavaDictionary: ju.Dictionary[A, B] = JavaConversions.asJavaDictionary(m)
+ }
+
+ // Scala => Java
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala <code>Iterator</code> to a Java <code>Iterator</code>.
+ * The returned Java <code>Iterator</code> is backed by the provided Scala
+ * <code>Iterator</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Iterator</code> was previously obtained from an implicit or
+ * explicit call of <code>asIterator(java.util.Iterator)</code> then the original
+ * Java <code>Iterator</code> will be returned by the `asJava` method.
+ *
+ * @param i The <code>Iterator</code> to be converted.
+ * @return An object with an `asJava` method that returns a Java <code>Iterator</code> view of the argument.
+ */
+ implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] =
+ new AsJava(asJavaIterator(i))
+
+ /**
+ * Adds an `asJavaEnumeration` method that implicitly converts a Scala <code>Iterator</code> to a Java <code>Enumeration</code>.
+ * The returned Java <code>Enumeration</code> is backed by the provided Scala
+ * <code>Iterator</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Iterator</code> was previously obtained from an implicit or
+ * explicit call of <code>asIterator(java.util.Enumeration)</code> then the
+ * original Java <code>Enumeration</code> will be returned.
+ *
+ * @param i The <code>Iterator</code> to be converted.
+ * @return An object with an `asJavaEnumeration` method that returns a Java <code>Enumeration</code> view of the argument.
+ */
+ implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] =
+ new AsJavaEnumeration(i)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala <code>Iterable</code> to a Java <code>Iterable</code>.
+ * The returned Java <code>Iterable</code> is backed by the provided Scala
+ * <code>Iterable</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Iterable</code> was previously obtained from an implicit or
+ * explicit call of <code>asIterable(java.lang.Iterable)</code> then the original
+ * Java <code>Iterable</code> will be returned.
+ *
+ * @param i The <code>Iterable</code> to be converted.
+ * @return An object with an `asJavaCollection` method that returns a Java <code>Iterable</code> view of the argument.
+ */
+ implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] =
+ new AsJava(asJavaIterable(i))
+
+ /**
+ * Adds an `asJavaCollection` method that implicitly converts a Scala <code>Iterable</code> to an immutable Java
+ * <code>Collection</code>.
+ * <p>
+ * If the Scala <code>Iterable</code> was previously obtained from an implicit or
+ * explicit call of <code>asSizedIterable(java.util.Collection)</code> then the original
+ * Java <code>Collection</code> will be returned.
+ *
+ * @param i The <code>SizedIterable</code> to be converted.
+ * @return An object with an `asJava` method that returns a Java <code>Collection</code> view of the argument.
+ */
+ implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] =
+ new AsJavaCollection(i)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable <code>Buffer</code> to a Java <code>List</code>.
+ * The returned Java <code>List</code> is backed by the provided Scala
+ * <code>Buffer</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Buffer</code> was previously obtained from an implicit or
+ * explicit call of <code>asBuffer(java.util.List)</code> then the original
+ * Java <code>List</code> will be returned.
+ *
+ * @param b The <code>Buffer</code> to be converted.
+ * @return An object with an `asJava` method that returns a Java <code>List</code> view of the argument.
+ */
+ implicit def asJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] =
+ new AsJava(asJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable <code>Seq</code> to a Java <code>List</code>.
+ * The returned Java <code>List</code> is backed by the provided Scala
+ * <code>Seq</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Seq</code> was previously obtained from an implicit or
+ * explicit call of <code>asSeq(java.util.List)</code> then the original
+ * Java <code>List</code> will be returned.
+ *
+ * @param b The <code>Seq</code> to be converted.
+ * @return An object with an `asJava` method that returns a Java <code>List</code> view of the argument.
+ */
+ implicit def asJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] =
+ new AsJava(asJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala <code>Seq</code> to a Java <code>List</code>.
+ * The returned Java <code>List</code> is backed by the provided Scala
+ * <code>Seq</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Seq</code> was previously obtained from an implicit or
+ * explicit call of <code>asSeq(java.util.List)</code> then the original
+ * Java <code>List</code> will be returned.
+ *
+ * @param b The <code>Seq</code> to be converted.
+ * @return An object with an `asJava` method that returns a Java <code>List</code> view of the argument.
+ */
+ implicit def asJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] =
+ new AsJava(asJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable <code>Set</code> to a Java <code>Set</code>.
+ * The returned Java <code>Set</code> is backed by the provided Scala
+ * <code>Set</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Set</code> was previously obtained from an implicit or
+ * explicit call of <code>asSet(java.util.Set)</code> then the original
+ * Java <code>Set</code> will be returned.
+ *
+ * @param s The <code>Set</code> to be converted.
+ * @return An object with an `asJava` method that returns a Java <code>Set</code> view of the argument.
+ */
+ implicit def asJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] =
+ new AsJava(asJavaSet(s))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala <code>Set</code> to a Java <code>Set</code>.
+ * The returned Java <code>Set</code> is backed by the provided Scala
+ * <code>Set</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Set</code> was previously obtained from an implicit or
+ * explicit call of <code>asSet(java.util.Set)</code> then the original
+ * Java <code>Set</code> will be returned.
+ *
+ * @param s The <code>Set</code> to be converted.
+ * @return An object with an `asJava` method that returns a Java <code>Set</code> view of the argument.
+ */
+ implicit def asJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] =
+ new AsJava(asJavaSet(s))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable <code>Map</code> to a Java <code>Map</code>.
+ * The returned Java <code>Map</code> is backed by the provided Scala
+ * <code>Map</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Map</code> was previously obtained from an implicit or
+ * explicit call of <code>asMap(java.util.Map)</code> then the original
+ * Java <code>Map</code> will be returned.
+ *
+ * @param m The <code>Map</code> to be converted.
+ * @return An object with an `asJava` method that returns a Java <code>Map</code> view of the argument.
+ */
+ implicit def asJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] =
+ new AsJava(asJavaMap(m))
+
+ /**
+ * Adds an `asJavaDictionary` method that implicitly converts a Scala mutable <code>Map</code> to a Java <code>Dictionary</code>.
+ * The returned Java <code>Dictionary</code> is backed by the provided Scala
+ * <code>Dictionary</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Dictionary</code> was previously obtained from an implicit or
+ * explicit call of <code>asMap(java.util.Dictionary)</code> then the original
+ * Java <code>Dictionary</code> will be returned.
+ *
+ * @param m The <code>Map</code> to be converted.
+ * @return An object with an `asJavaDictionary` method that returns a Java <code>Dictionary</code> view of the argument.
+ */
+ implicit def asJavaDictionaryConverter[A, B](m : mutable.Map[A, B]): AsJavaDictionary[A, B] =
+ new AsJavaDictionary(m)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala <code>Map</code> to a Java <code>Map</code>.
+ * The returned Java <code>Map</code> is backed by the provided Scala
+ * <code>Map</code> and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>Map</code> was previously obtained from an implicit or
+ * explicit call of <code>asMap(java.util.Map)</code> then the original
+ * Java <code>Map</code> will be returned.
+ *
+ * @param m The <code>Map</code> to be converted.
+ * @return An object with an `asJava` method that returns a Java <code>Map</code> view of the argument.
+ */
+ implicit def asJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] =
+ new AsJava(asJavaMap(m))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `ConcurrentMap` to a Java `ConcurrentMap`.
+ * The returned Java `ConcurrentMap` is backed by the provided Scala `ConcurrentMap`
+ * and any side-effects of using it via the Java interface will be visible
+ * via the Scala interface and vice versa.
+ * <p>
+ * If the Scala <code>ConcurrentMap</code> was previously obtained from an implicit or
+ * explicit call of <code>asConcurrentMap(java.util.concurrect.ConcurrentMap)</code> then the original
+ * Java <code>ConcurrentMap</code> will be returned.
+ *
+ * @param m The <code>ConcurrentMap</code> to be converted.
+ * @return An object with an `asJava` method that returns a Java <code>ConcurrentMap</code> view of the argument.
+ */
+ implicit def asJavaConcurrentMapConverter[A, B](m: mutable.ConcurrentMap[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
+ new AsJava(asJavaConcurrentMap(m))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java <code>Iterator</code> to a Scala <code>Iterator</code>.
+ * The returned Scala <code>Iterator</code> is backed by the provided Java
+ * <code>Iterator</code> and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ * <p>
+ * If the Java <code>Iterator</code> was previously obtained from an implicit or
+ * explicit call of <code>asIterator(scala.collection.Iterator)</code> then the original
+ * Scala <code>Iterator</code> will be returned.
+ *
+ * @param i The <code>Iterator</code> to be converted.
+ * @return An object with an `asScala` method that returns a Scala <code>Iterator</code> view of the argument.
+ */
+ implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] =
+ new AsScala(asScalaIterator(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java <code>Enumeration</code> to a Scala <code>Iterator</code>.
+ * The returned Scala <code>Iterator</code> is backed by the provided Java
+ * <code>Enumeration</code> and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ * <p>
+ * If the Java <code>Enumeration</code> was previously obtained from an implicit or
+ * explicit call of <code>asEnumeration(scala.collection.Iterator)</code> then the
+ * original Scala <code>Iterator</code> will be returned.
+ *
+ * @param i The <code>Enumeration</code> to be converted.
+ * @return An object with an `asScala` method that returns a Scala <code>Iterator</code> view of the argument.
+ */
+ implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] =
+ new AsScala(enumerationAsScalaIterator(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java <code>Iterable</code> to a Scala <code>Iterable</code>.
+ * The returned Scala <code>Iterable</code> is backed by the provided Java
+ * <code>Iterable</code> and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ * <p>
+ * If the Java <code>Iterable</code> was previously obtained from an implicit or
+ * explicit call of <code>asIterable(scala.collection.Iterable)</code> then the original
+ * Scala <code>Iterable</code> will be returned.
+ *
+ * @param i The <code>Iterable</code> to be converted.
+ * @return An object with an `asScala` method that returns a Scala <code>Iterable</code> view of the argument.
+ */
+ implicit def asScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] =
+ new AsScala(asScalaIterable(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java <code>Collection</code> to an Scala <code>Iterable</code>.
+ * <p>
+ * If the Java <code>Collection</code> was previously obtained from an implicit or
+ * explicit call of <code>asCollection(scala.collection.SizedIterable)</code> then
+ * the original Scala <code>SizedIterable</code> will be returned.
+ *
+ * @param i The <code>Collection</code> to be converted.
+ * @return An object with an `asScala` method that returns a Scala <code>SizedIterable</code> view of the argument.
+ */
+ implicit def asScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] =
+ new AsScala(asScalaIterable(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java <code>List</code> to a Scala mutable <code>Buffer</code>.
+ * The returned Scala <code>Buffer</code> is backed by the provided Java
+ * <code>List</code> and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ * <p>
+ * If the Java <code>List</code> was previously obtained from an implicit or
+ * explicit call of <code>asList(scala.collection.mutable.Buffer)</code> then the original
+ * Scala <code>Buffer</code> will be returned.
+ *
+ * @param l The <code>List</code> to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable <code>Buffer</code> view of the argument.
+ */
+ implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] =
+ new AsScala(asScalaBuffer(l))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java <code>Set</code> to a Scala mutable <code>Set</code>.
+ * The returned Scala <code>Set</code> is backed by the provided Java
+ * <code>Set</code> and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ * <p>
+ * If the Java <code>Set</code> was previously obtained from an implicit or
+ * explicit call of <code>asSet(scala.collection.mutable.Set)</code> then the original
+ * ScalaThe reported problems have to do with dependent method types, which is currently an experimental feature in Scala and is still under development. We emphasize that these problems are related to type-inference and, as stated in the paper, it is possible to run and type-check the programs with additional annotations. <code>Set</code> will be returned.
+ *
+ * @param s The <code>Set</code> to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable <code>Set</code> view of the argument.
+ */
+ implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] =
+ new AsScala(asScalaSet(s))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java <code>Map</code> to a Scala mutable <code>Map</code>.
+ * The returned Scala <code>Map</code> is backed by the provided Java
+ * <code>Map</code> and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ * <p>
+ * If the Java <code>Map</code> was previously obtained from an implicit or
+ * explicit call of <code>asMap(scala.collection.mutable.Map)</code> then the original
+ * Scala <code>Map</code> will be returned.
+ *
+ * @param m The <code>Map</code> to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable <code>Map</code> view of the argument.
+ */
+ implicit def asScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] =
+ new AsScala(asScalaMap(m))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java <code>ConcurrentMap</code> to a Scala mutable <code>ConcurrentMap</code>.
+ * The returned Scala <code>ConcurrentMap</code> is backed by the provided Java
+ * <code>ConcurrentMap</code> and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ * <p>
+ * If the Java <code>ConcurrentMap</code> was previously obtained from an implicit or
+ * explicit call of <code>asConcurrentMap(scala.collection.mutable.ConcurrentMap)</code> then the original
+ * Scala <code>ConcurrentMap</code> will be returned.
+ *
+ * @param m The <code>ConcurrentMap</code> to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable <code>ConcurrrentMap</code> view of the argument.
+ */
+ implicit def asScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[mutable.ConcurrentMap[A, B]] =
+ new AsScala(asScalaConcurrentMap(m))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java <code>Dictionary</code> to a Scala mutable <code>Map[String, String]</code>.
+ * The returned Scala <code>Map[String, String]</code> is backed by the provided Java
+ * <code>Dictionary</code> and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * @param m The <code>Dictionary</code> to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable <code>Map[String, String]</code> view of the argument.
+ */
+ implicit def dictionaryAsScalaMapConverter[A, B](p: ju.Dictionary[A, B]): AsScala[mutable.Map[A, B]] =
+ new AsScala(dictionaryAsScalaMap(p))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java <code>Properties</code> to a Scala mutable <code>Map[String, String]</code>.
+ * The returned Scala <code>Map[String, String]</code> is backed by the provided Java
+ * <code>Properties</code> and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * @param m The <code>Properties</code> to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable <code>Map[String, String]</code> view of the argument.
+ */
+ implicit def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
+ new AsScala(asScalaMap(p))
+
+}
diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala
index e81f2dba28..1cd130eeb2 100644
--- a/src/library/scala/collection/LinearSeq.scala
+++ b/src/library/scala/collection/LinearSeq.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,19 +13,8 @@ package scala.collection
import generic._
import mutable.Builder
-/** <p>
- * Class <code>Linear[A]</code> represents linear sequences of elements.
- * For such sequences <code>isEmpty</code>, <code>head</code> and
- * <code>tail</code> are guaranteed to be efficient constant time (or near so)
- * operations.<br/>
- * It does not add any methods to <code>Seq</code> but overrides several
- * methods with optimized implementations.
- * </p>
- *
- * @author Martin Odersky
- * @author Matthias Zenger
- * @version 1.0, 16/07/2003
- * @since 2.8
+/** A base trait for linear sequences.
+ * $linearSeqInfo
*/
trait LinearSeq[+A] extends Seq[A]
with GenericTraversableTemplate[A, LinearSeq]
@@ -34,8 +22,10 @@ trait LinearSeq[+A] extends Seq[A]
override def companion: GenericCompanion[LinearSeq] = LinearSeq
}
-/**
- * @since 2.8
+/** $factoryInfo
+ * The current default implementation of a $Coll is a `Vector`.
+ * @define coll linear sequence
+ * @define Coll LinearSeq
*/
object LinearSeq extends SeqFactory[LinearSeq] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = new GenericCanBuildFrom[A]
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index a9b33305fd..c2c4996f47 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -16,55 +15,41 @@ import mutable.ListBuffer
import immutable.List
import scala.util.control.Breaks._
-/** Class <code>Linear[A]</code> represents linear sequences of elements.
- * For such sequences `isEmpty`, `head` and `tail` are guaranteed to be
- * efficient constant time (or near so) operations.
- * It does not add any methods to <code>Seq</code> but overrides
- * several methods with optimized implementations.
+/** A template trait for linear sequences of type `LinearSeq[A]`.
*
+ * $linearSeqInfo
+ *
+ * This trait just implements `iterator`
+ * in terms of `isEmpty, ``head`, and `tail`.
+ * However, see `LinearSeqOptimized` for an implementation trait that overrides operations
+ * to make them run faster under the assumption of fast linear access with `head` and `tail`.
+ *
+ * @define linearSeqInfo
+ * Linear sequences are defined in terms of three abstract methods, which are assumed
+ * to have efficient implementations. These are:
+ * {{{
+ * def isEmpty: Boolean
+ * def head: A
+ * def tail: Repr
+ * }}}
+ * Here, `A` is the type of the sequence elements and `Repr` is the type of the sequence itself.
+ *
+ * Linear sequences do not add any new methods to `Seq`, but promise efficient implementations
+ * of linear access patterns.
* @author Martin Odersky
- * @author Matthias Zenger
- * @version 1.0, 16/07/2003
+ * @version 2.8
* @since 2.8
+ *
+ * @tparam A the element type of the $coll
+ * @tparam Repr the type of the actual $coll containing the elements.
*/
trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr] { self: Repr =>
override protected[this] def thisCollection: LinearSeq[A] = this.asInstanceOf[LinearSeq[A]]
override protected[this] def toCollection(repr: Repr): LinearSeq[A] = repr.asInstanceOf[LinearSeq[A]]
- /** Abstract method to be implemented in a subclass */
- def isEmpty: Boolean
-
- /** Abstract method to be implemented in a subclass */
- def head: A
-
- /** Abstract method to be implemented in a subclass */
- def tail: Repr
-
- /** Returns the number of elements in the linear sequence.
- */
- def length: Int = {
- var these = self
- var len = 0
- while (!these.isEmpty) {
- len += 1
- these = these.tail
- }
- len
- }
-
- /** Returns the <code>n</code>-th element of this linear sequence. The first element
- * (head of the linear sequence) is at position 0.
- *
- * @param n index of the element to return
- * @return the element at position <code>n</code> in this linear sequence.
- * @throws Predef.NoSuchElementException if the linear sequence is too short.
- */
- def apply(n: Int): A = drop(n).head
-
- /** Returns the elements in the sequence as an iterator
- */
- override def iterator: Iterator[A] = new Iterator[A] {
+ override /*IterableLike*/
+ def iterator: Iterator[A] = new Iterator[A] {
var these = self
def hasNext: Boolean = !these.isEmpty
def next: A =
@@ -73,334 +58,4 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
} else Iterator.empty.next
override def toList: List[A] = these.toList
}
-
- /** Apply the given function <code>f</code> to each element of this linear sequence
- * (while respecting the order of the elements).
- *
- * @param f the treatment to apply to each element.
- */
- override def foreach[B](f: A => B) {
- var these = this
- while (!these.isEmpty) {
- f(these.head)
- these = these.tail
- }
- }
-
- /** Tests if the predicate <code>p</code> is satisfied by all elements
- * in this list.
- *
- * @param p the test predicate.
- * @return <code>true</code> iff all elements of this list satisfy the
- * predicate <code>p</code>.
- */
- override def forall(p: A => Boolean): Boolean = {
- var these = this
- while (!these.isEmpty) {
- if (!p(these.head)) return false
- these = these.tail
- }
- true
- }
-
- /** Tests the existence in this list of an element that satisfies the
- * predicate <code>p</code>.
- *
- * @param p the test predicate.
- * @return <code>true</code> iff there exists an element in this list that
- * satisfies the predicate <code>p</code>.
- */
- override def exists(p: A => Boolean): Boolean = {
- var these = this
- while (!these.isEmpty) {
- if (p(these.head)) return true
- these = these.tail
- }
- false
- }
-
- /** Count the number of elements in the iterable which satisfy a predicate.
- *
- * @param p the predicate for which to count
- * @return the number of elements satisfying the predicate <code>p</code>.
- */
- override def count(p: A => Boolean): Int = {
- var these = this
- var cnt = 0
- while (!these.isEmpty) {
- if (p(these.head)) cnt += 1
- these = these.tail
- }
- cnt
- }
-
- /** Find and return the first element of the list satisfying a
- * predicate, if any.
- *
- * @param p the predicate
- * @return the first element in the list satisfying <code>p</code>,
- * or <code>None</code> if none exists.
- */
- override def find(p: A => Boolean): Option[A] = {
- var these = this
- while (!these.isEmpty) {
- if (p(these.head)) return Some(these.head)
- these = these.tail
- }
- None
- }
-
- /** Combines the elements of this list together using the binary
- * function <code>f</code>, from left to right, and starting with
- * the value <code>z</code>.
- *
- * @return <code>f(... (f(f(z, a<sub>0</sub>), a<sub>1</sub>) ...),
- * a<sub>n</sub>)</code> if the list is
- * <code>[a<sub>0</sub>, a<sub>1</sub>, ..., a<sub>n</sub>]</code>.
- */
- override def foldLeft[B](z: B)(f: (B, A) => B): B = {
- var acc = z
- var these = this
- while (!these.isEmpty) {
- acc = f(acc, these.head)
- these = these.tail
- }
- acc
- }
-
- /** Combines the elements of this list together using the binary
- * function <code>f</code>, from right to left, and starting with
- * the value <code>z</code>.
- *
- * @return <code>f(a<sub>0</sub>, f(a<sub>1</sub>, f(..., f(a<sub>n</sub>, z)...)))</code>
- * if the list is <code>[a<sub>0</sub>, a1, ..., a<sub>n</sub>]</code>.
- */
- override def foldRight[B](z: B)(f: (A, B) => B): B =
- if (this.isEmpty) z
- else f(head, tail.foldRight(z)(f))
-
- /** Combines the elements of this list together using the binary
- * operator <code>op</code>, from left to right
- * @param op The operator to apply
- * @return <code>op(... op(a<sub>0</sub>,a<sub>1</sub>), ..., a<sub>n</sub>)</code>
- if the list has elements
- * <code>a<sub>0</sub>, a<sub>1</sub>, ..., a<sub>n</sub></code>.
- * @throws Predef.UnsupportedOperationException if the list is empty.
- */
- override def reduceLeft[B >: A](f: (B, A) => B): B =
- if (isEmpty) throw new UnsupportedOperationException("empty.reduceLeft")
- else tail.foldLeft[B](head)(f)
-
- /** Combines the elements of this iterable object together using the binary
- * operator <code>op</code>, from right to left
- * @note Will not terminate for infinite-sized collections.
- * @param op The operator to apply
- *
- * @return <code>a<sub>0</sub> op (... op (a<sub>n-1</sub> op a<sub>n</sub>)...)</code>
- * if the iterable object has elements <code>a<sub>0</sub>, a<sub>1</sub>, ...,
- * a<sub>n</sub></code>.
- *
- * @throws Predef.UnsupportedOperationException if the iterator is empty.
- */
- override def reduceRight[B >: A](op: (A, B) => B): B =
- if (isEmpty) throw new UnsupportedOperationException("Nil.reduceRight")
- else if (tail.isEmpty) head
- else op(head, tail.reduceRight(op))
-
- /** The last element of this linear sequence.
- *
- * @throws Predef.NoSuchElementException if the linear sequence is empty.
- */
- override def last: A = {
- if (isEmpty) throw new NoSuchElementException
- var these = this
- var nx = these.tail
- while (!nx.isEmpty) {
- these = nx
- nx = nx.tail
- }
- these.head
- }
-
- override def take(n: Int): Repr = {
- val b = newBuilder
- var i = 0
- var these = repr
- while (!these.isEmpty && i < n) {
- i += 1
- b += these.head
- these = these.tail
- }
- b.result
- }
-
- override def drop(n: Int): Repr = {
- var these: Repr = repr
- var count = n
- while (!these.isEmpty && count > 0) {
- these = these.tail
- count -= 1
- }
- these
- }
-
- /** Returns the rightmost <code>n</code> elements from this iterable.
- * @param n the number of elements to take
- */
- override def dropRight(n: Int): Repr = {
- val b = newBuilder
- var these = this
- var lead = this drop n
- while (!lead.isEmpty) {
- b += these.head
- these = these.tail
- lead = lead.tail
- }
- b.result
- }
-
- /** A sub-traversable starting at index `from`
- * and extending up to (but not including) index `until`.
- *
- * @note c.slice(from, to) is equivalent to (but possibly more efficient than)
- * c.drop(from).take(to - from)
- *
- * @param from The index of the first element of the returned subsequence
- * @param until The index of the element following the returned subsequence
- * @note Might return different results for different runs, unless this traversable is ordered
- */
- override def slice(from: Int, until: Int): Repr = {
- val b = newBuilder
- var i = from
- var these = this drop from
- while (i < until && !these.isEmpty) {
- b += these.head
- these = these.tail
- i += 1
- }
- b.result
- }
-
- /** Returns the longest prefix of this traversable whose elements satisfy
- * the predicate <code>p</code>.
- *
- * @param p the test predicate.
- * @note Might return different results for different runs, unless this traversable is ordered
- */
- override def takeWhile(p: A => Boolean): Repr = {
- val b = newBuilder
- var these = this
- while (!these.isEmpty && p(these.head)) {
- b += these.head
- these = these.tail
- }
- b.result
- }
-
- /** Returns a pair consisting of the longest prefix of the linear sequence whose
- * elements all satisfy the given predicate, and the rest of the linear sequence.
- *
- * @param p the test predicate
- */
- override def span(p: A => Boolean): (Repr, Repr) = {
- var these: Repr = repr
- val b = newBuilder
- while (!these.isEmpty && p(these.head)) {
- b += these.head
- these = these.tail
- }
- (b.result, these)
- }
-
- /** Returns <code>true</code> iff the other linear sequence contains the
- * same elements as this one.
- *
- * @note will not terminate for two infinite-sized linear sequences.
- * @param that the other linear sequence
- */
- override def sameElements[B >: A](that: Iterable[B]): Boolean = that match {
- case that1: LinearSeq[_] =>
- var these = this
- var those = that1
- while (!these.isEmpty && !those.isEmpty && these.head == those.head) {
- these = these.tail
- those = those.tail
- }
- these.isEmpty && those.isEmpty
- case _ =>
- super.sameElements(that)
- }
-
- // Overridden methods from Seq
-
- /** Result of comparing <code>length</code> with operand <code>len</code>.
- * returns <code>x</code> where
- * <code>x &lt; 0</code> iff <code>this.length &lt; len</code>
- * <code>x == 0</code> iff <code>this.length == len</code>
- * <code>x &gt; 0</code> iff <code>this.length &gt; len</code>.
- */
- override def lengthCompare(len: Int): Int = {
- var i = 0
- var these = self
- while (!these.isEmpty && i <= len) {
- i += 1
- these = these.tail
- }
- i - len
- }
-
- /** Is this partial function defined for the index <code>x</code>?
- */
- override def isDefinedAt(x: Int): Boolean = x >= 0 && lengthCompare(x) > 0
-
- /** Returns length of longest segment starting from a start index `from`
- * such that every element of the segment satisfies predicate `p`.
- * @note may not terminate for infinite-sized collections.
- * @param p the predicate
- * @param from the start index
- */
- override def segmentLength(p: A => Boolean, from: Int): Int = {
- var i = 0
- var these = this drop from
- while (!these.isEmpty && p(these.head)) {
- i += 1
- these = these.tail
- }
- i
- }
-
- /** Returns index of the first element starting from a start index
- * satisying a predicate, or -1, if none exists.
- *
- * @note may not terminate for infinite-sized linear sequences.
- * @param p the predicate
- * @param from the start index
- */
- override def indexWhere(p: A => Boolean, from: Int): Int = {
- var i = from
- var these = this drop from
- while (!these.isEmpty && !p(these.head)) {
- i += 1
- these = these.tail
- }
- if (these.isEmpty) -1 else i
- }
-
- /** Returns index of the last element satisying a predicate, or -1, if none exists.
- *
- * @param p the predicate
- * @return the index of the last element satisfying <code>p</code>,
- * or -1 if such an element does not exist
- */
- override def lastIndexWhere(p: A => Boolean, end: Int): Int = {
- var i = 0
- var these = this
- var last = -1
- while (!these.isEmpty && i <= end) {
- if (p(these.head)) last = i
- these = these.tail
- i += 1
- }
- last
- }
}
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
new file mode 100755
index 0000000000..abe8e2fa62
--- /dev/null
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -0,0 +1,294 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+import generic._
+
+import mutable.ListBuffer
+import immutable.List
+import scala.util.control.Breaks._
+
+/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
+ * the implementation of several methods under the assumption of fast linear access.
+ *
+ * $linearSeqInfo
+ */
+trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends LinearSeqLike[A, Repr] { self: Repr =>
+
+ def isEmpty: Boolean
+
+ def head: A
+
+ def tail: Repr
+
+ /** The length of the $coll.
+ *
+ * $willNotTerminateInf
+ *
+ * Note: the execution of `length` may take time proportial to the length of the sequence.
+ */
+ def length: Int = {
+ var these = self
+ var len = 0
+ while (!these.isEmpty) {
+ len += 1
+ these = these.tail
+ }
+ len
+ }
+
+ /** Selects an element by its index in the $coll.
+ * Note: the execution of `apply` may take time proportial to the index value.
+ * @throws `IndexOutOfBoundsException` if `idx` does not satisfy `0 <= idx < length`.
+ */
+ def apply(n: Int): A = {
+ val rest = drop(n)
+ if (n < 0 || rest.isEmpty) throw new IndexOutOfBoundsException
+ rest.head
+ }
+
+ override /*IterableLike*/
+ def foreach[B](f: A => B) {
+ var these = this
+ while (!these.isEmpty) {
+ f(these.head)
+ these = these.tail
+ }
+ }
+
+
+ override /*IterableLike*/
+ def forall(p: A => Boolean): Boolean = {
+ var these = this
+ while (!these.isEmpty) {
+ if (!p(these.head)) return false
+ these = these.tail
+ }
+ true
+ }
+
+ override /*IterableLike*/
+ def exists(p: A => Boolean): Boolean = {
+ var these = this
+ while (!these.isEmpty) {
+ if (p(these.head)) return true
+ these = these.tail
+ }
+ false
+ }
+
+ override /*TraversableLike*/
+ def count(p: A => Boolean): Int = {
+ var these = this
+ var cnt = 0
+ while (!these.isEmpty) {
+ if (p(these.head)) cnt += 1
+ these = these.tail
+ }
+ cnt
+ }
+
+ override /*IterableLike*/
+ def find(p: A => Boolean): Option[A] = {
+ var these = this
+ while (!these.isEmpty) {
+ if (p(these.head)) return Some(these.head)
+ these = these.tail
+ }
+ None
+ }
+/*
+ override def mapFind[B](f: A => Option[B]): Option[B] = {
+ var res: Option[B] = None
+ var these = this
+ while (res.isEmpty && !these.isEmpty) {
+ res = f(these.head)
+ these = these.tail
+ }
+ res
+ }
+*/
+ override /*TraversableLike*/
+ def foldLeft[B](z: B)(f: (B, A) => B): B = {
+ var acc = z
+ var these = this
+ while (!these.isEmpty) {
+ acc = f(acc, these.head)
+ these = these.tail
+ }
+ acc
+ }
+
+ override /*IterableLike*/
+ def foldRight[B](z: B)(f: (A, B) => B): B =
+ if (this.isEmpty) z
+ else f(head, tail.foldRight(z)(f))
+
+ override /*TraversableLike*/
+ def reduceLeft[B >: A](f: (B, A) => B): B =
+ if (isEmpty) throw new UnsupportedOperationException("empty.reduceLeft")
+ else tail.foldLeft[B](head)(f)
+
+ override /*IterableLike*/
+ def reduceRight[B >: A](op: (A, B) => B): B =
+ if (isEmpty) throw new UnsupportedOperationException("Nil.reduceRight")
+ else if (tail.isEmpty) head
+ else op(head, tail.reduceRight(op))
+
+ override /*TraversableLike*/
+ def last: A = {
+ if (isEmpty) throw new NoSuchElementException
+ var these = this
+ var nx = these.tail
+ while (!nx.isEmpty) {
+ these = nx
+ nx = nx.tail
+ }
+ these.head
+ }
+
+ override /*IterableLike*/
+ def take(n: Int): Repr = {
+ val b = newBuilder
+ var i = 0
+ var these = repr
+ while (!these.isEmpty && i < n) {
+ i += 1
+ b += these.head
+ these = these.tail
+ }
+ b.result
+ }
+
+ override /*TraversableLike*/
+ def drop(n: Int): Repr = {
+ var these: Repr = repr
+ var count = n
+ while (!these.isEmpty && count > 0) {
+ these = these.tail
+ count -= 1
+ }
+ these
+ }
+
+ override /*IterableLike*/
+ def dropRight(n: Int): Repr = {
+ val b = newBuilder
+ var these = this
+ var lead = this drop n
+ while (!lead.isEmpty) {
+ b += these.head
+ these = these.tail
+ lead = lead.tail
+ }
+ b.result
+ }
+
+ override /*IterableLike*/
+ def slice(from: Int, until: Int): Repr = {
+ val b = newBuilder
+ var i = from
+ var these = this drop from
+ while (i < until && !these.isEmpty) {
+ b += these.head
+ these = these.tail
+ i += 1
+ }
+ b.result
+ }
+
+ override /*IterableLike*/
+ def takeWhile(p: A => Boolean): Repr = {
+ val b = newBuilder
+ var these = this
+ while (!these.isEmpty && p(these.head)) {
+ b += these.head
+ these = these.tail
+ }
+ b.result
+ }
+
+ override /*TraversableLike*/
+ def span(p: A => Boolean): (Repr, Repr) = {
+ var these: Repr = repr
+ val b = newBuilder
+ while (!these.isEmpty && p(these.head)) {
+ b += these.head
+ these = these.tail
+ }
+ (b.result, these)
+ }
+
+ override /*IterableLike*/
+ def sameElements[B >: A](that: Iterable[B]): Boolean = that match {
+ case that1: LinearSeq[_] =>
+ var these = this
+ var those = that1
+ while (!these.isEmpty && !those.isEmpty && these.head == those.head) {
+ these = these.tail
+ those = those.tail
+ }
+ these.isEmpty && those.isEmpty
+ case _ =>
+ super.sameElements(that)
+ }
+
+ override /*SeqLike*/
+ def lengthCompare(len: Int): Int = {
+ var i = 0
+ var these = self
+ while (!these.isEmpty && i <= len) {
+ i += 1
+ these = these.tail
+ }
+ i - len
+ }
+
+ override /*SeqLike*/
+ def isDefinedAt(x: Int): Boolean = x >= 0 && lengthCompare(x) > 0
+
+ override /*SeqLike*/
+ def segmentLength(p: A => Boolean, from: Int): Int = {
+ var i = 0
+ var these = this drop from
+ while (!these.isEmpty && p(these.head)) {
+ i += 1
+ these = these.tail
+ }
+ i
+ }
+
+ override /*SeqLike*/
+ def indexWhere(p: A => Boolean, from: Int): Int = {
+ var i = from
+ var these = this drop from
+ while (these.nonEmpty) {
+ if (p(these.head))
+ return i
+
+ i += 1
+ these = these.tail
+ }
+ -1
+ }
+
+ override /*SeqLike*/
+ def lastIndexWhere(p: A => Boolean, end: Int): Int = {
+ var i = 0
+ var these = this
+ var last = -1
+ while (!these.isEmpty && i <= end) {
+ if (p(these.head)) last = i
+ these = these.tail
+ i += 1
+ }
+ last
+ }
+}
diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala
index cf140ce672..b48acd0c4e 100644
--- a/src/library/scala/collection/Map.scala
+++ b/src/library/scala/collection/Map.scala
@@ -1,55 +1,44 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
import generic._
-/** <p>
- * A map from keys of type <code>A</code> to values of type <code>B</code>.
- * To implement a concrete map, you need to provide implementations of the
- * following methods (where <code>This</code> is the type of the map in question):
- * </p><pre>
- * <b>def</b> get(key: A): Option[B]
- * <b>def</b> iterator: Iterator[(A, B)]
- * <b>def</b> + [B1 >: B](kv: (A, B1)): This
- * <b>def</b> -(key: A): This</pre>
- * <p>
- * If you wish that methods like, take, drop, filter return the same kind
- * of map, you should also override:
- * </p><pre>
- * <b>def</b> empty: This</pre>
- * <p>
- * It might also be a good idea to override methods <code>foreach</code>
- * and <code>size</code> for efficiency.
- * </p>
+/**
+ * A map from keys of type `A` to values of type `B`.
*
- * @note If you do not have specific implementations for `add` and `-` in mind,
- * you might consider inheriting from <code>DefaultMap</code> instead.
+ * $mapnote
*
- * @note If your additions and mutations return the same kind of map as the map
- * you are defining, you should inherit from <code>MapLike</code> as well.
+ * '''Note:''' If you do not have specific implementations for `add` and `-` in mind,
+ * you might consider inheriting from `DefaultMap` instead.
*
- * @since 1
+ * '''Note:''' If your additions and mutations return the same kind of map as the map
+ * you are defining, you should inherit from `MapLike` as well.
+ *
+ * @tparam A the type of the keys in this map.
+ * @tparam B the type of the values associated with keys.
+ *
+ * @since 1
*/
trait Map[A, +B] extends Iterable[(A, B)] with MapLike[A, B, Map[A, B]] {
def empty: Map[A, B] = Map.empty
}
-/* Factory object for `Map` class
- *
- * @since 2.5
+/** $factoryInfo
+ * @define Coll Map
+ * @define coll map
*/
object Map extends MapFactory[Map] {
def empty[A, B]: immutable.Map[A, B] = immutable.Map.empty
+ /** $mapCanBuildFromInfo */
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B]
}
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 3b188acab6..ebf95dde81 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -1,53 +1,70 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
import generic._
import mutable.{Builder, StringBuilder, MapBuilder}
+import annotation.migration
import PartialFunction._
-/** <p>
- * A generic template for maps from keys of type <code>A</code> to values
- * of type <code>B</code>.<br/>
+/** A template trait for maps, which associate keys with values.
+ *
+ * $mapNote
+ * $mapTags
+ * @since 2.8
+ *
+ * @define mapNote
+ * '''Implementation note:'''
+ * This trait provides most of the operations of a `Map` independently of its representation.
+ * It is typically inherited by concrete implementations of maps.
+ *
* To implement a concrete map, you need to provide implementations of the
- * following methods (where <code>This</code> is the type of the map in
- * question):
- * </p>
- * <pre>
- * <b>def</b> get(key: A): Option[B]
- * <b>def</b> iterator: Iterator[(A, B)]
- * <b>def</b> + [B1 >: B](kv: (A, B1)): This
- * <b>def</b> -(key: A): This</pre>
- * <p>
- * If you wish that methods <code>like</code>, <code>take</code>, <code>drop</code>,
- * <code>filter</code> return the same kind of map, you should also override:
- * </p>
- * <pre>
- * <b>def</b> empty: This</pre>
- * <p>
- * It is also good idea to override methods <code>foreach</code> and
- * <code>size</code> for efficiency.
- * </p>
+ * following methods:
+ * {{{
+ * def get(key: A): Option[B]
+ * def iterator: Iterator[(A, B)]
+ * def + [B1 >: B](kv: (A, B1)): This
+ * def -(key: A): This
+ * }}}
+ * If you wish that methods like `take`, `drop`, `filter` also return the same kind of map
+ * you should also override:
+ * {{{
+ * def empty: This
+ * }}}
+ * It is also good idea to override methods `foreach` and
+ * `size` for efficiency.
+ *
+ * @define mapTags
+ * @tparam A the type of the keys.
+ * @tparam B the type of associated values.
+ * @tparam This the type of the map itself.
*
* @author Martin Odersky
* @version 2.8
- * @since 2.8
+ *
+ * @define coll map
+ * @define Coll Map
+ * @define willNotTerminateInf
+ * @define mayNotTerminateInf
*/
trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
extends PartialFunction[A, B]
with IterableLike[(A, B), This]
with Subtractable[A, This] {
self =>
+ // note: can't inherit Addable because of variance problems: Map
+ // is covariant in its value type B, but Addable is nonvariant.
- /* The empty map of the dame type as this map */
+ /** The empty map of the same type as this map
+ * @return an empty map of type `This`.
+ */
def empty: This
/** A common implementation of `newBuilder` for all maps in terms of `empty`.
@@ -55,81 +72,98 @@ self =>
*/
override protected[this] def newBuilder: Builder[(A, B), This] = new MapBuilder[A, B, This](empty)
- /** Check if this map maps <code>key</code> to a value and return the
- * value as an option if it exists, None if not.
+ /** Optionally returns the value associated with a key.
*
- * @param key the key of the mapping of interest.
- * @return the value of the mapping as an option, if it exists, or None.
+ * @param key the key value
+ * @return an option value containing the value associated with `key` in this map,
+ * or `None` if none exists.
*/
def get(key: A): Option[B]
- /** An iterator yielding all key/value mappings of this map. */
+ /** Creates a new iterator over all key/value pairs of this map
+ *
+ * @return the new iterator
+ */
def iterator: Iterator[(A, B)]
- /** Add a key/value pair to this map, returning a new map.
+ /** Adds a key/value pair to this map, returning a new map.
* @param kv the key/value pair
- * @return A new map with the new binding added to this map
+ * @tparam B1 the type of the value in the key/value pair.
+ * @return a new map with the new binding added to this map
+ * @usecase def + (kv: (A, B)): Map[A, B]
*/
def + [B1 >: B] (kv: (A, B1)): Map[A, B1]
- /** Removes a key from this map, returning a new map
+ /** Removes a key from this map, returning a new map.
* @param key the key to be removed
- * @return A new map without a binding for <code>key</code>
+ * @return a new map without a binding for `key`
+ * @usecase def - (key: A): Map[A, B]
*/
def - (key: A): This
- /** Is this an empty map?
+ /** Tests whether the map is empty.
*
- * @return <code>true</code> iff the map does not contain any key/value mapping.
+ * @return `true` if the map does not contain any key/value binding, `false` otherwise.
*/
override def isEmpty: Boolean = size == 0
- /** Check if this map maps <code>key</code> to a value.
- * Return that value if it exists, otherwise return <code>default</code>.
+ /** Returns the value associated with a key, or a default value if the key is not contained in the map.
* @param key the key.
- * @param default a computation that yields a default value in case no binding for the key is
+ * @param default a computation that yields a default value in case no binding for `key` is
* found in the map.
+ * @tparam B1 the result type of the default computation.
+ * @return the value associated with `key` if it exists,
+ * otherwise the result of the `default` computation.
+ * @usecase def getOrElse(key: A, default: => B): B
*/
def getOrElse[B1 >: B](key: A, default: => B1): B1 = get(key) match {
case Some(v) => v
case None => default
}
- /** Retrieve the value which is associated with the given key. This
- * method throws an exception if there is no mapping from the given
- * key to a value.
+ /** Retrieves the value which is associated with the given key. This
+ * method invokes the `default` method of the map if there is no mapping
+ * from the given key to a value. Unless overridden, the `default` method throws a
+ * `NoSuchElementException`.
*
* @param key the key
- * @return the value associated with the given key.
+ * @return the value associated with the given key, or the result of the
+ * map's `default` method, if none exists.
*/
def apply(key: A): B = get(key) match {
case None => default(key)
case Some(value) => value
}
- /** Is the given key mapped to a value by this map?
+ /** Tests whether this map contains a binding for a key.
*
* @param key the key
- * @return <code>true</code> iff there is a mapping for key in this map
+ * @return `true` if there is a binding for `key` in this map, `false` otherwise.
*/
def contains(key: A): Boolean = get(key) match {
case None => false
case Some(_) => true
}
- /** Does this map contain a mapping from the given key to a value?
+ /** Tests whether this map contains a binding for a key. This method,
+ * which implements an abstract method of trait `PartialFunction`,
+ * is equivalent to `contains`.
*
* @param key the key
- * @return <code>true</code> iff there is a mapping for key in this map
+ * @return `true` if there is a binding for `key` in this map, `false` otherwise.
*/
def isDefinedAt(key: A) = contains(key)
- /** @return the keys of this map as a set. */
+ /** Collects all keys of this map in a set.
+ * @return a set containing all keys of this map.
+ */
def keySet: Set[A] = new DefaultKeySet
+ /** The implementation class of the set returned by `keySet`.
+ */
protected class DefaultKeySet extends Set[A] {
def contains(key : A) = self.contains(key)
- def iterator = self.iterator.map(_._1)
+ def iterator = keysIterator
def + (elem: A): Set[A] = (Set[A]() ++ this + elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem
def - (elem: A): Set[A] = (Set[A]() ++ this - elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem
override def size = self.size
@@ -150,22 +184,26 @@ self =>
*
* @return an iterator over all keys.
*/
- @deprecated("use `keysIterator' instead")
- def keys: Iterator[A] = keysIterator
+ @migration(2, 8, "As of 2.8, keys returns Iterable[A] rather than Iterator[A].")
+ def keys: Iterable[A] = keySet
- /** @return the values of this map as an iterable.
+ /** Collects all values of this map in an iterable collection.
+ * @return the values of this map as an iterable.
*/
- def valuesIterable: Iterable[B] = new DefaultValuesIterable
+ @migration(2, 8, "As of 2.8, values returns Iterable[B] rather than Iterator[B].")
+ def values: Iterable[B] = new DefaultValuesIterable
+ /** The implementation class of the iterable returned by `values`.
+ */
protected class DefaultValuesIterable extends Iterable[B] {
- def iterator = self.iterator.map(_._2)
+ def iterator = valuesIterator
override def size = self.size
override def foreach[C](f: B => C) = for ((k, v) <- self) f(v)
}
- /** Creates an iterator for a contained values.
+ /** Creates an iterator for all values in this map.
*
- * @return an iterator over all values.
+ * @return an iterator over all values that are associated with some key in this map.
*/
def valuesIterator: Iterator[B] = new Iterator[B] {
val iter = self.iterator
@@ -173,37 +211,35 @@ self =>
def next = iter.next._2
}
- /** Creates an iterator for a contained values.
- *
- * @return an iterator over all values.
- */
- @deprecated("use `valuesIterator' instead")
- def values: Iterator[B] = valuesIterator
-
- /** The default value for the map, returned when a key is not found
- * The method implemented here yields an error,
+ /** Defines the default value computation for the map,
+ * returned when a key is not found
+ * The method implemented here throws an exception,
* but it might be overridden in subclasses.
*
- * @param key the given key value
- * @throws Predef.NoSuchElementException
+ * @param key the given key value for which a binding is missing.
+ * @throws `NoSuchElementException`
*/
def default(key: A): B =
throw new NoSuchElementException("key not found: " + key)
- /** A map view consisting only of those key value pairs where the key satisfies a given
- * predicate `p`.
+ /** Filters this map by retaining only keys satisfying a predicate.
+ * @param p the predicate used to test keys
+ * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
+ * the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- def filterKeys(p: A => Boolean) = new DefaultMap[A, B] {
+ def filterKeys(p: A => Boolean): Map[A, B] = new DefaultMap[A, B] {
override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
override def contains(key: A) = self.contains(key) && p(key)
def get(key: A) = if (!p(key)) None else self.get(key)
}
- /** A map view resulting from applying a given function `f` to each value
- * associated with a key in this map.
+ /** Transforms this map by applying a function to every retrieved value.
+ * @param f the function used to transform values of this map.
+ * @return a map view which maps every key of this map
+ * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
*/
- def mapValues[C](f: B => C) = new DefaultMap[A, C] {
+ def mapValues[C](f: B => C): Map[A, C] = new DefaultMap[A, C] {
override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
override def size = self.size
@@ -217,64 +253,91 @@ self =>
// generic, returning This[B]. We need better covariance support to express that though.
// So right now we do the brute force approach of code duplication.
- /** A new immutable map containing updating this map with a given key/value mapping.
+ /** Creates a new map obtained by updating this map with a given key/value pair.
* @param key the key
* @param value the value
- * @return A new map with the new key/value mapping
+ * @tparam B1 the type of the added value
+ * @return A new map with the new key/value mapping added to this map.
+ * @usecase def updated(key: A, value: B): Map[A, B]
*/
def updated [B1 >: B](key: A, value: B1): Map[A, B1] = this + ((key, value))
- /** Adds two or more elements to this collection and returns
- * a new collection.
+ /** Adds key/value pairs to this map, returning a new map.
*
- * @param elem1 the first element to add.
- * @param elem2 the second element to add.
- * @param elems the remaining elements to add.
+ * This method takes two or more key/value pairs. Another overloaded
+ * variant of this method handles the case where a single key/value pair is
+ * added.
+ * @param kv1 the first key/value pair
+ * @param kv2 the second key/value pair
+ * @param kvs the remaining key/value pairs
+ * @tparam B1 the type of the added values
+ * @return a new map with the given bindings added to this map
+ * @usecase def + (kvs: (A, B)*): Map[A, B]
+ * @param the key/value pairs
*/
- def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): Map[A, B1] =
- this + elem1 + elem2 ++ elems
+ def + [B1 >: B] (kv1: (A, B1), kv2: (A, B1), kvs: (A, B1) *): Map[A, B1] =
+ this + kv1 + kv2 ++ kvs
- /** Adds a number of elements provided by a traversable object
- * and returns a new collection with the added elements.
+ /** Adds all key/value pairs in a traversable collection to this map, returning a new map.
*
- * @param elems the traversable object.
+ * @param kvs the collection containing the added key/value pairs
+ * @tparam B1 the type of the added values
+ * @return a new map with the given bindings added to this map
+ * @usecase def ++ (xs: Traversable[(A, B)]): Map[A, B]
*/
- def ++[B1 >: B](elems: Traversable[(A, B1)]): Map[A, B1] =
- ((repr: Map[A, B1]) /: elems) (_ + _)
+ def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] =
+ ((repr: Map[A, B1]) /: xs) (_ + _)
- /** Adds a number of elements provided by an iterator
- * and returns a new collection with the added elements.
+ /** Returns a new map with all key/value pairs for which the predicate
+ * `p` returns `true`.
*
- * @param iter the iterator
+ * '''Note:''' This method works by successively removing elements fro which the
+ * predicate is false from this set.
+ * If removal is slow, or you expect that most elements of the set
+ * will be removed, you might consider using `filter`
+ * with a negated predicate instead.
+ * @param p A predicate over key-value pairs
+ * @return A new map containing elements not satisfying the predicate.
*/
- def ++[B1 >: B] (iter: Iterator[(A, B1)]): Map[A, B1] =
- ((repr: Map[A, B1]) /: iter) (_ + _)
+ override def filterNot(p: ((A, B)) => Boolean): This = {
+ var res: This = repr
+ for (kv <- this)
+ if (p(kv)) res = (res - kv._1).asInstanceOf[This] // !!! concrete overrides abstract problem
+ res
+ }
- /** Creates a string representation for this map.
+ /** Appends all bindings of this map to a string builder using start, end, and separator strings.
+ * The written text begins with the string `start` and ends with the string
+ * `end`. Inside, the string representations of all bindings of this map
+ * in the form of `key -> value` are separated by the string `sep`.
*
- * @return a string showing all mappings
+ * @param b the builder to which strings are appended.
+ * @param start the starting string.
+ * @param sep the separator string.
+ * @param end the ending string.
+ * @return the string builder `b` to which elements were appended.
*/
override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder =
this.iterator.map { case (k, v) => k+" -> "+v }.addString(b, start, sep, end)
- /** Defines the prefix of this object's <code>toString</code> representation.
- * !!! todo: remove stringPrefix overrides where possible
+ /** Defines the prefix of this object's `toString` representation.
+ * @return a string representation which starts the result of `toString` applied to this $coll.
+ * Unless overridden in subclasses, the string prefix of every map is `"Map"`.
*/
override def stringPrefix: String = "Map"
- /** Need to override string, so that it's not the Function1's string that gets mixed in.
- */
- override def toString = super[IterableLike].toString
+ override /*PartialFunction*/
+ def toString = super[IterableLike].toString
- override def hashCode() = this map (_.hashCode) sum
+ override def hashCode() = this map (_.##) sum
/** Compares two maps structurally; i.e. checks if all mappings
* contained in this map are also contained in the other map,
* and vice versa.
*
* @param that the other map
- * @return <code>true</code> iff both maps contain exactly the
- * same mappings.
+ * @return `true` if both maps contain exactly the
+ * same mappings, `false` otherwise.
*/
override def equals(that: Any): Boolean = that match {
case that: Map[b, _] =>
@@ -290,7 +353,7 @@ self =>
}
} catch {
case ex: ClassCastException =>
- println("calss cast "); false
+ println("class cast "); false
}}
case _ =>
false
diff --git a/src/library/scala/collection/MapProxy.scala b/src/library/scala/collection/MapProxy.scala
index 336613ac4d..1cc7d81de4 100644
--- a/src/library/scala/collection/MapProxy.scala
+++ b/src/library/scala/collection/MapProxy.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala
index 88c79c0865..7ef1eb68ef 100644
--- a/src/library/scala/collection/MapProxyLike.scala
+++ b/src/library/scala/collection/MapProxyLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,8 +12,8 @@ import generic._
// Methods could be printed by cat MapLike.scala | egrep '^ (override )?def'
-/** This trait implements a proxy for iterable objects. It forwards
- * all calls to a different iterable object
+/** This trait implements a proxy for Map objects. It forwards
+ * all calls to a different Map object.
*
* @author Martin Odersky
* @version 2.8
@@ -23,12 +23,10 @@ trait MapProxyLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
extends MapLike[A, B, This]
with IterableProxyLike[(A, B), This]
{
- // def empty: This
- // def + [B1 >: B] (kv: (A, B1)): Map[A, B1]
- // def - (key: A): This
-
override def get(key: A): Option[B] = self.get(key)
override def iterator: Iterator[(A, B)] = self.iterator
+ override def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = self.+(kv)
+ override def - (key: A): This = self.-(key)
override def isEmpty: Boolean = self.isEmpty
override def getOrElse[B1 >: B](key: A, default: => B1): B1 = self.getOrElse(key, default)
override def apply(key: A): B = self.apply(key)
@@ -36,18 +34,16 @@ trait MapProxyLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
override def isDefinedAt(key: A) = self.isDefinedAt(key)
override def keySet: Set[A] = self.keySet
override def keysIterator: Iterator[A] = self.keysIterator
- override def keys: Iterator[A] = self.keysIterator
- override def valuesIterable: Iterable[B] = self.valuesIterable
+ override def keys: Iterable[A] = self.keys
+ override def values: Iterable[B] = self.values
override def valuesIterator: Iterator[B] = self.valuesIterator
- override def values: Iterator[B] = self.valuesIterator
override def default(key: A): B = self.default(key)
override def filterKeys(p: A => Boolean) = self.filterKeys(p)
override def mapValues[C](f: B => C) = self.mapValues(f)
-
- // override def updated [B1 >: B](key: A, value: B1) = self + ((key, value))
- // override def + [B1 >: B](elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) = self.+(elem1, elem2, elems: _*)
- // override def ++[B1 >: B](elems: Traversable[(A, B1)]) = self.++(elems)
- // override def ++[B1 >: B](iter: Iterator[(A, B1)]) = self.++(iter)
+ override def updated [B1 >: B](key: A, value: B1): Map[A, B1] = self.updated(key, value)
+ override def + [B1 >: B] (kv1: (A, B1), kv2: (A, B1), kvs: (A, B1) *): Map[A, B1] = self.+(kv1, kv2, kvs: _*)
+ override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] = self.++(xs)
+ override def filterNot(p: ((A, B)) => Boolean) = self filterNot p
override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder =
self.addString(b, start, sep, end)
diff --git a/src/library/scala/collection/RollbackIterator.scala.disabled b/src/library/scala/collection/RollbackIterator.scala.disabled
deleted file mode 100644
index 99573ab307..0000000000
--- a/src/library/scala/collection/RollbackIterator.scala.disabled
+++ /dev/null
@@ -1,88 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.collection
-
-import scala.collection.mutable.{ArrayBuffer}
-
-/** Rollback iterators are buffered iterators which allow for unbounded rollbacks
- *
- * @author Sean McDirmid
- * @since 2.5
- */
-class RollbackIterator[+A](underlying: Iterator[A]) extends BufferedIterator.Default[A] {
- private[this] var rollback: ArrayBuffer[A] = null
- protected def fill(sz: Int): Seq[A] =
- if (underlying.hasNext) underlying.next :: Nil else Nil
-
- override def next: A = {
- val ret = super.next
- if (rollback != null) rollback += ret
- ret
- }
-
- private def initRollback =
- if (rollback == null) {
- rollback = new ArrayBuffer[A]
- None
- }
- else Some(rollback.length)
-
- /** will rollback all elements iterated during
- * <code>f</code>'s execution if <code>f</code> return false
- */
- def tryRead[T](f: => Option[T]): Option[T] = {
- val oldLength = initRollback
- var g : Option[T] = None
- try {
- g = f
- } finally {
- if (g.isEmpty) {
- //putBack(rollback(0))
- val sz = oldLength.getOrElse(0)
- val i = rollback.drop(sz).reverse.iterator
- while (i.hasNext) putBack(i.next)
- if (oldLength.isEmpty) rollback = null
- else rollback.reduceToSize(sz)
- }
- }
- if (!g.isEmpty && oldLength.isEmpty)
- rollback = null
- g
-
- }
- /** remembers elements iterated over during <code>g</code>'s execution
- * and provides these elements to the result of <code>g</code>'s execution
- */
- def remember[T](g: => (Seq[A] => T)): T = {
- val oldLength = initRollback
- var in: Seq[A] = Nil
- val f = try {
- g
- } finally {
- in = rollback.drop(oldLength.getOrElse(0))
- if (oldLength.isEmpty) rollback = null
- }
- f(in)
- }
-
- /** returns true if any elements are iterated over during <code>f</code>'s execution
- */
- def read(f: => Unit): Boolean = remember[Boolean] {
- f; seq => !seq.isEmpty
- }
-
- /** if elements of <code>seq</code> will be iterated over next in this iterator,
- * returns true and iterates over these elements
- */
- override def readIfStartsWith(seq : Seq[Any]) : Boolean =
- !tryRead{if (seq.forall(a => hasNext && next == a)) Some(()) else None}.isEmpty
-
-}
diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala
index 8ae857af32..2e8a1af99a 100644
--- a/src/library/scala/collection/Seq.scala
+++ b/src/library/scala/collection/Seq.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,21 +13,8 @@ package scala.collection
import generic._
import mutable.Builder
-/** <p>
- * Class <code>Seq[A]</code> represents sequences of elements
- * of type <code>A</code>.<br/>
- * It adds the following methods to class <code>Iterable</code>:
- * <code>length</code>, <code>lengthCompare</code>, <code>apply</code>,
- * <code>isDefinedAt</code>, <code>segmentLength</code>,
- * <code>prefixLength</code>, <code>indexWhere</code>, <code>indexOf</code>,
- * <code>lastIndexWhere</code>, <code>lastIndexOf</code>, <code>reverse</code>,
- * <code>reverseIterator</code>, <code>startsWith</code>,
- * <code>endsWith</code>, <code>indexOfSeq</code>.
- * </p>
- *
- * @author Martin Odersky
- * @author Matthias Zenger
- * @version 1.0, 16/07/2003
+/** A base trait for sequences.
+ * $seqInfo
*/
trait Seq[+A] extends PartialFunction[Int, A]
with Iterable[A]
@@ -37,16 +23,18 @@ trait Seq[+A] extends PartialFunction[Int, A]
override def companion: GenericCompanion[Seq] = Seq
}
-/** Factory object for <code>Seq</code> trait.
- *
- * @author Martin Odersky
- * @version 2.8
+/** $factoryInfo
+ * The current default implementation of a $Coll is a `Vector`.
+ * @define coll sequence
+ * @define Coll Seq
*/
object Seq extends SeqFactory[Seq] {
private[collection] val hashSeed = "Seq".hashCode
+ /** $genericCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = new GenericCanBuildFrom[A]
+
def newBuilder[A]: Builder[A, Seq[A]] = immutable.Seq.newBuilder[A]
@deprecated("use View instead")
@@ -54,9 +42,5 @@ object Seq extends SeqFactory[Seq] {
@deprecated("use Seq(value) instead")
def singleton[A](value: A) = Seq(value)
-
- /** Builds a singleton sequence. */
- @deprecated("use <code>Seq(x)</code> instead.")
- def single[A](x: A) = singleton(x)
}
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 749aeacd73..8770de9c36 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -1,29 +1,28 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
-import generic._
-import mutable.{ListBuffer, HashMap, GenericArray}
-import immutable.{List, Range}
-// import immutable.{List, Nil, ::}
+import mutable.{ListBuffer, HashMap, ArraySeq}
+import immutable.{List, Range}
import generic._
-/** Contains a KMP implementation, based on the undoubtedly reliable wikipedia entry.
- *
- * @author paulp
- * @since 2.8
+/** The companion object for trait `SeqLike`.
*/
object SeqLike {
+ /** A KMP implementation, based on the undoubtedly reliable wikipedia entry.
+ *
+ * @author paulp
+ * @since 2.8
+ */
private def KMP[B](S: Seq[B], W: Seq[B]): Option[Int] = {
// trivial cases
if (W.isEmpty) return Some(0)
@@ -73,6 +72,24 @@ object SeqLike {
None
}
+ /** Finds a particular index at which one sequence occurs in another sequence.
+ * Both the source sequence and the target sequence are expressed in terms
+ * other sequences S' and T' with offset and length parameters. This
+ * function is designed to wrap the KMP machinery in a sufficiently general
+ * way that all library sequence searches can use it. It is unlikely you
+ * have cause to call it directly: prefer functions such as StringBuilder#indexOf
+ * and Seq#lastIndexOf.
+ *
+ * @param source the sequence to search in
+ * @param sourceOffset the starting offset in source
+ * @param sourceCount the length beyond sourceOffset to search
+ * @param target the sequence being searched for
+ * @param targetOffset the starting offset in target
+ * @param targetCount the length beyond targetOffset which makes up the target string
+ * @param fromIndex the smallest index at which the target sequence may start
+ *
+ * @return the applicable index in source where target exists, or -1 if not found
+ */
def indexOf[B](
source: Seq[B], sourceOffset: Int, sourceCount: Int,
target: Seq[B], targetOffset: Int, targetCount: Int,
@@ -82,6 +99,11 @@ object SeqLike {
case Some(x) => x + fromIndex
}
+ /** Finds a particular index at which one sequence occurs in another sequence.
+ * Like indexOf, but finds the latest occurrence rather than earliest.
+ *
+ * @see SeqLike#indexOf
+ */
def lastIndexOf[B](
source: Seq[B], sourceOffset: Int, sourceCount: Int,
target: Seq[B], targetOffset: Int, targetCount: Int,
@@ -96,41 +118,91 @@ object SeqLike {
}
}
-/** Class <code>Seq[A]</code> represents sequences of elements
- * of type <code>A</code>.
- * It adds the following methods to class Iterable:
- * `length`, `lengthCompare`, `apply`, `isDefinedAt`, `segmentLength`, `prefixLength`,
- * `indexWhere`, `indexOf`, `lastIndexWhere`, `lastIndexOf`, `reverse`, `reverseIterator`,
- * `startsWith`, `endsWith`, `indexOfSeq`, , `zip`, `zipAll`, `zipWithIndex`.
+/** A template trait for sequences of type `Seq[A]`
+ * $seqInfo
+ *
+ * @define seqInfo
+ * Sequences are special cases of iterable collections of class `Iterable`.
+ * Unlike iterables, sequences always have a defined order of elements.
+ * Sequences provide a method `apply` for indexing. Indices range from `0` up the the `length` of
+ * a sequence. Sequences support a number to find occurrences of elements or subsequences, including
+ * `segmentLength`, `prefixLength`, `indexWhere`, `indexOf`, `lastIndexWhere`, `lastIndexOf`,
+ * `startsWith`, `endsWith`, `indexOfSlice`.
+ *
+ * Another way to see a sequence is as a `PartialFunction` from `Int` values
+ * to the element type of the sequence. The `isDefinedAt` method of a sequence
+ * returns `true` for the interval from `0` until `length`.
+ *
+ * Sequences can be accessed in reverse order of their elements, using methods
+ * `reverse` and `reverseIterator`.
+ *
+ * Sequences have two principal subtraits, `IndexedSeq` and `LinearSeq`, which give different guarantees for performance.
+ * An `IndexedSeq` provides fast random-access of elements and a fast `length` operation.
+ * A `LinearSeq` provides fast access only to the first element via `head`, but also
+ * has a fast `tail` operation.
*
+ * @tparam A the element type of the collection
+ * @tparam Repr the type of the actual collection containing the elements.
*
* @author Martin Odersky
* @author Matthias Zenger
* @version 1.0, 16/07/2003
* @since 2.8
+ *
+ * @define Coll Seq
+ * @define coll sequence
+ * @define thatinfo the class of the returned collection. Where possible, `That` is
+ * the same class as the current collection class `Repr`, but this
+ * depends on the element type `B` being admissible for that class,
+ * which means that an implicit instance of type `CanBuildFrom[Repr, B, That]`
+ * is found.
+ * @define bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`.
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ *
+ * Note: may not terminate for infinite-sized collections.
+ * @define willNotTerminateInf
+ *
+ * Note: will not terminate for infinite-sized collections.
*/
trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
override protected[this] def thisCollection: Seq[A] = this.asInstanceOf[Seq[A]]
override protected[this] def toCollection(repr: Repr): Seq[A] = repr.asInstanceOf[Seq[A]]
- /** Returns the length of the sequence.
+ /** The length of the $coll.
+ *
+ * $willNotTerminateInf
+ *
+ * Note: `xs.length` and `xs.size` yield the same result.
+ *
+ * @return the number of elements in this $coll.
*/
def length: Int
- /** Returns the elements at position `idx`
+ /** Selects an element by its index in the $coll.
+ *
+ * @param idx The index to select.
+ * @return the element of this $coll at index `idx`, where `0` indicates the first element.
+ * @throws `IndexOutOfBoundsException` if `idx` does not satisfy `0 <= idx < length`.
*/
def apply(idx: Int): A
- /** Result of comparing <code>length</code> with operand <code>len</code>.
- * returns <code>x</code> where
- * <code>x &lt; 0</code> iff <code>this.length &lt; len</code>
- * <code>x == 0</code> iff <code>this.length == len</code>
- * <code>x &gt; 0</code> iff <code>this.length &gt; len</code>.
+ /** Compares the length of this $coll to a test value.
*
- * The method as implemented here does not call length directly; its running time
- * is O(length min len) instead of O(length). The method should be overwritten
- * if computing length is cheap.
+ * @param len the test value that gets compared with the length.
+ * @return A value `x` where
+ * {{{
+ * x < 0 if this.length < len
+ * x == 0 if this.length == len
+ * x > 0 if this.length > len
+ * }}}
+ * The method as implemented here does not call `length` directly; its running time
+ * is `O(length min len)` instead of `O(length)`. The method should be overwritten
+ * if computing `length` is cheap.
*/
def lengthCompare(len: Int): Int = {
var i = 0
@@ -142,18 +214,30 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
i - len
}
- /** Should always be <code>length</code> */
+ /** The size of this $coll, equivalent to `length`.
+ *
+ * $willNotTerminateInf
+ */
override def size = length
- /** Is this partial function defined for the index <code>x</code>?
+ /** Tests whether this $coll contains given index.
+ *
+ * The implementations of methods `apply` and `isDefinedAt` turn a `Seq[A]` into
+ * a `PartialFunction[Int, A]`.
+ *
+ * @param idx the index to test
+ * @return `true` if this $coll contains an element at position `idx`, `false` otherwise.
*/
- def isDefinedAt(x: Int): Boolean = (x >= 0) && (x < length)
+ def isDefinedAt(idx: Int): Boolean = (idx >= 0) && (idx < length)
- /** Returns length of longest segment starting from a start index `from`
- * such that every element of the segment satisfies predicate `p`.
- * @note may not terminate for infinite-sized collections.
- * @param p the predicate
- * @param from the start index
+ /** Computes length of longest segment whose elements all satisfy some predicate.
+ *
+ * $mayNotTerminateInf
+ *
+ * @param p the predicate used to test elements.
+ * @param from the index where the search starts.
+ * @return the length of the longest segment of this $coll starting from index `from`
+ * such that every element of the segment satisfies the predicate `p`.
*/
def segmentLength(p: A => Boolean, from: Int): Int = {
var i = 0
@@ -163,92 +247,117 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
i
}
- /** Returns length of longest prefix of this seqence
- * such that every element of the prefix satisfies predicate `p`.
- * @note may not terminate for infinite-sized collections.
- * @param p the predicate
+ /** Returns the length of the longest prefix whose elements all satisfy some predicate.
+ *
+ * $mayNotTerminateInf
+ *
+ * @param p the predicate used to test elements.
+ * @return the length of the longest prefix of this $coll
+ * such that every element of the segment satisfies the predicate `p`.
*/
def prefixLength(p: A => Boolean) = segmentLength(p, 0)
- /** Returns index of the first element satisfying a predicate, or -1, if none exists.
+ /** Finds index of first element satisfying some predicate.
+ *
+ * $mayNotTerminateInf
*
- * @note may not terminate for infinite-sized collections.
- * @param p the predicate
+ * @param p the predicate used to test elements.
+ * @return the index of the first element of this $coll that satisfies the predicate `p`,
+ * or `-1`, if none exists.
*/
def indexWhere(p: A => Boolean): Int = indexWhere(p, 0)
- /** Returns index of the first element starting from a start index
- * satisying a predicate, or -1, if none exists.
+ /** Finds index of the first element satisfying some predicate after or at some start index.
*
- * @note may not terminate for infinite-sized collections.
- * @param p the predicate
- * @param from the start index
+ * $mayNotTerminateInf
+ *
+ * @param p the predicate used to test elements.
+ * @param from the start index
+ * @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`,
+ * or `-1`, if none exists.
*/
def indexWhere(p: A => Boolean, from: Int): Int = {
var i = from
var it = iterator.drop(from)
- while (it.hasNext && !p(it.next()))
- i += 1
- if (it.hasNext) i else -1
+ while (it.hasNext) {
+ if (p(it.next())) return i
+ else i += 1
+ }
+
+ -1
}
- /** Returns index of the first element satisying a predicate, or -1. */
- @deprecated("Use `indexWhere' instead")
+ /** Returns index of the first element satisfying a predicate, or `-1`.
+ */
def findIndexOf(p: A => Boolean): Int = indexWhere(p)
- /** Returns the index of the first occurence of the specified
- * object in this sequence.
+ /** Finds index of first occurrence of some value in this $coll.
+ *
+ * $mayNotTerminateInf
*
- * @note may not terminate for infinite-sized collections.
- * @param elem element to search for.
- * @return the index in this sequence of the first occurence of the
- * specified element, or -1 if the sequence does not contain
- * this element.
+ * @param elem the element value to search for.
+ * @tparam B the type of the element `elem`.
+ * @return the index of the first element of this $coll that is equal (wrt `==`)
+ * to `elem`, or `-1`, if none exists.
+ *
+ * @usecase def indexOf(elem: A): Int
*/
def indexOf[B >: A](elem: B): Int = indexOf(elem, 0)
- /** Returns the index of the first occurence of the specified
- * object in this sequence, starting from a start index, or
- * -1, if none exists.
+ /** Finds index of first occurrence of some value in this $coll after or at some start index.
+ *
+ * $mayNotTerminateInf
*
- * @note may not terminate for infinite-sized collections.
- * @param elem element to search for.
+ * @param elem the element value to search for.
+ * @tparam B the type of the element `elem`.
+ * @param from the start index
+ * @return the index `>= from` of the first element of this $coll that is equal (wrt `==`)
+ * to `elem`, or `-1`, if none exists.
+ *
+ * @usecase def indexOf(elem: A, from: Int): Int
*/
def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem ==, from)
- /** Returns the index of the last occurence of the specified element
- * in this sequence, or -1 if the sequence does not contain this element.
+ /** Finds index of last occurrence of some value in this $coll.
+ *
+ * $willNotTerminateInf
*
- * @param elem element to search for.
- * @return the index in this sequence of the last occurence of the
- * specified element, or -1 if the sequence does not contain
- * this element.
+ * @param elem the element value to search for.
+ * @tparam B the type of the element `elem`.
+ * @return the index of the last element of this $coll that is equal (wrt `==`)
+ * to `elem`, or `-1`, if none exists.
+ *
+ * @usecase def lastIndexOf(elem: A): Int
*/
def lastIndexOf[B >: A](elem: B): Int = lastIndexWhere(elem ==)
- /** Returns the index of the last
- * occurence of the specified element in this sequence
- * before or at a given end index,
- * or -1 if the sequence does not contain this element.
- *
- * @param elem element to search for.
- * @param end the end index
- */
+ /** Finds index of last occurrence of some value in this $coll before or at a given end index.
+ *
+ * @param elem the element value to search for.
+ * @param end the end index.
+ * @tparam B the type of the element `elem`.
+ * @return the index `<= end` of the last element of this $coll that is equal (wrt `==`)
+ * to `elem`, or `-1`, if none exists.
+ *
+ * @usecase def lastIndexOf(elem: A, end: Int): Int
+ */
def lastIndexOf[B >: A](elem: B, end: Int): Int = lastIndexWhere(elem ==, end)
- /** Returns index of the last element satisying a predicate, or -1, if none exists.
+ /** Finds index of last element satisfying some predicate.
+ *
+ * $willNotTerminateInf
*
- * @param p the predicate
- * @return the index of the last element satisfying <code>p</code>,
- * or -1 if such an element does not exist
+ * @param p the predicate used to test elements.
+ * @return the index of the last element of this $coll that satisfies the predicate `p`,
+ * or `-1`, if none exists.
*/
def lastIndexWhere(p: A => Boolean): Int = lastIndexWhere(p, length - 1)
- /** Returns index of the last element not exceeding a given end index
- * and satisying a predicate, or -1 if none exists.
+ /** Finds index of last element satisfying some predicate before or at given end index.
*
- * @param end the end index
- * @param p the predicate
+ * @param p the predicate used to test elements.
+ * @return the index `<= end` of the last element of this $coll that satisfies the predicate `p`,
+ * or `-1`, if none exists.
*/
def lastIndexWhere(p: A => Boolean, end: Int): Int = {
var i = length - 1
@@ -257,31 +366,48 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
i
}
- /** A sequence of type <code>C</code> consisting of all elements of
- * this sequence in reverse order.
+ /** Returns new $coll wih elements in reversed order.
+ *
+ * $willNotTerminateInf
+ *
+ * @return A new $coll with all elements of this $coll in reversed order.
*/
def reverse: Repr = {
var xs: List[A] = List()
for (x <- this)
xs = x :: xs
val b = newBuilder
+ b.sizeHint(this)
for (x <- xs)
b += x
b.result
}
- /** Apply a function to all the elements of the sequence, and return the
- * reversed sequence of results. This is equivalent to a call to <code>reverse</code>
- * followed by a call to <code>map</code>, but more efficient.
+ /**
+ * Builds a new collection by applying a function to all elements of this $coll and
+ * collecting the results in reversed order.
+ *
+ * $willNotTerminateInf
+ *
+ * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
*
- * @param f the function to apply to each elements.
- * @return the reversed seq of results.
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned collection.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results in reversed order.
+ *
+ * @usecase def reverseMap[B](f: A => B): $Coll[B]
+ *
+ * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results in reversed order.
*/
def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
var xs: List[A] = List()
for (x <- this)
xs = x :: xs
-
val b = bf(repr)
for (x <- xs)
b += f(x)
@@ -289,25 +415,29 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
b.result
}
- /** The elements of this sequence in reversed order
+ /** An iterator yielding elements in reversed order.
+ *
+ * $willNotTerminateInf
+ *
+ * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but might be more efficient.
+ *
+ * @return an iterator yielding the elements of this $coll in reversed order
*/
def reverseIterator: Iterator[A] = toCollection(reverse).iterator
@deprecated("use `reverseIterator' instead")
def reversedElements = reverseIterator
- /**
- * Checks whether the argument sequence is contained at the
- * specified index within the receiver object.
+ /** Tests whether this $coll contains the given sequence at a given index.
*
* If the both the receiver object, <code>this</code> and
* the argument, <code>that</code> are infinite sequences
* this method may not terminate.
*
- * @return true if <code>that</code> is contained in
- * <code>this</code>, at the specified index, otherwise false
- *
- * @see String.startsWith
+ * @param that the sequence to test
+ * @param offset the index where the sequence is searched.
+ * @return `true` if the sequence `that` is contained in this $coll at index `offset`,
+ * otherwise `false`.
*/
def startsWith[B](that: Seq[B], offset: Int): Boolean = {
val i = this.iterator drop offset
@@ -319,16 +449,17 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
!j.hasNext
}
- /**
- * Check whether the receiver object starts with the argument sequence.
+ /** Tests whether this $coll starts with the given sequence.
*
- * @return true if <code>that</code> is a prefix of <code>this</code>,
- * otherwise false
+ * @param that the sequence to test
+ * @return `true` if this collection has `that` as a prefix, `false` otherwise.
*/
def startsWith[B](that: Seq[B]): Boolean = startsWith(that, 0)
- /** @return true if this sequence end with that sequence
- * @see String.endsWith
+ /** Tests whether this $coll ends with the given sequence.
+ * $willNotTerminateInf
+ * @param that the sequence to test
+ * @return `true` if this $coll has `that` as a suffix, `false` otherwise.
*/
def endsWith[B](that: Seq[B]): Boolean = {
val i = this.iterator.drop(length - that.length)
@@ -340,16 +471,26 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
!j.hasNext
}
- /** @return -1 if <code>that</code> not contained in this, otherwise the
- * first index where <code>that</code> is contained.
+ /** Finds first index where this $coll contains a given sequence as a slice.
+ * $mayNotTerminateInf
+ * @param that the sequence to test
+ * @return the first index such that the elements of this $coll starting at this index
+ * match the elements of sequence `that`, or `-1` of no such subsequence exists.
*/
- def indexOfSeq[B >: A](that: Seq[B]): Int = indexOfSeq(that, 0)
+ def indexOfSlice[B >: A](that: Seq[B]): Int = indexOfSlice(that, 0)
- def indexOfSeq[B >: A](that: Seq[B], fromIndex: Int): Int =
+ /** Finds first index after or at a start index where this $coll contains a given sequence as a slice.
+ * $mayNotTerminateInf
+ * @param that the sequence to test
+ * @param from the start index
+ * @return the first index `>= from` such that the elements of this $coll starting at this index
+ * match the elements of sequence `that`, or `-1` of no such subsequence exists.
+ */
+ def indexOfSlice[B >: A](that: Seq[B], from: Int): Int =
if (this.hasDefiniteSize && that.hasDefiniteSize)
- SeqLike.indexOf(thisCollection, 0, length, that, 0, that.length, fromIndex)
+ SeqLike.indexOf(thisCollection, 0, length, that, 0, that.length, from)
else {
- var i = fromIndex
+ var i = from
var s: Seq[A] = thisCollection drop i
while (!s.isEmpty) {
if (s startsWith that)
@@ -361,63 +502,83 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
-1
}
- /** @return -1 if <code>that</code> not contained in this, otherwise the
- * last index where <code>that</code> is contained.
- * @note may not terminate for infinite-sized collections.
- */
- def lastIndexOfSeq[B >: A](that: Seq[B]): Int = lastIndexOfSeq(that, that.length)
+ /** Finds last index where this $coll contains a given sequence as a slice.
+ * $willNotTerminateInf
+ * @param that the sequence to test
+ * @return the last index such that the elements of this $coll starting a this index
+ * match the elements of sequence `that`, or `-1` of no such subsequence exists.
+ */
+ def lastIndexOfSlice[B >: A](that: Seq[B]): Int = lastIndexOfSlice(that, length)
+
+ /** Finds last index before or at a given end index where this $coll contains a given sequence as a slice.
+ * @param that the sequence to test
+ * @param end the end index
+ * @return the last index `<= end` such that the elements of this $coll starting at this index
+ * match the elements of sequence `that`, or `-1` of no such subsequence exists.
+ */
+ def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int =
+ SeqLike.lastIndexOf(thisCollection, 0, length, that, 0, that.length, end)
- // since there's no way to find the last index in an infinite sequence,
- // we just document it may not terminate and assume it will.
- def lastIndexOfSeq[B >: A](that: Seq[B], fromIndex: Int): Int =
- SeqLike.lastIndexOf(thisCollection, 0, length, that, 0, that.length, fromIndex)
+ /** Tests whether this $coll contains a given sequence as a slice.
+ * $mayNotTerminateInf
+ * @param that the sequence to test
+ * @return `true` if this $coll contains a slice with the same elements
+ * as `that`, otherwise `false`.
+ */
+ def containsSlice[B](that: Seq[B]): Boolean = indexOfSlice(that) != -1
- /** Tests if the given value <code>elem</code> is a member of this
- * sequence.
+ /** Tests whether this $coll contains a given value as an element.
+ * $mayNotTerminateInf
*
- * @param elem element whose membership has to be tested.
- * @return <code>true</code> iff there is an element of this sequence
- * which is equal (w.r.t. <code>==</code>) to <code>elem</code>.
+ * @param elem the element to test.
+ * @return `true` if this $coll has an element that is
+ * is equal (wrt `==`) to `elem`, `false` otherwise.
*/
def contains(elem: Any): Boolean = exists (_ == elem)
- /** <p>
- * Computes the multiset union of this sequence and the given sequence
- * <code>that</code>. For example:
- * </p><pre>
- * <b>val</b> xs = List(1, 1, 2)
- * <b>val</b> ys = List(1, 2, 2, 3)
- * println(xs union ys) // prints "List(1, 1, 2, 1, 2, 2, 3)"
- * println(ys union xs) // prints "List(1, 2, 2, 3, 1, 1, 2)"
- * </pre>
+ /** Produces a new sequence which contains all elements of this $coll and also all elements of
+ * a given sequence. `xs union ys` is equivalent to `xs ++ ys`.
+ * $willNotTerminateInf
+ *
+ * Another way to express this
+ * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
+ * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
*
- * @param that the sequence of elements to add to the sequence.
- * @return a sequence containing the elements of this
- * sequence and those of the given sequence <code>that</code>.
+ * $willNotTerminateInf
+ *
+ * @param that the sequence to add.
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` which contains all elements of this $coll
+ * followed by all elements of `that`.
+ * @usecase def union(that: Seq[A]): $Coll[A]
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
this ++ that
- /** <p>
- * Computes the multiset difference between this sequence and the
- * given sequence <code>that</code>. If an element appears more
- * than once in both sequences, the difference contains <i>m</i> copies
- * of that element, where <i>m</i> is the difference between the
- * number of times the element appears in this sequence and the number
- * of times it appears in <code>that</code>. For example:
- * </p><pre>
- * <b>val</b> xs = List(1, 1, 2)
- * <b>val</b> ys = List(1, 2, 2, 3)
- * println(xs diff ys) // prints "List(1)"
- * println(xs -- ys) // prints "List()"
- * </pre>
- *
- * @param that the sequence of elements to remove from this sequence.
- * @return the sequence of elements contained only in this sequence plus
- * <i>m</i> copies of each element present in both sequences,
- * where <i>m</i> is defined as above.
- */
- def diff[B >: A, That](that: Seq[B]): Repr = {
+ /** Computes the multiset difference between this $coll and another sequence.
+ * $willNotTerminateInf
+ *
+ * @param that the sequence of elements to remove
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` which contains all elements of this $coll
+ * except some of occurrences of elements that also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
+ * part of the result, but any following occurrences will.
+ * @usecase def diff(that: Seq[A]): $Coll[A]
+ * @return a new $coll which contains all elements of this $coll
+ * except some of occurrences of elements that also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
+ * part of the result, but any following occurrences will.
+ */
+ def diff[B >: A](that: Seq[B]): Repr = {
val occ = occCounts(that)
val b = newBuilder
for (x <- this)
@@ -426,24 +587,26 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
b.result
}
- /** <p>
- * Computes the multiset intersection between this sequence and the
- * given sequence <code>that</code>; the intersection contains <i>m</i>
- * copies of an element contained in both sequences, where <i>m</i> is
- * the smaller of the number of times the element appears in this
- * sequence or in <code>that</code>. For example:
- * </p><pre>
- * <b>val</b> xs = List(1, 1, 2)
- * <b>val</b> ys = List(3, 2, 2, 1)
- * println(xs intersect ys) // prints "List(1, 2)"
- * println(ys intersect xs) // prints "List(2, 1)"
- * </pre>
- *
- * @param that the sequence to intersect.
- * @return the sequence of elements contained both in this sequence and
- * in the given sequence <code>that</code>.
- */
- def intersect[B >: A, That](that: Seq[B]): Repr = {
+ /** Computes the multiset intersection between this $coll and another sequence.
+ * $mayNotTerminateInf
+ *
+ * @param that the sequence of elements to intersect with.
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
+ * @usecase def intersect(that: Seq[A]): $Coll[A]
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
+ */
+ def intersect[B >: A](that: Seq[B]): Repr = {
val occ = occCounts(that)
val b = newBuilder
for (x <- this)
@@ -460,10 +623,12 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
occ
}
- /** Builds a new sequence from this sequence in which any duplicates (wrt to ==) removed.
- * Among duplicate elements, only the first one is retained in the result sequence
+ /** Builds a new $coll from this $coll without any duplicate elements.
+ * $willNotTerminateInf
+ *
+ * @return A new $coll which contains the first occurrence of every element of this $coll.
*/
- def removeDuplicates: Repr = {
+ def distinct: Repr = {
val b = newBuilder
var seen = Set[A]() //TR: should use mutable.HashSet?
for (x <- this) {
@@ -475,9 +640,21 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
b.result
}
- /** A new sequence, consisting of all elements of current sequence
- * except that `replaced` elements starting from `from` are replaced
- * by `patch`.
+ /** Produces a new $coll where a slice of elements in this $coll is replaced by another sequence.
+ *
+ * @param from the index of the first replaced element
+ * @param patch the replacement sequence
+ * @param replaced the number of elements to drop in the original $coll
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new $coll consisting of all elements of this $coll
+ * except that `replaced` elements starting from `from` are replaced
+ * by `patch`.
+ * @usecase def patch(from: Int, that: Seq[A], replaced: Int): $Coll[A]
+ * @return a new $coll consisting of all elements of this $coll
+ * except that `replaced` elements starting from `from` are replaced
+ * by `patch`.
*/
def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -488,7 +665,15 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
b.result
}
- /** Returns a copy of this sequence with the element at position `index` replaced by `elem`.
+ /** A copy of this $coll with one single replaced element.
+ * @param index the position of the replacement
+ * @param elem the replacing element
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new $coll` which is a copy of this $coll with the element at position `index` replaced by `elem`.
+ * @usecase def updated(index: Int, elem: A): $Coll[A]
+ * @return a copy of this $coll with the element at position `index` replaced by `elem`.
*/
def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -499,7 +684,16 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
b.result
}
- /** Returns a new sequence consisting of `elem` followed by the elements of this sequence.
+ /** Prepends an element to this $coll
+ * @param elem the prepended element
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` consisting of `elem` followed
+ * by all elements of this $coll.
+ * @usecase def +:(elem: A): $Coll[A]
+ * @return a new $coll consisting of `elem` followed
+ * by all elements of this $coll.
*/
def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -508,7 +702,17 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
b.result
}
- /** Returns a new sequence consisting of the elements of this sequence followed by `elem`.
+ /** Appends an element to this $coll
+ * $willNotTerminateInf
+ * @param elem the appended element
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` consisting of
+ * all elements of this $coll followed by `elem`.
+ * @usecase def :+(elem: A): $Coll[A]
+ * @return a new $coll consisting of
+ * all elements of this $coll followed by `elem`.
*/
def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -517,11 +721,19 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
b.result
}
-
-
-
- /** Returns a new sequence of given length containing the elements of this sequence followed by zero
- * or more occurrences of given elements.
+ /** Appends an element value to this $coll until a given target length is reached.
+ * @param len the target length
+ * @param elem the padding value
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` consisting of
+ * all elements of this $coll followed by the minimal number of occurrences of `elem` so
+ * that the resulting collection has a length of at least `len`.
+ * @usecase def padTo(len: Int, elem: A): $Coll[A]
+ * @return a new $coll consisting of
+ * all elements of this $coll followed by the minimal number of occurrences of `elem` so
+ * that the resulting $coll has a length of at least `len`.
*/
def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -530,66 +742,106 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
b ++= thisCollection
while (diff > 0) {
b += elem
- diff -=1
+ diff -= 1
}
b.result
}
- /** Sort the sequence according to the comparison function
- * <code>lt(e1: a, e2: a) =&gt; Boolean</code>,
- * which should be true iff <code>e1</code> precedes
- * <code>e2</code> in the desired ordering.
- * The sort is stable. That is elements that are equal wrt `lt` appear in the
+ /** Tests whether every element of this $coll relates to the
+ * corresponding element of another sequence by satisfying a test predicate.
+ *
+ * @param that the other sequence
+ * @param p the test predicate, which relates elements from both sequences
+ * @tparam B the type of the elements of `that`
+ * @return `true` if both sequences have the same length and
+ * `p(x, y)` is `true` for all corresponding elements `x` of this $coll
+ * and `y` of `that`, otherwise `false`.
+ */
+ def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean = {
+ val i = this.iterator
+ val j = that.iterator
+ while (i.hasNext && j.hasNext)
+ if (!p(i.next, j.next))
+ return false
+
+ !i.hasNext && !j.hasNext
+ }
+
+ /** Sorts this $coll according to a comparison function.
+ * $willNotTerminateInf
+ *
+ * The sort is stable. That is, elements that are equal wrt `lt` appear in the
* same order in the sorted sequence as in the original.
*
- * @param lt the comparison function
- * @return a sequence sorted according to the comparison function
- * <code>lt(e1: a, e2: a) =&gt; Boolean</code>.
- * @ex <pre>
- * List("Steve", "Tom", "John", "Bob")
- * .sortWith((e1, e2) => (e1 compareTo e2) &lt; 0) =
- * List("Bob", "John", "Steve", "Tom")</pre>
+ * @param lt the comparison function which tests whether
+ * its first argument precedes its second argument in
+ * the desired ordering.
+ * @return a $coll consisting of the elements of this $coll
+ * sorted according to the comparison function `lt`.
+ * @example {{{
+ * List("Steve", "Tom", "John", "Bob").sortWith(_.compareTo(_) < 0) =
+ * List("Bob", "John", "Steve", "Tom")
+ * }}}
+ */
+ def sortWith(lt: (A, A) => Boolean): Repr = sorted(Ordering fromLessThan lt)
+
+ /** Sorts this $Coll according to the Ordering which results from transforming
+ * an implicitly given Ordering with a transformation function.
+ * @see scala.math.Ordering
+ * $willNotTerminateInf
+ * @param f the transformation function mapping elements
+ * to some other domain `B`.
+ * @param ord the ordering assumed on domain `B`.
+ * @tparam B the target type of the transformation `f`, and the type where
+ * the ordering `ord` is defined.
+ * @return a $coll consisting of the elements of this $coll
+ * sorted according to the ordering where `x < y` if
+ * `ord.lt(f(x), f(y))`.
+ *
+ * @example {{{
+ * val words = "The quick brown fox jumped over the lazy dog".split(' ')
+ * // this works because scala.Ordering will implicitly provide an Ordering[Tuple2[Int, Char]]
+ * words.sortBy(x => (x.length, x.head))
+ * res0: Array[String] = Array(The, dog, fox, the, lazy, over, brown, quick, jumped)
+ * }}}
*/
- def sortWith(lt: (A, A) => Boolean): Repr = sortWith(Ordering fromLessThan lt)
+ def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Repr = sorted(ord on f)
- def sortWith[B >: A](ord: Ordering[B]): Repr = {
- val arr = new GenericArray[A](this.length)
+ /** Sorts this $coll according to an Ordering.
+ *
+ * The sort is stable. That is, elements that are equal wrt `lt` appear in the
+ * same order in the sorted sequence as in the original.
+ *
+ * @see scala.math.Ordering
+ *
+ * @param ord the ordering to be used to compare elements.
+ * @return a $coll consisting of the elements of this $coll
+ * sorted according to the ordering `ord`.
+ */
+ def sorted[B >: A](implicit ord: Ordering[B]): Repr = {
+ val arr = new ArraySeq[A](this.length)
var i = 0
for (x <- this) {
arr(i) = x
i += 1
}
- java.util.Arrays.sort(
- arr.array, ord.asInstanceOf[Ordering[Object]])
+ java.util.Arrays.sort(arr.array, ord.asInstanceOf[Ordering[Object]])
val b = newBuilder
+ b.sizeHint(this)
for (x <- arr) b += x
b.result
}
- /** Sort the sequence according to the Ordering which results from transforming
- * the implicitly given Ordering[B] to an Ordering[A]. For example:
+ /** Converts this $coll to a sequence.
+ * $willNotTerminateInf
*
- * <code>
- * val words = "The quick brown fox jumped over the lazy dog".split(' ')
- * // this works because scala.Ordering will implicitly provide an Ordering[Tuple2[Int, Char]]
- * words.sortBy(x => (x.length, x.head))
- * res0: Array[String] = Array(The, dog, fox, the, lazy, over, brown, quick, jumped)
- * </code>
- *
- * @param f the transformation function A => B
- * @param ord the Ordering[B]
- * @return the sorted representation
- */
- def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Repr = sortWith(ord on f)
-
- /**
* Overridden for efficiency.
- *
- * @return the sequence itself
*/
override def toSeq: Seq[A] = thisCollection
- /** The range of all indices of this sequence.
+ /** Produces the range of all indices of this sequence.
+ *
+ * @return a `Range` value from `0` to one less than the length of this $coll.
*/
def indices: Range = 0 until length
@@ -602,44 +854,37 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
override def view(from: Int, until: Int) = view.slice(from, until)
- override def hashCode() = (Seq.hashSeed /: this)(_ * 41 + _.hashCode)
+ /** Hashcodes for $Coll produce a value from the hashcodes of all the
+ * elements of the $coll.
+ */
+ override def hashCode() = (Seq.hashSeed /: this)(_ * 41 + _.##)
override def equals(that: Any): Boolean = that match {
- case that: Seq[_] => (that canEqual this) && (this sameElements that)
- case _ => false
+ case that: Seq[_] => (that canEqual this) && (this sameElements that)
+ case _ => false
}
- /** Need to override string, so that it's not the Function1's string that gets mixed in.
+ /* Need to override string, so that it's not the Function1's string that gets mixed in.
*/
override def toString = super[IterableLike].toString
- /** Returns index of the last element satisying a predicate, or -1. */
- @deprecated("use `lastIndexWhere' instead")
+ /** Returns index of the last element satisfying a predicate, or -1.
+ */
+ @deprecated("use `lastIndexWhere` instead")
def findLastIndexOf(p: A => Boolean): Int = lastIndexWhere(p)
- /** A sub-sequence starting at index <code>from</code>
- * and extending up to the length of the current sequence
- *
- * @param from The index of the first element of the slice
- * @throws IndexOutOfBoundsException if <code>from &lt; 0</code>
- */
- @deprecated("use `drop' instead")
- def slice(from: Int): Seq[A] = toCollection(slice(from, length))
-
- @deprecated("Should be replaced by <code>(s1, s2) forall { case (x, y) => f(x, y) }</code>")
- def equalsWith[B](that: Seq[B])(f: (A,B) => Boolean): Boolean = {
- val i = this.iterator
- val j = that.iterator
- while (i.hasNext && j.hasNext)
- if (!f(i.next, j.next))
- return false
-
- !i.hasNext && !j.hasNext
- }
-
- /** Is <code>that</code> a slice in this? */
- @deprecated("Should be replaced by <code>indexOfSeq(that) != -1</code>")
- def containsSlice[B](that: Seq[B]): Boolean = indexOfSeq(that) != -1
+ /** Tests whether every element of this $coll relates to the
+ * corresponding element of another sequence by satisfying a test predicate.
+ *
+ * @param that the other sequence
+ * @param p the test predicate, which relates elements from both sequences
+ * @tparam B the type of the elements of `that`
+ * @return `true` if both sequences have the same length and
+ * `p(x, y)` is `true` for all corresponding elements `x` of this $coll
+ * and `y` of `that`, otherwise `false`.
+ */
+ @deprecated("use `corresponds` instead")
+ def equalsWith[B](that: Seq[B])(f: (A,B) => Boolean): Boolean = corresponds(that)(f)
/**
* returns a projection that can be used to call non-strict <code>filter</code>,
diff --git a/src/library/scala/collection/SeqProxy.scala b/src/library/scala/collection/SeqProxy.scala
index d2e4c0a7b7..392ed15d79 100644
--- a/src/library/scala/collection/SeqProxy.scala
+++ b/src/library/scala/collection/SeqProxy.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala
index df4ea77f56..c3095c8516 100644
--- a/src/library/scala/collection/SeqProxyLike.scala
+++ b/src/library/scala/collection/SeqProxyLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -23,11 +22,12 @@ import generic._
* @version 2.8
* @since 2.8
*/
-trait SeqProxyLike[+A, +This <: SeqLike[A, This] with Seq[A]] extends SeqLike[A, This] with IterableProxyLike[A, This] {
+trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A, Repr] with IterableProxyLike[A, Repr] {
+ override def size = self.size
+ override def toSeq: Seq[A] = self.toSeq
override def length: Int = self.length
override def apply(idx: Int): A = self.apply(idx)
override def lengthCompare(len: Int): Int = self.lengthCompare(len)
- override def size = self.size
override def isDefinedAt(x: Int): Boolean = self.isDefinedAt(x)
override def segmentLength(p: A => Boolean, from: Int): Int = self.segmentLength(p, from)
override def prefixLength(p: A => Boolean) = self.prefixLength(p)
@@ -40,24 +40,34 @@ trait SeqProxyLike[+A, +This <: SeqLike[A, This] with Seq[A]] extends SeqLike[A,
override def lastIndexOf[B >: A](elem: B, end: Int): Int = self.lastIndexWhere(elem ==, end)
override def lastIndexWhere(p: A => Boolean): Int = self.lastIndexWhere(p, length - 1)
override def lastIndexWhere(p: A => Boolean, end: Int): Int = self.lastIndexWhere(p)
- override def reverse: This = self.reverse
+ override def reverse: Repr = self.reverse
+ override def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.reverseMap(f)(bf)
override def reverseIterator: Iterator[A] = self.reverseIterator
override def startsWith[B](that: Seq[B], offset: Int): Boolean = self.startsWith(that, offset)
override def startsWith[B](that: Seq[B]): Boolean = self.startsWith(that)
override def endsWith[B](that: Seq[B]): Boolean = self.endsWith(that)
- override def indexOfSeq[B >: A](that: Seq[B]): Int = self.indexOfSeq(that)
+ override def indexOfSlice[B >: A](that: Seq[B]): Int = self.indexOfSlice(that)
+ override def indexOfSlice[B >: A](that: Seq[B], from: Int): Int = self.indexOfSlice(that)
+ override def lastIndexOfSlice[B >: A](that: Seq[B]): Int = self.lastIndexOfSlice(that)
+ override def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int = self.lastIndexOfSlice(that, end)
+ override def containsSlice[B](that: Seq[B]): Boolean = self.indexOfSlice(that) != -1
override def contains(elem: Any): Boolean = self.contains(elem)
- override def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[This, B, That]): That = self.union(that)(bf)
- override def diff[B >: A, That](that: Seq[B]): This = self.diff(that)
- override def intersect[B >: A, That](that: Seq[B]): This = self.intersect(that)
- override def removeDuplicates: This = self.removeDuplicates
- override def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That = self.patch(from, patch, replaced)(bf)
- override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = self.padTo(len, elem)(bf)
+ override def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.union(that)(bf)
+ override def diff[B >: A](that: Seq[B]): Repr = self.diff(that)
+ override def intersect[B >: A](that: Seq[B]): Repr = self.intersect(that)
+ override def distinct: Repr = self.distinct
+ override def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.patch(from, patch, replaced)(bf)
+ override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.updated(index, elem)(bf)
+ override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.+:(elem)(bf)
+ override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.:+(elem)(bf)
+ override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.padTo(len, elem)(bf)
+ override def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean = self.corresponds(that)(p)
+ override def sortWith(lt: (A, A) => Boolean): Repr = self.sortWith(lt)
+ override def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Repr = self.sortBy(f)(ord)
+ override def sorted[B >: A](implicit ord: Ordering[B]): Repr = self.sorted(ord)
override def indices: Range = self.indices
override def view = self.view
override def view(from: Int, until: Int) = self.view(from, until)
- override def findLastIndexOf(p: A => Boolean): Int = self.lastIndexWhere(p)
- override def slice(from: Int): Seq[A] = self.drop(from)
- override def equalsWith[B](that: Seq[B])(f: (A,B) => Boolean): Boolean = (self zip that) forall { case (x,y) => f(x,y) }
- override def containsSlice[B](that: Seq[B]): Boolean = self.indexOfSeq(that) != -1
}
+
+
diff --git a/src/library/scala/collection/SeqView.scala b/src/library/scala/collection/SeqView.scala
index 89a96d6a2b..8198abfeab 100644
--- a/src/library/scala/collection/SeqView.scala
+++ b/src/library/scala/collection/SeqView.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,13 +13,14 @@ package scala.collection
import generic._
import TraversableView.NoBuilder
-/** A non-strict projection of an iterable.
- * @author Sean McDirmid
- * @author Martin Odersky
- * @version 2.8
+/** A base trait for non-strict views of sequences.
+ * $seqViewInfo
*/
trait SeqView[+A, +Coll] extends SeqViewLike[A, Coll, SeqView[A, Coll]]
+/** An object containing the necessary implicit definitions to make
+ * `SeqView`s work. Its definitions are generally not accessed directly by clients.
+ */
object SeqView {
type Coll = TraversableView[_, C] forSome {type C <: Traversable[_]}
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] =
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 189e93406e..3231720bb3 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,14 +14,23 @@ import generic._
import Seq.fill
import TraversableView.NoBuilder
-/** A template trait for a non-strict view of a sequence.
- * @author Sean McDirmid
- * @author Martin Odersky
- * @version 2.8
+/** A template trait for non-strict views of sequences.
+ * $seqViewInfo
+ *
+ * @define seqViewInfo
+ * $viewInfo
+ * All views for sequences are defined by re-interpreting the `length` and `apply` methods.
+ *
+ * @author Martin Odersky
+ * @version 2.8
+ * @since 2.8
+ * @tparam A the element type of the view
+ * @tparam Coll the type of the underlying collection containing the elements.
+ * @tparam This the type of the view itself
*/
trait SeqViewLike[+A,
- +Coll,
- +This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]]
+ +Coll,
+ +This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]]
extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This]
{ self =>
@@ -31,6 +39,11 @@ trait SeqViewLike[+A,
override def apply(idx: Int): B
}
+ trait Forced[B] extends Transformed[B] with super.Forced[B] {
+ override def length = forced.length
+ override def apply(idx: Int) = forced.apply(idx)
+ }
+
trait Sliced extends Transformed[A] with super.Sliced {
override def length = ((until min self.length) - from) max 0
override def apply(idx: Int): A =
@@ -104,7 +117,8 @@ trait SeqViewLike[+A,
trait Zipped[B] extends Transformed[(A, B)] with super.Zipped[B] {
protected[this] lazy val thatSeq = other.toSeq
- override def length: Int = self.length min thatSeq.length
+ /* Have to be careful here - other may be an infinite sequence. */
+ override def length = if ((thatSeq lengthCompare self.length) <= 0) thatSeq.length else self.length
override def apply(idx: Int) = (self.apply(idx), thatSeq.apply(idx))
}
@@ -117,10 +131,16 @@ trait SeqViewLike[+A,
}
trait Reversed extends Transformed[A] {
- override def iterator: Iterator[A] = self.reverseIterator
+ override def iterator: Iterator[A] = createReversedIterator
override def length: Int = self.length
override def apply(idx: Int): A = self.apply(length - 1 - idx)
override def stringPrefix = self.stringPrefix+"R"
+
+ private def createReversedIterator = {
+ var lst = List[A]()
+ for (elem <- self) lst ::= elem
+ lst.iterator
+ }
}
trait Patched[B >: A] extends Transformed[B] {
@@ -137,9 +157,20 @@ trait SeqViewLike[+A,
override def stringPrefix = self.stringPrefix+"P"
}
+ trait Prepended[B >: A] extends Transformed[B] {
+ protected[this] val fst: B
+ override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
+ override def length: Int = 1 + self.length
+ override def apply(idx: Int): B =
+ if (idx == 0) fst
+ else self.apply(idx - 1)
+ override def stringPrefix = self.stringPrefix+"A"
+ }
+
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
+ protected override def newForced[B](xs: => Seq[B]): Transformed[B] = new Forced[B] { val forced = xs }
protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
protected override def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
@@ -151,6 +182,7 @@ trait SeqViewLike[+A,
protected override def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new ZippedAll[A1, B] { val other = that; val thisElem = _thisElem; val thatElem = _thatElem }
protected def newReversed: Transformed[A] = new Reversed { }
protected def newPatched[B >: A](_from: Int, _patch: Seq[B], _replaced: Int): Transformed[B] = new Patched[B] { val from = _from; val patch = _patch; val replaced = _replaced }
+ protected def newPrepended[B >: A](elem: B): Transformed[B] = new Prepended[B] { protected[this] val fst = elem }
override def reverse: This = newReversed.asInstanceOf[This]
@@ -161,11 +193,35 @@ trait SeqViewLike[+A,
// else super.patch[B, That](from, patch, replaced)(bf)
}
- //TR TODO: updated, +: ed :+ ed
-
override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
patch(length, fill(len - length)(elem), 0)
+ override def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That =
+ reverse.map(f)
+
+ override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = {
+ require(0 <= index && index < length)
+ patch(index, List(elem), 1)(bf)
+ }
+
+ override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
+ newPrepended(elem).asInstanceOf[That]
+
+ override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
+ ++(Iterator.single(elem))(bf)
+
+ override def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ newForced(thisSeq union that).asInstanceOf[That]
+
+ override def diff[B >: A](that: Seq[B]): This =
+ newForced(thisSeq diff that).asInstanceOf[This]
+
+ override def intersect[B >: A](that: Seq[B]): This =
+ newForced(thisSeq intersect that).asInstanceOf[This]
+
+ override def sorted[B >: A](implicit ord: Ordering[B]): This =
+ newForced(thisSeq sorted ord).asInstanceOf[This]
+
override def stringPrefix = "SeqView"
}
diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala
index f58aff5356..61cfd7e27d 100644
--- a/src/library/scala/collection/Set.scala
+++ b/src/library/scala/collection/Set.scala
@@ -1,25 +1,26 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
import generic._
-/** <p>
- * A set is a collection that includes at most one of any object.
- * </p>
+/** A base trait for all sets, mutable as well as immutable.
*
- * @author Matthias Zenger
- * @author Martin Odersky
- * @version 2.8
- * @since 1
+ * $setNote
+ * $setNote2
+ * $setTags
+ * @since 1.0
+ * @author Matthias Zenger
+ * @define setNote2
+ * '''Implementation note:''' If your additions and mutations return the same kind of set as the set
+ * you are defining, you should inherit from `SetLike` as well.
*/
trait Set[A] extends (A => Boolean)
with Iterable[A]
@@ -28,13 +29,14 @@ trait Set[A] extends (A => Boolean)
override def companion: GenericCompanion[Set] = Set
}
-/** Factory object for <code>Set</code> class.
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** $factoryInfo
+ * The current default implementation of a $Coll is one of `EmptySet`, `Set1`, `Set2`, `Set3`, `Set4` in
+ * class `immutable.Set` for sets of sizes up to 4, and a `immutable.HashSet` for sets of larger sizes.
+ * @define coll set
+ * @define Coll Set
*/
object Set extends SetFactory[Set] {
+ def newBuilder[A] = immutable.Set.newBuilder[A]
override def empty[A]: Set[A] = immutable.Set.empty[A]
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A]
}
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 0752f4185f..2e9a1ec2a2 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,49 +13,70 @@ import generic._
import mutable.{Builder, AddingBuilder}
import PartialFunction._
-/** <p>
- * A generic template for sets of type <code>A</code>.<br/>
+/** A template trait for sets.
+ *
+ * $setNote
+ * $setTags
+ * @since 2.8
+ *
+ * @define setNote
+ *
+ * A set is a collection that contains no duplicate elements.
+ *
+ * '''Implementation note:'''
+ * This trait provides most of the operations of a `Set` independently of its representation.
+ * It is typically inherited by concrete implementations of sets.
+ *
* To implement a concrete set, you need to provide implementations of the
- * following methods (where <code>This</code> is the type of the set in
- * question):
- * </p>
- * <pre>
- * <b>def</b> contains(key: A): Boolean
- * <b>def</b> iterator: Iterator[A]
- * <b>def</b> +(elem: A): This
- * <b>def</b> -(elem: A): This</pre>
- * <p>
- * If you wish that methods <code>like</code>, <code>take</code>, <code>drop</code>,
- * <code>filter</code> return the same kind of set, you should also override:
- * </p>
- * <pre>
- * <b>def</b> empty: This</pre>
- * <p>
- * It is also good idea to override methods <code>foreach</code> and
- * <code>size</code> for efficiency.
- * </p>
+ * following methods:
+ * {{{
+ * def contains(key: A): Boolean
+ * def iterator: Iterator[A]
+ * def +(elem: A): This
+ * def -(elem: A): This
+ * }}}
+ * If you wish that methods like `take`, `drop`,
+ * `filter` return the same kind of set, you should also override:
+ * {{{
+ * def empty: This
+ * }}}
+ * It is also good idea to override methods `foreach` and
+ * `size` for efficiency.
+ *
+ * @define setTags
+ * @tparam A the type of the elements of the set
+ * @tparam This the type of the set itself.
*
* @author Martin Odersky
* @version 2.8
+ *
+ * @define coll set
+ * @define Coll Set
+ * @define willNotTerminateInf
+ * @define mayNotTerminateInf
*/
-trait SetLike[A, +This <: SetLike[A, This] with Set[A]] extends IterableLike[A, This] with Addable[A, This] with Subtractable[A, This] {
+trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
+extends IterableLike[A, This]
+ with Addable[A, This]
+ with Subtractable[A, This] {
self =>
- /* The empty set of the dame type as this set */
+ /** The empty set of the same type as this set
+ * @return an empty set of type `This`.
+ */
def empty: This
- /** A common implementation of <code>newBuilder</code> for all sets in terms
- * of <code>empty</code>. Overridden for mutable sets in
+ /** A common implementation of `newBuilder` for all sets in terms
+ * of `empty`. Overridden for mutable sets in
* <a href="mutable/SetLike.html" target="ContentFrame">
- * <code>mutable.SetLike</code></a>.
+ * `mutable.SetLike`</a>.
*/
override protected[this] def newBuilder: Builder[A, This] = new AddingBuilder[A, This](empty)
- /** Checks if this set contains element <code>elem</code>.
+ /** Tests if some element is contained in this set.
*
- * @param elem the element to check for membership.
- * @return <code>true</code> iff <code>elem</code> is contained in
- * this set.
+ * @param elem the element to test for membership.
+ * @return `true` if `elem` is contained in this set, `false` otherwise.
*/
def contains(elem: A): Boolean
@@ -64,119 +84,121 @@ self =>
* already present.
*
* @param elem the element to be added
+ * @return a new set that contains all elements of this set and that also
+ * contains `elem`.
*/
def + (elem: A): This
- /** Creates a new set with given element removed from this set, unless the
- * element is not present.
+ /** Creates a new set with a given element removed from this set.
*
* @param elem the element to be removed
+ * @return a new set that contains all elements of this set but that does not
+ * contain `elem`.
*/
def - (elem: A): This
- /** Checks if this set is empty.
+ /** Tests if this set is empty.
*
- * @return <code>true</code> iff there is no element in the set.
+ * @return `true` if there is no element in the set, `false` otherwise.
*/
override def isEmpty: Boolean = size == 0
- /** This method allows sets to be interpreted as predicates.
- * It returns <code>true</code>, iff this set contains element
- * <code>elem</code>.
+ /** Tests if some element is contained in this set.
*
- * @param elem the element to check for membership.
- * @return <code>true</code> iff <code>elem</code> is contained in
- * this set.
+ * This method is equivalent to `contains`. It allows sets to be interpreted as predicates.
+ * @param elem the element to test for membership.
+ * @return `true` if `elem` is contained in this set, `false` otherwise.
*/
def apply(elem: A): Boolean = contains(elem)
- /** Returns a new set consisting of all elements that are both in the current
- * set and in the argument set.
+ /** Computes the intersection between this set and another set.
*
- * @param that the set to intersect with.
+ * @param that the set to intersect with.
+ * @return a new set consisting of all elements that are both in this
+ * set and in the given set `that`.
*/
def intersect(that: Set[A]): This = filter(that.contains)
- /** Returns a new set consisting of all elements that are both in the current
- * set and in the argument set.
+ /** Computes the intersection between this set and another set.
*
- * @param that the set to intersect with.
- * @note same as <code>intersect</code>.
+ * '''Note:''' Same as `intersect`.
+ * @param that the set to intersect with.
+ * @return a new set consisting of all elements that are both in this
+ * set and in the given set `that`.
*/
def &(that: Set[A]): This = intersect(that)
- /** This method is an alias for <code>intersect</code>.
- * It computes an intersection with set <code>that</code>.
- * It removes all the elements that are not present in <code>that</code>.
+ /** This method is an alias for `intersect`.
+ * It computes an intersection with set `that`.
+ * It removes all the elements that are not present in `that`.
*
* @param that the set to intersect with
*/
@deprecated("use & instead") def ** (that: Set[A]): This = intersect(that)
- /** The union of this set and the given set <code>that</code>.
+ /** Computes the union between of set and another set.
*
- * @param that the set of elements to add
- * @return a set containing the elements of this
- * set and those of the given set <code>that</code>.
+ * @param that the set to form the union with.
+ * @return a new set consisting of all elements that are in this
+ * set or in the given set `that`.
*/
def union(that: Set[A]): This = this.++(that)
- /** The union of this set and the given set <code>that</code>.
+ /** Computes the union between this set and another set.
*
- * @param that the set of elements to add
- * @return a set containing the elements of this
- * set and those of the given set <code>that</code>.
- * @note same as <code>union</code>.
+ * '''Note:''' Same as `union`.
+ * @param that the set to form the union with.
+ * @return a new set consisting of all elements that are in this
+ * set or in the given set `that`.
*/
def | (that: Set[A]): This = union(that)
- /** The difference of this set and the given set <code>that</code>.
+ /** Computes the difference of this set and another set.
*
- * @param that the set of elements to remove
+ * @param that the set of elements to exclude.
* @return a set containing those elements of this
- * set that are not also contained in the given set <code>that</code>.
+ * set that are not also contained in the given set `that`.
*/
def diff(that: Set[A]): This = --(that)
- /** The difference of this set and the given set <code>that</code>.
+ /** The difference of this set and another set.
*
- * @param that the set of elements to remove
+ * '''Note:''' Same as `diff`.
+ * @param that the set of elements to exclude.
* @return a set containing those elements of this
- * set that are not also contained in the given set <code>that</code>.
- * @note same as <code>diff</code>.
+ * set that are not also contained in the given set `that`.
*/
def &~(that: Set[A]): This = diff(that)
- /** Checks if this set is a subset of set <code>that</code>.
+ /** Tests whether this set is a subset of another set.
*
- * @param that another set.
- * @return <code>true</code> iff the other set is a superset of
- * this set.
- * todo: rename to isSubsetOf
+ * @param that the set to test.
+ * @return `true` if this set is a subset of `that`, i.e. if
+ * every element of this set is also an element of `that`.
*/
def subsetOf(that: Set[A]): Boolean = forall(that.contains)
- /** Defines the prefix of this object's <code>toString</code> representation.
+ /** Defines the prefix of this object's `toString` representation.
+ * @return a string representation which starts the result of `toString` applied to this set.
+ * Unless overridden this is simply `"Set"`.
*/
override def stringPrefix: String = "Set"
- /** Need to override string, so that it's not the Function1's string that gets mixed in.
- */
override def toString = super[IterableLike].toString
-
override def hashCode() = this map (_.hashCode) sum
- /** Compares this set with another object and returns true, iff the
- * other object is also a set which contains the same elements as
- * this set.
+ /** Compares this set with another object for equality.
*
+ * '''Note:''' This operation contains an unchecked cast: if `that`
+ * is a set, it will assume with an unchecked cast
+ * that it has the same element type as this set.
+ * Any subsequent ClassCastException is treated as a `false` result.
* @param that the other object
- * @note not necessarily run-time type safe.
- * @return <code>true</code> iff this set and the other set
- * contain the same elements.
+ * @return `true` if `that` is a set which contains the same elements
+ * as this set.
*/
override def equals(that: Any): Boolean = that match {
- case that: Set[A] =>
+ case that: Set[_] =>
(this eq that) ||
(that canEqual this) &&
(this.size == that.size) &&
diff --git a/src/library/scala/collection/SetProxy.scala b/src/library/scala/collection/SetProxy.scala
index dfa8fe0926..03ea4fbbb3 100644
--- a/src/library/scala/collection/SetProxy.scala
+++ b/src/library/scala/collection/SetProxy.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala
index b4bb67e876..cf7f3b845c 100644
--- a/src/library/scala/collection/SetProxyLike.scala
+++ b/src/library/scala/collection/SetProxyLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,10 +21,11 @@ import generic._
*/
trait SetProxyLike[A, +This <: SetLike[A, This] with Set[A]] extends SetLike[A, This] with IterableProxyLike[A, This]
{
+ def empty: This
// def empty: This
- // def + (elem: A): This
- // def - (elem: A): This
override def contains(elem: A): Boolean = self.contains(elem)
+ override def + (elem: A) = self.+(elem)
+ override def - (elem: A) = self.-(elem)
override def isEmpty: Boolean = self.isEmpty
override def apply(elem: A): Boolean = self.apply(elem)
override def intersect(that: Set[A]) = self.intersect(that)
diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala
index 692cf670e6..e30ad37924 100644
--- a/src/library/scala/collection/SortedMap.scala
+++ b/src/library/scala/collection/SortedMap.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
import generic._
@@ -21,9 +20,9 @@ import mutable.Builder
*/
trait SortedMap[A, +B] extends Map[A, B] with SortedMapLike[A, B, SortedMap[A, B]] {
/** Needs to be overridden in subclasses. */
- override def empty = SortedMap.empty[A, B]
+ override def empty: SortedMap[A, B] = SortedMap.empty[A, B]
- override protected[this] def newBuilder : Builder[(A, B), SortedMap[A, B]] =
+ override protected[this] def newBuilder: Builder[(A, B), SortedMap[A, B]] =
immutable.SortedMap.newBuilder[A, B]
}
diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala
index c7a8ebbd62..a5386802b6 100644
--- a/src/library/scala/collection/SortedMapLike.scala
+++ b/src/library/scala/collection/SortedMapLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
diff --git a/src/library/scala/collection/SortedSet.scala b/src/library/scala/collection/SortedSet.scala
index 7d18bf79a9..2d59d12f0d 100644
--- a/src/library/scala/collection/SortedSet.scala
+++ b/src/library/scala/collection/SortedSet.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
import generic._
diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala
index 1c41ee3a5a..4542925a21 100644
--- a/src/library/scala/collection/SortedSetLike.scala
+++ b/src/library/scala/collection/SortedSetLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
import generic._
diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala
index 2cf66e80a2..dcea1a8c2f 100644
--- a/src/library/scala/collection/Traversable.scala
+++ b/src/library/scala/collection/Traversable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,20 +14,8 @@ import generic._
import mutable.{Builder, Buffer, ArrayBuffer, ListBuffer}
import scala.util.control.Breaks
-/** <p>
- * A template trait for traversable collections.
- * </p>
- * <p>
- * Collection classes mixing in this trait provide a method
- * <code>foreach</code> which traverses all the
- * elements contained in the collection, applying a given procedure to each.
- * They also provide a method <code>newBuilder</code>
- * which creates a builder for collections of the same kind.
- * </p>
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** A trait for traversable collections.
+ * $traversableInfo
*/
trait Traversable[+A] extends TraversableLike[A, Traversable[A]]
with GenericTraversableTemplate[A, Traversable] {
@@ -39,8 +26,7 @@ trait Traversable[+A] extends TraversableLike[A, Traversable[A]]
override def isEmpty: Boolean
override def size: Int
override def hasDefiniteSize
- override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
- override def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
+ override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
override def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
override def filter(p: A => Boolean): Traversable[A]
@@ -81,7 +67,7 @@ trait Traversable[+A] extends TraversableLike[A, Traversable[A]]
override def toIterable: Iterable[A]
override def toSeq: Seq[A]
override def toStream: Stream[A]
-// override def sortWith(lt : (A,A) => Boolean): Traversable[A]
+ override def sortWith(lt : (A,A) => Boolean): Traversable[A]
override def mkString(start: String, sep: String, end: String): String
override def mkString(sep: String): String
override def mkString: String
@@ -95,17 +81,17 @@ trait Traversable[+A] extends TraversableLike[A, Traversable[A]]
*/
}
-/** Factory methods and utilities for instances of type <code>Traversable</code>.
- *
- * @author Martin Odersky
- * @version 2.8
+/** $factoryInfo
+ * The current default implementation of a $Coll is a `Vector`.
*/
object Traversable extends TraversableFactory[Traversable] { self =>
- /** provide break functionality separate from client code */
+ /** Provides break functionality separate from client code */
private[collection] val breaks: Breaks = new Breaks
+ /** $genericCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = new GenericCanBuildFrom[A]
+
def newBuilder[A]: Builder[A, Traversable[A]] = immutable.Traversable.newBuilder[A]
}
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index a0e330831f..a307ccbf3f 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -16,78 +15,127 @@ import scala.reflect.ClassManifest
import mutable.{Builder, StringBuilder, Buffer, ArrayBuffer, ListBuffer}
import immutable.{List, Stream, Nil, ::}
-/** <p>
- * A template trait for traversable collections.
- * This is a base trait of all kinds of Scala collections. It implements
- * the behavior common to all collections, in terms of a method
- * <code>foreach</code> with signature:
- * </p><pre>
- * <b>def</b> foreach[U](f: Elem => U): Unit</pre>
- * <p>
- * Collection classes mixing in this trait provide a concrete
- * <code>foreach</code> method which traverses all the
- * elements contained in the collection, applying a given function to each.
- * They also need to provide a method <code>newBuilder</code>
- * which creates a builder for collections of the same kind.
- * </p>
- * <p>
- * A traversable class might or might not have two properties: strictness
- * and orderedness. Neither is represented as a type.
- * </p>
- * <p>
- * The instances of a strict collection class have all their elements
- * computed before they can be used as values. By contrast, instances of
- * a non-strict collection class may defer computation of some of their
- * elements until after the instance is available as a value.
- * A typical example of a non-strict collection class is a
- * <a href="../immutable/Stream.html" target="ContentFrame">
- * <code>scala.collection.immutable.Stream</code></a>.
- * A more general class of examples are <code>TraversableViews</code>.
- * </p>
- * <p>
- * If a collection is an instance of an ordered collection class, traversing
- * its elements with <code>foreach</code> will always visit elements in the
- * same order, even for different runs of the program. If the class is not
- * ordered, <code>foreach</code> can visit elements in different orders for
- * different runs (but it will keep the same order in the same run).<br/>
- * A typical example of a collection class which is not ordered is a
- * <code>HashMap</code> of objects. The traversal order for hash maps will
- * depend on the hash codes of its elements, and these hash codes might
- * differ from one run to the next. By contrast, a <code>LinkedHashMap</code>
- * is odered because it's <code>foreach</code> method visits elements in the
- * order they were inserted into the <code>HashMap</code>.
- * </p>
+/** A template trait for traversable collections of type `Traversable[A]`.
+ * $traversableInfo
+ * @define mutability
+ * @define traversableInfo
+ * This is a base trait of all kinds of $mutability Scala collections. It implements
+ * the behavior common to all collections, in terms of a method
+ * `foreach` with signature:
+ * {{{
+ * def foreach[U](f: Elem => U): Unit
+ * }}}
+ * Collection classes mixing in this trait provide a concrete
+ * `foreach` method which traverses all the
+ * elements contained in the collection, applying a given function to each.
+ * They also need to provide a method `newBuilder`
+ * which creates a builder for collections of the same kind.
+ *
+ * A traversable class might or might not have two properties: strictness
+ * and orderedness. Neither is represented as a type.
+ *
+ * The instances of a strict collection class have all their elements
+ * computed before they can be used as values. By contrast, instances of
+ * a non-strict collection class may defer computation of some of their
+ * elements until after the instance is available as a value.
+ * A typical example of a non-strict collection class is a
+ * <a href="../immutable/Stream.html" target="ContentFrame">
+ * `scala.collection.immutable.Stream`</a>.
+ * A more general class of examples are `TraversableViews`.
+ *
+ * If a collection is an instance of an ordered collection class, traversing
+ * its elements with `foreach` will always visit elements in the
+ * same order, even for different runs of the program. If the class is not
+ * ordered, `foreach` can visit elements in different orders for
+ * different runs (but it will keep the same order in the same run).'
+ *
+ * A typical example of a collection class which is not ordered is a
+ * `HashMap` of objects. The traversal order for hash maps will
+ * depend on the hash codes of its elements, and these hash codes might
+ * differ from one run to the next. By contrast, a `LinkedHashMap`
+ * is ordered because it's `foreach` method visits elements in the
+ * order they were inserted into the `HashMap`.
*
* @author Martin Odersky
* @version 2.8
* @since 2.8
+ * @tparam A the element type of the collection
+ * @tparam Repr the type of the actual collection containing the elements.
+ *
+ * @define Coll Traversable
+ * @define coll traversable collection
+ * @define thatinfo the class of the returned collection. Where possible, `That` is
+ * the same class as the current collection class `Repr`, but this
+ * depends on the element type `B` being admissible for that class,
+ * which means that an implicit instance of type `CanBuildFrom[Repr, B, That]`
+ * is found.
+ * @define bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr` and
+ * and the new element type `B`.
+ * @define orderDependent
+ *
+ * Note: might return different results for different runs, unless the underlying collection type is ordered.
+ * @define orderDependentFold
+ *
+ * Note: might return different results for different runs, unless the underlying collection type is ordered.
+ * or the operator is associative and commutative.
+ * @define mayNotTerminateInf
+ *
+ * Note: may not terminate for infinite-sized collections.
+ * @define willNotTerminateInf
+ *
+ * Note: will not terminate for infinite-sized collections.
*/
-trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr] {
-self =>
+trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
+ with FilterMonadic[A, Repr]
+ with TraversableOnce[A] {
+ self =>
import Traversable.breaks._
+ /** The type implementing this traversable */
+ protected type Self = Repr
+
+ /** The collection of type $coll underlying this `TraversableLike` object.
+ * By default this is implemented as the `TraversableLike` object itself, but this can be overridden.
+ */
def repr: Repr = this.asInstanceOf[Repr]
+ /** The underlying collection seen as an instance of `$Coll`.
+ * By default this is implemented as the current collection object itself,
+ * but this can be overridden.
+ */
protected[this] def thisCollection: Traversable[A] = this.asInstanceOf[Traversable[A]]
+
+ /** A conversion from collections of type `Repr` to `$Coll` objects.
+ * By default this is implemented as just a cast, but this can be overridden.
+ */
protected[this] def toCollection(repr: Repr): Traversable[A] = repr.asInstanceOf[Traversable[A]]
- /** Create a new builder for this collection type.
+ /** Creates a new builder for this collection type.
*/
protected[this] def newBuilder: Builder[A, Repr]
- /** Apply a function <code>f</code> to all elements of this
- * traversable object.
+ /** Applies a function `f` to all elements of this $coll.
+ *
+ * Note: this method underlies the implementation of most other bulk operations.
+ * It's important to implement this method in an efficient way.
*
- * @param f A function that is applied for its side-effect to every element.
- * The result (of arbitrary type U) of function `f` is discarded.
*
- * @note This method underlies the implementation of most other bulk operations.
- * It's important to implement this method in an efficient way.
+ * @param f the function that is applied for its side-effect to every element.
+ * The result of function `f` is discarded.
+ *
+ * @tparam U the type parameter describing the result of function `f`.
+ * This result will always be ignored. Typically `U` is `Unit`,
+ * but this is not necessary.
+ *
+ * @usecase def foreach(f: A => Unit): Unit
*/
def foreach[U](f: A => U): Unit
- /** Does this collection contain no elements?
+ /** Tests whether this $coll is empty.
+ *
+ * @return `true` if the $coll contain no elements, `false` otherwise.
*/
def isEmpty: Boolean = {
var result = true
@@ -100,67 +148,79 @@ self =>
result
}
- /** Does this collection contain some elements?
+ /** Tests whether this $coll can be repeatedly traversed.
+ * @return `true`
*/
- def nonEmpty: Boolean = !isEmpty
+ final def isTraversableAgain = true
- /** The number of elements in this collection
- */
- def size: Int = {
- var result = 0
- for (x <- this) result += 1
- result
- }
-
- /** Returns true if this collection is known to have finite size.
- * This is the case if the collection type is strict, or if the
- * collection type is non-strict (e.g. it's a Stream), but all
- * collection elements have been computed.
- * Many methods in this trait will not work on collections of
- * infinite sizes.
+ /** Tests whether this $coll is known to have a finite size.
+ * All strict collections are known to have finite size. For a non-strict collection
+ * such as `Stream`, the predicate returns `true` if all elements have been computed.
+ * It returns `false` if the stream is not yet evaluated to the end.
+ *
+ * Note: many collection methods will not work on collections of infinite sizes.
+ *
+ * @return `true` if this collection is known to have finite size, `false` otherwise.
*/
def hasDefiniteSize = true
- /** Creates a new traversable of type `That` which contains all elements of this traversable
- * followed by all elements of another traversable.
+ /** Concatenates this $coll with the elements of a traversable collection.
*
- * @param that The traversable to append
- */
- def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
- val b = bf(repr)
- b ++= thisCollection
- b ++= that
- b.result
- }
-
- /** Creates a new traversable of type `That` which contains all elements of this traversable
- * followed by all elements of an iterator.
+ * @param that the traversable to append.
+ * @tparam B the element type of the returned collection.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` which contains all elements of this $coll
+ * followed by all elements of `that`.
*
- * @param that The iterator to append
+ * @usecase def ++(that: TraversableOnce[A]): $Coll[A]
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
- def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
+ if (that.isInstanceOf[IndexedSeqLike[_, _]]) b.sizeHint(this, that.size)
b ++= thisCollection
b ++= that
b.result
}
- /** Returns the traversable that results from applying the given function
- * <code>f</code> to each element of this traversable and collecting the results
- * in a traversable of type `That`.
+ /** Builds a new collection by applying a function to all elements of this $coll.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned collection.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results.
*
- * @param f function to apply to each element.
+ * @usecase def map[B](f: A => B): $Coll[B]
+ *
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results.
*/
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
+ b.sizeHint(this)
for (x <- this) b += f(x)
b.result
}
- /** Applies the given function <code>f</code> to each element of
- * this traversable, then concatenates the results in a traversable of type That.
+ /** Builds a new collection by applying a function to all elements of this $coll
+ * and concatenating the results.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned collection.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` resulting from applying the given collection-valued function
+ * `f` to each element of this $coll and concatenating the results.
*
- * @param f the function to apply on each element.
+ * @usecase def flatMap[B](f: A => Traversable[B]): $Coll[B]
+ *
+ * @return a new $coll resulting from applying the given collection-valued function
+ * `f` to each element of this $coll and concatenating the results.
*/
def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -168,10 +228,11 @@ self =>
b.result
}
- /** Returns all the elements of this traversable that satisfy the
- * predicate <code>p</code>. The order of the elements is preserved.
- * @param p the predicate used to filter the traversable.
- * @return the elements of this traversable satisfying <code>p</code>.
+ /** Selects all elements of this $coll which satisfy a predicate.
+ *
+ * @param p the predicate used to test elements.
+ * @return a new $coll consisting of all elements of this $coll that satisfy the given
+ * predicate `p`. The order of the elements is preserved.
*/
def filter(p: A => Boolean): Repr = {
val b = newBuilder
@@ -180,42 +241,72 @@ self =>
b.result
}
- /** Returns a traversable with all elements of this traversable which do not satisfy the predicate
- * <code>p</code>.
+ /** Selects all elements of this $coll which do not satisfy a predicate.
*
- * @param p the predicate used to test elements
- * @return the traversable without all elements that satisfy <code>p</code>
+ * @param p the predicate used to test elements.
+ * @return a new $coll consisting of all elements of this $coll that do not satisfy the given
+ * predicate `p`. The order of the elements is preserved.
*/
def filterNot(p: A => Boolean): Repr = filter(!p(_))
- /** Returns a new traversable based on the partial function <code>pf</code>,
- * containing pf(x) for all the elements which are defined on pf.
- * The order of the elements is preserved.
- * @param pf the partial function which filters and maps the traversable.
- * @return the new traversable.
- */
- def partialMap[B, That](pf: PartialFunction[Any, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ /** Builds a new collection by applying a partial function to all elements of this $coll
+ * on which the function is defined.
+ *
+ * @param pf the partial function which filters and maps the $coll.
+ * @tparam B the element type of the returned collection.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` resulting from applying the partial function
+ * `pf` to each element on which it is defined and collecting the results.
+ * The order of the elements is preserved.
+ *
+ * @usecase def collect[B](pf: PartialFunction[A, B]): $Coll[B]
+ *
+ * @return a new $coll resulting from applying the given partial function
+ * `pf` to each element on which it is defined and collecting the results.
+ * The order of the elements is preserved.
+ */
+ def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
for (x <- this) if (pf.isDefinedAt(x)) b += pf(x)
b.result
}
- /** Returns a traversable with all elements of this traversable which do not satisfy the predicate
- * <code>p</code>.
+ /** Builds a new collection by applying an option-valued function to all elements of this $coll
+ * on which the function is defined.
+ *
+ * @param f the option-valued function which filters and maps the $coll.
+ * @tparam B the element type of the returned collection.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` resulting from applying the option-valued function
+ * `f` to each element and collecting all defined results.
+ * The order of the elements is preserved.
*
- * @param p the predicate used to test elements
- * @return the traversable without all elements that satisfy <code>p</code>
+ * @usecase def filterMap[B](f: A => Option[B]): $Coll[B]
+ *
+ * @param pf the partial function which filters and maps the $coll.
+ * @return a new $coll resulting from applying the given option-valued function
+ * `f` to each element and collecting all defined results.
+ * The order of the elements is preserved.
+ def filterMap[B, That](f: A => Option[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ val b = bf(repr)
+ for (x <- this)
+ f(x) match {
+ case Some(y) => b += y
+ case _ =>
+ }
+ b.result
+ }
*/
- @deprecated("use `filterNot' instead")
- def remove(p: A => Boolean): Repr = filterNot(p)
- /** Partitions this traversable in two traversables according to a predicate.
+ /** Partitions this $coll in two ${coll}s according to a predicate.
*
- * @param p the predicate on which to partition
- * @return a pair of traversables: the traversable that satisfies the predicate
- * <code>p</code> and the traversable that does not.
- * The relative order of the elements in the resulting traversables
- * is the same as in the original traversable.
+ * @param p the predicate on which to partition.
+ * @return a pair of ${coll}s: the first $coll consists of all elements that
+ * satisfy the predicate `p` and the second $coll consists of all elements
+ * that don't. The relative order of the elements in the resulting ${coll}s
+ * is the same as in the original $coll.
*/
def partition(p: A => Boolean): (Repr, Repr) = {
val l, r = newBuilder
@@ -223,32 +314,43 @@ self =>
(l.result, r.result)
}
- /** Partition this traversable into a map of traversables
- * according to some discriminator function.
- * @invariant (xs partition f)(k) = xs filter (x => f(x) == k)
+ /** Partitions this $coll into a map of ${coll}s according to some discriminator function.
*
- * @note This method is not re-implemented by views. This means
+ * Note: this method is not re-implemented by views. This means
* when applied to a view it will always force the view and
- * return a new collection.
+ * return a new $coll.
+ *
+ * @param f the discriminator function.
+ * @tparam K the type of keys returned by the discriminator function.
+ * @return A map from keys to ${coll}s such that the following invariant holds:
+ * {{{
+ * (xs partition f)(k) = xs filter (x => f(x) == k)
+ * }}}
+ * That is, every key `k` is bound to a $coll of those elements `x`
+ * for which `f(x)` equals `k`.
+ *
*/
- def groupBy[K](f: A => K): Map[K, Repr] = {
- var m = Map[K, Builder[A, Repr]]()
+ def groupBy[K](f: A => K): immutable.Map[K, Repr] = {
+ val m = mutable.Map.empty[K, Builder[A, Repr]]
for (elem <- this) {
val key = f(elem)
- val bldr = m get key match {
- case None => val b = newBuilder; m = m updated (key, b); b
- case Some(b) => b
- }
+ val bldr = m.getOrElseUpdate(key, newBuilder)
bldr += elem
}
- m mapValues (_.result)
+ val b = immutable.Map.newBuilder[K, Repr]
+ for ((k, v) <- m)
+ b += ((k, v.result))
+
+ b.result
}
- /** Return true iff the given predicate `p` yields true for all elements
- * of this traversable.
+ /** Tests whether a predicate holds for all elements of this $coll.
*
- * @note May not terminate for infinite-sized collections.
- * @param p the predicate
+ * $mayNotTerminateInf
+ *
+ * @param p the predicate used to test elements.
+ * @return `true` if the given predicate `p` holds for all elements
+ * of this $coll, otherwise `false`.
*/
def forall(p: A => Boolean): Boolean = {
var result = true
@@ -259,11 +361,13 @@ self =>
result
}
- /** Return true iff there is an element in this traversable for which the
- * given predicate `p` yields true.
+ /** Tests whether a predicate holds for some of the elements of this $coll.
+ *
+ * $mayNotTerminateInf
*
- * @note May not terminate for infinite-sized collections.
- * @param p the predicate
+ * @param p the predicate used to test elements.
+ * @return `true` if the given predicate `p` holds for some of the elements
+ * of this $coll, otherwise `false`.
*/
def exists(p: A => Boolean): Boolean = {
var result = false
@@ -274,28 +378,14 @@ self =>
result
}
- /** Count the number of elements in the traversable which satisfy a predicate.
+ /** Finds the first element of the $coll satisfying a predicate, if any.
*
- * @note Will not terminate for infinite-sized collections.
- * @param p the predicate for which to count
- * @return the number of elements satisfying the predicate <code>p</code>.
- */
- def count(p: A => Boolean): Int = {
- var cnt = 0
- for (x <- this) {
- if (p(x)) cnt += 1
- }
- cnt
- }
-
- /** Find and return the first element of the traversable object satisfying a
- * predicate, if any.
+ * $mayNotTerminateInf
+ * $orderDependent
*
- * @note may not terminate for infinite-sized collections.
- * @note Might return different results for different runs, unless this traversable is ordered.
- * @param p the predicate
- * @return an option containing the first element in the traversable object
- * satisfying <code>p</code>, or <code>None</code> if none exists.
+ * @param p the predicate used to test elements.
+ * @return an option value containing the first element in the $coll
+ * that satisfies `p`, or `None` if none exists.
*/
def find(p: A => Boolean): Option[A] = {
var result: Option[A] = None
@@ -306,158 +396,74 @@ self =>
result
}
- /** Combines the elements of this traversable object together using the binary
- * function <code>f</code>, from left to right, and starting with
- * the value <code>z</code>.
+ /** Applies option-valued function to successive elements of this $coll
+ * until a defined value is found.
*
- * @note Will not terminate for infinite-sized collections.
- * @note Might return different results for different runs, unless this traversable is ordered, or
- * the operator is associative and commutative.
- * @return <code>f(... (f(f(z, a<sub>0</sub>), a<sub>1</sub>) ...),
- * a<sub>n</sub>)</code> if the traversable is
- * <code>[a<sub>0</sub>, a<sub>1</sub>, ..., a<sub>n</sub>]</code>.
- */
- def foldLeft[B](z: B)(op: (B, A) => B): B = {
- var result = z
- for (x <- this)
- result = op(result, x)
- result
- }
-
- /** Similar to <code>foldLeft</code> but can be used as
- * an operator with the order of traversable and zero arguments reversed.
- * That is, <code>z /: xs</code> is the same as <code>xs foldLeft z</code>
- * @note Will not terminate for infinite-sized collections.
- * @note Might return different results for different runs, unless this traversable is ordered, or
- * the operator is associative and commutative.
- */
- def /: [B](z: B)(op: (B, A) => B): B = foldLeft(z)(op)
-
- /** Combines the elements of this traversable together using the binary
- * function <code>f</code>, from right to left, and starting with
- * the value <code>z</code>.
- *
- * @note Will not terminate for infinite-sized collections.
- * @note Might return different results for different runs, unless this traversable is ordered, or
- * the operator is associative and commutative.
- * @return <code>f(a<sub>0</sub>, f(a<sub>1</sub>, f(..., f(a<sub>n</sub>, z)...)))</code>
- * if the traversable is <code>[a<sub>0</sub>, a1, ..., a<sub>n</sub>]</code>.
- */
- def foldRight[B](z: B)(op: (A, B) => B): B = {
- var elems: List[A] = Nil
- for (x <- this) elems = x :: elems
- elems.foldLeft(z)((x, y) => op(y, x))
- }
-
- /** An alias for <code>foldRight</code>.
- * That is, <code>xs :\ z</code> is the same as <code>xs foldRight z</code>
- * @note Will not terminate for infinite-sized collections.
- * @note Might return different results for different runs, unless this traversable is ordered, or
- * the operator is associative and commutative.
- */
- def :\ [B](z: B)(op: (A, B) => B): B = foldRight(z)(op)
-
- /** Combines the elements of this traversable object together using the binary
- * operator <code>op</code>, from left to right
- * @note Will not terminate for infinite-sized collections.
- * @note Might return different results for different runs, unless this traversable is ordered, or
- * the operator is associative and commutative.
- * @param op The operator to apply
- * @return <code>op(... op(a<sub>0</sub>,a<sub>1</sub>), ..., a<sub>n</sub>)</code>
- if the traversable object has elements
- * <code>a<sub>0</sub>, a<sub>1</sub>, ..., a<sub>n</sub></code>.
- * @throws Predef.UnsupportedOperationException if the traversable object is empty.
- */
- def reduceLeft[B >: A](op: (B, A) => B): B = {
- if (isEmpty) throw new UnsupportedOperationException("empty.reduceLeft")
- var result: B = head
- var first = true
- for (x <- this)
- if (first) first = false
- else result = op(result, x)
+ * $mayNotTerminateInf
+ * $orderDependent
+ *
+ * @param f the function to be applied to successive elements.
+ * @return an option value containing the first defined result of
+ * `f`, or `None` if `f` returns `None` for all all elements.
+ def mapFind[B](f: A => Option[B]): Option[B] = {
+ var result: Option[B] = None
+ breakable {
+ for (x <- this)
+ f(x) match {
+ case s @ Some(_) => result = s; break
+ case _ =>
+ }
+ }
result
}
-
- /** Combines the elements of this traversable object together using the binary
- * operator <code>op</code>, from left to right
- * @note Will not terminate for infinite-sized collections.
- * @note Might return different results for different runs, unless this traversable is ordered, or
- * the operator is associative and commutative.
- * @param op The operator to apply
- * @return If the traversable is non-empty, the result of the operations as an Option, otherwise None.
*/
- def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = {
- if (isEmpty) None else Some(reduceLeft(op))
- }
- /** Combines the elements of this traversable object together using the binary
- * operator <code>op</code>, from right to left
- * @note Will not terminate for infinite-sized collections.
- * @note Might return different results for different runs, unless this traversable is ordered, or
- * the operator is associative and commutative.
- * @param op The operator to apply
- *
- * @return <code>a<sub>0</sub> op (... op (a<sub>n-1</sub> op a<sub>n</sub>)...)</code>
- * if the traversable object has elements <code>a<sub>0</sub>, a<sub>1</sub>, ...,
- * a<sub>n</sub></code>.
+ /**
+ * Produces a collection containing cummulative results of applying the operator going left to right.
+ * $willNotTerminateInf
+ * $orderDependent
*
- * @throws Predef.UnsupportedOperationException if the iterator is empty.
+ * @tparam B the type of the elements in the resulting collection
+ * @tparam That the actual type of the resulting collection
+ * @param z the initial value
+ * @param op the binary operator applied to the intermediate result and the element
+ * @param bf $bfinfo
+ * @return collection with intermediate results
*/
- def reduceRight[B >: A](op: (A, B) => B): B = {
- if (isEmpty) throw new UnsupportedOperationException("empty.reduceRight")
- var elems: List[A] = Nil
- for (x <- this) elems = x :: elems
- elems.reduceLeft[B]((x, y) => op(y, x))
+ def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ val b = bf(repr)
+ b.sizeHint(this, 1)
+ var acc = z
+ b += acc
+ for (x <- this) { acc = op(acc, x); b += acc }
+ b.result
}
- /** Combines the elements of this traversable object together using the binary
- * operator <code>op</code>, from right to left.
- * @note Will not terminate for infinite-sized collections.
- * @note Might return different results for different runs, unless this traversable is ordered, or
- * the operator is associative and commutative.
- * @param op The operator to apply
- * @return If the traversable is non-empty, the result of the operations as an Option, otherwise None.
+ /**
+ * Produces a collection containing cummulative results of applying the operator going right to left.
+ * $willNotTerminateInf
+ * $orderDependent
+ *
+ * @tparam B the type of the elements in the resulting collection
+ * @tparam That the actual type of the resulting collection
+ * @param z the initial value
+ * @param op the binary operator applied to the intermediate result and the element
+ * @param bf $bfinfo
+ * @return collection with intermediate results
*/
- def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = {
- if (isEmpty) None else Some(reduceRight(op))
- }
-
- /** Returns the sum of all elements with respect to the numeric operations in `num` */
- def sum[B >: A](implicit num: Numeric[B]): B = {
- var acc = num.zero
- for (x <- self) acc = num.plus(acc, x)
- acc
- }
-
- /** Returns the product of all elements with respect to the numeric operations in `num` */
- def product[B >: A](implicit num: Numeric[B]): B = {
- var acc = num.one
- for (x <- self) acc = num.times(acc, x)
- acc
- }
-
- /** Returns the minimal element with respect to the given ordering `cmp` */
- def min[B >: A](implicit cmp: Ordering[B]): A = {
- require(!self.isEmpty, "<empty>.min")
- var acc = self.head
- for (x <- self)
- if (cmp.lt(x, acc)) acc = x
- acc
- }
-
- /** Returns the maximal element with respect to the given ordering `cmp` */
- def max[B >: A](implicit cmp: Ordering[B]): A = {
- require(!self.isEmpty, "<empty>.max")
- var acc = self.head
- for (x <- self)
- if (cmp.gt(x, acc)) acc = x
- acc
+ def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ val b = bf(repr)
+ b.sizeHint(this, 1)
+ var acc = z
+ b += acc
+ for (x <- reversed) { acc = op(x, acc); b += acc }
+ b.result
}
- /** The first element of this traversable.
- *
- * @note Might return different results for different runs, unless this traversable is ordered
- * @throws Predef.NoSuchElementException if the traversable is empty.
+ /** Selects the first element of this $coll.
+ * $orderDependent
+ * @return the first element of this $coll.
+ * @throws `NoSuchElementException` if the $coll is empty.
*/
def head: A = {
var result: () => A = () => throw new NoSuchElementException
@@ -470,26 +476,27 @@ self =>
result()
}
- /** Returns as an option the first element of this traversable
- * or <code>None</code> if traversable is empty.
- * @note Might return different results for different runs, unless this traversable is ordered
+ /** Optionally selects the first element.
+ * $orderDependent
+ * @return the first element of this $coll if it is nonempty, `None` if it is empty.
*/
def headOption: Option[A] = if (isEmpty) None else Some(head)
- /** a traversable consisting of all elements of this traversable
- * except the first one.
- * @note Might return different results for different runs, unless this traversable is ordered
+ /** Selects all elements except the first.
+ * $orderDependent
+ * @return a $coll consisting of all elements of this $coll
+ * except the first one.
+ * @throws `UnsupportedOperationException` if the $coll is empty.
*/
def tail: Repr = {
- require(!self.isEmpty, "<empty>.tail")
+ if (isEmpty) throw new UnsupportedOperationException("empty.tail")
drop(1)
}
- /** The last element of this traversable.
- *
- * @throws Predef.NoSuchElementException if the traversable is empty.
- * @note Might return different results for different runs, unless this traversable is ordered
- */
+ /** Selects the last element.
+ * $orderDependent
+ * @return The last element of this $coll.
+ * @throws NoSuchElementException If the $coll is empty. */
def last: A = {
var lst = head
for (x <- this)
@@ -497,23 +504,24 @@ self =>
lst
}
- /** Returns as an option the last element of this traversable or
- * <code>None</code> if traversable is empty.
- *
- * @return the last element as an option.
- * @note Might return different results for different runs, unless this traversable is ordered
+ /** Optionally selects the last element.
+ * $orderDependent
+ * @return the last element of this $coll$ if it is nonempty, `None` if it is empty.
*/
def lastOption: Option[A] = if (isEmpty) None else Some(last)
- /** a traversable consisting of all elements of this traversable except the last one.
- * @throws Predef.UnsupportedOperationException if the stream is empty.
- * @note Might return different results for different runs, unless this traversable is ordered
+ /** Selects all elements except the last.
+ * $orderDependent
+ * @return a $coll consisting of all elements of this $coll
+ * except the last one.
+ * @throws `UnsupportedOperationException` if the $coll is empty.
*/
def init: Repr = {
if (isEmpty) throw new UnsupportedOperationException("empty.init")
var lst = head
var follow = false
val b = newBuilder
+ b.sizeHint(this, -1)
for (x <- this) {
if (follow) b += lst
else follow = true
@@ -522,15 +530,15 @@ self =>
b.result
}
- /** Return a traversable consisting only of the first <code>n</code>
- * elements of this traversable, or else the whole traversable, if it has less
- * than <code>n</code> elements.
- *
- * @param n the number of elements to take
- * @note Might return different results for different runs, unless this traversable is ordered
+ /** Selects first ''n'' elements.
+ * $orderDependent
+ * @param n Tt number of elements to take from this $coll.
+ * @return a $coll consisting only of the first `n` elements of this $coll, or else the
+ * whole $coll, if it has less than `n` elements.
*/
def take(n: Int): Repr = {
val b = newBuilder
+ b.sizeHintBounded(n, this)
var i = 0
breakable {
for (x <- this) {
@@ -542,16 +550,15 @@ self =>
b.result
}
- /** Returns this traversable without its <code>n</code> first elements
- * If this traversable has less than <code>n</code> elements, the empty
- * traversable is returned.
- *
- * @param n the number of elements to drop
- * @return the new traversable
- * @note Might return different results for different runs, unless this traversable is ordered
+ /** Selects all elements except first ''n'' ones.
+ * $orderDependent
+ * @param n the number of elements to drop from this $coll.
+ * @return a $coll consisting of all elements of this $coll except the first `n` ones, or else the
+ * empty $coll, if this $coll has less than `n` elements.
*/
def drop(n: Int): Repr = {
val b = newBuilder
+ if (n >= 0) b.sizeHint(this, -n)
var i = 0
for (x <- this) {
if (i >= n) b += x
@@ -560,18 +567,20 @@ self =>
b.result
}
- /** A sub-traversable starting at index `from`
- * and extending up to (but not including) index `until`.
+ /** Selects an interval of elements.
*
- * @note c.slice(from, to) is equivalent to (but possibly more efficient than)
- * c.drop(from).take(to - from)
+ * Note: `c.slice(from, to)` is equivalent to (but possibly more efficient than)
+ * `c.drop(from).take(to - from)`
+ * $orderDependent
*
- * @param from The index of the first element of the returned subsequence
- * @param until The index of the element following the returned subsequence
- * @note Might return different results for different runs, unless this traversable is ordered
+ * @param from the index of the first returned element in this $coll.
+ * @param until the index one past the last returned element in this $coll.
+ * @return a $coll containing the elements starting at index `from`
+ * and extending up to (but not including) index `until` of this $coll.
*/
def slice(from: Int, until: Int): Repr = {
val b = newBuilder
+ b.sizeHintBounded(until - from, this)
var i = 0
breakable {
for (x <- this) {
@@ -583,11 +592,11 @@ self =>
b.result
}
- /** Returns the longest prefix of this traversable whose elements satisfy
- * the predicate <code>p</code>.
- *
- * @param p the test predicate.
- * @note Might return different results for different runs, unless this traversable is ordered
+ /** Takes longest prefix of elements that satisfy a predicate.
+ * $orderDependent
+ * @param p The predicate used to test elements.
+ * @return the longest prefix of this $coll whose elements all satisfy
+ * the predicate `p`.
*/
def takeWhile(p: A => Boolean): Repr = {
val b = newBuilder
@@ -600,11 +609,11 @@ self =>
b.result
}
- /** Returns the longest suffix of this traversable whose first element
- * does not satisfy the predicate <code>p</code>.
- *
- * @param p the test predicate.
- * @note Might return different results for different runs, unless this traversable is ordered
+ /** Drops longest prefix of elements that satisfy a predicate.
+ * $orderDependent
+ * @param p The predicate used to test elements.
+ * @return the longest suffix of this $coll whose first element
+ * does not satisfy the predicate `p`.
*/
def dropWhile(p: A => Boolean): Repr = {
val b = newBuilder
@@ -616,13 +625,16 @@ self =>
b.result
}
- /** Returns a pair consisting of the longest prefix of the traversable whose
- * elements all satisfy the given predicate, and the rest of the traversable.
+ /** Splits this $coll into a prefix/suffix pair according to a predicate.
+ *
+ * Note: `c span p` is equivalent to (but possibly more efficient than)
+ * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the predicate `p`
+ * does not cause any side-effects.
+ * $orderDependent
*
* @param p the test predicate
- * @return a pair consisting of the longest prefix of the traversable whose
- * elements all satisfy <code>p</code>, and the rest of the traversable.
- * @note Might return different results for different runs, unless this traversable is ordered
+ * @return a pair consisting of the longest prefix of this $coll whose
+ * elements all satisfy `p`, and the rest of this $coll.
*/
def span(p: A => Boolean): (Repr, Repr) = {
val l, r = newBuilder
@@ -634,16 +646,19 @@ self =>
(l.result, r.result)
}
- /** Split the traversable at a given point and return the two parts thus
- * created.
+ /** Splits this $coll into two at a given position.
+ * Note: `c splitAt n` is equivalent to (but possibly more efficient than)
+ * `(c take n, c drop n)`.
+ * $orderDependent
*
- * @param n the position at which to split
- * @return a pair of traversables composed of the first <code>n</code>
- * elements, and the other elements.
- * @note Might return different results for different runs, unless this traversable is ordered
+ * @param n the position at which to split.
+ * @return a pair of ${coll}s consisting of the first `n`
+ * elements of this $coll, and the other elements.
*/
def splitAt(n: Int): (Repr, Repr) = {
val l, r = newBuilder
+ l.sizeHintBounded(n, this)
+ if (n >= 0) r.sizeHint(this, -n)
var i = 0
for (x <- this) {
(if (i < n) l else r) += x
@@ -652,23 +667,21 @@ self =>
(l.result, r.result)
}
- /** Copy all elements of this traversable to a given buffer
- * @note Will not terminate for infinite-sized collections.
- * @param dest The buffer to which elements are copied
- */
- def copyToBuffer[B >: A](dest: Buffer[B]) {
- for (x <- this) dest += x
- }
-
- /** Fills the given array <code>xs</code> with at most `len` elements of
- * this traversable starting at position `start`.
- * Copying will stop once either the end of the current traversable is reached or
- * `len` elements have been copied or the end of the array is reached.
+ /** Copies elements of this $coll to an array.
+ * Fills the given array `xs` with at most `len` elements of
+ * this $coll, starting at position `start`.
+ * Copying will stop once either the end of the current $coll is reached,
+ * or the end of the array is reached, or `len` elements have been copied.
+ *
+ * $willNotTerminateInf
+ *
+ * @param xs the array to fill.
+ * @param start the starting index.
+ * @param len the maximal number of elements to copy.
+ * @tparam B the type of the elements of the array.
*
- * @note Will not terminate for infinite-sized collections.
- * @param xs the array to fill.
- * @param start starting index.
- * @param len number of elements to copy
+ *
+ * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
var i = start
@@ -682,119 +695,20 @@ self =>
}
}
- /** Fills the given array <code>xs</code> with the elements of
- * this traversable starting at position <code>start</code>
- * until either the end of the current traversable or the end of array `xs` is reached.
- *
- * @note Will not terminate for infinite-sized collections.
- * @param xs the array to fill.
- * @param start starting index.
- * @pre the array must be large enough to hold all elements.
- */
- def copyToArray[B >: A](xs: Array[B], start: Int) {
- copyToArray(xs, start, xs.length - start)
- }
-
- /** Converts this traversable to a fresh Array containing all elements.
- * @note Will not terminate for infinite-sized collections.
- */
- def toArray[B >: A : ClassManifest]: Array[B] = {
- val result = new Array[B](size)
- copyToArray(result, 0)
- result
- }
-
- /** Returns a list with all elements of this traversable object.
- * @note Will not terminate for infinite-sized collections.
- */
- def toList: List[A] = (new ListBuffer[A] ++= thisCollection).toList
-
- /** Returns a traversable with all elements in this traversable object.
- * @note Will not terminate for infinite-sized collections.
- */
- def toIterable: Iterable[A] = toStream
-
- /** Returns a sequence with all elements in this traversable object.
- * @note Will not terminate for infinite-sized collections.
- */
- def toSeq: Seq[A] = toList
-
- /** Returns a IndexedSeq with all elements in this traversable object.
- * @note Will not terminate for infinite-sized collections.
- */
- def toIndexedSeq[B >: A]: mutable.IndexedSeq[B] = (new ArrayBuffer[B] ++= thisCollection)
-
- /** Returns a stream with all elements in this traversable object.
- */
- def toStream: Stream[A] = toList.toStream
-
- /** Returns an immutable set with all unique elements in this traversable object.
- */
- def toSet[B >: A]: immutable.Set[B] = immutable.Set() ++ thisCollection
-
- /** Returns a string representation of this traversable object. The resulting string
- * begins with the string <code>start</code> and is finished by the string
- * <code>end</code>. Inside, the string representations of elements (w.r.t.
- * the method <code>toString()</code>) are separated by the string
- * <code>sep</code>.
- *
- * @ex <code>List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"</code>
- * @param start starting string.
- * @param sep separator string.
- * @param end ending string.
- * @return a string representation of this traversable object.
- */
- def mkString(start: String, sep: String, end: String): String =
- addString(new StringBuilder(), start, sep, end).toString
-
- /** Returns a string representation of this traversable object. The string
- * representations of elements (w.r.t. the method <code>toString()</code>)
- * are separated by the string <code>sep</code>.
- *
- * @param sep separator string.
- * @return a string representation of this traversable object.
- */
- def mkString(sep: String): String =
- addString(new StringBuilder(), sep).toString
-
- /** Returns a string representation of this traversable object. The string
- * representations of elements (w.r.t. the method <code>toString()</code>)
- * follow each other without any separator string.
- */
- def mkString: String =
- addString(new StringBuilder()).toString
-
- /** Write all elements of this traversable into given string builder.
- * The written text begins with the string <code>start</code> and is finished by the string
- * <code>end</code>. Inside, the string representations of elements (w.r.t.
- * the method <code>toString()</code>) are separated by the string
- * <code>sep</code>.
- */
- def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
- b append start
- var first = true
- for (x <- this) {
- if (first) first = false
- else b append sep
- b append x
- }
- b append end
- }
-
- /** Write all elements of this string into given string builder.
- * The string representations of elements (w.r.t. the method <code>toString()</code>)
- * are separated by the string <code>sep</code>.
- */
- def addString(b: StringBuilder, sep: String): StringBuilder = addString(b, "", sep, "")
+ def toTraversable: Traversable[A] = thisCollection
+ def toIterator: Iterator[A] = toStream.iterator
+ def toStream: Stream[A] = toBuffer.toStream
- /** Write all elements of this string into given string builder without using
- * any separator between consecutive elements.
+ /** Converts this $coll to a string.
+ * @return a string representation of this collection. By default this
+ * string consists of the `stringPrefix` of this $coll,
+ * followed by all elements separated by commas and enclosed in parentheses.
*/
- def addString(b: StringBuilder): StringBuilder = addString(b, "")
-
override def toString = mkString(stringPrefix + "(", ", ", ")")
- /** Defines the prefix of this object's <code>toString</code> representation.
+ /** Defines the prefix of this object's `toString` representation.
+ * @return a string representation which starts the result of `toString` applied to this $coll.
+ * By default the string prefix is the simple name of the collection class $coll.
*/
def stringPrefix : String = {
var string = repr.asInstanceOf[AnyRef].getClass.getName
@@ -805,28 +719,67 @@ self =>
string
}
- /** Creates a view of this traversable @see TraversableView
+ /** Creates a non-strict view of this $coll.
+ *
+ * @return a non-strict view of this $coll.
*/
def view = new TraversableView[A, Repr] {
protected lazy val underlying = self.repr
- override def foreach[B](f: A => B) = self foreach f
+ override def foreach[U](f: A => U) = self foreach f
}
- /** A sub-traversable starting at index `from`
- * and extending up to (but not including) index `until`.
+ /** Creates a non-strict view of a slice of this $coll.
*
- * @param from The index of the first element of the slice
- * @param until The index of the element following the slice
- * @note The difference between `view` and `slice` is that `view` produces
- * a view of the current traversable, whereas `slice` produces a new traversable.
+ * Note: the difference between `view` and `slice` is that `view` produces
+ * a view of the current $coll, whereas `slice` produces a new $coll.
*
- * @note Might return different results for different runs, unless this traversable is ordered
- * @note view(from, to) is equivalent to view.slice(from, to)
+ * Note: `view(from, to)` is equivalent to `view.slice(from, to)`
+ * $orderDependent
+ *
+ * @param from the index of the first element of the view
+ * @param until the index of the element following the view
+ * @return a non-strict view of a slice of this $coll, starting at index `from`
+ * and extending up to (but not including) index `until`.
*/
def view(from: Int, until: Int): TraversableView[A, Repr] = view.slice(from, until)
- class WithFilter(p: A => Boolean) {
-
+ /** Creates a non-strict filter of this $coll.
+ *
+ * Note: the difference between `c filter p` and `c withFilter p` is that
+ * the former creates a new collection, whereas the latter only restricts
+ * the domain of subsequent `map`, `flatMap`, `foreach`, and `withFilter` operations.
+ * $orderDependent
+ *
+ * @param p the predicate used to test elements.
+ * @return an object of class `WithFilter`, which supports
+ * `map`, `flatMap`, `foreach`, and `withFilter` operations.
+ * All these operations apply to those elements of this $coll which
+ * satisfy the predicate `p`.
+ */
+ def withFilter(p: A => Boolean): FilterMonadic[A, Repr] = new WithFilter(p)
+
+ /** A class supporting filtered operations. Instances of this class are returned by
+ * method `withFilter`.
+ */
+ class WithFilter(p: A => Boolean) extends FilterMonadic[A, Repr] {
+
+ /** Builds a new collection by applying a function to all elements of the
+ * outer $coll containing this `WithFilter` instance that satisfy predicate `p`.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned collection.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` resulting from applying the given function
+ * `f` to each element of the outer $coll that satisfies predicate `p`
+ * and collecting the results.
+ *
+ * @usecase def map[B](f: A => B): $Coll[B]
+ *
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of the outer $coll that satisfies predicate `p`
+ * and collecting the results.
+ */
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
for (x <- self)
@@ -834,6 +787,21 @@ self =>
b.result
}
+ /** Builds a new collection by applying a function to all elements of the
+ * outer $coll containing this `WithFilter` instance that satisfy predicate `p` and concatenating the results.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned collection.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` resulting from applying the given collection-valued function
+ * `f` to each element of the outer $coll that satisfies predicate `p` and concatenating the results.
+ *
+ * @usecase def flatMap[B](f: A => Traversable[B]): $Coll[B]
+ *
+ * @return a new $coll resulting from applying the given collection-valued function
+ * `f` to each element of the outer $coll that satisfies predicate `p` and concatenating the results.
+ */
def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
for (x <- self)
@@ -841,13 +809,31 @@ self =>
b.result
}
+ /** Applies a function `f` to all elements of the outer $coll containing this `WithFilter` instance
+ * that satisfy predicate `p`.
+ *
+ * @param f the function that is applied for its side-effect to every element.
+ * The result of function `f` is discarded.
+ *
+ * @tparam U the type parameter describing the result of function `f`.
+ * This result will always be ignored. Typically `U` is `Unit`,
+ * but this is not necessary.
+ *
+ * @usecase def foreach(f: A => Unit): Unit
+ */
def foreach[U](f: A => U): Unit =
for (x <- self)
if (p(x)) f(x)
+ /** Further refines the filter for this $coll.
+ *
+ * @param q the predicate used to test elements.
+ * @return an object of class `WithFilter`, which supports
+ * `map`, `flatMap`, `foreach`, and `withFilter` operations.
+ * All these operations apply to those elements of this $coll which
+ * satisfy the predicate `q` in addition to the predicate `p`.
+ */
def withFilter(q: A => Boolean): WithFilter =
new WithFilter(x => p(x) && q(x))
}
-
- def withFilter(p: A => Boolean): WithFilter = new WithFilter(p)
}
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
new file mode 100644
index 0000000000..6656b05083
--- /dev/null
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -0,0 +1,538 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+import mutable.{ Buffer, ListBuffer, ArrayBuffer }
+
+/** A template trait for collections which can be traversed either once only
+ * or one or more times.
+ * $traversableonceinfo
+ *
+ * @tparam A the element type of the collection
+ *
+ * @define traversableonceinfo
+ * This trait is composed of those methods which can be implemented
+ * solely in terms of foreach and which do not need access to a Builder.
+ * It represents the implementations common to Iterators and
+ * Traversables, such as folds, conversions, and other operations which
+ * traverse some or all of the elements and return a derived value.
+ *
+ * @author Martin Odersky
+ * @author Paul Phillips
+ * @version 2.8
+ * @since 2.8
+ *
+ * @define coll traversable or iterator
+ * @define orderDependentFold
+ *
+ * Note: might return different results for different runs, unless the underlying collection type is ordered
+ * or the operator is associative and commutative.
+ * @define willNotTerminateInf
+ *
+ * Note: will not terminate for infinite-sized collections.
+ */
+trait TraversableOnce[+A] {
+ self =>
+
+ /** Self-documenting abstract methods. */
+ def foreach[U](f: A => U): Unit
+ def isEmpty: Boolean
+ def hasDefiniteSize: Boolean
+
+ /** Tests whether this $coll can be repeatedly traversed. Always
+ * true for Traversables and false for Iterators unless overridden.
+ *
+ * @return `true` if it is repeatedly traversable, `false` otherwise.
+ */
+ def isTraversableAgain: Boolean
+
+ /** Returns an Iterator over the elements in this $coll. Will return
+ * the same Iterator if this instance is already an Iterator.
+ * $willNotTerminateInf
+ * @return an Iterator containing all elements of this $coll.
+ */
+ def toIterator: Iterator[A]
+
+ /** Converts this $coll to an unspecified Traversable. Will return
+ * the same collection if this instance is already Traversable.
+ * $willNotTerminateInf
+ * @return a Traversable containing all elements of this $coll.
+ */
+ def toTraversable: Traversable[A]
+
+ /** Converts this $coll to a stream.
+ * $willNotTerminateInf
+ * @return a stream containing all elements of this $coll.
+ */
+ def toStream: Stream[A]
+
+ /** Presently these are abstract because the Traversable versions use
+ * breakable/break, and I wasn't sure enough of how that's supposed to
+ * function to consolidate them with the Iterator versions.
+ */
+ def forall(p: A => Boolean): Boolean
+ def exists(p: A => Boolean): Boolean
+ def find(p: A => Boolean): Option[A]
+ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit
+ // def mapFind[B](f: A => Option[B]): Option[B]
+
+ // for internal use
+ protected[this] def reversed = {
+ var elems: List[A] = Nil
+ self foreach (elems ::= _)
+ elems
+ }
+
+ /** The size of this $coll.
+ *
+ * $willNotTerminateInf
+ *
+ * @return the number of elements in this $coll.
+ */
+ def size: Int = {
+ var result = 0
+ for (x <- self) result += 1
+ result
+ }
+
+ /** Tests whether the $coll is not empty.
+ *
+ * @return `true` if the $coll contains at least one element, `false` otherwise.
+ */
+ def nonEmpty: Boolean = !isEmpty
+
+ /** Counts the number of elements in the $coll which satisfy a predicate.
+ *
+ * @param p the predicate used to test elements.
+ * @return the number of elements satisfying the predicate `p`.
+ */
+ def count(p: A => Boolean): Int = {
+ var cnt = 0
+ for (x <- this)
+ if (p(x)) cnt += 1
+
+ cnt
+ }
+
+ /** Applies a binary operator to a start value and all elements of this $coll, going left to right.
+ *
+ * Note: `/:` is alternate syntax for `foldLeft`; `z /: xs` is the same as `xs foldLeft z`.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going left to right with the start value `z` on the left:
+ * {{{
+ * op(...op(op(z, x,,1,,), x,,2,,), ..., x,,n,,)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def /:[B](z: B)(op: (B, A) => B): B = foldLeft(z)(op)
+
+ /** Applies a binary operator to all elements of this $coll and a start value, going right to left.
+ *
+ * Note: `:\` is alternate syntax for `foldRight`; `xs :\ z` is the same as `xs foldRight z`.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param z the start value
+ * @param op the binary operator
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going right to left with the start value `z` on the right:
+ * {{{
+ * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def :\[B](z: B)(op: (A, B) => B): B = foldRight(z)(op)
+
+ /** Applies a binary operator to a start value and all elements of this $coll, going left to right.
+ *
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going left to right with the start value `z` on the left:
+ * {{{
+ * op(...op(z, x,,1,,), x,,2,,, ..., x,,n,,)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def foldLeft[B](z: B)(op: (B, A) => B): B = {
+ var result = z
+ this foreach (x => result = op(result, x))
+ result
+ }
+
+ /** Applies a binary operator to all elements of this $coll and a start value, going right to left.
+ *
+ * $willNotTerminateInf
+ * $orderDependentFold
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going right to left with the start value `z` on the right:
+ * {{{
+ * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def foldRight[B](z: B)(op: (A, B) => B): B =
+ reversed.foldLeft(z)((x, y) => op(y, x))
+
+ /** Applies a binary operator to all elements of this $coll, going left to right.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going left to right:
+ * {{{
+ * op(...(op(x,,1,,, x,,2,,), ... ) , x,,n,,)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ * @throws `UnsupportedOperationException` if this $coll is empty.
+ */
+ def reduceLeft[B >: A](op: (B, A) => B): B = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.reduceLeft")
+
+ var first = true
+ var acc: B = 0.asInstanceOf[B]
+
+ for (x <- self) {
+ if (first) {
+ acc = x
+ first = false
+ }
+ else acc = op(acc, x)
+ }
+ acc
+ }
+
+ /** Applies a binary operator to all elements of this $coll, going right to left.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going right to left:
+ * {{{
+ * op(x,,1,,, op(x,,2,,, ..., op(x,,n-1,,, x,,n,,)...))
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ * @throws `UnsupportedOperationException` if this $coll is empty.
+ */
+ def reduceRight[B >: A](op: (A, B) => B): B = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.reduceRight")
+
+ reversed.reduceLeft[B]((x, y) => op(y, x))
+ }
+
+ /** Optionally applies a binary operator to all elements of this $coll, going left to right.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return an option value containing the result of `reduceLeft(op)` is this $coll is nonempty,
+ * `None` otherwise.
+ */
+ def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] =
+ if (isEmpty) None else Some(reduceLeft(op))
+
+ /** Optionally applies a binary operator to all elements of this $coll, going right to left.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return an option value containing the result of `reduceRight(op)` is this $coll is nonempty,
+ * `None` otherwise.
+ */
+ def reduceRightOption[B >: A](op: (A, B) => B): Option[B] =
+ if (isEmpty) None else Some(reduceRight(op))
+
+ /** Sums up the elements of this collection.
+ *
+ * @param num an implicit parameter defining a set of numeric operations
+ * which includes the `+` operator to be used in forming the sum.
+ * @tparam B the result type of the `+` operator.
+ * @return the sum of all elements of this $coll with respect to the `+` operator in `num`.
+ *
+ * @usecase def sum: A
+ *
+ * @return the sum of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `sum`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ *
+ */
+ def sum[B >: A](implicit num: Numeric[B]): B = foldLeft(num.zero)(num.plus)
+
+ /** Multiplies up the elements of this collection.
+ *
+ * @param num an implicit parameter defining a set of numeric operations
+ * which includes the `*` operator to be used in forming the product.
+ * @tparam B the result type of the `*` operator.
+ * @return the product of all elements of this $coll with respect to the `*` operator in `num`.
+ *
+ * @usecase def product: A
+ *
+ * @return the product of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `product`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ */
+ def product[B >: A](implicit num: Numeric[B]): B = foldLeft(num.one)(num.times)
+
+ /** Finds the smallest element.
+ *
+ * @param cmp An ordering to be used for comparing elements.
+ * @tparam B The type over which the ordering is defined.
+ * @return the smallest element of this $coll with respect to the ordering `cmp`.
+ *
+ * @usecase def min: A
+ * @return the smallest element of this $coll
+ */
+ def min[B >: A](implicit cmp: Ordering[B]): A = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.min")
+
+ reduceLeft((x, y) => if (cmp.lteq(x, y)) x else y)
+ }
+
+ /** Finds the largest element.
+ *
+ * @param cmp An ordering to be used for comparing elements.
+ * @tparam B The type over which the ordering is defined.
+ * @return the largest element of this $coll with respect to the ordering `cmp`.
+ *
+ * @usecase def max: A
+ * @return the largest element of this $coll.
+ */
+ def max[B >: A](implicit cmp: Ordering[B]): A = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.max")
+
+ reduceLeft((x, y) => if (cmp.gteq(x, y)) x else y)
+ }
+
+ /** Copies all elements of this $coll to a buffer.
+ * $willNotTerminateInf
+ * @param dest The buffer to which elements are copied.
+ */
+ def copyToBuffer[B >: A](dest: Buffer[B]): Unit = dest ++= self
+
+ /** Copies values of this $coll to an array.
+ * Fills the given array `xs` with values of this $coll, after skipping `start` values.
+ * Copying will stop once either the end of the current $coll is reached,
+ * or the end of the array is reached.
+ *
+ * $willNotTerminateInf
+ *
+ * @param xs the array to fill.
+ * @param start the starting index.
+ * @tparam B the type of the elements of the array.
+ *
+ * @usecase def copyToArray(xs: Array[A], start: Int): Unit
+ */
+ def copyToArray[B >: A](xs: Array[B], start: Int): Unit =
+ copyToArray(xs, start, xs.length - start)
+
+ /** Copies values of this $coll to an array.
+ * Fills the given array `xs` with values of this $coll.
+ * Copying will stop once either the end of the current $coll is reached,
+ * or the end of the array is reached.
+ *
+ * $willNotTerminateInf
+ *
+ * @param xs the array to fill.
+ * @tparam B the type of the elements of the array.
+ *
+ * @usecase def copyToArray(xs: Array[A]): Unit
+ */
+ def copyToArray[B >: A](xs: Array[B]): Unit =
+ copyToArray(xs, 0, xs.length)
+
+ /** Converts this $coll to an array.
+ * $willNotTerminateInf
+ *
+ * @tparam B the type of the elements of the array. A `ClassManifest` for this type must
+ * be available.
+ * @return an array containing all elements of this $coll.
+ *
+ * @usecase def toArray: Array[A]
+ * @return an array containing all elements of this $coll.
+ * A `ClassManifest` must be available for the element type of this $coll.
+ */
+ def toArray[B >: A : ClassManifest]: Array[B] = {
+ if (isTraversableAgain) {
+ val result = new Array[B](size)
+ copyToArray(result, 0)
+ result
+ }
+ else toBuffer.toArray
+ }
+
+ /** Converts this $coll to a list.
+ * $willNotTerminateInf
+ * @return a list containing all elements of this $coll.
+ */
+ def toList: List[A] = new ListBuffer[A] ++= self toList
+
+ /** Converts this $coll to an iterable collection. Note that
+ * the choice of target `Iterable` is lazy in this default implementation
+ * as this `TraversableOnce` may be lazy and unevaluated (i.e. it may
+ * be an iterator which is only traversable once).
+ *
+ * $willNotTerminateInf
+ * @return an `Iterable` containing all elements of this $coll.
+ */
+ def toIterable: Iterable[A] = toStream
+
+ /** Converts this $coll to a sequence. As with `toIterable`, it's lazy
+ * in this default implementation, as this `TraversableOnce` may be
+ * lazy and unevaluated.
+ *
+ * $willNotTerminateInf
+ * @return a sequence containing all elements of this $coll.
+ */
+ def toSeq: Seq[A] = toStream
+
+ /** Converts this $coll to an indexed sequence.
+ * $willNotTerminateInf
+ * @return an indexed sequence containing all elements of this $coll.
+ */
+ def toIndexedSeq[B >: A]: immutable.IndexedSeq[B] = immutable.IndexedSeq() ++ self
+
+ /** Converts this $coll to a mutable buffer.
+ * $willNotTerminateInf
+ * @return a buffer containing all elements of this $coll.
+ */
+ def toBuffer[B >: A]: mutable.Buffer[B] = new ArrayBuffer[B] ++= self
+
+ /** Converts this $coll to a set.
+ * $willNotTerminateInf
+ * @return a set containing all elements of this $coll.
+ */
+ def toSet[B >: A]: immutable.Set[B] = immutable.Set() ++ self
+
+ /** Converts this $coll to a map. This method is unavailable unless
+ * the elements are members of Tuple2, each ((K, V)) becoming a key-value
+ * pair in the map. Duplicate keys will be overwritten by later keys:
+ * if this is an unordered collection, which key is in the resulting map
+ * is undefined.
+ * $willNotTerminateInf
+ * @return a map containing all elements of this $coll.
+ * @usecase def toMap: Map[K, V]
+ * @return a map of type `immutable.Map[K, V]`
+ * containing all key/value pairs of type `(K, V)` of this $coll.
+ */
+ def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
+ val b = immutable.Map.newBuilder[T, U]
+ for (x <- self)
+ b += x
+
+ b.result
+ }
+
+ /** Displays all elements of this $coll in a string using start, end, and separator strings.
+ *
+ * @param start the starting string.
+ * @param sep the separator string.
+ * @param end the ending string.
+ * @return a string representation of this $coll. The resulting string
+ * begins with the string `start` and ends with the string
+ * `end`. Inside, the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll are separated by the string `sep`.
+ *
+ * @example `List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"`
+ */
+ def mkString(start: String, sep: String, end: String): String =
+ addString(new StringBuilder(), start, sep, end).toString
+
+ /** Displays all elements of this $coll in a string using a separator string.
+ *
+ * @param sep the separator string.
+ * @return a string representation of this $coll. In the resulting string
+ * the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll are separated by the string `sep`.
+ *
+ * @example `List(1, 2, 3).mkString("|") = "1|2|3"`
+ */
+ def mkString(sep: String): String = mkString("", sep, "")
+
+ /** Displays all elements of this $coll in a string.
+ * @return a string representation of this $coll. In the resulting string
+ * the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll follow each other without any separator string.
+ */
+ def mkString: String = mkString("")
+
+ /** Appends all elements of this $coll to a string builder using start, end, and separator strings.
+ * The written text begins with the string `start` and ends with the string
+ * `end`. Inside, the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll are separated by the string `sep`.
+ *
+ * @param b the string builder to which elements are appended.
+ * @param start the starting string.
+ * @param sep the separator string.
+ * @param end the ending string.
+ * @return the string builder `b` to which elements were appended.
+ */
+ def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
+ var first = true
+
+ b append start
+ for (x <- self) {
+ if (first) {
+ b append x
+ first = false
+ }
+ else {
+ b append sep
+ b append x
+ }
+ }
+ b append end
+
+ b
+ }
+
+ /** Appends all elements of this $coll to a string builder using a separator string.
+ * The written text consists of the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll, separated by the string `sep`.
+ *
+ * @param b the string builder to which elements are appended.
+ * @param sep the separator string.
+ * @return the string builder `b` to which elements were appended.
+ */
+ def addString(b: StringBuilder, sep: String): StringBuilder = addString(b, "", sep, "")
+
+ /** Appends all elements of this $coll to a string builder.
+ * The written text consists of the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll without any separator string.
+ *
+ * @param b the string builder to which elements are appended.
+ * @return the string builder `b` to which elements were appended.
+ */
+ def addString(b: StringBuilder): StringBuilder = addString(b, "")
+}
diff --git a/src/library/scala/collection/TraversableProxy.scala b/src/library/scala/collection/TraversableProxy.scala
index 72a6b2cfce..4a26c88129 100644
--- a/src/library/scala/collection/TraversableProxy.scala
+++ b/src/library/scala/collection/TraversableProxy.scala
@@ -1,17 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
-// Methods could be printed by cat TraversibeLike.scala | egrep '^ (override )?def'
+// Methods could be printed by cat TraverableLike.scala | egrep '^ (override )?def'
/** This trait implements a proxy for traversable objects. It forwards
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index cfccae6bcf..f2d91ded0c 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -16,31 +15,29 @@ import mutable.{Buffer, StringBuilder}
// Methods could be printed by cat TraversableLike.scala | egrep '^ (override )?def'
-
-/** This trait implements a proxy for traversable objects. It forwards
- * all calls to a different traversable object
+/** This trait implements a proxy for Traversable objects. It forwards
+ * all calls to a different Traversable object.
*
* @author Martin Odersky
* @version 2.8
* @since 2.8
*/
-trait TraversableProxyLike[+A, +This <: TraversableLike[A, This] with Traversable[A]] extends TraversableLike[A, This] with Proxy {
- def self: This
+trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversable[A]] extends TraversableLike[A, Repr] with Proxy {
+ def self: Repr
override def foreach[B](f: A => B): Unit = self.foreach(f)
override def isEmpty: Boolean = self.isEmpty
override def nonEmpty: Boolean = self.nonEmpty
override def size: Int = self.size
override def hasDefiniteSize = self.hasDefiniteSize
- override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That = self.++(that)(bf)
- override def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[This, B, That]): That = self.++(that)(bf)
- override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = self.map(f)(bf)
- override def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That = self.flatMap(f)(bf)
- override def filter(p: A => Boolean): This = self.filter(p)
- override def filterNot(p: A => Boolean): This = self.filterNot(p)
- override def remove(p: A => Boolean): This = self.filterNot(p)
- override def partition(p: A => Boolean): (This, This) = self.partition(p)
- override def groupBy[K](f: A => K): Map[K, This] = self.groupBy(f)
+ override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.++(xs)(bf)
+ override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.map(f)(bf)
+ override def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.flatMap(f)(bf)
+ override def filter(p: A => Boolean): Repr = self.filter(p)
+ override def filterNot(p: A => Boolean): Repr = self.filterNot(p)
+ override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.collect(pf)(bf)
+ override def partition(p: A => Boolean): (Repr, Repr) = self.partition(p)
+ override def groupBy[K](f: A => K): immutable.Map[K, Repr] = self.groupBy(f)
override def forall(p: A => Boolean): Boolean = self.forall(p)
override def exists(p: A => Boolean): Boolean = self.exists(p)
override def count(p: A => Boolean): Int = self.count(p)
@@ -53,28 +50,40 @@ trait TraversableProxyLike[+A, +This <: TraversableLike[A, This] with Traversabl
override def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = self.reduceLeftOption(op)
override def reduceRight[B >: A](op: (A, B) => B): B = self.reduceRight(op)
override def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = self.reduceRightOption(op)
+ override def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.scanLeft(z)(op)(bf)
+ override def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.scanRight(z)(op)(bf)
+ override def sum[B >: A](implicit num: Numeric[B]): B = self.sum(num)
+ override def product[B >: A](implicit num: Numeric[B]): B = self.product(num)
+ override def min[B >: A](implicit cmp: Ordering[B]): A = self.min(cmp)
+ override def max[B >: A](implicit cmp: Ordering[B]): A = self.max(cmp)
override def head: A = self.head
override def headOption: Option[A] = self.headOption
- override def tail: This = self.tail
+ override def tail: Repr = self.tail
override def last: A = self.last
override def lastOption: Option[A] = self.lastOption
- override def init: This = self.init
- override def take(n: Int): This = self.take(n)
- override def drop(n: Int): This = self.drop(n)
- override def slice(from: Int, until: Int): This = self.slice(from, until)
- override def takeWhile(p: A => Boolean): This = self.takeWhile(p)
- override def dropWhile(p: A => Boolean): This = self.dropWhile(p)
- override def span(p: A => Boolean): (This, This) = self.span(p)
- override def splitAt(n: Int): (This, This) = self.splitAt(n)
+ override def init: Repr = self.init
+ override def take(n: Int): Repr = self.take(n)
+ override def drop(n: Int): Repr = self.drop(n)
+ override def slice(from: Int, until: Int): Repr = self.slice(from, until)
+ override def takeWhile(p: A => Boolean): Repr = self.takeWhile(p)
+ override def dropWhile(p: A => Boolean): Repr = self.dropWhile(p)
+ override def span(p: A => Boolean): (Repr, Repr) = self.span(p)
+ override def splitAt(n: Int): (Repr, Repr) = self.splitAt(n)
override def copyToBuffer[B >: A](dest: Buffer[B]) = self.copyToBuffer(dest)
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) = self.copyToArray(xs, start, len)
override def copyToArray[B >: A](xs: Array[B], start: Int) = self.copyToArray(xs, start)
+ override def copyToArray[B >: A](xs: Array[B]) = self.copyToArray(xs)
override def toArray[B >: A: ClassManifest]: Array[B] = self.toArray
override def toList: List[A] = self.toList
override def toIterable: Iterable[A] = self.toIterable
override def toSeq: Seq[A] = self.toSeq
+ override def toIndexedSeq[B >: A] = self.toIndexedSeq
+ override def toBuffer[B >: A] = self.toBuffer
override def toStream: Stream[A] = self.toStream
override def toSet[B >: A]: immutable.Set[B] = self.toSet
+ override def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = self.toMap(ev)
+ override def toTraversable: Traversable[A] = self.toTraversable
+ override def toIterator: Iterator[A] = self.toIterator
override def mkString(start: String, sep: String, end: String): String = self.mkString(start, sep, end)
override def mkString(sep: String): String = self.mkString(sep)
override def mkString: String = self.mkString
@@ -83,14 +92,7 @@ trait TraversableProxyLike[+A, +This <: TraversableLike[A, This] with Traversabl
override def addString(b: StringBuilder): StringBuilder = self.addString(b)
override def stringPrefix : String = self.stringPrefix
override def view = self.view
- override def view(from: Int, until: Int): TraversableView[A, This] = self.view(from, until)
-}
-
-private class TraversableProxyLikeConfirmation[+A, +This <: TraversableLike[A, This] with Traversable[A]]
- extends TraversableProxyLike[A, Traversable[A]]
- with interfaces.TraversableMethods[A, Traversable[A]]
-{
- def self: This = repr.asInstanceOf[This]
- protected[this] def newBuilder = scala.collection.Traversable.newBuilder[A]
- // : Builder[A, This]
+ override def view(from: Int, until: Int): TraversableView[A, Repr] = self.view(from, until)
+ // This appears difficult to override due to the type of WithFilter.
+ // override def withFilter(p: A => Boolean): WithFilter = self.withFilter(p)
}
diff --git a/src/library/scala/collection/TraversableView.scala b/src/library/scala/collection/TraversableView.scala
index b4362e2b6b..9c4f7d8a5d 100644
--- a/src/library/scala/collection/TraversableView.scala
+++ b/src/library/scala/collection/TraversableView.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,18 +14,14 @@ import generic._
import mutable.Builder
import TraversableView.NoBuilder
-/** <p>
- * A base class for views of <a href="../Traversable.html"
- * target="ContentFrame"><code>Traversable<code></a>.<br/>
- * Every subclass has to implenment the <code>foreach</code> method.
- * </p>
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** A base trait for non-strict views of traversable collections.
+ * $traversableViewInfo
*/
trait TraversableView[+A, +Coll] extends TraversableViewLike[A, Coll, TraversableView[A, Coll]]
+/** An object containing the necessary implicit definitions to make
+ * `TraversableView`s work. Its definitions are generally not accessed directly by clients.
+ */
object TraversableView {
class NoBuilder[A] extends Builder[A, Nothing] {
def +=(elem: A): this.type = this
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 8177658026..9b5be82dd6 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -1,35 +1,45 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
import generic._
-import mutable.Builder
-import Math.MAX_INT
+import mutable.{Builder, ArrayBuffer}
import TraversableView.NoBuilder
-/** <p>
- * A template trait for views of <a href="../Traversable.html"
- * target="contentFrame"><code>Traversable</code></a>.<br/>
- * Every subclass has to implement the <code>foreach</code> method.
- * </p>
- * @note Methods such as map/flatMap on this will not invoke the implicitly passed
- * Builder factory, but will return a new view directly, to preserve by-name behavior.
- * The new view is then cast to the factory's result type.
- * This means that every CanBuildFrom that takes a
- * View as its From type parameter must yield the same view (or a generic superclass of it)
- * as its result parameter. If that assumption is broken, cast errors might result.
+/** A template trait for non-strict views of traversable collections.
+ * $traversableviewinfo
*
+ * Implementation note: Methods such as `map` or `flatMap` on this view will not invoke the implicitly passed
+ * `Builder` factory, but will return a new view directly, to preserve by-name behavior.
+ * The new view is then cast to the factory's result type. This means that every `CanBuildFrom`
+ * that takes a `View` as its `From` type parameter must yield the same view (or a generic
+ * superclass of it) as its result parameter. If that assumption is broken, cast errors might result.
+ *
+ * @define viewinfo
+ * A view is a lazy version of some collection. Collection transformers such as
+ * `map` or `filter` or `++` do not traverse any elements when applied on a view.
+ * Instead they create a new view which simply records that fact that the operation
+ * needs to be applied. The collection elements are accessed, and the view operations are applied,
+ * when a non-view result is needed, or when the `force` method is called on a view.
+ * @define traversableviewinfo
+ * $viewinfo
+ *
+ * All views for traversable collections are defined by creating a new `foreach` method.
+
* @author Martin Odersky
* @version 2.8
+ * @since 2.8
+ * @tparam A the element type of the view
+ * @tparam Coll the type of the underlying collection containing the elements.
+ * @tparam This the type of the view itself
*/
trait TraversableViewLike[+A,
+Coll,
@@ -48,8 +58,23 @@ self =>
b.result()
}
+ /** The implementation base trait of this view.
+ * This trait and all its subtraits has to be re-implemented for each
+ * ViewLike class.
+ */
trait Transformed[+B] extends TraversableView[B, Coll] {
lazy val underlying = self.underlying
+ override def toString = stringPrefix+"(...)"
+ }
+
+ /** A fall back which forces everything into a vector and then applies an operation
+ * on it. Used for those operations which do not naturally lend themselves to a view
+ */
+ trait Forced[B] extends Transformed[B] {
+ protected[this] def forced: Seq[B]
+ private[this] lazy val forcedCache = forced
+ override def foreach[U](f: B => U) = forcedCache.foreach(f)
+ override def stringPrefix = self.stringPrefix+"C"
}
/** pre: from >= 0
@@ -57,7 +82,7 @@ self =>
trait Sliced extends Transformed[A] {
protected[this] val from: Int
protected[this] val until: Int
- override def foreach[C](f: A => C) {
+ override def foreach[U](f: A => U) {
var index = 0
for (x <- self) {
if (from <= index) {
@@ -74,7 +99,7 @@ self =>
trait Mapped[B] extends Transformed[B] {
protected[this] val mapping: A => B
- override def foreach[C](f: B => C) {
+ override def foreach[U](f: B => U) {
for (x <- self)
f(mapping(x))
}
@@ -83,7 +108,7 @@ self =>
trait FlatMapped[B] extends Transformed[B] {
protected[this] val mapping: A => Traversable[B]
- override def foreach[C](f: B => C) {
+ override def foreach[U](f: B => U) {
for (x <- self)
for (y <- mapping(x))
f(y)
@@ -93,7 +118,7 @@ self =>
trait Appended[B >: A] extends Transformed[B] {
protected[this] val rest: Traversable[B]
- override def foreach[C](f: B => C) {
+ override def foreach[U](f: B => U) {
for (x <- self) f(x)
for (x <- rest) f(x)
}
@@ -102,7 +127,7 @@ self =>
trait Filtered extends Transformed[A] {
protected[this] val pred: A => Boolean
- override def foreach[C](f: A => C) {
+ override def foreach[U](f: A => U) {
for (x <- self)
if (pred(x)) f(x)
}
@@ -111,7 +136,7 @@ self =>
trait TakenWhile extends Transformed[A] {
protected[this] val pred: A => Boolean
- override def foreach[C](f: A => C) {
+ override def foreach[U](f: A => U) {
for (x <- self) {
if (!pred(x)) return
f(x)
@@ -122,7 +147,7 @@ self =>
trait DroppedWhile extends Transformed[A] {
protected[this] val pred: A => Boolean
- override def foreach[C](f: A => C) {
+ override def foreach[U](f: A => U) {
var go = false
for (x <- self) {
if (!go && !pred(x)) go = true
@@ -135,6 +160,7 @@ self =>
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
+ protected def newForced[B](xs: => Seq[B]): Transformed[B] = new Forced[B] { val forced = xs }
protected def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
protected def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
protected def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
@@ -143,14 +169,12 @@ self =>
protected def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
protected def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
- override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
- newAppended(that).asInstanceOf[That]
+ override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
+ newAppended(xs.toTraversable).asInstanceOf[That]
// was: if (bf.isInstanceOf[ByPassCanBuildFrom]) newAppended(that).asInstanceOf[That]
// else super.++[B, That](that)(bf)
}
- override def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[This, B, That]): That = ++[B, That](that.toStream)
-
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = {
newMapped(f).asInstanceOf[That]
// val b = bf(repr)
@@ -158,6 +182,9 @@ self =>
// else super.map[B, That](f)(bf)
}
+ override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ filter(pf.isDefinedAt).map(pf)(bf)
+
override def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
newFlatMapped(f).asInstanceOf[That]
// was: val b = bf(repr)
@@ -165,14 +192,34 @@ self =>
// else super.flatMap[B, That](f)(bf)
}
+ protected[this] def thisSeq: Seq[A] = {
+ val buf = new ArrayBuffer[A]
+ self foreach (buf +=)
+ buf.result
+ }
+
override def filter(p: A => Boolean): This = newFiltered(p).asInstanceOf[This]
+ override def withFilter(p: A => Boolean): This = newFiltered(p).asInstanceOf[This]
+ override def partition(p: A => Boolean): (This, This) = (filter(p), filter(!p(_)))
override def init: This = newSliced(0, size - 1).asInstanceOf[This]
- override def drop(n: Int): This = newSliced(n max 0, MAX_INT).asInstanceOf[This]
+ override def drop(n: Int): This = newSliced(n max 0, Int.MaxValue).asInstanceOf[This]
override def take(n: Int): This = newSliced(0, n).asInstanceOf[This]
override def slice(from: Int, until: Int): This = newSliced(from max 0, until).asInstanceOf[This]
override def dropWhile(p: A => Boolean): This = newDroppedWhile(p).asInstanceOf[This]
override def takeWhile(p: A => Boolean): This = newTakenWhile(p).asInstanceOf[This]
override def span(p: A => Boolean): (This, This) = (takeWhile(p), dropWhile(p))
override def splitAt(n: Int): (This, This) = (take(n), drop(n))
+
+ override def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[This, B, That]): That =
+ newForced(thisSeq.scanLeft(z)(op)).asInstanceOf[That]
+
+ override def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[This, B, That]): That =
+ newForced(thisSeq.scanRight(z)(op)).asInstanceOf[That]
+
+ override def groupBy[K](f: A => K): immutable.Map[K, This] =
+ thisSeq.groupBy(f).mapValues(xs => newForced(xs).asInstanceOf[This])
+
override def stringPrefix = "TraversableView"
}
+
+
diff --git a/src/library/scala/collection/generic/Addable.scala b/src/library/scala/collection/generic/Addable.scala
index d05258922a..8411022404 100644
--- a/src/library/scala/collection/generic/Addable.scala
+++ b/src/library/scala/collection/generic/Addable.scala
@@ -1,59 +1,55 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package generic
-/** This class represents collections that can be added to other
- * collections using a '+' operator.
- *
+/** This trait represents collection-like objects that can be added to
+ * using a '+' operator. It defines variants of `+` and `++`
+ * as convenience methods in terms of single-element addition `+`.
+ * @tparam A the type of the elements of the $coll
+ * @tparam Repr the type of the $coll itself
* @author Martin Odersky
- * @owner Martin Odersky
* @version 2.8
* @since 2.8
+ * @define $coll collection
+ * @define $Coll Addable
*/
-trait Addable[A, +This <: Addable[A, This]] { self =>
+trait Addable[A, +Repr <: Addable[A, Repr]] { self =>
- protected def repr: This
+ /** The representation object of type `Repr` which contains the collection's elements
+ */
+ protected def repr: Repr
- /** Creates a new collection with an additional element, unless the element is already present.
- * @param elem the element to be added
- * @return a fresh collection
+ /** Creates a new $coll with an additional element, unless the element is already present.
+ * @param elem the element to add
+ * @return a fresh collection with `elem` added.
*/
- def +(elem: A): This
+ def +(elem: A): Repr
- /** Adds two or more elements to this collection and returns
- * a new collection.
+ /** Creates a new $coll with additional elements.
*
+ * This method takes two or more elements to be added. Another overloaded
+ * variant of this method handles the case where a single element is
+ * added.
* @param elem1 the first element to add.
* @param elem2 the second element to add.
* @param elems the remaining elements to add.
+ * @return a new $coll with the given elements added.
*/
- def + (elem1: A, elem2: A, elems: A*): This =
+ def + (elem1: A, elem2: A, elems: A*): Repr =
this + elem1 + elem2 ++ elems
- /** Adds a number of elements provided by a traversable object
- * and returns a new collection with the added elements.
- *
- * @param elems the traversable object.
- */
- def ++ (elems: Traversable[A]): This = (repr /: elems) (_ + _)
-
- /** Adds a number of elements provided by an iterator
- * and returns a new collection with the added elements.
+ /** Creates a new $coll by adding all elements contained in another collection to this $coll.
*
- * @param iter the iterator
+ * @param elems the collection containing the added elements.
+ * @return a new $coll with the given elements added.
*/
- def ++ (iter: Iterator[A]): This = (repr /: iter) (_ + _)
+ def ++ (xs: TraversableOnce[A]): Repr = (repr /: xs) (_ + _)
}
-
-
-
-
diff --git a/src/library/scala/collection/generic/BitSetFactory.scala b/src/library/scala/collection/generic/BitSetFactory.scala
index 1432fe5932..0d89464f71 100644
--- a/src/library/scala/collection/generic/BitSetFactory.scala
+++ b/src/library/scala/collection/generic/BitSetFactory.scala
@@ -1,26 +1,34 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package generic
import scala.collection._
-import mutable.{Builder, AddingBuilder}
+import mutable.Builder
-/**
- * @since 2.8
+/** @define coll collection
+ * @define Coll Traversable
+ * @define factoryInfo
+ * This object provides a set of operations to create `$Coll` values.
+ * @author Martin Odersky
+ * @version 2.8
+ * @define canBuildFromInfo
+ * The standard `CanBuildFrom` instance for $Coll objects.
+ * @see CanBuildFrom
+ * @define bitsetCanBuildFrom
+ * The standard `CanBuildFrom` instance for bitsets.
*/
trait BitSetFactory[Coll <: BitSet with BitSetLike[Coll]] {
- def newBuilder: Builder[Int, Coll] = new AddingBuilder[Int, Coll](empty)
def empty: Coll
+ def newBuilder: Builder[Int, Coll]
def apply(elems: Int*): Coll = (empty /: elems) (_ + _)
def bitsetCanBuildFrom = new CanBuildFrom[Coll, Int, Coll] {
def apply(from: Coll) = newBuilder
diff --git a/src/library/scala/collection/generic/CanBuildFrom.scala b/src/library/scala/collection/generic/CanBuildFrom.scala
index 18f0bde2cb..4c923dca44 100644
--- a/src/library/scala/collection/generic/CanBuildFrom.scala
+++ b/src/library/scala/collection/generic/CanBuildFrom.scala
@@ -1,29 +1,46 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package generic
import mutable.Builder
+import scala.annotation.implicitNotFound
-/** A base class for builder factories
+/** A base trait for builder factories.
*
+ * @tparam From the type of the underlying collection that requests
+ * a builder to be created.
+ * @tparam Elem the element type of the collection to be created.
+ * @tparam To the type of the collection to be created.
+ *
+ * @see Builder
+ * @author Martin Odersky
+ * @author Adriaan Moors
* @since 2.8
*/
+@implicitNotFound(msg = "Cannot construct a collection of type ${To} with elements of type ${Elem} based on a collection of type ${To}.")
trait CanBuildFrom[-From, -Elem, +To] {
- /** Creates a new builder, using `from` as a prototype
- * the resulting Builder will build the same kind of collection
+ /** Creates a new builder on request of a collection.
+ * @param from the collection requesting the builder to be created.
+ * @return a builder for collections of type `To` with element type `Elem`.
+ * The collections framework usually arranges things so
+ * that the created builder will build the same kind of collection
+ * as `from`.
*/
def apply(from: From): Builder[Elem, To]
- /** Creates a new builder from scratch */
+ /** Creates a new builder from scratch.
+ *
+ * @return a builder for collections of type `To` with element type `Elem`.
+ * @see scala.collection.breakOut
+ */
def apply(): Builder[Elem, To]
}
diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala
new file mode 100755
index 0000000000..45bba19e96
--- /dev/null
+++ b/src/library/scala/collection/generic/FilterMonadic.scala
@@ -0,0 +1,11 @@
+package scala.collection.generic
+
+/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods
+ * of trait `TraversableLike`.
+ */
+trait FilterMonadic[+A, +Repr] {
+ def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
+ def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+ def foreach[U](f: A => U): Unit
+ def withFilter(p: A => Boolean): FilterMonadic[A, Repr]
+}
diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala
index b895994497..4552867a9f 100644
--- a/src/library/scala/collection/generic/GenericCompanion.scala
+++ b/src/library/scala/collection/generic/GenericCompanion.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,21 +13,38 @@ package generic
import mutable.Builder
-/**
- * @since 2.8
+/** A template class for companion objects of ``regular'' collection classes
+ * represent an unconstrained higher-kinded type. Typically
+ * such classes inherit from trait `GenericTraversableTemplate`.
+ * @tparam CC The type constructor representing the collection class.
+ * @see GenericTraversableTemplate
+ * @author Martin Odersky
+ * @since 2.8
+ * @define coll collection
+ * @define Coll CC
*/
abstract class GenericCompanion[+CC[X] <: Traversable[X]] {
+ /** The underlying collection type with unknown element type */
type Coll = CC[_]
+ /** The default builder for `$Coll` objects.
+ * @tparam A the type of the ${coll}'s elements
+ */
def newBuilder[A]: Builder[A, CC[A]]
- /** The empty iterable of type <code>CC</code>. */
+ /** An empty collection of type `$Coll[A]`
+ * @tparam A the type of the ${coll}'s elements
+ */
def empty[A]: CC[A] = newBuilder[A].result
- /** Creates an iterable of type <code>CC</code> with specified elements. */
- def apply[A](args: A*): CC[A] = {
+ /** Creates a $coll with the specified elements.
+ * @tparam A the type of the ${coll}'s elements
+ * @param elems the elements of the created $coll
+ * @return a new $coll with elements `elems`
+ */
+ def apply[A](elems: A*): CC[A] = {
val b = newBuilder[A]
- b ++= args
+ b ++= elems
b.result
}
}
diff --git a/src/library/scala/collection/generic/GenericSequenceFactory.scala.disabled b/src/library/scala/collection/generic/GenericSequenceFactory.scala.disabled
deleted file mode 100644
index 9132c96762..0000000000
--- a/src/library/scala/collection/generic/GenericSequenceFactory.scala.disabled
+++ /dev/null
@@ -1,17 +0,0 @@
-package scala.collection.generic
-
-/** A template for companion objects of Sequence and subclasses thereof.
- *
- * @since 2.8
- */
-abstract class GenericSequenceFactory extends GenericTraversableFactory {
-
- type Coll <: Sequence[_]
-
- /** This method is called in a pattern match { case Sequence(...) => }.
- *
- * @param x the selector value
- * @return sequence wrapped in an option, if this is a Sequence, otherwise none
- */
- def unapplySeq[A](x: Coll with Sequence[A]): Some[Coll with Sequence[A]] = Some(x)
-}
diff --git a/src/library/scala/collection/generic/GenericSetTemplate.scala b/src/library/scala/collection/generic/GenericSetTemplate.scala
index 53180a4d34..c381fb84df 100644
--- a/src/library/scala/collection/generic/GenericSetTemplate.scala
+++ b/src/library/scala/collection/generic/GenericSetTemplate.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
diff --git a/src/library/scala/collection/generic/GenericTraversableFactory.scala.disabled b/src/library/scala/collection/generic/GenericTraversableFactory.scala.disabled
deleted file mode 100644
index c2ae7e114c..0000000000
--- a/src/library/scala/collection/generic/GenericTraversableFactory.scala.disabled
+++ /dev/null
@@ -1,188 +0,0 @@
-package scala.collection.generic
-
-/** A template for companion objects of Traversable and subclasses thereof.
- *
- * @since 2.8
- */
-abstract class GenericTraversableFactory {
-
- type Coll <: Traversable[_]
-
- /** The empty iterable of type CC */
- def empty[A, That](implicit bf: Coll => Builder[A, That, Coll]): That = apply[A, That]()
-
- /** Creates an iterable of type CC with specified elements */
- def apply[A, That](args: A*)(implicit bf: Coll => Builder[A, That, Coll]): That = {
- val b = bf(thisCollection)
- b ++= Iterable.fromOld(args)
- b.result
- }
-
- /** Concatenate all the argument lists into a single list.
- *
- * @param xss the lists that are to be concatenated
- * @return the concatenation of all the lists
- */
- def concat[A, That](xss: Traversable[A]*)(implicit bf: Coll => Builder[A, That, Coll]): That = {
- val b = bf(thisCollection)
- for (xs <- Iterable.fromOld(xss))
- b ++= xs
- b.result
- }
-
- /** An iterable that contains the results of some element computation a number of times.
- * @param n the number of elements returned
- * @param elem the element computation
- */
- def fill[A, That](n: Int)(elem: => A)(implicit bf: Coll => Builder[A, That, Coll]): That = {
- val b = bf(thisCollection)
- var i = 0
- while (i < n) {
- b += elem
- i += 1
- }
- b.result
- }
-
- /** A two-dimensional iterable that contains the results of some element computation a number of times.
- * @param n1 the number of elements in the 1st dimension
- * @param n2 the number of elements in the 2nd dimension
- * @param elem the element computation
- */
- def fill[A, C1, That](n1: Int, n2: Int)(elem: => A)(implicit b1: Builder[A, C1, Coll], b2: Builder[C1, That, Coll]): That =
- tabulate(n1)(_ => fill(n2)(elem))
-/*
- /** A three-dimensional iterable that contains the results of some element computation a number of times.
- * @param n1 the number of elements in the 1st dimension
- * @param n2 the number of elements in the 2nd dimension
- * @param n3 the number of elements in the 3nd dimension
- * @param elem the element computation
- */
- def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]]] =
- tabulate(n1)(_ => fill(n2, n3)(elem))
-
- /** A four-dimensional iterable that contains the results of some element computation a number of times.
- * @param n1 the number of elements in the 1st dimension
- * @param n2 the number of elements in the 2nd dimension
- * @param n3 the number of elements in the 3nd dimension
- * @param n4 the number of elements in the 4th dimension
- * @param elem the element computation
- */
- def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]]] =
- tabulate(n1)(_ => fill(n2, n3, n4)(elem))
-
- /** A five-dimensional iterable that contains the results of some element computation a number of times.
- * @param n1 the number of elements in the 1st dimension
- * @param n2 the number of elements in the 2nd dimension
- * @param n3 the number of elements in the 3nd dimension
- * @param n4 the number of elements in the 4th dimension
- * @param n5 the number of elements in the 5th dimension
- * @param elem the element computation
- */
- def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]]] =
- tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem))
-*/
-
- /** An iterable containing values of a given function over a range of integer values starting from 0.
- * @param n The number of elements in the iterable
- * @param f The function computing element values
- * @return An iterable consisting of elements `f(0), ..., f(n -1)`
- */
- def tabulate[A, That](n: Int)(f: Int => A)(implicit bf: Coll => Builder[A, That, Coll]) = {
- val b = bf(thisCollection)
- var i = 0
- while (i < n) {
- b += f(i)
- i += 1
- }
- b.result
- }
-
- /** A two-dimensional iterable containing values of a given function over ranges of integer values starting from 0.
- * @param n1 the number of elements in the 1st dimension
- * @param n2 the number of elements in the 2nd dimension
- * @param f The function computing element values
- */
- def tabulate[A, C1, That](n1: Int, n2: Int)(f: (Int, Int) => A)(implicit b1: Builder[A, C1, Coll], b2: Builder[C1, That, Coll]): That =
- tabulate(n1)(i1 => tabulate(n2)(f(i1, _)))
-
- /*
- /** A three-dimensional iterable containing values of a given function over ranges of integer values starting from 0.
- * @param n1 the number of elements in the 1st dimension
- * @param n2 the number of elements in the 2nd dimension
- * @param n3 the number of elements in the 3nd dimension
- * @param f The function computing element values
- */
- def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]]] =
- tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _)))
-
- /** A four-dimensional iterable containing values of a given function over ranges of integer values starting from 0.
- * @param n1 the number of elements in the 1st dimension
- * @param n2 the number of elements in the 2nd dimension
- * @param n3 the number of elements in the 3nd dimension
- * @param n4 the number of elements in the 4th dimension
- * @param f The function computing element values
- */
- def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]]] =
- tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _)))
-
- /** A five-dimensional iterable containing values of a given function over ranges of integer values starting from 0.
- * @param n1 the number of elements in the 1st dimension
- * @param n2 the number of elements in the 2nd dimension
- * @param n3 the number of elements in the 3nd dimension
- * @param n4 the number of elements in the 4th dimension
- * @param n5 the number of elements in the 5th dimension
- * @param f The function computing element values
- */
- def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]]] =
- tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _)))
-*/
-
- /** An iterable containing a sequence of increasing integers in a range.
- *
- * @param from the start value of the iterable
- * @param end the end value of the iterable (the first value NOT returned)
- * @return the iterable with values in range `start, start + 1, ..., end - 1`
- * up to, but exclusding, `end`.
- */
- def range[That](start: Int, end: Int)(implicit bf: Coll => Builder[Int, That, Coll]): That =
- range(start, end, 1)
-
- /** An iterable containing equally spaced values in some integer interval.
-
- * @param start the start value of the iterable
- * @param end the end value of the iterable (the first value NOT returned)
- * @param step the increment value of the iterable (must be positive or negative)
- * @return the iterable with values in `start, start + step, ...` up to, but excluding `end`
- */
- def range[That](start: Int, end: Int, step: Int)(implicit bf: Coll => Builder[Int, That, Coll]): That = {
- val b = bf(thisCollection)
- if (step == 0) throw new IllegalArgumentException("zero step")
- var i = start
- while (if (step < 0) end < i else i < end) {
- b += i
- i += step
- }
- b.result
- }
-
- /** An iterable containing repeated applications of a function to a start value.
- *
- * @param start the start value of the iterable
- * @param len the number of elements returned by the iterable
- * @param f the function that's repeatedly applied
- * @return the iterable returning `len` values in the sequence `start, f(start), f(f(start)), ...`
- */
- def iterate[That](start: Int, len: Int)(f: Int => Int)(implicit bf: Coll => Builder[Int, That, Coll]): That = {
- val b = bf(thisCollection)
- var acc = start
- var i = 0
- while (i < len) {
- b += acc
- acc = f(acc)
- i += 1
- }
- b.result
- }
-}
-
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index cf4f3abdd2..5f009b6742 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,24 +14,65 @@ package generic
import mutable.Builder
import annotation.unchecked.uncheckedVariance
-/**
- * @since 2.8
+/** A template class for companion objects of ``regular'' collection classes
+ * that represent an unconstrained higher-kinded type.
+ * @tparam A The type of the collection elements.
+ * @tparam CC The type constructor representing the collection class.
+ * @author Martin Odersky
+ * @since 2.8
+ * @define coll collection
+ * @define Coll CC
*/
trait GenericTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] {
+ /** Applies a function `f` to all elements of this $coll.
+ *
+ * @param f the function that is applied for its side-effect to every element.
+ * The result of function `f` is discarded.
+ *
+ * @tparam U the type parameter describing the result of function `f`.
+ * This result will always be ignored. Typically `U` is `Unit`,
+ * but this is not necessary.
+ *
+ * @usecase def foreach(f: A => Unit): Unit
+ */
def foreach[U](f: A => U): Unit
+
+ /** Selects the first element of this $coll.
+ * @return the first element of this $coll.
+ * @throws `NoSuchElementException` if the $coll is empty.
+ */
def head: A
+
+ /** Tests whether this $coll is empty.
+ *
+ * @return `true` if the $coll contain no elements, `false` otherwise.
+ */
def isEmpty: Boolean
- /** The factory companion object that builds instances of class CC */
+ /** The factory companion object that builds instances of class $Coll.
+ * (or its `Iterable` superclass where class $Coll is not a `Seq`.)
+ */
def companion: GenericCompanion[CC]
- /** The builder that builds instances of CC[A] */
+ /** The builder that builds instances of type $Coll[A]
+ */
protected[this] def newBuilder: Builder[A, CC[A]] = companion.newBuilder[A]
- /** The generic builder that builds instances of CC at arbitrary element types. */
+ /** The generic builder that builds instances of $Coll
+ * at arbitrary element types.
+ */
def genericBuilder[B]: Builder[B, CC[B]] = companion.newBuilder[B]
+ /** Converts this $coll of pairs into two collections of the first and second
+ * halfs of each pair.
+ * @param A1 the type of the first half of the element pairs
+ * @param A2 the type of the second half of the element pairs
+ * @param asPair an implicit conversion which asserts that the element type of this
+ * $coll is a pair.
+ * @return a pair ${coll}s, containing the first, respectively second half
+ * of each element pair of this $coll.
+ */
def unzip[A1, A2](implicit asPair: A => /*<:<!!!*/ (A1, A2)): (CC[A1], CC[A2]) = {
val b1 = genericBuilder[A1]
val b2 = genericBuilder[A2]
@@ -44,6 +84,14 @@ trait GenericTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBui
(b1.result, b2.result)
}
+ /** Converts this $coll of traversable collections into
+ * a $coll in which all element collections are concatenated.
+ * @tparam B the type of the elements of each traversable collection.
+ * @param asTraversable an implicit conversion which asserts that the element type of this
+ * $coll is a `Traversable`.
+ * @return a new $coll resulting from concatenating all element ${coll}s.
+ * @usecase def flatten[B]: $Coll[B]
+ */
def flatten[B](implicit asTraversable: A => /*<:<!!!*/ Traversable[B]): CC[B] = {
val b = genericBuilder[B]
for (xs <- this)
@@ -51,8 +99,16 @@ trait GenericTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBui
b.result
}
+ /** Transposes this $coll of traversable collections into
+ * a $coll of ${coll}s.
+ * @tparam B the type of the elements of each traversable collection.
+ * @param asTraversable an implicit conversion which asserts that the element type of this
+ * $coll is a `Traversable`.
+ * @return a two-dimensional $coll of ${coll}s which has as ''n''th row
+ * the ''n''th column of this $coll.
+ */
def transpose[B](implicit asTraversable: A => /*<:<!!!*/ Traversable[B]): CC[CC[B] @uncheckedVariance] = {
- val bs: IndexedSeq[Builder[B, CC[B]]] = asTraversable(head).map(_ => genericBuilder[B]).toIndexedSeq
+ val bs: IndexedSeq[Builder[B, CC[B]]] = IndexedSeq.fill(asTraversable(head).size)(genericBuilder[B])
for (xs <- this) {
var i = 0
for (x <- asTraversable(xs)) {
diff --git a/src/library/scala/collection/generic/Growable.scala b/src/library/scala/collection/generic/Growable.scala
index 504bceae66..3cc0421845 100644
--- a/src/library/scala/collection/generic/Growable.scala
+++ b/src/library/scala/collection/generic/Growable.scala
@@ -1,57 +1,54 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package generic
-/** This class represents collections that can be augmented using a `+=` operator
- * and that can be cleared of all elements using the `clear` method.
+/** This trait forms part of collections that can be augmented
+ * using a `+=` operator and that can be cleared of all elements using
+ * a `clear` method.
*
* @author Martin Odersky
- * @owner Martin Odersky
* @version 2.8
* @since 2.8
+ * @define coll growable collection
+ * @define Coll Growable
+ * @define add add
+ * @define Add add
*/
trait Growable[-A] {
- /** Adds a single element to this collection.
+ /** ${Add}s a single element to this $coll.
*
- * @param elem the element to add.
+ * @param elem the element to $add.
+ * @return the $coll itself
*/
def +=(elem: A): this.type
- /** Adds two or more elements to this collection.
+ /** ${Add}s two or more elements to this $coll.
*
- * @param elem1 the first element to add.
- * @param elem2 the second element to add.
- * @param elems the remaining elements to add.
+ * @param elem1 the first element to $add.
+ * @param elem2 the second element to $add.
+ * @param elems the remaining elements to $add.
+ * @return the $coll itself
*/
def +=(elem1: A, elem2: A, elems: A*): this.type = this += elem1 += elem2 ++= elems
- /** Adds a number of elements provided by an iterator to this collection.
+ /** ${Add}s all elements produced by a TraversableOnce to this $coll.
*
- * @param iter the iterator.
+ * @param iter the TraversableOnce producing the elements to $add.
+ * @return the $coll itself.
*/
- def ++=(iter: Iterator[A]): this.type = { iter foreach += ; this }
+ def ++=(xs: TraversableOnce[A]): this.type = { xs foreach += ; this }
- /** Adds a number of elements provided by an iterable object to this collection.
- *
- * @param iter the iterable object.
- */
- def ++=(iter: Traversable[A]): this.type = { iter foreach +=; this }
-
- /** Clears the collection contents.
+ /** Clears the $coll's contents. After this operation, the
+ * $coll is empty.
*/
def clear()
}
-
-
-
-
diff --git a/src/library/scala/collection/generic/HasNewBuilder.scala b/src/library/scala/collection/generic/HasNewBuilder.scala
index 0a422b1c5f..889f509a32 100755
--- a/src/library/scala/collection/generic/HasNewBuilder.scala
+++ b/src/library/scala/collection/generic/HasNewBuilder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/generic/ImmutableMapFactory.scala b/src/library/scala/collection/generic/ImmutableMapFactory.scala
index 53d831dbfb..c0d0b1ea8b 100644
--- a/src/library/scala/collection/generic/ImmutableMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableMapFactory.scala
@@ -1,19 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package generic
-/** A template for companion objects of <code>immutable.Map</code> and
- * subclasses thereof.
- *
- * @since 2.8
+/** A template for companion objects of `immutable.Map` and subclasses thereof.
+ * @author Martin Odersky
+ * @version 2.8
+ * @since 2.8
*/
abstract class ImmutableMapFactory[CC[A, +B] <: immutable.Map[A, B] with immutable.MapLike[A, B, CC[A, B]]] extends MapFactory[CC]
diff --git a/src/library/scala/collection/generic/ImmutableSetFactory.scala b/src/library/scala/collection/generic/ImmutableSetFactory.scala
new file mode 100644
index 0000000000..a551786f25
--- /dev/null
+++ b/src/library/scala/collection/generic/ImmutableSetFactory.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+import mutable.{ Builder, AddingBuilder }
+
+abstract class ImmutableSetFactory[CC[X] <: immutable.Set[X] with SetLike[X, CC[X]]]
+ extends SetFactory[CC] {
+
+ def newBuilder[A]: Builder[A, CC[A]] = new AddingBuilder[A, CC[A]](empty[A])
+}
diff --git a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
index 6f8ef7d7a3..790a881468 100644
--- a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
@@ -1,19 +1,26 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package generic
-/** A template for companion objects of immutable.Map and subclasses thereof.
+/** A template for companion objects of `SortedMap` and subclasses thereof.
*
* @since 2.8
+ * @define Coll SortedMap
+ * @define coll sorted map
+ * @define factoryInfo
+ * This object provides a set of operations needed to create sorted maps of type `$Coll`.
+ * @author Martin Odersky
+ * @version 2.8
+ * @define sortedMapCanBuildFromInfo
+ * The standard `CanBuildFrom` instance for sorted maps
*/
abstract class ImmutableSortedMapFactory[CC[A, B] <: immutable.SortedMap[A, B] with SortedMapLike[A, B, CC[A, B]]] extends SortedMapFactory[CC]
diff --git a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
index e9e7599bca..e60bf82857 100644
--- a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
@@ -1,19 +1,26 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package generic
-/** A template for companion objects of mutable.Map and subclasses thereof.
+/** A template for companion objects of `SortedSet` and subclasses thereof.
*
* @since 2.8
+ * @define Coll immutable.SortedSet
+ * @define coll immutable sorted
+ * @define factoryInfo
+ * This object provides a set of operations needed to create sorted sets of type `$Coll`.
+ * @author Martin Odersky
+ * @version 2.8
+ * @define sortedSetCanBuildFromInfo
+ * The standard `CanBuildFrom` instance for sorted sets
*/
abstract class ImmutableSortedSetFactory[CC[A] <: immutable.SortedSet[A] with SortedSetLike[A, CC[A]]] extends SortedSetFactory[CC] \ No newline at end of file
diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala
index 600104938d..8075fabc01 100644
--- a/src/library/scala/collection/generic/IterableForwarder.scala
+++ b/src/library/scala/collection/generic/IterableForwarder.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection.generic
@@ -22,7 +21,7 @@ import collection.mutable.Buffer
* <li><code>toString</code>, <code>hashCode</code>, <code>equals</code>,
* <code>stringPrefix</code></li>
* <li><code>newBuilder</code>, <code>view</code></li>
- * <li>all calls creating a new iterable objetc of the same kind</li>
+ * <li>all calls creating a new iterable object of the same kind</li>
* </ul>
* <p>
* The above methods are forwarded by subclass <a href="../IterableProxy.html"
@@ -41,6 +40,6 @@ trait IterableForwarder[+A] extends Iterable[A] with TraversableForwarder[A] {
// Iterable delegates
// Iterable methods could be printed by cat IterableLike.scala | sed -n '/trait Iterable/,$ p' | egrep '^ (override )?def'
- override def iterator = underlying.iterator
+ override def iterator: Iterator[A] = underlying.iterator
override def sameElements[B >: A](that: Iterable[B]): Boolean = underlying.sameElements(that)
}
diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala
index 2476142ec7..916024e476 100644
--- a/src/library/scala/collection/generic/MapFactory.scala
+++ b/src/library/scala/collection/generic/MapFactory.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,21 +13,48 @@ package generic
import mutable.{Builder, MapBuilder}
-/** A template for companion objects of <code>mutable.Map</code> and
- * subclasses thereof.
+/** A template for companion objects of `Map` and subclasses thereof.
*
- * @since 2.8
+ * @define coll map
+ * @define Coll Map
+ * @define factoryInfo
+ * This object provides a set of operations needed to create `$Coll` values.
+ * @author Martin Odersky
+ * @version 2.8
+ * @since 2.8
+ * @define canBuildFromInfo
+ * The standard `CanBuildFrom` instance for `$Coll` objects.
+ * @see CanBuildFrom
+ * @define mapCanBuildFromInfo
+ * The standard `CanBuildFrom` instance for `$Coll` objects.
+ * The created value is an instance of class `MapCanBuildFrom`.
+ * @see CanBuildFrom
+ * @see GenericCanBuildFrom
*/
abstract class MapFactory[CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]] {
+ /** The type constructor of the collection that can be built by this factory */
type Coll = CC[_, _]
+ /** An empty $Coll */
def empty[A, B]: CC[A, B]
+ /** A collection of type $Coll that contains given key/value bindings.
+ * @param elems the key/value pairs that make up the $coll
+ * @tparam A the type of the keys
+ * @tparam B the type of the associated values
+ * @return a new $coll consisting key/value pairs given by `elems`.
+ */
def apply[A, B](elems: (A, B)*): CC[A, B] = (newBuilder[A, B] ++= elems).result
+ /** The default builder for $Coll objects.
+ * @tparam A the type of the keys
+ * @tparam B the type of the associated values
+ */
def newBuilder[A, B]: Builder[(A, B), CC[A, B]] = new MapBuilder[A, B, CC[A, B]](empty[A, B])
+ /** The standard `CanBuildFrom` class for maps.
+ */
class MapCanBuildFrom[A, B] extends CanBuildFrom[Coll, (A, B), CC[A, B]] {
def apply(from: Coll) = newBuilder[A, B]
def apply() = newBuilder
diff --git a/src/library/scala/collection/generic/MutableMapFactory.scala b/src/library/scala/collection/generic/MutableMapFactory.scala
index 603e447453..a24f41754a 100644
--- a/src/library/scala/collection/generic/MutableMapFactory.scala
+++ b/src/library/scala/collection/generic/MutableMapFactory.scala
@@ -1,22 +1,29 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package generic
-import mutable.MapBuilder
+import mutable.Builder
-/** A template for companion objects of mutable.Map and subclasses thereof.
- *
- * @since 2.8
+/** A template for companion objects of `mutable.Map` and subclasses thereof.
+ * @author Martin Odersky
+ * @version 2.8
+ * @since 2.8
*/
abstract class MutableMapFactory[CC[A, B] <: mutable.Map[A, B] with mutable.MapLike[A, B, CC[A, B]]]
- extends MapFactory[CC]
+ extends MapFactory[CC] {
+
+ /** The default builder for $Coll objects.
+ * @tparam A the type of the keys
+ * @tparam B the type of the associated values
+ */
+ override def newBuilder[A, B]: Builder[(A, B), CC[A, B]] = empty[A, B]
+}
diff --git a/src/library/scala/collection/generic/MutableSetFactory.scala b/src/library/scala/collection/generic/MutableSetFactory.scala
new file mode 100644
index 0000000000..28b5fdd897
--- /dev/null
+++ b/src/library/scala/collection/generic/MutableSetFactory.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+import mutable.{ Builder, GrowingBuilder }
+
+abstract class MutableSetFactory[CC[X] <: mutable.Set[X] with mutable.SetLike[X, CC[X]]]
+ extends SetFactory[CC] {
+
+ def newBuilder[A]: Builder[A, CC[A]] = new GrowingBuilder[A, CC[A]](empty[A])
+}
diff --git a/src/library/scala/collection/generic/SeqFactory.scala b/src/library/scala/collection/generic/SeqFactory.scala
index fabffa0151..80515b4e2f 100644
--- a/src/library/scala/collection/generic/SeqFactory.scala
+++ b/src/library/scala/collection/generic/SeqFactory.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala
index 02fdc62b65..203a1a9459 100644
--- a/src/library/scala/collection/generic/SeqForwarder.scala
+++ b/src/library/scala/collection/generic/SeqForwarder.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection.generic
@@ -30,24 +29,31 @@ trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] {
protected override def underlying: Seq[A]
- // PartialFunction delegates
-
- override def apply(i: Int): A = underlying.apply(i)
- override def isDefinedAt(x: Int): Boolean = underlying.isDefinedAt(x)
-
- // Seq delegates
- // Seq methods could be printed by cat SeqLike.scala | sed -n '/trait Seq/,$ p' | egrep '^ (override )?def'
-
override def length: Int = underlying.length
- override def lengthCompare(l: Int) = underlying lengthCompare l
+ override def apply(idx: Int): A = underlying.apply(idx)
+ override def lengthCompare(len: Int): Int = underlying.lengthCompare(len)
+ override def isDefinedAt(x: Int): Boolean = underlying.isDefinedAt(x)
override def segmentLength(p: A => Boolean, from: Int): Int = underlying.segmentLength(p, from)
override def prefixLength(p: A => Boolean) = underlying.prefixLength(p)
+ override def indexWhere(p: A => Boolean): Int = underlying.indexWhere(p)
override def indexWhere(p: A => Boolean, from: Int): Int = underlying.indexWhere(p, from)
+ override def findIndexOf(p: A => Boolean): Int = underlying.indexWhere(p)
+ override def indexOf[B >: A](elem: B): Int = underlying.indexOf(elem)
override def indexOf[B >: A](elem: B, from: Int): Int = underlying.indexOf(elem, from)
+ override def lastIndexOf[B >: A](elem: B): Int = underlying.lastIndexOf(elem)
+ override def lastIndexOf[B >: A](elem: B, end: Int): Int = underlying.lastIndexOf(elem, end)
+ override def lastIndexWhere(p: A => Boolean): Int = underlying.lastIndexWhere(p)
+ override def lastIndexWhere(p: A => Boolean, end: Int): Int = underlying.lastIndexWhere(p, end)
override def reverseIterator: Iterator[A] = underlying.reverseIterator
override def startsWith[B](that: Seq[B], offset: Int): Boolean = underlying.startsWith(that, offset)
+ override def startsWith[B](that: Seq[B]): Boolean = underlying.startsWith(that)
override def endsWith[B](that: Seq[B]): Boolean = underlying.endsWith(that)
- override def indexOfSeq[B >: A](that: Seq[B]): Int = underlying.indexOfSeq(that)
+ override def indexOfSlice[B >: A](that: Seq[B]): Int = underlying.indexOfSlice(that)
+ override def indexOfSlice[B >: A](that: Seq[B], from: Int): Int = underlying.indexOfSlice(that, from)
+ override def lastIndexOfSlice[B >: A](that: Seq[B]): Int = underlying.lastIndexOfSlice(that)
+ override def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int = underlying.lastIndexOfSlice(that, end)
+ override def containsSlice[B](that: Seq[B]): Boolean = underlying.containsSlice(that)
override def contains(elem: Any): Boolean = underlying.contains(elem)
+ override def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean = underlying.corresponds(that)(p)
override def indices: Range = underlying.indices
}
diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala
index be864a45b1..2c7554dc5a 100644
--- a/src/library/scala/collection/generic/SetFactory.scala
+++ b/src/library/scala/collection/generic/SetFactory.scala
@@ -1,29 +1,42 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package generic
-import mutable.{Builder, AddingBuilder}
+import mutable.Builder
-/** A template for companion objects of <code>Set</code> and subclasses
- * thereof.
+/** A template for companion objects of `Set` and subclasses thereof.
*
- * @since 2.8
+ * @define coll set
+ * @define Coll Set
+ * @define factoryInfo
+ * This object provides a set of operations needed to create `$Coll` values.
+ * @author Martin Odersky
+ * @version 2.8
+ * @since 2.8
+ * @define canBuildFromInfo
+ * The standard `CanBuildFrom` instance for `$Coll` objects.
+ * @see CanBuildFrom
+ * @define setCanBuildFromInfo
+ * The standard `CanBuildFrom` instance for `$Coll` objects.
+ * @see CanBuildFrom
+ * @see GenericCanBuildFrom
*/
abstract class SetFactory[CC[X] <: Set[X] with SetLike[X, CC[X]]]
extends GenericCompanion[CC] {
- def newBuilder[A]: Builder[A, CC[A]] = new AddingBuilder[A, CC[A]](empty[A])
+ def newBuilder[A]: Builder[A, CC[A]]
+ /** $setCanBuildFromInfo
+ */
def setCanBuildFrom[A] = new CanBuildFrom[CC[_], A, CC[A]] {
def apply(from: CC[_]) = newBuilder[A]
def apply() = newBuilder[A]
diff --git a/src/library/scala/collection/generic/Shrinkable.scala b/src/library/scala/collection/generic/Shrinkable.scala
index 863b7fe55e..ddd12c05bc 100644
--- a/src/library/scala/collection/generic/Shrinkable.scala
+++ b/src/library/scala/collection/generic/Shrinkable.scala
@@ -1,36 +1,39 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package generic
-/** This class represents collections that can be reduced using a -= operator.
+/** This trait forms part of collections that can be reduced
+ * using a `-=` operator.
*
* @author Martin Odersky
- * @owner Martin Odersky
* @version 2.8
* @since 2.8
+ * @define coll shrinkable collection
+ * @define Coll Shrinkable
*/
trait Shrinkable[-A] {
- /** Removes a single element from this collection.
+ /** Removes a single element from this $coll.
*
* @param elem the element to remove.
+ * @return the $coll itself
*/
def -=(elem: A): this.type
- /** Removes two or more elements from this collection.
+ /** Removes two or more elements from this $coll.
*
* @param elem1 the first element to remove.
* @param elem2 the second element to remove.
* @param elems the remaining elements to remove.
+ * @return the $coll itself
*/
def -=(elem1: A, elem2: A, elems: A*): this.type = {
this -= elem1
@@ -38,17 +41,12 @@ trait Shrinkable[-A] {
this --= elems
}
- /** Removes a number of elements provided by an iterator from this collection.
+ /** Removes all elements produced by an iterator from this $coll.
*
- * @param iter the iterator.
+ * @param iter the iterator producing the elements to remove.
+ * @return the $coll itself
*/
- def --=(iter: Iterator[A]): this.type = { iter foreach -=; this }
-
- /** Removes a number of elements provided by an iterable object from this collection.
- *
- * @param iter the iterable object.
- */
- def --=(iter: Traversable[A]): this.type = { iter foreach -=; this }
+ def --=(xs: TraversableOnce[A]): this.type = { xs foreach -= ; this }
}
diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala
index 78ffe83ec2..447df4fe72 100644
--- a/src/library/scala/collection/generic/Sorted.scala
+++ b/src/library/scala/collection/generic/Sorted.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package generic
@@ -16,8 +15,8 @@ package generic
* @author Sean McDirmid
* @since 2.8
*/
-trait Sorted[K, +This <: Sorted[K, This]]{
- def ordering : Ordering[K];
+trait Sorted[K, +This <: Sorted[K, This]] {
+ def ordering : Ordering[K]
/** The current collection */
protected def repr: This
@@ -25,7 +24,6 @@ trait Sorted[K, +This <: Sorted[K, This]]{
/** return as a projection the set of keys in this collection */
def keySet: SortedSet[K]
-
/** Returns the first key of the collection. */
def firstKey: K
@@ -68,24 +66,25 @@ trait Sorted[K, +This <: Sorted[K, This]]{
*/
def range(from: K, until: K): This = rangeImpl(Some(from), Some(until))
-
/** Create a range projection of this collection with no lower-bound.
* @param to The upper-bound (inclusive) of the ranged projection.
*/
def to(to: K): This = {
// tough!
- val i = keySet.from(to).iterator;
- if (!i.hasNext) return repr
- val next = i.next;
- if (next == to) {
- if (!i.hasNext) return repr
- else return until(i.next)
- } else return until(next)
+ val i = keySet.from(to).iterator
+ if (i.isEmpty) return repr
+ val next = i.next
+ if (next == to)
+ if (i.isEmpty) repr
+ else until(i.next)
+ else
+ until(next)
}
protected def hasAll(j: Iterator[K]): Boolean = {
- val i = keySet.iterator;
- if (!i.hasNext) return !j.hasNext;
+ val i = keySet.iterator
+ if (i.isEmpty) return j.isEmpty
+
var in = i.next;
while (j.hasNext) {
val jn = j.next;
@@ -99,5 +98,4 @@ trait Sorted[K, +This <: Sorted[K, This]]{
}
true
}
-
}
diff --git a/src/library/scala/collection/generic/SortedMapFactory.scala b/src/library/scala/collection/generic/SortedMapFactory.scala
index 29a92fc471..1e960bf52d 100644
--- a/src/library/scala/collection/generic/SortedMapFactory.scala
+++ b/src/library/scala/collection/generic/SortedMapFactory.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
diff --git a/src/library/scala/collection/generic/SortedSetFactory.scala b/src/library/scala/collection/generic/SortedSetFactory.scala
index b4d507cfe0..9144cc8fb7 100644
--- a/src/library/scala/collection/generic/SortedSetFactory.scala
+++ b/src/library/scala/collection/generic/SortedSetFactory.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
diff --git a/src/library/scala/collection/generic/Subtractable.scala b/src/library/scala/collection/generic/Subtractable.scala
index 9ae731bfd1..23b01376d1 100644
--- a/src/library/scala/collection/generic/Subtractable.scala
+++ b/src/library/scala/collection/generic/Subtractable.scala
@@ -1,56 +1,59 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package generic
-/** This class represents collections that can be reduced using a - operator.
- *
+/** This trait represents collection-like objects that can be reduced
+ * using a '+' operator. It defines variants of `-` and `--`
+ * as convenience methods in terms of single-element removal `-`.
+ * @tparam A the type of the elements of the $coll.
+ * @tparam Repr the type of the $coll itself
* @author Martin Odersky
- * @owner Martin Odersky
* @version 2.8
* @since 2.8
+ * @define $coll collection
+ * @define $Coll Subtractable
*/
-trait Subtractable[A, +This <: Subtractable[A, This]] { self =>
+trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self =>
- protected def repr: This
+ /** The representation object of type `Repr` which contains the collection's elements
+ */
+ protected def repr: Repr
- /** Returns a new collection that contains all elements of the current collection
- * except a given element.
- *
- * @param elem the element to remove.
+ /** Creates a new $coll from this $coll with an element removed.
+ * @param elem the element to remove
+ * @return a new collection that contains all elements of the current $coll
+ * except one less occurrence of `elem`.
*/
- def -(elem: A): This
+ def -(elem: A): Repr
- /** Returns a new collection that contains all elements of the current collection
- * except a two or more given elements.
+ /** Creates a new $coll from this $coll with some elements removed.
*
+ * This method takes two or more elements to be removed. Another overloaded
+ * variant of this method handles the case where a single element is
+ * removed.
* @param elem1 the first element to remove.
* @param elem2 the second element to remove.
* @param elems the remaining elements to remove.
+ * @return a new $coll that contains all elements of the current $coll
+ * except one less occurrence of each of the given elements.
*/
- def -(elem1: A, elem2: A, elems: A*): This =
+ def -(elem1: A, elem2: A, elems: A*): Repr =
this - elem1 - elem2 -- elems
- /** Returns a new collection that contains all elements of the current collection
- * except the elements provided by a traversable object
- *
- * @param elems the traversable object containing the elements that do not form part of the new collection.
- */
- def --(elems: Traversable[A]): This = (repr /: elems) (_ - _)
-
- /** Returns a new collection that contains all elements of the current collection
- * except the elements provided by an iterator
+ /** Creates a new $coll from this $coll by removing all elements of another
+ * collection.
*
- * @param elems the iterator containing the elements that do not form part of the new collection
- * @note same as --
+ * @param elems the collection containing the removed elements.
+ * @return a new $coll that contains all elements of the current $coll
+ * except one less occurrence of each of the elements of `elems`.
*/
- def --(iter: Iterator[A]): This = (repr /: iter) (_ - _)
+ def --(xs: TraversableOnce[A]): Repr = (repr /: xs) (_ - _)
}
diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala
index 3e34f9097c..c6f5ce4dde 100644
--- a/src/library/scala/collection/generic/TraversableFactory.scala
+++ b/src/library/scala/collection/generic/TraversableFactory.scala
@@ -1,45 +1,82 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package generic
-/** A template for companion objects of Traversable and subclasses thereof.
+/** A template for companion objects of `Traversable` and subclasses thereof.
+ * This class provides a set of operations to create `$Coll` objects.
+ * It is typically inherited by companion objects of subclasses of `Traversable`.
*
* @since 2.8
+ *
+ * @define coll collection
+ * @define Coll Traversable
+ * @define factoryInfo
+ * This object provides a set of operations to create `$Coll` values.
+ * @author Martin Odersky
+ * @version 2.8
+ * @define canBuildFromInfo
+ * The standard `CanBuildFrom` instance for $Coll objects.
+ * @see CanBuildFrom
+ * @define genericCanBuildFromInfo
+ * The standard `CanBuildFrom` instance for $Coll objects.
+ * The created value is an instance of class `GenericCanBuildFrom`,
+ * which forwards calls to create a new builder to the
+ * `genericBuilder` method of the requesting collection.
+ * @see CanBuildFrom
+ * @see GenericCanBuildFrom
*/
abstract class TraversableFactory[CC[X] <: Traversable[X] with GenericTraversableTemplate[X, CC]]
extends GenericCompanion[CC] {
+ /** A generic implementation of the `CanBuildFrom` trait, which forwards
+ * all calls to `apply(from)` to the `genericBuilder` method of
+ * $coll `from`, and which forwards all calls of `apply()` to the
+ * `newBuilder` method of this factory.
+ */
class GenericCanBuildFrom[A] extends CanBuildFrom[CC[_], A, CC[A]] {
+ /** Creates a new builder on request of a collection.
+ * @param from the collection requesting the builder to be created.
+ * @return the result of invoking the `genericBuilder` method on `from`.
+ */
def apply(from: Coll) = from.genericBuilder[A]
+
+ /** Creates a new builder from scratch
+ * @return the result of invoking the `newBuilder` method of this factory.
+ */
def apply() = newBuilder[A]
}
- /** Concatenate all the argument collections into a single collection.
+ /** Concatenates all argument collections into a single $coll.
*
- * @param xss the collections that are to be concatenated
- * @return the concatenation of all the collections
+ * @param xss the collections that are to be concatenated.
+ * @return the concatenation of all the collections.
*/
def concat[A](xss: Traversable[A]*): CC[A] = {
val b = newBuilder[A]
+ // At present we're using IndexedSeq as a proxy for "has a cheap size method".
+ if (xss forall (_.isInstanceOf[IndexedSeq[_]]))
+ b.sizeHint(xss map (_.size) sum)
+
for (xs <- xss) b ++= xs
b.result
}
- /** A traversable that contains the results of some element computation a number of times.
- * @param n the number of elements returned
+ /** Produces a $coll containing the results of some element computation a number of times.
+ * @param n the number of elements contained in the $coll.
* @param elem the element computation
+ * @return A $coll that contains the results of `n` evaluations of `elem`.
*/
def fill[A](n: Int)(elem: => A): CC[A] = {
val b = newBuilder[A]
+ b.sizeHint(n)
var i = 0
while (i < n) {
b += elem
@@ -48,51 +85,56 @@ abstract class TraversableFactory[CC[X] <: Traversable[X] with GenericTraversabl
b.result
}
- /** A two-dimensional traversable that contains the results of some element computation a number of times.
+ /** Produces a two-dimensional $coll containing the results of some element computation a number of times.
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param elem the element computation
+ * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`.
*/
def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A]] =
tabulate(n1)(_ => fill(n2)(elem))
- /** A three-dimensional traversable that contains the results of some element computation a number of times.
+ /** Produces a three-dimensional $coll containing the results of some element computation a number of times.
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param n3 the number of elements in the 3nd dimension
* @param elem the element computation
+ * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`.
*/
def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]]] =
tabulate(n1)(_ => fill(n2, n3)(elem))
- /** A four-dimensional traversable that contains the results of some element computation a number of times.
+ /** Produces a four-dimensional $coll containing the results of some element computation a number of times.
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param n3 the number of elements in the 3nd dimension
* @param n4 the number of elements in the 4th dimension
* @param elem the element computation
+ * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`.
*/
def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]]] =
tabulate(n1)(_ => fill(n2, n3, n4)(elem))
- /** A five-dimensional traversable that contains the results of some element computation a number of times.
+ /** Produces a five-dimensional $coll containing the results of some element computation a number of times.
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param n3 the number of elements in the 3nd dimension
* @param n4 the number of elements in the 4th dimension
* @param n5 the number of elements in the 5th dimension
* @param elem the element computation
+ * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`.
*/
def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]]] =
tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem))
- /** A traversable containing values of a given function over a range of integer values starting from 0.
- * @param n The number of elements in the traversable
+ /** Produces a $coll containing values of a given function over a range of integer values starting from 0.
+ * @param n The number of elements in the $coll
* @param f The function computing element values
- * @return A traversable consisting of elements `f(0), ..., f(n -1)`
+ * @return A $coll consisting of elements `f(0), ..., f(n -1)`
*/
def tabulate[A](n: Int)(f: Int => A): CC[A] = {
val b = newBuilder[A]
+ b.sizeHint(n)
var i = 0
while (i < n) {
b += f(i)
@@ -101,62 +143,70 @@ abstract class TraversableFactory[CC[X] <: Traversable[X] with GenericTraversabl
b.result
}
- /** A two-dimensional traversable containing values of a given function over ranges of integer values starting from 0.
+ /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param f The function computing element values
+ * @return A $coll consisting of elements `f(i1, i2)`
+ * for `0 <= i1 < n1` and `0 <= i2 < n2`.
*/
def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A]] =
tabulate(n1)(i1 => tabulate(n2)(f(i1, _)))
- /** A three-dimensional traversable containing values of a given function over ranges of integer values starting from 0.
+ /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param n3 the number of elements in the 3nd dimension
* @param f The function computing element values
+ * @return A $coll consisting of elements `f(i1, i2, i3)`
+ * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`.
*/
def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]]] =
tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _)))
- /** A four-dimensional traversable containing values of a given function over ranges of integer values starting from 0.
+ /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param n3 the number of elements in the 3nd dimension
* @param n4 the number of elements in the 4th dimension
* @param f The function computing element values
+ * @return A $coll consisting of elements `f(i1, i2, i3, i4)`
+ * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`.
*/
def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]]] =
tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _)))
- /** A five-dimensional traversable containing values of a given function over ranges of integer values starting from 0.
+ /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param n3 the number of elements in the 3nd dimension
* @param n4 the number of elements in the 4th dimension
* @param n5 the number of elements in the 5th dimension
* @param f The function computing element values
+ * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)`
+ * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`.
*/
def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]]] =
tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _)))
- /** A traversable containing a sequence of increasing integers in a range.
+ /** Produces a $coll containing a sequence of increasing of integers.
*
- * @param from the start value of the traversable
- * @param end the end value of the traversable (the first value NOT returned)
- * @return the traversable with values in range `start, start + 1, ..., end - 1`
- * up to, but exclusding, `end`.
+ * @param from the first element of the $coll
+ * @param end the end value of the $coll (the first value NOT contained)
+ * @return a $coll with values `start, start + 1, ..., end - 1`
*/
def range(start: Int, end: Int): CC[Int] = range(start, end, 1)
- /** A traversable containing equally spaced values in some integer interval.
- * @param start the start value of the traversable
- * @param end the end value of the traversable (the first value NOT returned)
- * @param step the increment value of the traversable (must be positive or negative)
- * @return the traversable with values in `start, start + step, ...` up to, but excluding `end`
+ /** Produces a $coll containing equally spaced values in some integer interval.
+ * @param start the start value of the $coll
+ * @param end the end value of the $coll (the first value NOT contained)
+ * @param step the difference between successive elements of the $coll (must be positive or negative)
+ * @return a $coll with values `start, start + step, ...` up to, but excluding `end`
*/
def range(start: Int, end: Int, step: Int): CC[Int] = {
if (step == 0) throw new IllegalArgumentException("zero step")
val b = newBuilder[Int]
+ b.sizeHint(Range.count(start, end, step, false))
var i = start
while (if (step < 0) end < i else i < end) {
b += i
@@ -165,21 +215,26 @@ abstract class TraversableFactory[CC[X] <: Traversable[X] with GenericTraversabl
b.result
}
- /** A traversable containing repeated applications of a function to a start value.
+ /** Produces a $coll containing repeated applications of a function to a start value.
*
- * @param start the start value of the traversable
- * @param len the number of elements returned by the traversable
+ * @param start the start value of the $coll
+ * @param len the number of elements contained inthe $coll
* @param f the function that's repeatedly applied
- * @return the traversable returning `len` values in the sequence `start, f(start), f(f(start)), ...`
+ * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...`
*/
def iterate[A](start: A, len: Int)(f: A => A): CC[A] = {
val b = newBuilder[A]
- var acc = start
- var i = 0
- while (i < len) {
+ if (len > 0) {
+ b.sizeHint(len)
+ var acc = start
+ var i = 1
b += acc
- acc = f(acc)
- i += 1
+
+ while (i < len) {
+ acc = f(acc)
+ i += 1
+ b += acc
+ }
}
b.result
}
diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala
index efd757b1fa..3995a5258b 100644
--- a/src/library/scala/collection/generic/TraversableForwarder.scala
+++ b/src/library/scala/collection/generic/TraversableForwarder.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection.generic
@@ -42,33 +41,48 @@ trait TraversableForwarder[+A] extends Traversable[A] {
/** The iterable object to which calls are forwarded */
protected def underlying: Traversable[A]
- // Iterable delegates
- // Iterable methods could be printed by cat TarversableLike.scala | sed -n '/trait Iterable/,$ p' | egrep '^ (override )?def'
-
- override def isEmpty = underlying.isEmpty
- override def nonEmpty = underlying.nonEmpty
+ override def foreach[B](f: A => B): Unit = underlying.foreach(f)
+ override def isEmpty: Boolean = underlying.isEmpty
+ override def nonEmpty: Boolean = underlying.nonEmpty
+ override def size: Int = underlying.size
override def hasDefiniteSize = underlying.hasDefiniteSize
- override def foreach[B](f: A => B) = underlying.foreach(f)
override def forall(p: A => Boolean): Boolean = underlying.forall(p)
override def exists(p: A => Boolean): Boolean = underlying.exists(p)
override def count(p: A => Boolean): Int = underlying.count(p)
override def find(p: A => Boolean): Option[A] = underlying.find(p)
override def foldLeft[B](z: B)(op: (B, A) => B): B = underlying.foldLeft(z)(op)
+ override def /: [B](z: B)(op: (B, A) => B): B = underlying./:(z)(op)
override def foldRight[B](z: B)(op: (A, B) => B): B = underlying.foldRight(z)(op)
+ override def :\ [B](z: B)(op: (A, B) => B): B = underlying.:\(z)(op)
override def reduceLeft[B >: A](op: (B, A) => B): B = underlying.reduceLeft(op)
- override def reduceRight[B >: A](op: (A, B) => B): B = underlying.reduceRight(op)
override def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = underlying.reduceLeftOption(op)
+ override def reduceRight[B >: A](op: (A, B) => B): B = underlying.reduceRight(op)
override def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = underlying.reduceRightOption(op)
+ override def sum[B >: A](implicit num: Numeric[B]): B = underlying.sum(num)
+ override def product[B >: A](implicit num: Numeric[B]): B = underlying.product(num)
+ override def min[B >: A](implicit cmp: Ordering[B]): A = underlying.min(cmp)
+ override def max[B >: A](implicit cmp: Ordering[B]): A = underlying.max(cmp)
+ override def head: A = underlying.head
+ override def headOption: Option[A] = underlying.headOption
+ override def last: A = underlying.last
+ override def lastOption: Option[A] = underlying.lastOption
override def copyToBuffer[B >: A](dest: Buffer[B]) = underlying.copyToBuffer(dest)
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) = underlying.copyToArray(xs, start, len)
- override def toArray[B >: A : ClassManifest]: Array[B] = underlying.toArray
+ override def copyToArray[B >: A](xs: Array[B], start: Int) = underlying.copyToArray(xs, start)
+ override def copyToArray[B >: A](xs: Array[B]) = underlying.copyToArray(xs)
+ override def toArray[B >: A: ClassManifest]: Array[B] = underlying.toArray
override def toList: List[A] = underlying.toList
+ override def toIterable: Iterable[A] = underlying.toIterable
override def toSeq: Seq[A] = underlying.toSeq
+ override def toIndexedSeq[B >: A] = underlying.toIndexedSeq
+ override def toBuffer[B >: A] = underlying.toBuffer
override def toStream: Stream[A] = underlying.toStream
+ override def toSet[B >: A]: immutable.Set[B] = underlying.toSet
+ override def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = underlying.toMap(ev)
override def mkString(start: String, sep: String, end: String): String = underlying.mkString(start, sep, end)
+ override def mkString(sep: String): String = underlying.mkString(sep)
+ override def mkString: String = underlying.mkString
override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = underlying.addString(b, start, sep, end)
-
- override def head: A = underlying.head
- override def last: A = underlying.last
- override def lastOption: Option[A] = underlying.lastOption
+ override def addString(b: StringBuilder, sep: String): StringBuilder = underlying.addString(b, sep)
+ override def addString(b: StringBuilder): StringBuilder = underlying.addString(b)
}
diff --git a/src/library/scala/collection/generic/TraversableView.scala.1 b/src/library/scala/collection/generic/TraversableView.scala.1
deleted file mode 100644
index bbded08091..0000000000
--- a/src/library/scala/collection/generic/TraversableView.scala.1
+++ /dev/null
@@ -1,152 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-package scalay.collection.generic
-
-import Math.MAX_INT
-import TraversableView.NoBuilder
-
-/** <p>
- * A base class for views of <code>Traversable</code>.
- * </p>
- * <p>
- * Every subclass has to implement the <code>foreach</code> method.
- * </p>
- *
- * @since 2.8
- */
-abstract class TraversableView[+A, +Coll <: Traversable[_]] extends Traversable[A] {
-self =>
-
- type This >: this.type <: TraversableView[A, Coll] { type This = self.This }
- protected val thisCollection: This = this
-
- protected[this] def newBuilder: Builder[A, This, This] =
- throw new UnsupportedOperationException(this+".newBuilder")
-
- def force[B >: A, That](implicit b: Builder[B, That, Coll]) = {
- b ++= this
- b.result()
- }
-
- trait Transformed[+B] extends TraversableView[B, Coll]
-
- /** pre: from >= 0
- */
- trait Sliced extends Transformed[A] {
- protected[this] val from: Int
- protected[this] val until: Int
- override def foreach(f: A => Unit) {
- var index = 0
- for (x <- self) {
- if (from <= index) {
- if (until <= index) return
- f(x)
- }
- index += 1
- }
- }
- override def stringPrefix = self.stringPrefix+"S"
- override def slice(from1: Int, until1: Int) =
- newSliced(from + (from1 max 0), from + (until1 max 0)).asInstanceOf[This]
- }
-
- trait Mapped[B] extends Transformed[B] {
- protected[this] val mapping: A => B
- override def foreach(f: B => Unit) {
- for (x <- self)
- f(mapping(x))
- }
- override def stringPrefix = self.stringPrefix+"M"
- }
-
- trait FlatMapped[B] extends Transformed[B] {
- protected[this] val mapping: A => Traversable[B]
- override def foreach(f: B => Unit) {
- for (x <- self)
- for (y <- mapping(x))
- f(y)
- }
- override def stringPrefix = self.stringPrefix+"N"
- }
-
- trait Appended[B >: A] extends Transformed[B] {
- protected[this] val rest: Traversable[B]
- override def foreach(f: B => Unit) {
- for (x <- self) f(x)
- for (x <- rest) f(x)
- }
- override def stringPrefix = self.stringPrefix+"A"
- }
-
- trait Filtered extends Transformed[A] {
- protected[this] val pred: A => Boolean
- override def foreach(f: A => Unit) {
- for (x <- self)
- if (pred(x)) f(x)
- }
- override def stringPrefix = self.stringPrefix+"F"
- }
-
- trait TakenWhile extends Transformed[A] {
- protected[this] val pred: A => Boolean
- override def foreach(f: A => Unit) {
- for (x <- self) {
- if (!pred(x)) return
- f(x)
- }
- }
- override def stringPrefix = self.stringPrefix+"T"
- }
-
- trait DroppedWhile extends Transformed[A] {
- protected[this] val pred: A => Boolean
- override def foreach(f: A => Unit) {
- var go = false
- for (x <- self) {
- if (!go && !pred(x)) go = true
- if (go) f(x)
- }
- }
- override def stringPrefix = self.stringPrefix+"D"
- }
-
- override def ++[B >: A, That](that: Traversable[B])(implicit b: Builder[B, That, This]): That =
- if (b.isInstanceOf[NoBuilder[_]]) newAppended(that).asInstanceOf[That]
- else super.++[B, That](that)(b)
-
- override def ++[B >: A, That](that: Iterator[B])(implicit b: Builder[B, That, This]): That = ++[B, That](that.toStream)
-
- override def map[B, That](f: A => B)(implicit b: Builder[B, That, This]): That =
- if (b.isInstanceOf[NoBuilder[_]]) newMapped(f).asInstanceOf[That]
- else super.map[B, That](f)(b)
-
- override def flatMap[B, That](f: A => Traversable[B])(implicit b: Builder[B, That, This]): That =
- if (b.isInstanceOf[NoBuilder[_]]) newFlatMapped(f).asInstanceOf[That]
- else super.flatMap[B, That](f)(b)
-
- override def filter(p: A => Boolean): This = newFiltered(p).asInstanceOf[This]
- override def init: This = newSliced(0, size - 1).asInstanceOf[This]
- override def drop(n: Int): This = newSliced(n max 0, MAX_INT).asInstanceOf[This]
- override def take(n: Int): This = newSliced(0, n).asInstanceOf[This]
- override def slice(from: Int, until: Int): This = newSliced(from max 0, until).asInstanceOf[This]
- override def dropWhile(p: A => Boolean): This = newDroppedWhile(p).asInstanceOf[This]
- override def takeWhile(p: A => Boolean): This = newTakenWhile(p).asInstanceOf[This]
- override def span(p: A => Boolean): (This, This) = (takeWhile(p), dropWhile(p))
- override def splitAt(n: Int): (This, This) = (take(n), drop(n))
-}
-
-object TraversableView {
- class NoBuilder[A] extends Builder[A, Nothing, TraversableView[_, _]] {
- def +=(elem: A) {}
- def iterator: Iterator[A] = Iterator.empty
- @deprecated("use `iterator' instead") def elements = iterator
- def result() = throw new UnsupportedOperationException("TraversableView.Builder.result")
- def clear() {}
- }
- implicit def implicitBuilder[A]: Builder[A, TraversableView[A, Traversable[_]], TraversableView[_, _]] = new NoBuilder
-}
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index 735e950c96..e7fd5c7f0f 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,10 +13,12 @@ package immutable
import generic._
import BitSetLike.{LogWL, updateArray}
+import mutable.{ Builder, AddingBuilder }
-/** A base class for immutable bit sets.
- *
- * @since 1
+/** A class for immutable bitsets.
+ * $bitsetinfo
+ * @define Coll immutable.BitSet
+ * @define coll immutable bitset
*/
@serializable @SerialVersionUID(1611436763290191562L)
abstract class BitSet extends Set[Int]
@@ -35,7 +36,7 @@ abstract class BitSet extends Set[Int]
/** Adds element to bitset, returning a new set.
*/
def + (elem: Int): BitSet = {
- require(elem >= 0)
+ require(elem >= 0, "bitset element must be >= 0")
if (contains(elem)) this
else {
val idx = elem >> LogWL
@@ -46,7 +47,7 @@ abstract class BitSet extends Set[Int]
/** Removes element from bitset, returning a new set
*/
def - (elem: Int): BitSet = {
- require(elem >= 0)
+ require(elem >= 0, "bitset element must be >= 0")
if (contains(elem)) {
val idx = elem >> LogWL
updateWord(idx, word(idx) & ~(1L << elem))
@@ -54,12 +55,18 @@ abstract class BitSet extends Set[Int]
}
}
-/** A factory object for bitsets */
+/** $factoryInfo
+ * @define Coll immutable.BitSet
+ * @define coll immutable bitset
+ */
object BitSet extends BitSetFactory[BitSet] {
-
/** The empty bitset */
val empty: BitSet = new BitSet1(0L)
+ /** An adding builder for immutable Sets. */
+ def newBuilder: Builder[Int, BitSet] = new AddingBuilder[Int, BitSet](empty)
+
+ /** $bitsetCanBuildFrom */
implicit def canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom
/** A bitset containing all the bits in an array */
diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala
new file mode 100755
index 0000000000..02b469a467
--- /dev/null
+++ b/src/library/scala/collection/immutable/DefaultMap.scala
@@ -0,0 +1,65 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package immutable
+
+import generic._
+
+/** A default map which implements the `+` and `-`
+ * methods of maps. It does so using the default builder for
+ * maps defined in the `Map` object.
+ * Instances that inherit from `DefaultMap[A, B]` still have to
+ * define:
+ *
+ * {{{
+ * def get(key: A): Option[B]
+ * def iterator: Iterator[(A, B)]
+ * }}}
+ *
+ * It refers back to the original map.
+ *
+ * It might also be advisable to override `foreach` or
+ * `size` if efficient implementations can be found.
+ *
+ * @tparam A the type of the keys contained in this map.
+ * @tparam B the type of the values associated with the keys.
+ *
+ * @since 2.8
+ */
+trait DefaultMap[A, +B] extends Map[A, B] { self =>
+
+ /** A default implementation which creates a new immutable map.
+ */
+ override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = {
+ val b = Map.newBuilder[A, B1]
+ b ++= this
+ b += ((kv._1, kv._2))
+ b.result
+ }
+
+ /** A default implementation which creates a new immutable map.
+ */
+ override def - (key: A): Map[A, B] = {
+ val b = newBuilder
+ for (kv <- this ; if kv._1 != key) b += kv
+ b.result
+ }
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 64608d163f..01ef597d24 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,175 +14,362 @@ package immutable
import generic._
import annotation.unchecked.uncheckedVariance
-/** <p>
- * This class implements immutable maps using a hash table.
- * </p>
- * <p>
- * It is optimized for sequential accesses where the last updated table is
- * accessed most often. It supports with reasonable efficiency accesses to
- * previous versions of the table by keeping a change log that's regularly
- * compacted. It needs to synchronize most methods, so it is less suitable
- * for highly concurrent accesses.
- * </p>
+/** This class implements immutable maps using a hash trie.
*
- * @note the builder of a hash map returns specialized representations EmptyMap,Map1,..., Map4
- * for maps of size <= 4.
+ * '''Note:''' the builder of a hash map returns specialized representations EmptyMap,Map1,..., Map4
+ * for maps of size <= 4.
+ *
+ * @tparam A the type of the keys contained in this hash map.
+ * @tparam B the type of the values associated with the keys.
*
* @author Martin Odersky
- * @version 2.0, 19/01/2007
+ * @author Tiark Rompf
+ * @version 2.8
* @since 2.3
+ * @define Coll immutable.HashMap
+ * @define coll immutable hash map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
-@serializable @SerialVersionUID(8886909077084990906L)
-class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with mutable.HashTable[A] {
-
- type Entry = scala.collection.mutable.DefaultEntry[A, Any]
+@serializable @SerialVersionUID(2L)
+class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] {
- protected var later: HashMap[A, B @uncheckedVariance] = null
- protected var oldKey: A = _
- protected var oldValue: Option[B @uncheckedVariance] = _
- protected var deltaSize: Int = _
+ override def size: Int = 0
override def empty = HashMap.empty[A, B]
- def get(key: A): Option[B] = synchronized {
- var m: HashMap[A, _ >: B] = this
- var cnt = 0
- while (m.later != null) {
- if (key == m.oldKey) return m.oldValue.asInstanceOf[Option[B]]
- cnt += 1
- m = m.later
- }
- if (cnt > logLimit) makeCopy(m)
- val e = m.findEntry(key)
- if (e == null) None
- else Some(getValue(e))
- }
+ def iterator: Iterator[(A,B)] = Iterator.empty
- override def updated [B1 >: B] (key: A, value: B1): HashMap[A, B1] = synchronized {
- makeCopyIfUpdated()
- val e = findEntry(key)
- if (e == null) {
- markUpdated(key, None, 1)
- later.addEntry(new Entry(key, value))
- } else {
- markUpdated(key, Some(getValue(e)), 0)
- e.value = value
- }
- later.asInstanceOf[HashMap[A, B1]]
- }
+ override def foreach[U](f: ((A, B)) => U): Unit = { }
+
+ def get(key: A): Option[B] =
+ get0(key, computeHash(key), 0)
+
+ override def updated [B1 >: B] (key: A, value: B1): HashMap[A, B1] =
+ updated0(key, computeHash(key), 0, value, null)
+
+ override def + [B1 >: B] (kv: (A, B1)): HashMap[A, B1] =
+ updated0(kv._1, computeHash(kv._1), 0, kv._2, kv)
- /** Add a key/value pair to this map.
- * @param kv the key/value pair
- * @return A new map with the new binding added to this map
- */
- override def + [B1 >: B] (kv: (A, B1)): HashMap[A, B1] = updated(kv._1, kv._2)
-
- /** Adds two or more elements to this collection and returns
- * either the collection itself (if it is mutable), or a new collection
- * with the added elements.
- *
- * @param elem1 the first element to add.
- * @param elem2 the second element to add.
- * @param elems the remaining elements to add.
- */
override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): HashMap[A, B1] =
this + elem1 + elem2 ++ elems
+ // TODO: optimize (might be able to use mutable updates)
- def - (key: A): HashMap[A, B] = synchronized {
- makeCopyIfUpdated()
- val e = findEntry(key)
- if (e == null) this
- else {
- markUpdated(key, Some(getValue(e)), -1)
- later removeEntry key
- later.asInstanceOf[HashMap[A, B]]
- }
+ def - (key: A): HashMap[A, B] =
+ removed0(key, computeHash(key), 0)
+
+ protected def elemHashCode(key: A) = if (key == null) 0 else key.##
+
+ protected final def improve(hcode: Int) = {
+ var h: Int = hcode + ~(hcode << 9)
+ h = h ^ (h >>> 14)
+ h = h + (h << 4)
+ h ^ (h >>> 10)
}
- override def size: Int = synchronized {
- var m: HashMap[A, _ >: B] = this
- var cnt = 0
- var s = 0
- while (m.later != null) {
- s -= m.deltaSize
- cnt += 1
- m = m.later
- }
- s += m.tableSize
- if (cnt > logLimit) makeCopy(m)
- s
+ protected def computeHash(key: A) = improve(elemHashCode(key))
+
+ protected def get0(key: A, hash: Int, level: Int): Option[B] = None
+
+ protected def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] =
+ new HashMap.HashMap1(key, hash, value, kv)
+
+ protected def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = this
+
+
+ protected def writeReplace(): AnyRef = new HashMap.SerializationProxy(this)
+
+}
+
+/** $factoryInfo
+ * @define Coll immutable.HashMap
+ * @define coll immutable hash map
+ *
+ * @author Tiark Rompf
+ * @since 2.3
+ */
+object HashMap extends ImmutableMapFactory[HashMap] {
+ /** $mapCanBuildFromInfo */
+ implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = new MapCanBuildFrom[A, B]
+ def empty[A, B]: HashMap[A, B] = EmptyHashMap.asInstanceOf[HashMap[A, B]]
+
+ private object EmptyHashMap extends HashMap[Any,Nothing] {
+
}
- def iterator = synchronized {
- makeCopyIfUpdated()
- entriesIterator map {e => (e.key, getValue(e))}
+ // TODO: add HashMap2, HashMap3, ...
+
+ class HashMap1[A,+B](private var key: A, private[HashMap] var hash: Int, private var value: (B @uncheckedVariance), private var kv: (A,B @uncheckedVariance)) extends HashMap[A,B] {
+ override def size = 1
+
+ override def get0(key: A, hash: Int, level: Int): Option[B] =
+ if (hash == this.hash && key == this.key) Some(value) else None
+
+ override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] =
+ if (hash == this.hash && key == this.key) new HashMap1(key, hash, value, kv)
+ else {
+ if (hash != this.hash) {
+ //new HashTrieMap[A,B1](level+5, this, new HashMap1(key, hash, value, kv))
+ val m = new HashTrieMap[A,B1](0,new Array[HashMap[A,B1]](0),0) // TODO: could save array alloc
+ m.updated0(this.key, this.hash, level, this.value, this.kv).updated0(key, hash, level, value, kv)
+ } else {
+ // 32-bit hash collision (rare, but not impossible)
+ new HashMapCollision1(hash, ListMap.empty.updated(this.key,this.value).updated(key,value))
+ }
+ }
+
+ override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] =
+ if (hash == this.hash && key == this.key) HashMap.empty[A,B] else this
+
+ override def iterator: Iterator[(A,B)] = Iterator(ensurePair)
+ override def foreach[U](f: ((A, B)) => U): Unit = f(ensurePair)
+ private[HashMap] def ensurePair: (A,B) = if (kv ne null) kv else { kv = (key, value); kv }
}
- private def getValue(e: Entry) =
- e.value.asInstanceOf[B]
+ private class HashMapCollision1[A,+B](private[HashMap] var hash: Int, var kvs: ListMap[A,B @uncheckedVariance]) extends HashMap[A,B] {
+ override def size = kvs.size
- private def logLimit: Int = Math.sqrt(table.length).toInt
+ override def get0(key: A, hash: Int, level: Int): Option[B] =
+ if (hash == this.hash) kvs.get(key) else None
- private[this] def markUpdated(key: A, ov: Option[B], delta: Int) {
- val lv = loadFactor
- later = new HashMap[A, B] {
- override def initialSize = 0
- override def loadFactor = lv
- table = HashMap.this.table
- tableSize = HashMap.this.tableSize
- threshold = HashMap.this.threshold
- }
- oldKey = key
- oldValue = ov
- deltaSize = delta
+ override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] =
+ if (hash == this.hash) new HashMapCollision1(hash, kvs.updated(key, value))
+ else {
+ var m: HashMap[A,B1] = new HashTrieMap[A,B1](0,new Array[HashMap[A,B1]](0),0)
+ // might be able to save some ops here, but it doesn't seem to be worth it
+ for ((k,v) <- kvs)
+ m = m.updated0(k, this.hash, level, v, null)
+ m.updated0(key, hash, level, value, kv)
+ }
+
+ override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] =
+ if (hash == this.hash) {
+ val kvs1 = kvs - key
+ if (!kvs1.isEmpty)
+ new HashMapCollision1(hash, kvs1)
+ else
+ HashMap.empty[A,B]
+ } else this
+
+ override def iterator: Iterator[(A,B)] = kvs.iterator
+ override def foreach[U](f: ((A, B)) => U): Unit = kvs.foreach(f)
}
- private def makeCopy(last: HashMap[A, _ >: B]) {
- def undo(m: HashMap[A, _ >: B]) {
- if (m ne last) {
- undo(m.later)
- if (m.deltaSize == 1) removeEntry(m.oldKey)
- else if (m.deltaSize == 0) findEntry(m.oldKey).value = m.oldValue.get
- else if (m.deltaSize == -1) addEntry(new Entry(m.oldKey, m.oldValue.get))
+
+ class HashTrieMap[A,+B](private var bitmap: Int, private var elems: Array[HashMap[A,B @uncheckedVariance]],
+ private var size0: Int) extends HashMap[A,B] {
+/*
+ def this (level: Int, m1: HashMap1[A,B], m2: HashMap1[A,B]) = {
+ this(((m1.hash >>> level) & 0x1f) | ((m2.hash >>> level) & 0x1f), {
+ val idx1 = (m1.hash >>> level) & 0x1f
+ val idx2 = (m2.hash >>> level) & 0x1f
+ assert(idx1 != idx2, m1.hash + "==" + m2.hash + " at level " + level) // TODO
+ val elems = new Array[HashMap[A,B]](2)
+ if (idx1 < idx2) {
+ elems(0) = m1
+ elems(1) = m2
+ } else {
+ elems(0) = m2
+ elems(1) = m1
+ }
+ elems
+ }, 2)
+ }
+*/
+ override def size = size0
+
+ override def get0(key: A, hash: Int, level: Int): Option[B] = {
+ val index = (hash >>> level) & 0x1f
+ val mask = (1 << index)
+ if (bitmap == - 1) {
+ elems(index & 0x1f).get0(key, hash, level + 5)
+ } else if ((bitmap & mask) != 0) {
+ val offset = Integer.bitCount(bitmap & (mask-1))
+ // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
+ elems(offset).get0(key, hash, level + 5)
+ } else
+ None
+ }
+
+ override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] = {
+ val index = (hash >>> level) & 0x1f
+ val mask = (1 << index)
+ val offset = Integer.bitCount(bitmap & (mask-1))
+ if ((bitmap & mask) != 0) {
+ val elemsNew = new Array[HashMap[A,B1]](elems.length)
+ Array.copy(elems, 0, elemsNew, 0, elems.length)
+ val sub = elems(offset)
+ // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
+ val subNew = sub.updated0(key, hash, level + 5, value, kv)
+ elemsNew(offset) = subNew
+ new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size))
+ } else {
+ val elemsNew = new Array[HashMap[A,B1]](elems.length + 1)
+ Array.copy(elems, 0, elemsNew, 0, offset)
+ elemsNew(offset) = new HashMap1(key, hash, value, kv)
+ Array.copy(elems, offset, elemsNew, offset + 1, elems.length - offset)
+ val bitmapNew = bitmap | mask
+ new HashTrieMap(bitmapNew, elemsNew, size + 1)
}
}
- def copy(e: Entry): Entry =
- if (e == null) null
- else {
- val rest = copy(e.next)
- val result = new Entry(e.key, e.value)
- result.next = rest
- result
+
+ override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = {
+ val index = (hash >>> level) & 0x1f
+ val mask = (1 << index)
+ val offset = Integer.bitCount(bitmap & (mask-1))
+ if ((bitmap & mask) != 0) {
+ val sub = elems(offset)
+ // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
+ val subNew = sub.removed0(key, hash, level + 5)
+ if (subNew.isEmpty) {
+ val bitmapNew = bitmap ^ mask
+ if (bitmapNew != 0) {
+ val elemsNew = new Array[HashMap[A,B]](elems.length - 1)
+ Array.copy(elems, 0, elemsNew, 0, offset)
+ Array.copy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1)
+ val sizeNew = size - sub.size
+ new HashTrieMap(bitmapNew, elemsNew, sizeNew)
+ } else
+ HashMap.empty[A,B]
+ } else {
+ val elemsNew = new Array[HashMap[A,B]](elems.length)
+ Array.copy(elems, 0, elemsNew, 0, elems.length)
+ elemsNew(offset) = subNew
+ val sizeNew = size + (subNew.size - sub.size)
+ new HashTrieMap(bitmap, elemsNew, sizeNew)
+ }
+ } else {
+ this
+ }
+ }
+
+/*
+ override def iterator = { // TODO: optimize (use a stack to keep track of pos)
+
+ def iter(m: HashTrieMap[A,B], k: => Stream[(A,B)]): Stream[(A,B)] = {
+ def horiz(elems: Array[HashMap[A,B]], i: Int, k: => Stream[(A,B)]): Stream[(A,B)] = {
+ if (i < elems.length) {
+ elems(i) match {
+ case m: HashTrieMap[A,B] => iter(m, horiz(elems, i+1, k))
+ case m: HashMap1[A,B] => new Stream.Cons(m.ensurePair, horiz(elems, i+1, k))
+ }
+ } else k
+ }
+ horiz(m.elems, 0, k)
+ }
+ iter(this, Stream.empty).iterator
+ }
+*/
+
+
+ override def iterator = new Iterator[(A,B)] {
+ private[this] var depth = 0
+ private[this] var arrayStack = new Array[Array[HashMap[A,B]]](6)
+ private[this] var posStack = new Array[Int](6)
+
+ private[this] var arrayD = elems
+ private[this] var posD = 0
+
+ private[this] var subIter: Iterator[(A,B)] = null // to traverse collision nodes
+
+ def hasNext = (subIter ne null) || depth >= 0
+
+ def next: (A,B) = {
+ if (subIter ne null) {
+ val el = subIter.next
+ if (!subIter.hasNext)
+ subIter = null
+ el
+ } else
+ next0(arrayD, posD)
+ }
+
+ @scala.annotation.tailrec private[this] def next0(elems: Array[HashMap[A,B]], i: Int): (A,B) = {
+ if (i == elems.length-1) { // reached end of level, pop stack
+ depth -= 1
+ if (depth >= 0) {
+ arrayD = arrayStack(depth)
+ posD = posStack(depth)
+ arrayStack(depth) = null
+ } else {
+ arrayD = null
+ posD = 0
+ }
+ } else
+ posD += 1
+
+ elems(i) match {
+ case m: HashTrieMap[A,B] => // push current pos onto stack and descend
+ if (depth >= 0) {
+ arrayStack(depth) = arrayD
+ posStack(depth) = posD
+ }
+ depth += 1
+ arrayD = m.elems
+ posD = 0
+ next0(m.elems, 0)
+ case m: HashMap1[A,B] => m.ensurePair
+ case m =>
+ subIter = m.iterator
+ subIter.next
+ }
+ }
+ }
+
+/*
+
+import collection.immutable._
+def time(block: =>Unit) = { val t0 = System.nanoTime; block; println("elapsed: " + (System.nanoTime - t0)/1000000.0) }
+var mOld = OldHashMap.empty[Int,Int]
+var mNew = HashMap.empty[Int,Int]
+time { for (i <- 0 until 100000) mOld = mOld.updated(i,i) }
+time { for (i <- 0 until 100000) mOld = mOld.updated(i,i) }
+time { for (i <- 0 until 100000) mOld = mOld.updated(i,i) }
+time { for (i <- 0 until 100000) mNew = mNew.updated(i,i) }
+time { for (i <- 0 until 100000) mNew = mNew.updated(i,i) }
+time { for (i <- 0 until 100000) mNew = mNew.updated(i,i) }
+time { mOld.iterator.foreach( p => ()) }
+time { mOld.iterator.foreach( p => ()) }
+time { mOld.iterator.foreach( p => ()) }
+time { mNew.iterator.foreach( p => ()) }
+time { mNew.iterator.foreach( p => ()) }
+time { mNew.iterator.foreach( p => ()) }
+
+*/
+
+
+ override def foreach[U](f: ((A, B)) => U): Unit = {
+ var i = 0;
+ while (i < elems.length) {
+ elems(i).foreach(f)
+ i += 1
}
- val ltable = last.table
- val s = ltable.length
- table = new scala.Array[collection.mutable.HashEntry[A, Entry]](s)
- var i = 0
- while (i < s) {
- table(i) = copy(ltable(i).asInstanceOf[Entry])
- i += 1
}
- tableSize = last.tableSize
- threshold = last.threshold
- undo(this)
- later = null
+
}
- private def makeCopyIfUpdated() {
- var m: HashMap[A, _ >: B] = this
- while (m.later != null) m = m.later
- if (m ne this) makeCopy(m)
+ @serializable @SerialVersionUID(2L) private class SerializationProxy[A,B](@transient private var orig: HashMap[A, B]) {
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ val s = orig.size
+ out.writeInt(s)
+ for ((k,v) <- orig) {
+ out.writeObject(k)
+ out.writeObject(v)
+ }
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ orig = empty
+ val s = in.readInt()
+ for (i <- 0 until s) {
+ val key = in.readObject().asInstanceOf[A]
+ val value = in.readObject().asInstanceOf[B]
+ orig = orig.updated(key, value)
+ }
+ }
+
+ private def readResolve(): AnyRef = orig
}
-}
-/** A factory object for immutable HashMaps.
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.3
- */
-object HashMap extends ImmutableMapFactory[HashMap] {
- implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = new MapCanBuildFrom[A, B]
- def empty[A, B]: HashMap[A, B] = new HashMap
}
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 93b9678751..08e64d6709 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -1,147 +1,350 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package immutable
import generic._
+import annotation.unchecked.uncheckedVariance
-/** <p>
- * This class implements immutable sets using a hash table.
- * </p>
- * <p>
- * It is optimized for sequential accesses where the last updated table is
- * accessed most often. It supports with reasonable efficiency accesses to
- * previous versions of the table by keeping a change log that's regularly
- * compacted. It needs to synchronize most methods, so it is less suitable
- * for highly concurrent accesses.
- * </p>
+/** This class implements immutable sets using a hash trie.
*
- * @note the builder of a hash set returns specialized representations EmptySet,Set1,..., Set4
- * for sets of size <= 4.
+ * '''Note:''' the builder of a hash set returns specialized representations `EmptySet`,`Set1`,..., `Set4`
+ * for sets of `size <= 4`.
+ *
+ * @tparam A the type of the elements contained in this hash set.
*
* @author Martin Odersky
+ * @author Tiark Rompf
* @version 2.8
* @since 2.3
+ * @define Coll immutable.HashSet
+ * @define coll immutable hash set
*/
-@serializable @SerialVersionUID(4020728942921483037L)
+@serializable @SerialVersionUID(2L)
class HashSet[A] extends Set[A]
with GenericSetTemplate[A, HashSet]
- with SetLike[A, HashSet[A]]
- with mutable.FlatHashTable[A] {
+ with SetLike[A, HashSet[A]] {
override def companion: GenericCompanion[HashSet] = HashSet
- protected var later: HashSet[A] = null
- protected var changedElem: A = _
- protected var deleted: Boolean = _
-
- def contains(elem: A): Boolean = synchronized {
- var m = this
- var cnt = 0
- while (m.later != null) {
- if (elem == m.changedElem) return m.deleted
- cnt += 1
- m = m.later
- }
- if (cnt > logLimit) makeCopy(m)
- m.containsEntry(elem)
+ //class HashSet[A] extends Set[A] with SetLike[A, HashSet[A]] {
+
+ override def size: Int = 0
+
+ override def empty = HashSet.empty[A]
+
+ def iterator: Iterator[A] = Iterator.empty
+
+ override def foreach[U](f: A => U): Unit = { }
+
+ def contains(e: A): Boolean = get0(e, computeHash(e), 0)
+
+ override def + (e: A): HashSet[A] = updated0(e, computeHash(e), 0)
+
+ override def + (elem1: A, elem2: A, elems: A*): HashSet[A] =
+ this + elem1 + elem2 ++ elems
+ // TODO: optimize (might be able to use mutable updates)
+
+ def - (e: A): HashSet[A] =
+ removed0(e, computeHash(e), 0)
+
+ protected def elemHashCode(key: A) = if (key == null) 0 else key.##
+
+ protected final def improve(hcode: Int) = {
+ var h: Int = hcode + ~(hcode << 9)
+ h = h ^ (h >>> 14)
+ h = h + (h << 4)
+ h ^ (h >>> 10)
}
- def + (elem: A): HashSet[A] = synchronized {
- makeCopyIfUpdated()
- if (containsEntry(elem)) this
- else {
- markUpdated(elem, false)
- later addEntry elem
- later
- }
+ protected def computeHash(key: A) = improve(elemHashCode(key))
+
+ protected def get0(key: A, hash: Int, level: Int): Boolean = false
+
+ protected def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ new HashSet.HashSet1(key, hash)
+
+ protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = this
+
+ protected def writeReplace(): AnyRef = new HashSet.SerializationProxy(this)
+}
+
+/** $factoryInfo
+ * @define Coll immutable.HashSet
+ * @define coll immutable hash set
+ *
+ * @author Tiark Rompf
+ * @since 2.3
+ * @define Coll immutable.HashSet
+ * @define coll immutable hash set
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+object HashSet extends ImmutableSetFactory[HashSet] {
+ /** $setCanBuildFromInfo */
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = setCanBuildFrom[A]
+ override def empty[A]: HashSet[A] = EmptyHashSet.asInstanceOf[HashSet[A]]
+
+ private object EmptyHashSet extends HashSet[Any] {
}
- def - (elem: A): HashSet[A] = synchronized {
- makeCopyIfUpdated()
- if (!containsEntry(elem)) this
- else {
- markUpdated(elem, true)
- later removeEntry elem
- later
- }
+ // TODO: add HashSet2, HashSet3, ...
+
+ class HashSet1[A](private[HashSet] var key: A, private[HashSet] var hash: Int) extends HashSet[A] {
+ override def size = 1
+
+ override def get0(key: A, hash: Int, level: Int): Boolean =
+ (hash == this.hash && key == this.key)
+
+ override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ if (hash == this.hash && key == this.key) this
+ else {
+ if (hash != this.hash) {
+ //new HashTrieSet[A](level+5, this, new HashSet1(key, hash))
+ val m = new HashTrieSet[A](0,new Array[HashSet[A]](0),0) // TODO: could save array alloc
+ m.updated0(this.key, this.hash, level).updated0(key, hash, level)
+ } else {
+ // 32-bit hash collision (rare, but not impossible)
+ new HashSetCollision1(hash, ListSet.empty + this.key + key)
+ }
+ }
+
+ override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
+ if (hash == this.hash && key == this.key) HashSet.empty[A] else this
+
+ override def iterator: Iterator[A] = Iterator(key)
+ override def foreach[U](f: A => U): Unit = f(key)
}
- override def size: Int = synchronized {
- var m = this
- var cnt = 0
- var s = 0
- while (m.later != null) {
- if (m.deleted) s += 1 else s -= 1
- cnt += 1
- m = m.later
+ private class HashSetCollision1[A](private[HashSet] var hash: Int, var ks: ListSet[A]) extends HashSet[A] {
+ override def size = ks.size
+
+ override def get0(key: A, hash: Int, level: Int): Boolean =
+ if (hash == this.hash) ks.contains(key) else false
+
+ override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ if (hash == this.hash) new HashSetCollision1(hash, ks + key)
+ else {
+ var m: HashSet[A] = new HashTrieSet[A](0,new Array[HashSet[A]](0),0)
+ // might be able to save some ops here, but it doesn't seem to be worth it
+ for (k <- ks)
+ m = m.updated0(k, this.hash, level)
+ m.updated0(key, hash, level)
+ }
+
+ override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
+ if (hash == this.hash) {
+ val ks1 = ks - key
+ if (!ks1.isEmpty)
+ new HashSetCollision1(hash, ks1)
+ else
+ HashSet.empty[A]
+ } else this
+
+ override def iterator: Iterator[A] = ks.iterator
+ override def foreach[U](f: A => U): Unit = ks.foreach(f)
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ // this cannot work - reading things in might produce different
+ // hash codes and remove the collision. however this is never called
+ // because no references to this class are ever handed out to client code
+ // and HashTrieSet serialization takes care of the situation
+ error("cannot serialize an immutable.HashSet where all items have the same 32-bit hash code")
+ //out.writeObject(kvs)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ error("cannot deserialize an immutable.HashSet where all items have the same 32-bit hash code")
+ //kvs = in.readObject().asInstanceOf[ListSet[A]]
+ //hash = computeHash(kvs.)
}
- s += m.tableSize
- if (cnt > logLimit) makeCopy(m)
- s
- }
- override def iterator = synchronized {
- makeCopyIfUpdated()
- // note need to cache because (later versions of) set might be mutated while elements are traversed.
- val cached = new mutable.ArrayBuffer() ++= super.iterator
- cached.iterator
}
- private def logLimit: Int = Math.sqrt(table.length).toInt
- private def markUpdated(elem: A, del: Boolean) {
- val lv = loadFactor
- later = new HashSet[A] {
- override def initialSize = 0
- override def loadFactor = lv
- table = HashSet.this.table
- tableSize = HashSet.this.tableSize
- threshold = HashSet.this.threshold
+ class HashTrieSet[A](private var bitmap: Int, private var elems: Array[HashSet[A]],
+ private var size0: Int) extends HashSet[A] {
+
+ override def size = size0
+
+ override def get0(key: A, hash: Int, level: Int): Boolean = {
+ val index = (hash >>> level) & 0x1f
+ val mask = (1 << index)
+ if (bitmap == - 1) {
+ elems(index & 0x1f).get0(key, hash, level + 5)
+ } else if ((bitmap & mask) != 0) {
+ val offset = Integer.bitCount(bitmap & (mask-1))
+ // TODO: might be worth checking if sub is HashTrieSet (-> monomorphic call site)
+ elems(offset).get0(key, hash, level + 5)
+ } else
+ false
}
- changedElem = elem
- deleted = del
- }
- private def makeCopy(last: HashSet[A]) {
- def undo(m: HashSet[A]) {
- if (m ne last) {
- undo(m.later)
- if (m.deleted) addEntry(m.changedElem)
- else removeEntry(m.changedElem)
+ override def updated0(key: A, hash: Int, level: Int): HashSet[A] = {
+ val index = (hash >>> level) & 0x1f
+ val mask = (1 << index)
+ val offset = Integer.bitCount(bitmap & (mask-1))
+ if ((bitmap & mask) != 0) {
+ val elemsNew = new Array[HashSet[A]](elems.length)
+ Array.copy(elems, 0, elemsNew, 0, elems.length)
+ val sub = elems(offset)
+ // TODO: might be worth checking if sub is HashTrieSet (-> monomorphic call site)
+ val subNew = sub.updated0(key, hash, level + 5)
+ elemsNew(offset) = subNew
+ new HashTrieSet(bitmap, elemsNew, size + (subNew.size - sub.size))
+ } else {
+ val elemsNew = new Array[HashSet[A]](elems.length + 1)
+ Array.copy(elems, 0, elemsNew, 0, offset)
+ elemsNew(offset) = new HashSet1(key, hash)
+ Array.copy(elems, offset, elemsNew, offset + 1, elems.length - offset)
+ val bitmapNew = bitmap | mask
+ new HashTrieSet(bitmapNew, elemsNew, size + 1)
+ }
+ }
+
+ override def removed0(key: A, hash: Int, level: Int): HashSet[A] = {
+ val index = (hash >>> level) & 0x1f
+ val mask = (1 << index)
+ val offset = Integer.bitCount(bitmap & (mask-1))
+ if ((bitmap & mask) != 0) {
+ val sub = elems(offset)
+ // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
+ val subNew = sub.removed0(key, hash, level + 5)
+ if (subNew.isEmpty) {
+ val bitmapNew = bitmap ^ mask
+ if (bitmapNew != 0) {
+ val elemsNew = new Array[HashSet[A]](elems.length - 1)
+ Array.copy(elems, 0, elemsNew, 0, offset)
+ Array.copy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1)
+ val sizeNew = size - sub.size
+ new HashTrieSet(bitmapNew, elemsNew, sizeNew)
+ } else
+ HashSet.empty[A]
+ } else {
+ val elemsNew = new Array[HashSet[A]](elems.length)
+ Array.copy(elems, 0, elemsNew, 0, elems.length)
+ elemsNew(offset) = subNew
+ val sizeNew = size + (subNew.size - sub.size)
+ new HashTrieSet(bitmap, elemsNew, sizeNew)
+ }
+ } else {
+ this
+ }
+ }
+
+
+ override def iterator = new Iterator[A] {
+ private[this] var depth = 0
+ private[this] var arrayStack = new Array[Array[HashSet[A]]](6)
+ private[this] var posStack = new Array[Int](6)
+
+ private[this] var arrayD = elems
+ private[this] var posD = 0
+
+ private[this] var subIter: Iterator[A] = null // to traverse collision nodes
+
+ def hasNext = (subIter ne null) || depth >= 0
+
+ def next: A = {
+ if (subIter ne null) {
+ val el = subIter.next
+ if (!subIter.hasNext)
+ subIter = null
+ el
+ } else
+ next0(arrayD, posD)
+ }
+
+ @scala.annotation.tailrec private[this] def next0(elems: Array[HashSet[A]], i: Int): A = {
+ if (i == elems.length-1) { // reached end of level, pop stack
+ depth -= 1
+ if (depth >= 0) {
+ arrayD = arrayStack(depth)
+ posD = posStack(depth)
+ arrayStack(depth) = null
+ } else {
+ arrayD = null
+ posD = 0
+ }
+ } else
+ posD += 1
+
+ elems(i) match {
+ case m: HashTrieSet[A] => // push current pos onto stack and descend
+ if (depth >= 0) {
+ arrayStack(depth) = arrayD
+ posStack(depth) = posD
+ }
+ depth += 1
+ arrayD = m.elems
+ posD = 0
+ next0(m.elems, 0)
+ case m: HashSet1[A] => m.key
+ case m =>
+ subIter = m.iterator
+ subIter.next
+ }
+ }
+ }
+
+/*
+
+import collection.immutable._
+def time(block: =>Unit) = { val t0 = System.nanoTime; block; println("elapsed: " + (System.nanoTime - t0)/1000000.0) }
+var mOld = OldHashSet.empty[Int]
+var mNew = HashSet.empty[Int]
+time { for (i <- 0 until 100000) mOld = mOld + i }
+time { for (i <- 0 until 100000) mOld = mOld + i }
+time { for (i <- 0 until 100000) mOld = mOld + i }
+time { for (i <- 0 until 100000) mNew = mNew + i }
+time { for (i <- 0 until 100000) mNew = mNew + i }
+time { for (i <- 0 until 100000) mNew = mNew + i }
+time { mOld.iterator.foreach( p => ()) }
+time { mOld.iterator.foreach( p => ()) }
+time { mOld.iterator.foreach( p => ()) }
+time { mNew.iterator.foreach( p => ()) }
+time { mNew.iterator.foreach( p => ()) }
+time { mNew.iterator.foreach( p => ()) }
+
+*/
+
+
+ override def foreach[U](f: A => U): Unit = {
+ var i = 0;
+ while (i < elems.length) {
+ elems(i).foreach(f)
+ i += 1
}
}
- table = new scala.Array[AnyRef](last.table.length)
- scala.Array.copy(last.table, 0, table, 0, table.length)
- tableSize = last.tableSize
- threshold = last.threshold
- undo(this)
- later = null
}
- private def makeCopyIfUpdated() {
- var m = this
- while (m.later != null) m = m.later
- if (m ne this) makeCopy(m)
+ @serializable @SerialVersionUID(2L) private class SerializationProxy[A,B](@transient private var orig: HashSet[A]) {
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ val s = orig.size
+ out.writeInt(s)
+ for (e <- orig) {
+ out.writeObject(e)
+ }
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ orig = empty
+ val s = in.readInt()
+ for (i <- 0 until s) {
+ val e = in.readObject().asInstanceOf[A]
+ orig = orig + e
+ }
+ }
+
+ private def readResolve(): AnyRef = orig
}
-}
-/** A factory object for immutable HashSets.
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.3
- */
-object HashSet extends SetFactory[HashSet] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = setCanBuildFrom[A]
- override def empty[A]: HashSet[A] = new HashSet
}
diff --git a/src/library/scala/collection/immutable/ImmutableIterator.scala.disabled b/src/library/scala/collection/immutable/ImmutableIterator.scala.disabled
deleted file mode 100644
index 2326bc19f6..0000000000
--- a/src/library/scala/collection/immutable/ImmutableIterator.scala.disabled
+++ /dev/null
@@ -1,117 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-// Not clear whether this is needed. How is it different from a Stream?
-package scala.collection.immutable
-
-/** An object for creating immutable iterators.
- * @since 2.4
- */
-object ImmutableIterator {
- case object Empty extends ImmutableIterator[Nothing] {
- def hasNext = false
- def next = throw new NoSuchElementException
- }
-
- private case class NonEmpty[+A](item: A, right: () => ImmutableIterator[A]) extends ImmutableIterator[A] {
- def hasNext = true
- def next = Tuple2(item, right())
- }
-
- /** Creates an empty immutable iterator.
- */
- def empty : ImmutableIterator[Nothing] = Empty
-
- /** Creates an immutable iterator with one element.
- */
- def apply[A](item : A) : ImmutableIterator[A] = NonEmpty(item, () => Empty)
-
- /** Prepends a lazy immutable iterator (right) with an element (item).
- */
- def apply[A](item : A, right : () => ImmutableIterator[A]) : () => ImmutableIterator[A] =
- () => NonEmpty(item, right)
-
- /** Appends an immutable iterator (left) with an element (item) followed
- * by a lazy immutable iterator (right).
- */
- def apply[A](left : ImmutableIterator[A], item : A, right : () => ImmutableIterator[A]) : ImmutableIterator[A] = left match {
- case NonEmpty(first, middle) =>
- val rest = NonEmpty(item,right);
- NonEmpty(first, apply(middle, () => rest));
- case Empty => NonEmpty(item, right);
- }
-
- /** Concats a lazy immutable iterator (left) with another lazy immutable
- * iterator (right).
- */
- def apply[A](left: () => ImmutableIterator[A], right: () => ImmutableIterator[A]): () => ImmutableIterator[A] = () => (left() match {
- case Empty => right()
- case NonEmpty(item, middle) => NonEmpty(item, apply(middle, right))
- });
-}
-
-/** A stateless iterator.
- *
- * @author Sean McDirmid
- * @version 1.0
- * @since 2.4
- */
-sealed abstract class ImmutableIterator[+A] {
-
- /** queries if this iterator has an element to return.
- */
- def hasNext: Boolean
-
- /** returns the next element and immutable iterator as a pair.
- */
- def next: Tuple2[A,ImmutableIterator[A]]
-
- /** Creates a new immutable iterator that appends item to this immutable
- * iterator.
- */
- def append[B >: A](item: B): ImmutableIterator[B] = append(item, () => ImmutableIterator.Empty)
-
- /** Creates a new immutable iterator that appends item and a lazy immutable
- * iterator (right) to this immutable iterator.
- *
- * @param item ...
- * @param right ...
- * @return ...
- */
- def append[B >: A](item: B, right: () => ImmutableIterator[B]): ImmutableIterator[B] =
- ImmutableIterator[B](this, item, right)
-
- /** Creates a new immutable iterator that appends a lazy immutable
- * iterator (right) to this immutable iterator.
- */
- def append[B >: A](right: () => ImmutableIterator[B]) =
- ImmutableIterator(() => this, right)()
-
- private class Elements extends Iterator[A] {
- private[this] var cursor: ImmutableIterator[A] = ImmutableIterator.this
- def hasNext = cursor.hasNext
- def next = {
- val Tuple2(ret,cursor0) = cursor.next
- cursor = cursor0
- ret
- }
- }
-
- /** Converts this immutable iterator into a conventional iterator.
- */
- def iterator: Iterator[A] = new Elements
-
- @deprecated("use `iterator' instead") def elements = iterator
-
-}
-
-
-
-
diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala
index 8fe50367fe..3db217a5f3 100644
--- a/src/library/scala/collection/immutable/IndexedSeq.scala
+++ b/src/library/scala/collection/immutable/IndexedSeq.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id: IndexedSeq.scala 19072 2009-10-13 12:19:59Z rompf $
package scala.collection
package immutable
@@ -14,10 +13,9 @@ package immutable
import generic._
import mutable.{ArrayBuffer, Builder}
-/** A subtrait of <code>collection.IndexedSeq</code> which represents sequences
- * that cannot be mutated.
- *
- * @since 2.8
+/** A subtrait of `collection.IndexedSeq` which represents indexed sequences
+ * that are guaranteed immutable.
+ * $indexedSeqInfo
*/
trait IndexedSeq[+A] extends Seq[A]
with scala.collection.IndexedSeq[A]
@@ -26,8 +24,10 @@ trait IndexedSeq[+A] extends Seq[A]
override def companion: GenericCompanion[IndexedSeq] = IndexedSeq
}
-/**
- * @since 2.8
+/** $factoryInfo
+ * The current default implementation of a $Coll is a `Vector`.
+ * @define coll indexed sequence
+ * @define Coll IndexedSeq
*/
object IndexedSeq extends SeqFactory[IndexedSeq] {
@serializable
@@ -36,5 +36,5 @@ object IndexedSeq extends SeqFactory[IndexedSeq] {
def apply(idx: Int) = buf.apply(idx)
}
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = new GenericCanBuildFrom[A]
- def newBuilder[A]: Builder[A, IndexedSeq[A]] = new ArrayBuffer[A] mapResult (buf => new Impl(buf))
-} \ No newline at end of file
+ def newBuilder[A]: Builder[A, IndexedSeq[A]] = Vector.newBuilder[A]
+}
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index 0ce61f93b4..d4605d3e1f 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -1,19 +1,26 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package immutable;
-/**
- * @author David MacIver
+
+
+import scala.collection.generic.CanBuildFrom
+import scala.collection.mutable.Builder
+import scala.collection.mutable.MapBuilder
+
+
+
+/** Utility class for integer maps.
+ * @author David MacIver
*/
private[immutable] object IntMapUtils {
def zero(i : Int, mask : Int) = (i & mask) == 0;
@@ -50,10 +57,16 @@ private[immutable] object IntMapUtils {
import IntMapUtils._
-/**
- * @since 2.7
+/** A companion object for integer maps.
+ * @since 2.7
*/
object IntMap {
+ /** $mapCanBuildFromInfo */
+ implicit def canBuildFrom[A, B] = new CanBuildFrom[IntMap[A], (Int, B), IntMap[B]] {
+ def apply(from: IntMap[A]): Builder[(Int, B), IntMap[B]] = apply()
+ def apply(): Builder[(Int, B), IntMap[B]] = new MapBuilder[Int, B, IntMap[B]](empty[B])
+ }
+
def empty[T] : IntMap[T] = IntMap.Nil;
def singleton[T](key : Int, value : T) : IntMap[T] = IntMap.Tip(key, value);
def apply[T](elems : (Int, T)*) : IntMap[T] =
@@ -146,12 +159,19 @@ private[immutable] class IntMapKeyIterator[V](it : IntMap[V]) extends IntMapIter
import IntMap._
-/**
- * Specialised immutable map structure for integer keys, based on
- * <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Integer Maps</a>
- * by Okasaki and Gill. Essentially a trie based on binary digits of the the integers.
+/** Specialised immutable map structure for integer keys, based on
+ * <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Integer Maps</a>
+ * by Okasaki and Gill. Essentially a trie based on binary digits of the integers.
+ *
+ * Note: This class is as of 2.8 largely superseded by HashMap.
+ *
+ * @tparam T type of the values associated with integer keys.
*
- * @since 2.7
+ * @since 2.7
+ * @define Coll immutable.IntMap
+ * @define coll immutable integer map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap[T]] {
override def empty: IntMap[T] = IntMap.Nil;
@@ -164,6 +184,8 @@ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap
/**
* Iterator over key, value pairs of the map in unsigned order of the keys.
+ *
+ * @return an iterator over pairs of integer keys and corresponding values.
*/
def iterator : Iterator[(Int, T)] = this match {
case IntMap.Nil => Iterator.empty;
@@ -275,15 +297,20 @@ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap
/**
* Updates the map, using the provided function to resolve conflicts if the key is already present.
- * Equivalent to
- * <pre>this.get(key) match {
- * case None => this.update(key, value);
- * case Some(oldvalue) => this.update(key, f(oldvalue, value) }
- * </pre>
*
- * @param key The key to update
- * @param value The value to use if there is no conflict
- * @param f The function used to resolve conflicts.
+ * Equivalent to:
+ * {{{
+ * this.get(key) match {
+ * case None => this.update(key, value);
+ * case Some(oldvalue) => this.update(key, f(oldvalue, value)
+ * }
+ * }}}
+ *
+ * @tparam S The supertype of values in this `LongMap`.
+ * @param key The key to update
+ * @param value The value to use if there is no conflict
+ * @param f The function used to resolve conflicts.
+ * @return The updated map.
*/
def updateWith[S >: T](key : Int, value : S, f : (T, S) => S) : IntMap[S] = this match {
case IntMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this);
@@ -309,7 +336,9 @@ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap
* A combined transform and filter function. Returns an IntMap such that for each (key, value) mapping
* in this map, if f(key, value) == None the map contains no mapping for key, and if <code>f(key, value)
*
- * @param f The transforming function.
+ * @tparam S The type of the values in the resulting `LongMap`.
+ * @param f The transforming function.
+ * @return The modified map.
*/
def modifyOrRemove[S](f : (Int, T) => Option[S]) : IntMap[S] = this match {
case IntMap.Bin(prefix, mask, left, right) => {
@@ -332,8 +361,10 @@ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap
/**
* Forms a union map with that map, using the combining function to resolve conflicts.
*
- * @param that the map to form a union with.
- * @param f the function used to resolve conflicts between two mappings.
+ * @tparam S The type of values in `that`, a supertype of values in `this`.
+ * @param that The map to form a union with.
+ * @param f The function used to resolve conflicts between two mappings.
+ * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`.
*/
def unionWith[S >: T](that : IntMap[S], f : (Int, S, S) => S) : IntMap[S] = (this, that) match{
case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) =>
@@ -357,12 +388,15 @@ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap
}
/**
- * Forms the intersection of these two maps with a combinining function. The resulting map is
+ * Forms the intersection of these two maps with a combining function. The resulting map is
* a map that has only keys present in both maps and has values produced from the original mappings
* by combining them with f.
*
- * @param that The map to intersect with.
- * @param f The combining function.
+ * @tparam S The type of values in `that`.
+ * @tparam R The type of values in the resulting `LongMap`.
+ * @param that The map to intersect with.
+ * @param f The combining function.
+ * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`.
*/
def intersectionWith[S, R](that : IntMap[S], f : (Int, T, S) => R) : IntMap[R] = (this, that) match {
case (IntMap.Bin(p1, m1, l1, r1), that@IntMap.Bin(p2, m2, l2, r2)) =>
@@ -391,7 +425,9 @@ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap
* Left biased intersection. Returns the map that has all the same mappings as this but only for keys
* which are present in the other map.
*
- * @param that The map to intersect with.
+ * @tparam R The type of values in `that`.
+ * @param that The map to intersect with.
+ * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`.
*/
def intersection[R](that : IntMap[R]) : IntMap[T] = this.intersectionWith(that, (key : Int, value : T, value2 : R) => value);
diff --git a/src/library/scala/collection/immutable/Iterable.scala b/src/library/scala/collection/immutable/Iterable.scala
index e18fc29be8..fa710d3bfb 100644
--- a/src/library/scala/collection/immutable/Iterable.scala
+++ b/src/library/scala/collection/immutable/Iterable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,13 +14,11 @@ package immutable
import generic._
import mutable.Builder
-/** A subtrait of scala.collection.Iterable which represents iterables
- * that cannot be mutated.
+/** A base trait for iterable collections that are guaranteed immutable.
+ * $iterableInfo
*
- * @author Matthias Zenger
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+ * @define Coll immutable.Iterable
+ * @define coll immutable iterable collection
*/
trait Iterable[+A] extends Traversable[A]
with scala.collection.Iterable[A]
@@ -30,14 +27,11 @@ trait Iterable[+A] extends Traversable[A]
override def companion: GenericCompanion[Iterable] = Iterable
}
-/** A factory object for the trait <code>Iterable</code>.
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** $factoryInfo
+ * @define Coll immutable.Iterable
+ * @define coll immutable iterable collection
*/
object Iterable extends TraversableFactory[Iterable] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = new GenericCanBuildFrom[A]
def newBuilder[A]: Builder[A, Iterable[A]] = new mutable.ListBuffer
}
-
diff --git a/src/library/scala/collection/immutable/LinearSeq.scala b/src/library/scala/collection/immutable/LinearSeq.scala
index 448a06378b..5858a6ce68 100644
--- a/src/library/scala/collection/immutable/LinearSeq.scala
+++ b/src/library/scala/collection/immutable/LinearSeq.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,10 +14,9 @@ package immutable
import generic._
import mutable.Builder
-/** A subtrait of <code>collection.LinearSeq</code> which represents sequences
- * that cannot be mutated.
- *
- * @since 2.8
+/** A subtrait of `collection.LinearSeq` which represents sequences that
+ * are guaranteed immutable.
+ * $linearSeqInfo
*/
trait LinearSeq[+A] extends Seq[A]
with scala.collection.LinearSeq[A]
@@ -27,8 +25,10 @@ trait LinearSeq[+A] extends Seq[A]
override def companion: GenericCompanion[LinearSeq] = LinearSeq
}
-/**
- * @since 2.8
+/** $factoryInfo
+ * The current default implementation of a $Coll is a `List`.
+ * @define coll immutable linear sequence
+ * @define Coll immutable.LinearSeq
*/
object LinearSeq extends SeqFactory[LinearSeq] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = new GenericCanBuildFrom[A]
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 94732d7ad5..7785d73175 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -16,76 +15,75 @@ import generic._
import mutable.{Builder, ListBuffer}
import annotation.tailrec
-/** A class representing an ordered collection of elements of type
- * <code>a</code>. This class comes with two implementing case
- * classes <code>scala.Nil</code> and <code>scala.::</code> that
- * implement the abstract members <code>isEmpty</code>,
- * <code>head</code> and <code>tail</code>.
+/** A class for immutable linked lists representing ordered collections
+ * of elements of type.
+ *
+ * This class comes with two implementing case classes `scala.Nil`
+ * and `scala.::` that implement the abstract members `isEmpty`,
+ * `head` and `tail`.
*
* @author Martin Odersky and others
* @version 2.8
- * @since 2.8
+ * @since 1.0
+ *
+ * @tparam A the type of the list's elements
+ *
+ * @define Coll List
+ * @define coll list
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is always `List[B]` because an implicit of type `CanBuildFrom[List, B, That]`
+ * is defined in object `List`.
+ * @define $bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`. This is usually the `canBuildFrom` value
+ * defined in object `List`.
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
sealed abstract class List[+A] extends LinearSeq[A]
with Product
with GenericTraversableTemplate[A, List]
- with LinearSeqLike[A, List[A]] {
+ with LinearSeqOptimized[A, List[A]] {
override def companion: GenericCompanion[List] = List
import scala.collection.{Iterable, Traversable, Seq, IndexedSeq}
- /** Returns true if the list does not contain any elements.
- * @return <code>true</code>, iff the list is empty.
- */
def isEmpty: Boolean
-
- /** Returns this first element of the list.
- *
- * @return the first element of this list.
- * @throws Predef.NoSuchElementException if the list is empty.
- */
def head: A
-
- /** Returns this list without its first element.
- *
- * @return this list without its first element.
- * @throws Predef.NoSuchElementException if the list is empty.
- */
def tail: List[A]
// New methods in List
- /** <p>
- * Add an element <code>x</code> at the beginning of this list.
- * </p>
- *
+ /** Adds an element at the beginning of this list.
* @param x the element to prepend.
- * @return the list with <code>x</code> added at the beginning.
- * @ex <code>1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)</code>
+ * @return a list which contains `x` as first element and
+ * which continues with this list.
+ * @example `1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)`
+ * @usecase def ::(x: A): List[A]
*/
def ::[B >: A] (x: B): List[B] =
new scala.collection.immutable.::(x, this)
- /** <p>
- * Returns a list resulting from the concatenation of the given
- * list <code>prefix</code> and this list.
- * </p>
- *
- * @param prefix the list to concatenate at the beginning of this list.
- * @return the concatenation of the two lists.
- * @ex <code>List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)</code>
+ /** Adds the elements of a given list in front of this list.
+ * @param prefix The list elements to prepend.
+ * @return a list resulting from the concatenation of the given
+ * list `prefix` and this list.
+ * @example `List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)`
+ * @usecase def :::(prefix: List[A]): List[A]
*/
def :::[B >: A](prefix: List[B]): List[B] =
if (isEmpty) prefix
else (new ListBuffer[B] ++= prefix).prependToList(this)
- /** Reverse the given prefix and append the current list to that.
- * This function is equivalent to an application of <code>reverse</code>
- * on the prefix followed by a call to <code>:::</code>, but is more
- * efficient.
+ /** Adds the elements of a given list in reverse order in front of this list.
+ * `xs reverse_::: ys` is equivalent to
+ * `xs.reverse ::: ys` but is more efficient.
*
* @param prefix the prefix to reverse and then prepend
* @return the concatenation of the reversed prefix and the current list.
+ * @usecase def reverse_:::(prefix: List[A]): List[A]
*/
def reverse_:::[B >: A](prefix: List[B]): List[B] = {
var these: List[B] = this
@@ -97,17 +95,25 @@ sealed abstract class List[+A] extends LinearSeq[A]
these
}
- /** Like xs map f, but returns <code>xs</code> unchanged if function
- * <code>f</code> maps all elements to themselves (wrt ==).
- * @note Unlike `map`, `mapConserve` is not tail-recursive.
+ /** Builds a new list by applying a function to all elements of this list.
+ * Like `xs map f`, but returns `xs` unchanged if function
+ * `f` maps all elements to themselves (wrt eq).
+ *
+ * Note: Unlike `map`, `mapConserve` is not tail-recursive.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned collection.
+ * @return a list resulting from applying the given function
+ * `f` to each element of this list and collecting the results.
+ * @usecase def mapConserve(f: A => A): List[A]
*/
- def mapConserve[B >: A] (f: A => B): List[B] = {
+ def mapConserve[B >: A <: AnyRef] (f: A => B): List[B] = {
def loop(ys: List[A]): List[B] =
if (ys.isEmpty) this
else {
val head0 = ys.head
val head1 = f(head0)
- if (head1 == head0) {
+ if (head1 eq head0.asInstanceOf[AnyRef]) {
loop(ys.tail)
} else {
val ys1 = head1 :: ys.tail.mapConserve(f)
@@ -126,35 +132,21 @@ sealed abstract class List[+A] extends LinearSeq[A]
loop(this)
}
- // Overridden methods from IterableLike or overloaded variants of such methods
+ // Overridden methods from IterableLike and SeqLike or overloaded variants of such methods
- /** Create a new list which contains all elements of this list
- * followed by all elements of Traversable `that'
- */
- override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
+ override def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
val b = bf(this)
if (b.isInstanceOf[ListBuffer[_]]) (this ::: that.toList).asInstanceOf[That]
else super.++(that)
}
- /** Create a new list which contains all elements of this list
- * followed by all elements of Iterator `that'
- */
- override def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[List[A], B, That]): That =
- this ++ that.toList
+ override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[List[A], B, That]): That = bf match {
+ case _: List.GenericCanBuildFrom[_] => (elem :: this).asInstanceOf[That]
+ case _ => super.+:(elem)(bf)
+ }
- /** Overrides the method in Iterable for efficiency.
- *
- * @return the list itself
- */
override def toList: List[A] = this
- /** Returns the <code>n</code> first elements of this list, or else the whole
- * list, if it has less than <code>n</code> elements.
-
- * @param n the number of elements to take.
- * @return the <code>n</code> first elements of this list.
- */
override def take(n: Int): List[A] = {
val b = new ListBuffer[A]
var i = 0
@@ -168,12 +160,6 @@ sealed abstract class List[+A] extends LinearSeq[A]
else b.toList
}
- /** Returns the list without its <code>n</code> first elements.
- * If this list has less than <code>n</code> elements, the empty list is returned.
- *
- * @param n the number of elements to drop.
- * @return the list without its <code>n</code> first elements.
- */
override def drop(n: Int): List[A] = {
var these = this
var count = n
@@ -184,23 +170,12 @@ sealed abstract class List[+A] extends LinearSeq[A]
these
}
- /** Returns the list with elements belonging to the given index range.
- *
- * @param start the start position of the list slice.
- * @param end the end position (exclusive) of the list slice.
- * @return the list with elements belonging to the given index range.
- */
override def slice(start: Int, end: Int): List[A] = {
var len = end
if (start > 0) len -= start
drop(start) take len
}
- /** Returns the rightmost <code>n</code> elements from this list.
- *
- * @param n the number of elements to take
- * @return the suffix of length <code>n</code> of the list
- */
override def takeRight(n: Int): List[A] = {
@tailrec
def loop(lead: List[A], lag: List[A]): List[A] = lead match {
@@ -210,15 +185,8 @@ sealed abstract class List[+A] extends LinearSeq[A]
loop(drop(n), this)
}
- // dropRight is inherited from Stream
+ // dropRight is inherited from LinearSeq
- /** Split the list at a given point and return the two parts thus
- * created.
- *
- * @param n the position at which to split
- * @return a pair of lists composed of the first <code>n</code>
- * elements, and the other elements.
- */
override def splitAt(n: Int): (List[A], List[A]) = {
val b = new ListBuffer[A]
var i = 0
@@ -231,13 +199,6 @@ sealed abstract class List[+A] extends LinearSeq[A]
(b.toList, these)
}
- /** Returns the longest prefix of this list whose elements satisfy
- * the predicate <code>p</code>.
- *
- * @param p the test predicate.
- * @return the longest prefix of this list whose elements satisfy
- * the predicate <code>p</code>.
- */
override def takeWhile(p: A => Boolean): List[A] = {
val b = new ListBuffer[A]
var these = this
@@ -248,13 +209,6 @@ sealed abstract class List[+A] extends LinearSeq[A]
b.toList
}
- /** Returns the longest suffix of this list whose first element
- * does not satisfy the predicate <code>p</code>.
- *
- * @param p the test predicate.
- * @return the longest suffix of the list whose first element
- * does not satisfy the predicate <code>p</code>.
- */
override def dropWhile(p: A => Boolean): List[A] = {
@tailrec
def loop(xs: List[A]): List[A] =
@@ -264,13 +218,6 @@ sealed abstract class List[+A] extends LinearSeq[A]
loop(this)
}
- /** Returns the longest prefix of the list whose elements all satisfy
- * the given predicate, and the rest of the list.
- *
- * @param p the test predicate
- * @return a pair consisting of the longest prefix of the list whose
- * elements all satisfy <code>p</code>, and the rest of the list.
- */
override def span(p: A => Boolean): (List[A], List[A]) = {
val b = new ListBuffer[A]
var these = this
@@ -281,8 +228,6 @@ sealed abstract class List[+A] extends LinearSeq[A]
(b.toList, these)
}
- /** A list consisting of all elements of this list in reverse order.
- */
override def reverse: List[A] = {
var result: List[A] = Nil
var these = this
@@ -299,16 +244,22 @@ sealed abstract class List[+A] extends LinearSeq[A]
if (isEmpty) Stream.Empty
else new Stream.Cons(head, tail.toStream)
- // !!! todo: work in patch
+ /** Like <code>span</code> but with the predicate inverted.
+ */
+ @deprecated("use `span { x => !p(x) }` instead")
+ def break(p: A => Boolean): (List[A], List[A]) = span { x => !p(x) }
+
+ @deprecated("use `filterNot' instead")
+ def remove(p: A => Boolean): List[A] = filterNot(p)
/** Computes the difference between this list and the given list
- * <code>that</code>.
+ * `that`.
*
* @param that the list of elements to remove from this list.
* @return this list without the elements of the given list
- * <code>that</code>.
+ * `that`.
*/
- @deprecated("use `diff' instead")
+ @deprecated("use `list1 filterNot (list2 contains)` instead")
def -- [B >: A](that: List[B]): List[B] = {
val b = new ListBuffer[B]
var these = this
@@ -320,13 +271,13 @@ sealed abstract class List[+A] extends LinearSeq[A]
}
/** Computes the difference between this list and the given object
- * <code>x</code>.
+ * `x`.
*
* @param x the object to remove from this list.
* @return this list without occurrences of the given object
- * <code>x</code>.
+ * `x`.
*/
- @deprecated("use `diff' instead")
+ @deprecated("use `filterNot (_ == x)` instead")
def - [B >: A](x: B): List[B] = {
val b = new ListBuffer[B]
var these = this
@@ -337,18 +288,21 @@ sealed abstract class List[+A] extends LinearSeq[A]
b.toList
}
+ @deprecated("use `distinct' instead")
+ def removeDuplicates: List[A] = distinct
+
/** <p>
* Sort the list according to the comparison function
- * <code>lt(e1: a, e2: a) =&gt; Boolean</code>,
- * which should be true iff <code>e1</code> precedes
- * <code>e2</code> in the desired ordering.
+ * `lt(e1: a, e2: a) =&gt; Boolean`,
+ * which should be true iff `e1` precedes
+ * `e2` in the desired ordering.
* !!! todo: move sorting to IterableLike
* </p>
*
* @param lt the comparison function
* @return a list sorted according to the comparison function
- * <code>lt(e1: a, e2: a) =&gt; Boolean</code>.
- * @ex <pre>
+ * `lt(e1: a, e2: a) =&gt; Boolean`.
+ * @example <pre>
* List("Steve", "Tom", "John", "Bob")
* .sort((e1, e2) => (e1 compareTo e2) &lt; 0) =
* List("Bob", "John", "Steve", "Tom")</pre>
@@ -362,13 +316,13 @@ sealed abstract class List[+A] extends LinearSeq[A]
var left2 = l2
while (!left1.isEmpty && !left2.isEmpty) {
- if(lt(left1.head, left2.head)) {
- res += left1.head
- left1 = left1.tail
- } else {
- res += left2.head
- left2 = left2.tail
- }
+ if(lt(left1.head, left2.head)) {
+ res += left1.head
+ left1 = left1.tail
+ } else {
+ res += left2.head
+ left2 = left2.tail
+ }
}
res ++= left1
@@ -384,12 +338,12 @@ sealed abstract class List[+A] extends LinearSeq[A]
var left = lst
while (!left.isEmpty) {
- res1 += left.head
- left = left.tail
- if (!left.isEmpty) {
- res2 += left.head
- left = left.tail
- }
+ res1 += left.head
+ left = left.tail
+ if (!left.isEmpty) {
+ res2 += left.head
+ left = left.tail
+ }
}
(res1.toList, res2.toList)
@@ -399,15 +353,15 @@ sealed abstract class List[+A] extends LinearSeq[A]
/** Merge-sort the specified list */
def ms(lst: List[A]): List[A] =
lst match {
- case Nil => lst
- case x :: Nil => lst
- case x :: y :: Nil =>
- if (lt(x,y))
- lst
- else
- y :: x :: Nil
-
- case lst =>
+ case Nil => lst
+ case x :: Nil => lst
+ case x :: y :: Nil =>
+ if (lt(x,y))
+ lst
+ else
+ y :: x :: Nil
+
+ case lst =>
val (l1, l2) = split(lst)
val l1s = ms(l1)
val l2s = ms(l2)
@@ -431,8 +385,8 @@ case object Nil extends List[Nothing] {
override def head: Nothing =
throw new NoSuchElementException("head of empty list")
override def tail: List[Nothing] =
- throw new NoSuchElementException("tail of empty list")
- // Removal of equals method here might lead to an infinite recusion similar to IntMap.equals.
+ throw new UnsupportedOperationException("tail of empty list")
+ // Removal of equals method here might lead to an infinite recursion similar to IntMap.equals.
override def equals(that: Any) = that match {
case that1: Seq[_] => that1.isEmpty
case _ => false
@@ -440,7 +394,9 @@ case object Nil extends List[Nothing] {
}
/** A non empty list characterized by a head and a tail.
- *
+ * @param hd the first element of the list
+ * @param tl the list containing the remaining elements of this list after the first one.
+ * @tparam B the type of the list elements.
* @author Martin Odersky
* @version 1.0, 15/07/2003
* @since 2.8
@@ -475,18 +431,17 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
}
}
-/** This object provides methods for creating specialized lists, and for
- * transforming special kinds of lists (e.g. lists of lists).
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** $factoryInfo
+ * @define coll list
+ * @define Coll List
*/
object List extends SeqFactory[List] {
import scala.collection.{Iterable, Seq, IndexedSeq}
+ /** $genericCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, List[A]] = new GenericCanBuildFrom[A]
+
def newBuilder[A]: Builder[A, List[A]] = new ListBuffer[A]
override def empty[A]: List[A] = Nil
@@ -494,15 +449,15 @@ object List extends SeqFactory[List] {
override def apply[A](xs: A*): List[A] = xs.toList
/** Create a sorted list with element values
- * <code>v<sub>n+1</sub> = step(v<sub>n</sub>)</code>
- * where <code>v<sub>0</sub> = start</code>
- * and elements are in the range between <code>start</code> (inclusive)
- * and <code>end</code> (exclusive)
+ * `v<sub>n+1</sub> = step(v<sub>n</sub>)`
+ * where `v<sub>0</sub> = start`
+ * and elements are in the range between `start` (inclusive)
+ * and `end` (exclusive)
*
* @param start the start value of the list
* @param end the end value of the list
- * @param step the increment function of the list, which given <code>v<sub>n</sub></code>,
- * computes <code>v<sub>n+1</sub></code>. Must be monotonically increasing
+ * @param step the increment function of the list, which given `v<sub>n</sub>`,
+ * computes `v<sub>n+1</sub>`. Must be monotonically increasing
* or decreasing.
* @return the sorted list of all integers in range [start;end).
*/
@@ -544,7 +499,7 @@ object List extends SeqFactory[List] {
* @param xss the list of lists that are to be concatenated
* @return the concatenation of all the lists
*/
- @deprecated("use `xss.flatten' instead")
+ @deprecated("use `xss.flatten' instead of `List.flatten(xss)'")
def flatten[A](xss: List[List[A]]): List[A] = {
val b = new ListBuffer[A]
for (xs <- xss) {
@@ -562,7 +517,7 @@ object List extends SeqFactory[List] {
* @param xs the list of pairs to unzip
* @return a pair of lists.
*/
- @deprecated("use `xs.unzip' instead")
+ @deprecated("use `xs.unzip' instead of `List.unzip(xs)'")
def unzip[A,B](xs: List[(A,B)]): (List[A], List[B]) = {
val b1 = new ListBuffer[A]
val b2 = new ListBuffer[B]
@@ -580,17 +535,17 @@ object List extends SeqFactory[List] {
* @param xs the iterable of pairs to unzip
* @return a pair of lists.
*/
- @deprecated("use `xs.unzip' instead")
+ @deprecated("use `xs.unzip' instead of `List.unzip(xs)'")
def unzip[A,B](xs: Iterable[(A,B)]): (List[A], List[B]) =
xs.foldRight[(List[A], List[B])]((Nil, Nil)) {
case ((x, y), (xs, ys)) => (x :: xs, y :: ys)
}
/**
- * Returns the <code>Left</code> values in the given <code>Iterable</code>
- * of <code>Either</code>s.
+ * Returns the `Left` values in the given `Iterable`
+ * of `Either`s.
*/
- @deprecated("use `Either.lefts' instead")
+ @deprecated("use `xs collect { case Left(x: A) => x }' instead of `List.lefts(xs)'")
def lefts[A, B](es: Iterable[Either[A, B]]) =
es.foldRight[List[A]](Nil)((e, as) => e match {
case Left(a) => a :: as
@@ -598,9 +553,9 @@ object List extends SeqFactory[List] {
})
/**
- * Returns the <code>Right</code> values in the given<code>Iterable</code> of <code>Either</code>s.
+ * Returns the `Right` values in the given`Iterable` of `Either`s.
*/
- @deprecated("use `Either.rights' instead")
+ @deprecated("use `xs collect { case Right(x: B) => x }' instead of `List.rights(xs)'")
def rights[A, B](es: Iterable[Either[A, B]]) =
es.foldRight[List[B]](Nil)((e, bs) => e match {
case Left(_) => bs
@@ -612,9 +567,9 @@ object List extends SeqFactory[List] {
* @param xs the iterable of Eithers to separate
* @return a pair of lists.
*/
- @deprecated("use `Either.separate' instead")
- def separate[A,B](es: Iterable[Either[A,B]]): (List[A], List[B]) =
- es.foldRight[(List[A], List[B])]((Nil, Nil)) {
+ @deprecated("use `(for (Left(x) <- es) yield x, for (Right(x) <- es) yield x)` instead")
+ def separate[A,B](es: Iterable[Either[A, B]]): (List[A], List[B]) =
+ es.foldRight[(List[A], List[B])]((Nil, Nil)) {
case (Left(a), (lefts, rights)) => (a :: lefts, rights)
case (Right(b), (lefts, rights)) => (lefts, b :: rights)
}
@@ -623,29 +578,29 @@ object List extends SeqFactory[List] {
*
* @param it the iterator to convert
* @return a list that contains the elements returned by successive
- * calls to <code>it.next</code>
+ * calls to `it.next`
*/
- @deprecated("use `it.toList' instead")
+ @deprecated("use `it.toList' instead of `List.toList(it)'")
def fromIterator[A](it: Iterator[A]): List[A] = it.toList
/** Converts an array into a list.
*
* @param arr the array to convert
- * @return a list that contains the same elements than <code>arr</code>
+ * @return a list that contains the same elements than `arr`
* in the same order
*/
- @deprecated("use `array.toList' instead")
+ @deprecated("use `array.toList' instead of `List.fromArray(array)'")
def fromArray[A](arr: Array[A]): List[A] = fromArray(arr, 0, arr.length)
/** Converts a range of an array into a list.
*
* @param arr the array to convert
* @param start the first index to consider
- * @param len the lenght of the range to convert
- * @return a list that contains the same elements than <code>arr</code>
+ * @param len the length of the range to convert
+ * @return a list that contains the same elements than `arr`
* in the same order
*/
- @deprecated("use `array.view(start, end).toList' instead")
+ @deprecated("use `array.view(start, end).toList' instead of `List.fromArray(array, start, end)'")
def fromArray[A](arr: Array[A], start: Int, len: Int): List[A] = {
var res: List[A] = Nil
var i = start + len
@@ -663,7 +618,7 @@ object List extends SeqFactory[List] {
* @param separator the separator character
* @return the list of substrings
*/
- @deprecated("use `str.split(separator).toList' instead")
+ @deprecated("use `str.split(separator).toList' instead of `List.fromString(str, separator)'")
def fromString(str: String, separator: Char): List[String] = {
var words: List[String] = Nil
var pos = str.length()
@@ -676,20 +631,12 @@ object List extends SeqFactory[List] {
words
}
- /** Returns the given string as a list of characters.
- *
- * @param str the string to convert.
- * @return the string as a list of characters.
- */
- @deprecated("use `str.toList' instead")
- def fromString(str: String): List[Char] = str.toList
-
/** Returns the given list of characters as a string.
*
* @param xs the list to convert.
* @return the list in form of a string.
*/
- @deprecated("use `xs.mkString' instead")
+ @deprecated("use `xs.mkString' instead of `List.toString(xs)'")
def toString(xs: List[Char]): String = {
val sb = new StringBuilder()
var xc = xs
@@ -700,10 +647,10 @@ object List extends SeqFactory[List] {
sb.toString()
}
- /** Like xs map f, but returns <code>xs</code> unchanged if function
- * <code>f</code> maps all elements to themselves.
+ /** Like xs map f, but returns `xs` unchanged if function
+ * `f` maps all elements to themselves.
*/
- @deprecated("use `xs.mapConserve(f)' instead")
+ @deprecated("use `xs.mapConserve(f)' instead of `List.mapConserve(xs, f)'")
def mapConserve[A <: AnyRef](xs: List[A])(f: A => A): List[A] = {
def loop(ys: List[A]): List[A] =
if (ys.isEmpty) xs
@@ -729,15 +676,15 @@ object List extends SeqFactory[List] {
loop(xs)
}
- /** Returns the list resulting from applying the given function <code>f</code>
+ /** Returns the list resulting from applying the given function `f`
* to corresponding elements of the argument lists.
*
* @param f function to apply to each pair of elements.
- * @return <code>[f(a0,b0), ..., f(an,bn)]</code> if the lists are
- * <code>[a0, ..., ak]</code>, <code>[b0, ..., bl]</code> and
- * <code>n = min(k,l)</code>
+ * @return `[f(a0,b0), ..., f(an,bn)]` if the lists are
+ * `[a0, ..., ak]`, `[b0, ..., bl]` and
+ * `n = min(k,l)`
*/
- @deprecated("use `(xs, ys).map(f)' instead")
+ @deprecated("use `(xs, ys).zipped.map(f)' instead of `List.map2(xs, ys)(f)'")
def map2[A,B,C](xs: List[A], ys: List[B])(f: (A, B) => C): List[C] = {
val b = new ListBuffer[C]
var xc = xs
@@ -751,17 +698,17 @@ object List extends SeqFactory[List] {
}
/** Returns the list resulting from applying the given function
- * <code>f</code> to corresponding elements of the argument lists.
+ * `f` to corresponding elements of the argument lists.
*
* @param f function to apply to each pair of elements.
- * @return <code>[f(a<sub>0</sub>,b<sub>0</sub>,c<sub>0</sub>),
- * ..., f(a<sub>n</sub>,b<sub>n</sub>,c<sub>n</sub>)]</code>
- * if the lists are <code>[a<sub>0</sub>, ..., a<sub>k</sub>]</code>,
- * <code>[b<sub>0</sub>, ..., b<sub>l</sub>]</code>,
- * <code>[c<sub>0</sub>, ..., c<sub>m</sub>]</code> and
- * <code>n = min(k,l,m)</code>
- */
- @deprecated("use `(xs, ys, zs).map(f)' instead")
+ * @return `[f(a<sub>0</sub>,b<sub>0</sub>,c<sub>0</sub>),
+ * ..., f(a<sub>n</sub>,b<sub>n</sub>,c<sub>n</sub>)]`
+ * if the lists are `[a<sub>0</sub>, ..., a<sub>k</sub>]`,
+ * `[b<sub>0</sub>, ..., b<sub>l</sub>]`,
+ * `[c<sub>0</sub>, ..., c<sub>m</sub>]` and
+ * `n = min(k,l,m)`
+ */
+ @deprecated("use `(xs, ys, zs).zipped.map(f)' instead of `List.map3(xs, ys, zs)(f)'")
def map3[A,B,C,D](xs: List[A], ys: List[B], zs: List[C])(f: (A, B, C) => D): List[D] = {
val b = new ListBuffer[D]
var xc = xs
@@ -776,17 +723,17 @@ object List extends SeqFactory[List] {
b.toList
}
- /** Tests whether the given predicate <code>p</code> holds
+ /** Tests whether the given predicate `p` holds
* for all corresponding elements of the argument lists.
*
* @param p function to apply to each pair of elements.
- * @return <code>(p(a<sub>0</sub>,b<sub>0</sub>) &amp;&amp;
- * ... &amp;&amp; p(a<sub>n</sub>,b<sub>n</sub>))]</code>
- * if the lists are <code>[a<sub>0</sub>, ..., a<sub>k</sub>]</code>;
- * <code>[b<sub>0</sub>, ..., b<sub>l</sub>]</code>
- * and <code>n = min(k,l)</code>
+ * @return `(p(a<sub>0</sub>,b<sub>0</sub>) &amp;&amp;
+ * ... &amp;&amp; p(a<sub>n</sub>,b<sub>n</sub>))]`
+ * if the lists are `[a<sub>0</sub>, ..., a<sub>k</sub>]`;
+ * `[b<sub>0</sub>, ..., b<sub>l</sub>]`
+ * and `n = min(k,l)`
*/
- @deprecated("use `(xs, ys).forall(f)' instead")
+ @deprecated("use `(xs, ys).zipped.forall(f)' instead of `List.forall2(xs, ys)(f)'")
def forall2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
var xc = xs
var yc = ys
@@ -798,17 +745,17 @@ object List extends SeqFactory[List] {
true
}
- /** Tests whether the given predicate <code>p</code> holds
+ /** Tests whether the given predicate `p` holds
* for some corresponding elements of the argument lists.
*
* @param p function to apply to each pair of elements.
- * @return <code>n != 0 &amp;&amp; (p(a<sub>0</sub>,b<sub>0</sub>) ||
- * ... || p(a<sub>n</sub>,b<sub>n</sub>))]</code> if the lists are
- * <code>[a<sub>0</sub>, ..., a<sub>k</sub>]</code>,
- * <code>[b<sub>0</sub>, ..., b<sub>l</sub>]</code> and
- * <code>n = min(k,l)</code>
+ * @return `n != 0 &amp;&amp; (p(a<sub>0</sub>,b<sub>0</sub>) ||
+ * ... || p(a<sub>n</sub>,b<sub>n</sub>))]` if the lists are
+ * `[a<sub>0</sub>, ..., a<sub>k</sub>]`,
+ * `[b<sub>0</sub>, ..., b<sub>l</sub>]` and
+ * `n = min(k,l)`
*/
- @deprecated("use `(xs, ys).exists(f)' instead")
+ @deprecated("use `(xs, ys).zipped.exists(f)' instead of `List.exists2(xs, ys)(f)'")
def exists2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
var xc = xs
var yc = ys
@@ -826,7 +773,7 @@ object List extends SeqFactory[List] {
* @param xss the list of lists
* @return the transposed list of lists
*/
- @deprecated("use `xss.transpose' instead")
+ @deprecated("use `xss.transpose' instead of `List.transpose(xss)'")
def transpose[A](xss: List[List[A]]): List[List[A]] = {
val buf = new ListBuffer[List[A]]
var yss = xss
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index f56993241d..6721d5bbf0 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,25 +13,34 @@ package immutable
import generic._
-/** The canonical factory of <a href="ListMap.html">ListMap</a>'s.
- *
+/** $factoryInfo
* @since 1
+ * @define Coll immutable.ListMap
+ * @define coll immutable list map
*/
object ListMap extends ImmutableMapFactory[ListMap] {
+ /** $mapCanBuildFromInfo */
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), ListMap[A, B]] =
new MapCanBuildFrom[A, B]
def empty[A, B]: ListMap[A, B] = new ListMap
}
/** This class implements immutable maps using a list-based data
- * structure. Instances of <code>ListMap</code> represent
+ * structure. Instances of `ListMap` represent
* empty maps; they can be either created by calling the constructor
- * directly, or by applying the function <code>ListMap.empty</code>.
+ * directly, or by applying the function `ListMap.empty`.
+ *
+ * @tparam A the type of the keys in this list map.
+ * @tparam B the type of the values associated with the keys.
*
* @author Matthias Zenger
- * @author Martin Oderskty
+ * @author Martin Odersky
* @version 2.0, 01/01/2007
* @since 1
+ * @define Coll immutable.ListMap
+ * @define coll immutable list map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
@serializable @SerialVersionUID(301002838095710379L)
class ListMap[A, +B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] {
@@ -45,7 +53,7 @@ class ListMap[A, +B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] {
*/
override def size: Int = 0
- /** Checks if this map maps <code>key</code> to a value and return the
+ /** Checks if this map maps `key` to a value and return the
* value if it exists.
*
* @param key the key of the mapping of interest
@@ -104,6 +112,8 @@ class ListMap[A, +B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] {
protected def value: B = throw new NoSuchElementException("empty map")
protected def next: ListMap[A, B] = throw new NoSuchElementException("empty map")
+ /** This class represents an entry in the `ListMap`.
+ */
@serializable @SerialVersionUID(-6453056603889598734L)
protected class Node[B1 >: B](override protected val key: A,
override protected val value: B1) extends ListMap[A, B1] {
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index 7767e42224..4268e742b0 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,24 +13,32 @@ package immutable
import generic._
-/** The canonical factory of <a href="ListSet.html">ListSet</a>'s
- *
+/** $factoryInfo
+ * @define Coll immutable.ListSet
+ * @define coll immutable list set
* @since 1
*/
-object ListSet extends SetFactory[ListSet] {
+object ListSet extends ImmutableSetFactory[ListSet] {
+ /** setCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListSet[A]] = setCanBuildFrom[A]
override def empty[A] = new ListSet[A]
}
/** This class implements immutable sets using a list-based data
- * structure. Instances of <code>ListSet</code> represent
+ * structure. Instances of `ListSet` represent
* empty sets; they can be either created by calling the constructor
- * directly, or by applying the function <code>ListSet.empty</code>.
+ * directly, or by applying the function `ListSet.empty`.
+ *
+ * @tparam A the type of the elements contained in this list set.
*
* @author Matthias Zenger
* @version 1.0, 09/07/2003
* @since 1
+ * @define Coll immutable.ListSet
+ * @define coll immutable list set
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
@serializable
class ListSet[A] extends Set[A]
@@ -85,6 +92,8 @@ class ListSet[A] extends Set[A]
*/
protected def next: ListSet[A] = throw new NoSuchElementException("Next of an empty set");
+ /** Represents an entry in the `ListSet`.
+ */
@serializable
protected class Node(override protected val elem: A) extends ListSet[A] {
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index e527712475..dcdc6e948f 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -1,8 +1,25 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
package scala.collection
package immutable
-/**
- * @author David MacIver
+
+import scala.collection.generic.CanBuildFrom
+import scala.collection.mutable.Builder
+import scala.collection.mutable.MapBuilder
+
+
+
+/** Utility class for long maps.
+ * @author David MacIver
*/
private[immutable] object LongMapUtils{
def zero(i : Long, mask : Long) = (i & mask) == 0L;
@@ -40,10 +57,16 @@ private[immutable] object LongMapUtils{
import LongMapUtils._
-/**
- * @since 2.7
+/** A companion object for long maps.
+ * @since 2.7
*/
object LongMap{
+ /** $mapCanBuildFromInfo */
+ implicit def canBuildFrom[A, B] = new CanBuildFrom[LongMap[A], (Long, B), LongMap[B]] {
+ def apply(from: LongMap[A]): Builder[(Long, B), LongMap[B]] = apply()
+ def apply(): Builder[(Long, B), LongMap[B]] = new MapBuilder[Long, B, LongMap[B]](empty[B])
+ }
+
def empty[T] : LongMap[T] = LongMap.Nil;
def singleton[T](key : Long, value : T) : LongMap[T] = LongMap.Tip(key, value);
def apply[T](elems : (Long, T)*) : LongMap[T] =
@@ -134,11 +157,19 @@ private[immutable] class LongMapKeyIterator[V](it : LongMap[V]) extends LongMapI
import LongMap._;
/**
- * Specialised immutable map structure for long keys, based on
- * <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Long Maps</a>
- * by Okasaki and Gill. Essentially a trie based on binary digits of the the integers.
+ * Specialised immutable map structure for long keys, based on
+ * <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Long Maps</a>
+ * by Okasaki and Gill. Essentially a trie based on binary digits of the integers.
+ *
+ * Note: This class is as of 2.8 largely superseded by HashMap.
*
- * @since 2.7
+ * @tparam T type of the values associated with the long keys.
+ *
+ * @since 2.7
+ * @define Coll immutable.LongMap
+ * @define coll immutable long integer map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, LongMap[T]] {
override def empty: LongMap[T] = LongMap.Nil;
@@ -151,6 +182,8 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
/**
* Iterator over key, value pairs of the map in unsigned order of the keys.
+ *
+ * @return an iterator over pairs of long keys and corresponding values.
*/
def iterator: Iterator[(Long, T)] = this match {
case LongMap.Nil => Iterator.empty;
@@ -262,15 +295,20 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
/**
* Updates the map, using the provided function to resolve conflicts if the key is already present.
+ *
* Equivalent to
- * <pre>this.get(key) match {
- * case None => this.update(key, value);
- * case Some(oldvalue) => this.update(key, f(oldvalue, value) }
- * </pre>
+ * {{{
+ * this.get(key) match {
+ * case None => this.update(key, value);
+ * case Some(oldvalue) => this.update(key, f(oldvalue, value)
+ * }
+ * }}}
*
- * @param key The key to update
- * @param value The value to use if there is no conflict
- * @param f The function used to resolve conflicts.
+ * @tparam S The supertype of values in this `LongMap`.
+ * @param key The key to update.
+ * @param value The value to use if there is no conflict.
+ * @param f The function used to resolve conflicts.
+ * @return The updated map.
*/
def updateWith[S >: T](key : Long, value : S, f : (T, S) => S) : LongMap[S] = this match {
case LongMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this);
@@ -296,7 +334,9 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
* A combined transform and filter function. Returns an LongMap such that for each (key, value) mapping
* in this map, if f(key, value) == None the map contains no mapping for key, and if <code>f(key, value)
*
- * @param f The transforming function.
+ * @tparam S The type of the values in the resulting `LongMap`.
+ * @param f The transforming function.
+ * @return The modified map.
*/
def modifyOrRemove[S](f : (Long, T) => Option[S]) : LongMap[S] = this match {
case LongMap.Bin(prefix, mask, left, right) => {
@@ -319,8 +359,10 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
/**
* Forms a union map with that map, using the combining function to resolve conflicts.
*
- * @param that the map to form a union with.
- * @param f the function used to resolve conflicts between two mappings.
+ * @tparam S The type of values in `that`, a supertype of values in `this`.
+ * @param that The map to form a union with.
+ * @param f The function used to resolve conflicts between two mappings.
+ * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`.
*/
def unionWith[S >: T](that : LongMap[S], f : (Long, S, S) => S) : LongMap[S] = (this, that) match{
case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) =>
@@ -344,12 +386,15 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
}
/**
- * Forms the intersection of these two maps with a combinining function. The resulting map is
+ * Forms the intersection of these two maps with a combining function. The resulting map is
* a map that has only keys present in both maps and has values produced from the original mappings
* by combining them with f.
*
- * @param that The map to intersect with.
- * @param f The combining function.
+ * @tparam S The type of values in `that`.
+ * @tparam R The type of values in the resulting `LongMap`.
+ * @param that The map to intersect with.
+ * @param f The combining function.
+ * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`.
*/
def intersectionWith[S, R](that : LongMap[S], f : (Long, T, S) => R) : LongMap[R] = (this, that) match {
case (LongMap.Bin(p1, m1, l1, r1), that@LongMap.Bin(p2, m2, l2, r2)) =>
@@ -378,7 +423,9 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
* Left biased intersection. Returns the map that has all the same mappings as this but only for keys
* which are present in the other map.
*
- * @param that The map to intersect with.
+ * @tparam R The type of values in `that`.
+ * @param that The map to intersect with.
+ * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`.
*/
def intersection[R](that : LongMap[R]) : LongMap[T] = this.intersectionWith(that, (key : Long, value : T, value2 : R) => value);
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index e1d0abb928..c65fb170cc 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,6 +14,16 @@ package immutable
import generic._
/**
+ * A generic trait for immutable maps. Concrete classes have to provide
+ * functionality for the abstract methods in `Map`:
+ *
+ * {{{
+ * def get(key: A): Option[B]
+ * def iterator: Iterator[(A, B)]
+ * def + [B1 >: B](kv: (A, B1)): Map[A, B1]
+ * def -(key: A): Map[A, B]
+ * }}}
+ *
* @since 1
*/
trait Map[A, +B] extends Iterable[(A, B)]
@@ -38,13 +47,16 @@ trait Map[A, +B] extends Iterable[(A, B)]
def withDefaultValue[B1 >: B](d: B1): Map[A, B1] = new Map.WithDefault[A, B1](this, x => d)
}
-/**
- * @since 1
+/** $factoryInfo
+ * @define Coll immutable.Map
+ * @define coll immutable map
*/
object Map extends ImmutableMapFactory[Map] {
+
+ /** $mapCanBuildFromInfo */
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B]
- def empty[A, B]: Map[A, B] = new EmptyMap[A, B]
+ def empty[A, B]: Map[A, B] = EmptyMap.asInstanceOf[Map[A, B]]
class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends Map[A, B] {
override def size = underlying.size
@@ -58,12 +70,22 @@ object Map extends ImmutableMapFactory[Map] {
}
@serializable
- class EmptyMap[A, +B] extends Map[A, B] {
+ private object EmptyMap extends Map[Any, Nothing] {
+ override def size: Int = 0
+ def get(key: Any): Option[Nothing] = None
+ def iterator: Iterator[(Any, Nothing)] = Iterator.empty
+ override def updated [B1] (key: Any, value: B1): Map[Any, B1] = new Map1(key, value)
+ def + [B1](kv: (Any, B1)): Map[Any, B1] = updated(kv._1, kv._2)
+ def - (key: Any): Map[Any, Nothing] = this
+ }
+
+ @serializable @deprecated("use `Map.empty' instead")
+ class EmptyMap[A,B] extends Map[A,B] {
override def size: Int = 0
def get(key: A): Option[B] = None
def iterator: Iterator[(A, B)] = Iterator.empty
- override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = new Map1(key, value)
- def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2)
+ override def updated [B1] (key: A, value: B1): Map[A, B1] = new Map1(key, value)
+ def + [B1](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2)
def - (key: A): Map[A, B] = this
}
@@ -78,7 +100,7 @@ object Map extends ImmutableMapFactory[Map] {
else new Map2(key1, value1, key, value)
def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2)
def - (key: A): Map[A, B] =
- if (key == key1) empty else this
+ if (key == key1) Map.empty else this
override def foreach[U](f: ((A, B)) => U): Unit = {
f((key1, value1))
}
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index 8ea7fc2a97..bbad195687 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,35 +13,42 @@ package immutable
import generic._
-/** <p>
- * A generic template for immutable maps from keys of type <code>A</code>
- * to values of type <code>B</code>.<br/>
- * To implement a concrete map, you need to provide implementations of the
- * following methods (where <code>This</code> is the type of the map in
- * question):
- * </p>
- * <pre>
- * <b>def</b> get(key: A): Option[B]
- * <b>def</b> iterator: Iterator[(A, B)]
- * <b>def</b> + [B1 >: B](kv: (A, B)): Map[A, B1]
- * <b>def</b> - (key: A): This</pre>
- * <p>
- * If you wish that methods <code>like</code>, <code>take</code>, <code>drop</code>,
- * <code>filter</code> return the same kind of map, you should also override:
- * </p>
- * <pre>
- * <b>def</b> empty: This</pre>
- * <p>
- * It is also good idea to override methods <code>foreach</code> and
- * <code>size</code> for efficiency.
- * </p>
+/**
+ * A generic template for immutable maps from keys of type `A`
+ * to values of type `B`.
+ * To implement a concrete map, you need to provide implementations of the
+ * following methods (where `This` is the type of the actual map implementation):
+ *
+ * {{{
+ * def get(key: A): Option[B]
+ * def iterator: Iterator[(A, B)]
+ * def + [B1 >: B](kv: (A, B)): Map[A, B1]
+ * def - (key: A): This
+ * }}}
+ *
+ * If you wish that transformer methods like `take`, `drop`, `filter` return the
+ * same kind of map, you should also override:
+ *
+ * {{{
+ * def empty: This
+ * }}}
+ *
+ * It is also good idea to override methods `foreach` and
+ * `size` for efficiency.
+ *
+ * @param A the type of the keys contained in this collection.
+ * @param B the type of the values associated with the keys.
+ * @param This The type of the actual map implementation.
*
* @author Martin Odersky
* @version 2.8
* @since 2.8
+ * @define Coll immutable.Map
+ * @define coll immutable map
*/
-trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]] extends scala.collection.MapLike[A, B, This] {
-self =>
+trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
+ extends scala.collection.MapLike[A, B, This]
+{ self =>
import scala.collection.Traversable
@@ -54,8 +60,8 @@ self =>
override def updated [B1 >: B](key: A, value: B1): immutable.Map[A, B1] = this + ((key, value))
/** Add a key/value pair to this map, returning a new map.
- * @param kv the key/value pair
- * @return A new map with the new binding added to this map
+ * @param kv the key/value pair.
+ * @return A new map with the new binding added to this map.
*/
def + [B1 >: B] (kv: (A, B1)): immutable.Map[A, B1]
@@ -65,6 +71,7 @@ self =>
* @param elem1 the first element to add.
* @param elem2 the second element to add.
* @param elems the remaining elements to add.
+ * @return A new map with the new bindings added to this map.
*/
override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): immutable.Map[A, B1] =
this + elem1 + elem2 ++ elems
@@ -72,21 +79,39 @@ self =>
/** Adds a number of elements provided by a traversable object
* and returns a new collection with the added elements.
*
- * @param elems the traversable object.
+ * @param xs the traversable object consisting of key-value pairs.
+ * @return a new immutable map with the bindings of this map and those from `xs`.
*/
- override def ++[B1 >: B](elems: Traversable[(A, B1)]): immutable.Map[A, B1] =
- ((repr: immutable.Map[A, B1]) /: elems) (_ + _)
+ override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): immutable.Map[A, B1] =
+ ((repr: immutable.Map[A, B1]) /: xs) (_ + _)
- /** Adds a number of elements provided by an iterator
- * and returns a new collection with the added elements.
- *
- * @param iter the iterator
+ /** Filters this map by retaining only keys satisfying a predicate.
+ * @param p the predicate used to test keys
+ * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
+ * the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- override def ++[B1 >: B] (iter: Iterator[(A, B1)]): immutable.Map[A, B1] =
- ((repr: immutable.Map[A, B1]) /: iter) (_ + _)
+ override def filterKeys(p: A => Boolean): Map[A, B] = new DefaultMap[A, B] {
+ override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
+ def iterator = self.iterator.filter(kv => p(kv._1))
+ override def contains(key: A) = self.contains(key) && p(key)
+ def get(key: A) = if (!p(key)) None else self.get(key)
+ }
+
+ /** Transforms this map by applying a function to every retrieved value.
+ * @param f the function used to transform values of this map.
+ * @return a map view which maps every key of this map
+ * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
+ */
+ override def mapValues[C](f: B => C): Map[A, C] = new DefaultMap[A, C] {
+ override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
+ def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
+ override def size = self.size
+ override def contains(key: A) = self.contains(key)
+ def get(key: A) = self.get(key).map(f)
+ }
/** This function transforms all the values of mappings contained
- * in this map with function <code>f</code>.
+ * in this map with function `f`.
*
* @param f A function over keys and values
* @return the updated map
@@ -97,23 +122,6 @@ self =>
b.result
}
- /** Returns a new map with all key/value pairs for which the predicate
- * <code>p</code> returns <code>true</code>.
- *
- * @param p A predicate over key-value pairs
- * @note This method works by successively removing elements fro which the
- * predicate is false from this set.
- * If removal is slow, or you expect that most elements of the set$
- * will be removed, you might consider using <code>filter</code>
- * with a negated predicate instead.
- */
- override def filterNot(p: ((A, B)) => Boolean): This = {
- var res: This = repr
- for (kv <- this)
- if (p(kv)) res = (res - kv._1).asInstanceOf[This] // !!! concrete overrides abstract problem
- res
- }
-
@deprecated("use `updated' instead")
def update[B1 >: B](key: A, value: B1): immutable.Map[A, B1] = updated(key, value).asInstanceOf[immutable.Map[A, B1]]
}
diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala
index b998747f58..fa0b097070 100644
--- a/src/library/scala/collection/immutable/MapProxy.scala
+++ b/src/library/scala/collection/immutable/MapProxy.scala
@@ -1,32 +1,28 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package immutable
-/** <p>
- * This is a simple wrapper class for <a href="Map.html"
- * target="contentFrame"><code>scala.collection.mutable.Map</code></a>.
- * </p>
- * <p>
- * It is most useful for assembling customized map abstractions
- * dynamically using object composition and forwarding.
- * </p>
+/**
+ * This is a simple wrapper class for <a href="Map.html"
+ * target="contentFrame">`scala.collection.immutable.Map`</a>.
+ *
+ * It is most useful for assembling customized map abstractions
+ * dynamically using object composition and forwarding.
*
* @author Matthias Zenger, Martin Odersky
* @version 2.0, 31/12/2006
* @since 2.8
*/
-trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]]
-{
+trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] {
override def repr = this
private def newProxy[B1 >: B](newSelf: Map[A, B1]): MapProxy[A, B1] =
new MapProxy[A, B1] { val self = newSelf }
@@ -34,8 +30,11 @@ trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]]
override def empty = newProxy(self.empty)
override def updated [B1 >: B](key: A, value: B1) = newProxy(self.updated(key, value))
- override def + [B1 >: B](kv: (A, B1)): Map[A, B1] = newProxy(self + kv)
- override def + [B1 >: B](elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) =
- newProxy(self.+(elem1, elem2, elems: _*))
override def -(key: A) = newProxy(self - key)
+ override def + [B1 >: B](kv: (A, B1)): Map[A, B1] = newProxy(self + kv)
+ override def + [B1 >: B](elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) = newProxy(self.+(elem1, elem2, elems: _*))
+ override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]) = newProxy(self ++ xs)
+
+ override def filterKeys(p: A => Boolean) = self.filterKeys(p)
+ override def mapValues[C](f: B => C) = self.mapValues(f)
}
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index 9c70ba3ca6..db44e9ffa0 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id: NumericRange.scala 18987 2009-10-08 18:31:44Z odersky $
package scala.collection
package immutable
@@ -14,33 +13,45 @@ package immutable
import mutable.{ Builder, ListBuffer }
import generic._
-/** <p>
- * <code>NumericRange</code> is a more generic version of the
- * <code>Range</code> class which works with arbitrary types.
- * It must be supplied with an Integral implementation of the
- * range type.
+/** `NumericRange` is a more generic version of the
+ * `Range` class which works with arbitrary types.
+ * It must be supplied with an `Integral` implementation of the
+ * range type.
*
- * Factories for likely types include Range.BigInt, Range.Long,
- * and Range.BigDecimal. Range.Int exists for completeness, but
- * the Int-based scala.Range should be more performant.
- * </p><pre>
- * <b>val</b> r1 = new Range(0, 100, 1)
- * <b>val</b> veryBig = Math.MAX_INT.toLong + 1
- * <b>val</b> r2 = Range.Long(veryBig, veryBig + 100, 1)
+ * Factories for likely types include `Range.BigInt`, `Range.Long`,
+ * and `Range.BigDecimal`. `Range.Int` exists for completeness, but
+ * the `Int`-based `scala.Range` should be more performant.
+ *
+ * {{{
+ * val r1 = new Range(0, 100, 1)
+ * val veryBig = Int.MaxValue.toLong + 1
+ * val r2 = Range.Long(veryBig, veryBig + 100, 1)
* assert(r1 sameElements r2.map(_ - veryBig))
- * </pre>
+ * }}}
*
* @author Paul Phillips
* @version 2.8
+ * @define Coll NumericRange
+ * @define coll numeric range
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
-abstract class NumericRange[+T]
+@serializable
+abstract class NumericRange[T]
(val start: T, val end: T, val step: T, val isInclusive: Boolean)
(implicit num: Integral[T])
extends IndexedSeq[T]
{
+ /** Note that NumericRange must be invariant so that constructs
+ * such as
+ *
+ * 1L to 10 by 5
+ *
+ * do not infer the range type as AnyVal.
+ */
import num._
- private def fail(msg: String) = throw new UnsupportedOperationException(msg)
+ private def fail(msg: String) = throw new IllegalArgumentException(msg)
if (step equiv zero)
fail("NumericRange step cannot be zero.")
@@ -48,39 +59,37 @@ extends IndexedSeq[T]
// todo? - we could lift the length restriction by implementing a range as a sequence of
// subranges and limiting the subranges to MAX_INT. There's no other way around it because
// the generics we inherit assume integer-based indexing (as well they should.)
- // The second condition is making sure type T can meaningfully be compared to Math.MAX_INT.
- if (genericLength > fromInt(Math.MAX_INT) && (Math.MAX_INT == toInt(fromInt(Math.MAX_INT))))
- fail("Implementation restricts ranges to Math.MAX_INT elements.")
+ // The second condition is making sure type T can meaningfully be compared to Int.MaxValue.
+ if (genericLength > fromInt(Int.MaxValue) && (Int.MaxValue == toInt(fromInt(Int.MaxValue))))
+ fail("Implementation restricts ranges to Int.MaxValue elements.")
// inclusive/exclusiveness captured this way because we do not have any
// concept of a "unit", we can't just add an epsilon to an exclusive
// endpoint to make it inclusive (as can be done with the int-based Range.)
- protected def limitTest[U >: T](x: U)(implicit unum: Integral[U]) =
- !isEmpty && isInclusive && unum.equiv(x, end)
+ protected def limitTest(x: T) = !isEmpty && isInclusive && equiv(x, end)
protected def underlying = collection.immutable.IndexedSeq.empty[T]
/** Create a new range with the start and end values of this range and
- * a new <code>step</code>.
+ * a new `step`.
*/
- def by[U >: T](newStep: U)(implicit unum: Integral[U]): NumericRange[U] =
- copy(start, end, newStep)
+ def by(newStep: T): NumericRange[T] = copy(start, end, newStep)
/** Create a copy of this range.
*/
- def copy[U >: T](start: U, end: U, step: U)(implicit unum: Integral[U]): NumericRange[U]
+ def copy(start: T, end: T, step: T): NumericRange[T]
override def foreach[U](f: T => U) {
var i = start
if (step > zero) {
while (i < end) {
f(i)
- i = i + step
+ i += step
}
} else {
while (i > end) {
f(i)
- i = i + step
+ i += step
}
}
if (limitTest(i)) f(i)
@@ -100,7 +109,7 @@ extends IndexedSeq[T]
}
def length: Int = toInt(genericLength)
- override def isEmpty =
+ override def isEmpty: Boolean =
if (step > zero)
if (isInclusive) end < start
else end <= start
@@ -114,9 +123,8 @@ extends IndexedSeq[T]
}
// a well-typed contains method.
- def containsTyped[U >: T](x: U)(implicit unum: Integral[U]): Boolean = {
- import unum._
- def divides(d: U, by: U) = equiv(d % by, zero)
+ def containsTyped(x: T): Boolean = {
+ def divides(d: T, by: T) = equiv(d % by, zero)
limitTest(x) || (
if (step > zero)
@@ -153,7 +161,7 @@ extends IndexedSeq[T]
// XXX This may be incomplete.
new NumericRange[A](fm(start), fm(end), fm(step), isInclusive) {
- def copy[A1 >: A](start: A1, end: A1, step: A1)(implicit unum: Integral[A1]): NumericRange[A1] =
+ def copy(start: A, end: A, step: A): NumericRange[A] =
if (isInclusive) NumericRange.inclusive(start, end, step)
else NumericRange(start, end, step)
@@ -161,8 +169,7 @@ extends IndexedSeq[T]
override def foreach[U](f: A => U) { underlyingRange foreach (x => f(fm(x))) }
override def isEmpty = underlyingRange.isEmpty
override def apply(idx: Int): A = fm(underlyingRange(idx))
- override def containsTyped[A1 >: A](el: A1)(implicit unum: Integral[A1]) =
- underlyingRange exists (x => fm(x) == el)
+ override def containsTyped(el: A) = underlyingRange exists (x => fm(x) == el)
}
}
@@ -196,10 +203,12 @@ extends IndexedSeq[T]
}
}
+/** A companion object for numeric ranges.
+ */
object NumericRange {
class Inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T])
extends NumericRange(start, end, step, true) {
- def copy[U >: T](start: U, end: U, step: U)(implicit unum: Integral[U]): Inclusive[U] =
+ def copy(start: T, end: T, step: T): Inclusive[T] =
NumericRange.inclusive(start, end, step)
def exclusive: Exclusive[T] = NumericRange(start, end, step)
@@ -207,7 +216,7 @@ object NumericRange {
class Exclusive[T](start: T, end: T, step: T)(implicit num: Integral[T])
extends NumericRange(start, end, step, false) {
- def copy[U >: T](start: U, end: U, step: U)(implicit unum: Integral[U]): Exclusive[U] =
+ def copy(start: T, end: T, step: T): Exclusive[T] =
NumericRange(start, end, step)
def inclusive: Inclusive[T] = NumericRange.inclusive(start, end, step)
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 72e09829d4..9cb1040f95 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,13 +14,13 @@ package immutable
import java.io._
import scala.util.matching.Regex
-/** The PagedSeq object defines a lazy implementations of
+/** The `PagedSeq` object defines a lazy implementations of
* a random access sequence.
*
* @since 2.7
*/
object PagedSeq {
- final val UndeterminedEnd = Math.MAX_INT
+ final val UndeterminedEnd = Int.MaxValue
/** Constructs a character sequence from a character iterator */
def fromIterator[T: ClassManifest](source: Iterator[T]): PagedSeq[T] =
@@ -108,8 +107,14 @@ import PagedSeq._
/** An implementation of lazily computed sequences, where elements are stored
* in ``pages'', i.e. arrays of fixed size.
*
- * @author Martin Odersky
- * @since 2.7
+ * @tparam T the type of the elements contained in this paged sequence, with a `ClassManifest` context bound.
+ *
+ * @author Martin Odersky
+ * @since 2.7
+ * @define Coll PagedSeq
+ * @define coll paged sequence
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
class PagedSeq[T: ClassManifest] protected(
more: (Array[T], Int, Int) => Int,
@@ -182,7 +187,7 @@ extends scala.collection.IndexedSeq[T]
/** the subsequence from index `start' up to the
* length of the current sequence.
*/
- override def slice(start: Int) = slice(start, UndeterminedEnd)
+ def slice(start: Int): PagedSeq[T] = slice(start, UndeterminedEnd)
/** Convert sequence to string */
override def toString = {
@@ -202,7 +207,7 @@ private class Page[T: ClassManifest](val num: Int) {
/** The next page in the sequence */
var next : Page[T] = null
- /** A later page in the sequence, serves a cachae for pointing to last page */
+ /** A later page in the sequence, serves a cache for pointing to last page */
var later : Page[T] = this
/** The number of characters read into this page */
@@ -218,11 +223,11 @@ private class Page[T: ClassManifest](val num: Int) {
/** The index of the first character in this page relative to the whole sequence */
final def start = num * PageSize
- /** The index of the character following the last charcater in this page relative
+ /** The index of the character following the last character in this page relative
* to the whole sequence */
final def end = start + filled
- /** The currently last page in the sequence; might change as more charcaters are appended */
+ /** The currently last page in the sequence; might change as more characters are appended */
final def latest: Page[T] = {
if (later.next != null) later = later.next.latest
later
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 079c3f3a3d..f5cfd83643 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -1,45 +1,48 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package immutable
-import scala.annotation.tailrec
+import generic._
+import mutable.{ Builder, ListBuffer }
+import annotation.tailrec
-object Queue {
- val Empty: Queue[Nothing] = new Queue(Nil, Nil)
- def apply[A](elems: A*) = new Queue(Nil, elems.toList)
-}
-
-/** <code>Queue</code> objects implement data structures that allow to
+/** `Queue` objects implement data structures that allow to
* insert and retrieve elements in a first-in-first-out (FIFO) manner.
*
* @author Erik Stenman
* @version 1.0, 08/07/2003
* @since 1
+ * @define Coll immutable.Queue
+ * @define coll immutable queue
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
@serializable
@SerialVersionUID(-7622936493364270175L)
-class Queue[+A] protected(
- protected val in: List[A],
- protected val out: List[A]) extends Seq[A]
-{
- /** Returns the <code>n</code>-th element of this queue.
+class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
+ extends LinearSeq[A]
+ with GenericTraversableTemplate[A, Queue]
+ with LinearSeqLike[A, Queue[A]] {
+
+ override def companion: GenericCompanion[Queue] = Queue
+
+ /** Returns the `n`-th element of this queue.
* The first element is at position 0.
*
* @param n index of the element to return
- * @return the element at position <code>n</code> in this queue.
+ * @return the element at position `n` in this queue.
* @throws Predef.NoSuchElementException if the queue is too short.
*/
- def apply(n: Int): A = {
+ override def apply(n: Int): A = {
val len = out.length
if (n < len) out.apply(n)
else {
@@ -59,9 +62,19 @@ class Queue[+A] protected(
*/
override def isEmpty: Boolean = in.isEmpty && out.isEmpty
+ override def head: A =
+ if (out.nonEmpty) out.head
+ else if (in.nonEmpty) in.last
+ else throw new NoSuchElementException("head on empty queue")
+
+ override def tail: Queue[A] =
+ if (out.nonEmpty) new Queue(in, out.tail)
+ else if (in.nonEmpty) new Queue(Nil, in.reverse.tail)
+ else throw new NoSuchElementException("tail on empty queue")
+
/** Returns the length of the queue.
*/
- def length = in.length + out.length
+ override def length = in.length + out.length
/** Creates a new queue with element added at the end
* of the old queue.
@@ -98,7 +111,7 @@ class Queue[+A] protected(
* @param iter an iterable object
*/
def enqueue[B >: A](iter: Iterable[B]) =
- new Queue(iter.iterator.toList.reverse ::: in, out)
+ new Queue(iter.toList.reverse ::: in, out)
/** Returns a tuple with the first element in the queue,
* and a new queue with this element removed.
@@ -118,12 +131,24 @@ class Queue[+A] protected(
* @throws Predef.NoSuchElementException
* @return the first element.
*/
- def front: A =
- if (!out.isEmpty) out.head
- else if (!in.isEmpty) in.last
- else throw new NoSuchElementException("front on empty queue")
+ def front: A = head
/** Returns a string representation of this queue.
*/
override def toString() = mkString("Queue(", ", ", ")")
}
+
+/** $factoryInfo
+ * @define Coll immutable.Queue
+ * @define coll immutable queue
+ */
+object Queue extends SeqFactory[Queue] {
+ /** $genericCanBuildFromInfo */
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Queue[A]] = new GenericCanBuildFrom[A]
+ def newBuilder[A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x.toList))
+ override def empty[A]: Queue[A] = new Queue[A](Nil, Nil)
+ override def apply[A](xs: A*): Queue[A] = new Queue[A](Nil, xs.toList)
+
+ @deprecated("Use Queue.empty instead")
+ val Empty: Queue[Nothing] = Queue()
+}
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index d9a7725d1a..68b50fd09f 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -1,62 +1,84 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id: Range.scala 18987 2009-10-08 18:31:44Z odersky $
package scala.collection.immutable
-/** <p>
- * The <code>Range</code> class represents integer values in range
- * <code>[start;end)</code> with non-zero step value <code>step</code>.
- * It's a special case of an indexed sequence.
- * For example:
- * </p><pre>
- * <b>val</b> r1 = 0 until 10
- * <b>val</b> r2 = r1.start until r1.end by r1.step + 1
+/** The `Range` class represents integer values in range
+ * ''[start;end)'' with non-zero step value `step`.
+ * It's a special case of an indexed sequence.
+ * For example:
+ *
+ * {{{
+ * val r1 = 0 until 10
+ * val r2 = r1.start until r1.end by r1.step + 1
* println(r2.length) // = 5
- * </pre>
+ * }}}
+ *
+ * @param start the start of this range.
+ * @param end the exclusive end of the range.
+ * @param step the step for the range.
*
* @author Martin Odersky
* @version 2.8
* @since 2.5
+ * @define Coll Range
+ * @define coll range
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define doesNotUseBuilders
+ * '''Note:''' this method does not use builders to construct a new range,
+ * and its complexity is O(1).
*/
+@serializable @SerialVersionUID(7618862778670199309L)
class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int] {
require(step != 0)
protected def copy(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
- /** Create a new range with the start and end values of this range and
- * a new <code>step</code>.
+ /** Create a new range with the `start` and `end` values of this range and
+ * a new `step`.
+ *
+ * @return a new range with a different step
*/
def by(step: Int): Range = copy(start, end, step)
def isInclusive = false
- protected def limit = end
-
- override def foreach[U](f: Int => U) {
- var i = start
- while (if (step > 0) i < limit else i > limit) {
+ override def foreach[@specialized(Unit) U](f: Int => U) {
+ if (fullLength > 0) {
+ val last = this.last
+ var i = start
+ while (i != last) {
+ f(i)
+ i += step
+ }
f(i)
- i += step
}
}
- lazy val length: Int = {
- def plen(start: Int, limit: Int, step: Int) =
- if (limit <= start) 0 else (limit - start - 1) / step + 1
- if (step > 0) plen(start, limit, step)
- else plen(limit, start, -step)
+ override def last: Int = if (step == 1 || step == -1) {
+ end - step
+ } else {
+ val size = end.toLong - start.toLong
+ val inclusiveLast = (size / step.toLong * step.toLong + start.toLong).toInt
+ if (size % step == 0) inclusiveLast - step else inclusiveLast
}
- final override def isEmpty =
- if (step > 0) start >= limit else start <= limit
+ def length: Int = fullLength.toInt
+
+ protected def fullLength: Long = if (end > start == step > 0 && start != end)
+ ((last.toLong - start.toLong) / step.toLong + 1)
+ else
+ 0
+
+ final override def isEmpty = length == 0
@inline
final def apply(idx: Int): Int = {
@@ -64,24 +86,73 @@ class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int]
start + idx * step
}
- final override def take(n: Int): Range = {
- val limit1 = start + step * (n max 0)
- if (step > 0) Range(start, limit1 min limit, step)
- else Range(start, limit1 max limit, step)
+ // take and drop have to be tolerant of large values without overflowing
+ // warning! this is buggy, and gives wrong answers on boundary cases.
+ // The known bugs are avoided by drop not calling it in those cases.
+ // See ticket #3529. It should be revised.
+ private def locationAfterN(n: Int) = if (n > 0) {
+ if (step > 0)
+ ((start.toLong + step.toLong * n.toLong) min last.toLong).toInt
+ else
+ ((start.toLong + step.toLong * n.toLong) max last.toLong).toInt
+ } else {
+ start
}
+ /** Creates a new range containing the first `n` elements of this range.
+ *
+ * $doesNotUseBuilders
+ *
+ * @param n the number of elements to take.
+ * @return a new range consisting of `n` first elements.
+ */
+ final override def take(n: Int): Range =
+ if (n > 0 && length > 0)
+ Range(start, locationAfterN(n - 1), step).inclusive
+ else
+ Range(start, start, step)
+
+ /** Creates a new range containing all the elements of this range except the first `n` elements.
+ *
+ * $doesNotUseBuilders
+ *
+ * @param n the number of elements to drop.
+ * @return a new range consisting of all the elements of this range except `n` first elements.
+ */
final override def drop(n: Int): Range =
- copy(start + step * (n max 0), end, step)
+ if (n >= length) {
+ if (step > 0) copy(end + 1, end, step)
+ else copy(end - 1, end, step)
+ }
+ else copy(locationAfterN(n), end, step)
+ /** Creates a new range containing all the elements of this range except the last one.
+ *
+ * $doesNotUseBuilders
+ *
+ * @return a new range consisting of all the elements of this range except the last one.
+ */
final override def init: Range =
take(length - 1)
+ /** Creates a new range contained in the specified slice of this range.
+ *
+ * $doesNotUseBuilders
+ *
+ * @param from the start of the slice.
+ * @param until the end of the slice.
+ * @return a new range consisting of all the elements of this range contained in the specified slice.
+ */
final override def slice(from: Int, until: Int): Range =
drop(from).take(until - from)
private def skip(p: Int => Boolean): Int = {
var s = start
- while ((if (step > 0) s < limit else s > limit) && p(s)) s += step
+ if (length > 0) {
+ val last = this.last
+ while ((if (step > 0) s <= last else s >= last) && p(s))
+ s += step
+ }
s
}
@@ -93,22 +164,41 @@ class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int]
(Range(start, split, step), copy(split, end, step))
}
+ /** Creates a pair of new ranges, first consisting of elements before `n`, and the second
+ * of elements after `n`.
+ *
+ * $doesNotUseBuilders
+ */
final override def splitAt(n: Int) = (take(n), drop(n))
+ /** Creates a new range consisting of the `length - n` last elements of the range.
+ *
+ * $doesNotUseBuilders
+ */
final override def takeRight(n: Int): Range = drop(length - n)
+ /** Creates a new range consisting of the initial `length - n` elements of the range.
+ *
+ * $doesNotUseBuilders
+ */
final override def dropRight(n: Int): Range = take(length - n)
- final override def reverse: Range = new Range.Inclusive(last, start, -step)
+ /** Returns the reverse of this range.
+ *
+ * $doesNotUseBuilders
+ */
+ final override def reverse: Range = if (length > 0) new Range.Inclusive(last, start, -step) else this
/** Make range inclusive.
- * @pre if (step > 0) end != MaxInt else end != MinInt
*/
def inclusive = new Range.Inclusive(start, end, step)
- def contains(x: Int): Boolean =
- if (step > 0) start <= x && x < limit && (x - start) % step == 0
- else start >= x && x > limit && (start - x) % step == 0
+ final def contains(x: Int): Boolean = if (length > 0) {
+ if (step > 0) start <= x && x <= last && (x - start) % step == 0
+ else start >= x && x >= last && (start - x) % step == 0
+ } else {
+ false
+ }
override def equals(other: Any) = other match {
case x: Range =>
@@ -130,42 +220,69 @@ class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int]
}
}
+/** A companion object for the `Range` class.
+ */
object Range {
private[immutable] val MAX_PRINT = 512 // some arbitrary value
+ /** Calculates the number of elements in a range given start, end, step, and
+ * whether or not it is inclusive. Returns -1 if parameters are invalid.
+ */
+ def count(start: Int, end: Int, step: Int): Int = count(start, end, step, false)
+ def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int = {
+ def last =
+ if (isInclusive && step < 0) end - 1
+ else if (isInclusive && step > 0) end + 1
+ else end
+
+ if (step == 0) -1
+ else if (start == end) { if (isInclusive) 1 else 0 }
+ else if (end > start != step > 0) -1
+ else if (step == 1 || step == -1) last - start
+ else ((last - start - 1) / step) + 1
+ }
+
class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
override def isInclusive = true
- override protected val limit = end + Math.signum(step)
override protected def copy(start: Int, end: Int, step: Int): Range = new Inclusive(start, end, step)
+ override def last: Int = if (step == 1 || step == -1)
+ end
+ else
+ ((end.toLong - start.toLong) / step.toLong * step.toLong + start.toLong).toInt
+ protected override def fullLength: Long = if (end > start == step > 0 || start == end)
+ ((last.toLong - start.toLong) / step.toLong + 1)
+ else
+ 0
}
- /** Make a range from `start` until `end` (exclusive) with step value 1.
+ /** Make a range from `start` until `end` (exclusive) with given step value.
+ * @note step != 0
*/
def apply(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
/** Make an range from `start` to `end` inclusive with step value 1.
- * @pre end != MaxInt
*/
def apply(start: Int, end: Int): Range with ByOne = new Range(start, end, 1) with ByOne
/** Make an inclusive range from start to end with given step value.
- * @pre step != 0
- * @pre if (step > 0) end != MaxInt else end != MinInt
+ * @note step != 0
*/
def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Inclusive(start, end, step)
/** Make an inclusive range from start to end with step value 1.
- * @pre end != MaxInt
*/
def inclusive(start: Int, end: Int): Range.Inclusive with ByOne = new Inclusive(start, end, 1) with ByOne
trait ByOne extends Range {
- override final def foreach[U](f: Int => U) {
- var i = start
- val l = limit
- while (i < l) {
+ override final def foreach[@specialized(Unit) U](f: Int => U) {
+ if (length > 0) {
+ val last = this.last
+ var i = start
+ while (i != last) {
+ f(i)
+ i += 1
+ }
f(i)
- i += 1
}
}
}
diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala
index 337e2d51dd..ecd7a65889 100644
--- a/src/library/scala/collection/immutable/RedBlack.scala
+++ b/src/library/scala/collection/immutable/RedBlack.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package immutable
-/**
- * @since 2.3
+/** A base class containing the implementations for `TreeMaps` and `TreeSets`.
+ *
+ * @since 2.3
*/
@serializable @SerialVersionUID(8691885935445612921L)
abstract class RedBlack[A] {
@@ -33,7 +33,7 @@ abstract class RedBlack[A] {
def isBlack: Boolean
def lookup(x: A): Tree[B]
def update[B1 >: B](k: A, v: B1): Tree[B1] = blacken(upd(k, v))
- def delete(k: A): Tree[B] = del(k)
+ def delete(k: A): Tree[B] = blacken(del(k))
def foreach[U](f: (A, B) => U)
@deprecated("use `foreach' instead")
def visit[T](input: T)(f: (T, A, B) => (Boolean, T)): (Boolean, T)
@@ -80,16 +80,77 @@ abstract class RedBlack[A] {
else if (isSmaller(key, k)) balanceRight(isBlack, key, value, left, right.upd(k, v))
else mkTree(isBlack, k, v, left, right)
}
+ // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
+ // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html
def del(k: A): Tree[B] = {
- if (isSmaller(k, key)) mkTree(isBlack, key, value, left.del(k), right)
- else if (isSmaller(key, k)) mkTree(isBlack, key, value, left, right.del(k))
- else if (left.isEmpty) right
- else if (right.isEmpty) left
- else {
- val s = right.smallest
- mkTree(isBlack, s.key, s.value, left, right.del(s.key))
+ def balance(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
+ case (RedTree(y, yv, a, b), RedTree(z, zv, c, d)) =>
+ RedTree(x, xv, BlackTree(y, yv, a, b), BlackTree(z, zv, c, d))
+ case (RedTree(y, yv, RedTree(z, zv, a, b), c), d) =>
+ RedTree(y, yv, BlackTree(z, zv, a, b), BlackTree(x, xv, c, d))
+ case (RedTree(y, yv, a, RedTree(z, zv, b, c)), d) =>
+ RedTree(z, zv, BlackTree(y, yv, a, b), BlackTree(x, xv, c, d))
+ case (a, RedTree(y, yv, b, RedTree(z, zv, c, d))) =>
+ RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
+ case (a, RedTree(y, yv, RedTree(z, zv, b, c), d)) =>
+ RedTree(z, zv, BlackTree(x, xv, a, b), BlackTree(y, yv, c, d))
+ case (a, b) =>
+ BlackTree(x, xv, a, b)
+ }
+ def subl(t: Tree[B]) = t match {
+ case BlackTree(x, xv, a, b) => RedTree(x, xv, a, b)
+ case _ => error("Defect: invariance violation; expected black, got "+t)
+ }
+ def balLeft(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
+ case (RedTree(y, yv, a, b), c) =>
+ RedTree(x, xv, BlackTree(y, yv, a, b), c)
+ case (bl, BlackTree(y, yv, a, b)) =>
+ balance(x, xv, bl, RedTree(y, yv, a, b))
+ case (bl, RedTree(y, yv, BlackTree(z, zv, a, b), c)) =>
+ RedTree(z, zv, BlackTree(x, xv, bl, a), balance(y, yv, b, subl(c)))
+ case _ => error("Defect: invariance violation at "+right)
+ }
+ def balRight(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
+ case (a, RedTree(y, yv, b, c)) =>
+ RedTree(x, xv, a, BlackTree(y, yv, b, c))
+ case (BlackTree(y, yv, a, b), bl) =>
+ balance(x, xv, RedTree(y, yv, a, b), bl)
+ case (RedTree(y, yv, a, BlackTree(z, zv, b, c)), bl) =>
+ RedTree(z, zv, balance(y, yv, subl(a), b), BlackTree(x, xv, c, bl))
+ case _ => error("Defect: invariance violation at "+left)
+ }
+ def delLeft = left match {
+ case _: BlackTree[_] => balLeft(key, value, left.del(k), right)
+ case _ => RedTree(key, value, left.del(k), right)
+ }
+ def delRight = right match {
+ case _: BlackTree[_] => balRight(key, value, left, right.del(k))
+ case _ => RedTree(key, value, left, right.del(k))
+ }
+ def append(tl: Tree[B], tr: Tree[B]): Tree[B] = (tl, tr) match {
+ case (Empty, t) => t
+ case (t, Empty) => t
+ case (RedTree(x, xv, a, b), RedTree(y, yv, c, d)) =>
+ append(b, c) match {
+ case RedTree(z, zv, bb, cc) => RedTree(z, zv, RedTree(x, xv, a, bb), RedTree(y, yv, cc, d))
+ case bc => RedTree(x, xv, a, RedTree(y, yv, bc, d))
+ }
+ case (BlackTree(x, xv, a, b), BlackTree(y, yv, c, d)) =>
+ append(b, c) match {
+ case RedTree(z, zv, bb, cc) => RedTree(z, zv, BlackTree(x, xv, a, bb), BlackTree(y, yv, cc, d))
+ case bc => balLeft(x, xv, a, BlackTree(y, yv, bc, d))
+ }
+ case (a, RedTree(x, xv, b, c)) => RedTree(x, xv, append(a, b), c)
+ case (RedTree(x, xv, a, b), c) => RedTree(x, xv, a, append(b, c))
+ }
+ // RedBlack is neither A : Ordering[A], nor A <% Ordered[A]
+ k match {
+ case _ if isSmaller(k, key) => delLeft
+ case _ if isSmaller(key, k) => delRight
+ case _ => append(left, right)
}
}
+
def smallest: NonEmpty[B] = if (left.isEmpty) this else left.smallest
def toStream: Stream[(A,B)] =
diff --git a/src/library/scala/collection/immutable/Seq.scala b/src/library/scala/collection/immutable/Seq.scala
index 91c38eeaf8..18d3b9b263 100644
--- a/src/library/scala/collection/immutable/Seq.scala
+++ b/src/library/scala/collection/immutable/Seq.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,10 +14,12 @@ package immutable
import generic._
import mutable.Builder
-/** A subtrait of collection.Seq which represents sequences
- * that cannot be mutated.
+/** A subtrait of `collection.Seq` which represents sequences
+ * that are guaranteed immutable.
*
- * @since 2.8
+ * $seqInfo
+ * @define Coll immutable.Seq
+ * @define coll immutable sequence
*/
trait Seq[+A] extends Iterable[A]
with scala.collection.Seq[A]
@@ -27,10 +28,12 @@ trait Seq[+A] extends Iterable[A]
override def companion: GenericCompanion[Seq] = Seq
}
-/**
- * @since 2.8
+/** $factoryInfo
+ * @define Coll immutable.Seq
+ * @define coll immutable sequence
*/
object Seq extends SeqFactory[Seq] {
+ /** genericCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = new GenericCanBuildFrom[A]
def newBuilder[A]: Builder[A, Seq[A]] = new mutable.ListBuffer
}
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index cc935afe93..30f0d86139 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,23 +13,16 @@ package immutable
import generic._
-/** <p>
- * A generic trait for immutable sets. Concrete set implementations have
- * to provide functionality for the abstract methods in <code>Set</code>:
- * </p>
- * <pre>
- * <b>def</b> contains(elem: A): Boolean
- * <b>def</b> iterator: Iterator[A]
- * <b>def</b> + (elem: A): This
- * <b>def</b> - (elem: A): This</pre>
- * <p>
- * where <code>This</code> is the type of the set.
- * </p>
+/** A generic trait for immutable sets.
+ *
+ * $setnote
*
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.8
* @since 1
+ * @define Coll immutable.Set
+ * @define coll immutable set
*/
trait Set[A] extends Iterable[A]
with scala.collection.Set[A]
@@ -39,14 +31,29 @@ trait Set[A] extends Iterable[A]
override def companion: GenericCompanion[Set] = Set
}
-object Set extends SetFactory[Set] {
+/** $factoryInfo
+ * @define Coll immutable.Set
+ * @define coll immutable set
+ */
+object Set extends ImmutableSetFactory[Set] {
+ /** $setCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A]
- override def empty[A]: Set[A] = new EmptySet[A]
+ override def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]]
private val hashSeed = "Set".hashCode
/** An optimized representation for immutable empty sets */
@serializable
+ private object EmptySet extends Set[Any] {
+ override def size: Int = 0
+ def contains(elem: Any): Boolean = false
+ def + (elem: Any): Set[Any] = new Set1(elem)
+ def - (elem: Any): Set[Any] = this
+ def iterator: Iterator[Any] = Iterator.empty
+ override def foreach[U](f: Any => U): Unit = {}
+ }
+
+ @serializable @deprecated("use `Set.empty' instead")
class EmptySet[A] extends Set[A] {
override def size: Int = 0
def contains(elem: A): Boolean = false
@@ -57,7 +64,7 @@ object Set extends SetFactory[Set] {
}
/** An optimized representation for immutable sets of size 1 */
- @serializable
+ @serializable @SerialVersionUID(1233385750652442003L)
class Set1[A](elem1: A) extends Set[A] {
override def size: Int = 1
def contains(elem: A): Boolean =
@@ -66,7 +73,7 @@ object Set extends SetFactory[Set] {
if (contains(elem)) this
else new Set2(elem1, elem)
def - (elem: A): Set[A] =
- if (elem == elem1) new EmptySet[A]
+ if (elem == elem1) Set.empty
else this
def iterator: Iterator[A] =
Iterator(elem1)
@@ -76,7 +83,7 @@ object Set extends SetFactory[Set] {
}
/** An optimized representation for immutable sets of size 2 */
- @serializable
+ @serializable @SerialVersionUID(-6443011234944830092L)
class Set2[A](elem1: A, elem2: A) extends Set[A] {
override def size: Int = 2
def contains(elem: A): Boolean =
@@ -96,7 +103,7 @@ object Set extends SetFactory[Set] {
}
/** An optimized representation for immutable sets of size 3 */
- @serializable
+ @serializable @SerialVersionUID(-3590273538119220064L)
class Set3[A](elem1: A, elem2: A, elem3: A) extends Set[A] {
override def size: Int = 3
def contains(elem: A): Boolean =
@@ -117,7 +124,7 @@ object Set extends SetFactory[Set] {
}
/** An optimized representation for immutable sets of size 4 */
- @serializable
+ @serializable @SerialVersionUID(-3622399588156184395L)
class Set4[A](elem1: A, elem2: A, elem3: A, elem4: A) extends Set[A] {
override def size: Int = 4
def contains(elem: A): Boolean =
diff --git a/src/library/scala/collection/immutable/SetProxy.scala b/src/library/scala/collection/immutable/SetProxy.scala
index d5d3bee87b..3c285b2c81 100644
--- a/src/library/scala/collection/immutable/SetProxy.scala
+++ b/src/library/scala/collection/immutable/SetProxy.scala
@@ -1,30 +1,27 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package immutable
-/** <p>
- * This is a simple wrapper class for <a href="Set.html"
- * target="contentFrame"><code>scala.collection.immutable.Set</code></a>.
- * </p>
- * <p>
- * It is most useful for assembling customized set abstractions
- * dynamically using object composition and forwarding.
- * </p>
+/** This is a simple wrapper class for <a href="Set.html"
+ * target="contentFrame">`scala.collection.immutable.Set`</a>.
+ *
+ * It is most useful for assembling customized set abstractions
+ * dynamically using object composition and forwarding.
+ *
+ * @tparam A type of the elements contained in this set proxy.
*
* @since 2.8
*/
-trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]]
-{
+trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] {
override def repr = this
private def newProxy[B >: A](newSelf: Set[B]): SetProxy[B] =
new SetProxy[B] { val self = newSelf }
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index 5d0806c5e3..2f0749c37a 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -18,10 +17,15 @@ import annotation.unchecked.uncheckedVariance
/** A map whose keys are sorted.
*
+ * @tparam A the type of the keys contained in this sorted map.
+ * @tparam B the type of the values associated with the keys.
+ *
* @author Sean McDirmid
* @author Martin Odersky
* @version 2.8
* @since 2.4
+ * @define Coll immutable.SortedMap
+ * @define coll immutable sorted map
*/
trait SortedMap[A, +B] extends Map[A, B]
with scala.collection.SortedMap[A, B]
@@ -31,6 +35,8 @@ trait SortedMap[A, +B] extends Map[A, B]
override protected[this] def newBuilder : Builder[(A, B), SortedMap[A, B]] =
SortedMap.newBuilder[A, B]
+ override def empty: SortedMap[A, B] = SortedMap.empty
+
override def updated [B1 >: B](key: A, value: B1): SortedMap[A, B1] = this + ((key, value))
/** Add a key/value pair to this map.
@@ -56,22 +62,16 @@ trait SortedMap[A, +B] extends Map[A, B]
*
* @param elems the traversable object.
*/
- override def ++[B1 >: B](elems: scala.collection.Traversable[(A, B1)]): SortedMap[A, B1] =
- ((repr: SortedMap[A, B1]) /: elems) (_ + _)
-
- /** Adds a number of elements provided by an iterator
- * and returns a new collection with the added elements.
- *
- * @param iter the iterator
- */
- override def ++[B1 >: B] (iter: Iterator[(A, B1)]): SortedMap[A, B1] =
- ((repr: SortedMap[A, B1]) /: iter) (_ + _)
+ override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): SortedMap[A, B1] =
+ ((repr: SortedMap[A, B1]) /: xs) (_ + _)
}
-/**
- * @since 2.4
+/** $factoryInfo
+ * @define Coll immutable.SortedMap
+ * @define coll immutable sorted map
*/
object SortedMap extends ImmutableSortedMapFactory[SortedMap] {
+ /** $sortedMapCanBuildFromInfo */
implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B]
def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = TreeMap.empty[A, B]
}
diff --git a/src/library/scala/collection/immutable/SortedSet.scala b/src/library/scala/collection/immutable/SortedSet.scala
index 6fa233cad7..d0168b7b22 100644
--- a/src/library/scala/collection/immutable/SortedSet.scala
+++ b/src/library/scala/collection/immutable/SortedSet.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,22 +14,27 @@ package immutable
import generic._
import mutable.Builder
-/** A sorted set.
+/** A subtrait of `collection.SortedSet` which represents sorted sets
+ * which cannot be mutated.
*
* @author Sean McDirmid
* @author Martin Odersky
* @version 2.8
* @since 2.4
+ * @define Coll immutable.SortedSet
+ * @define coll immutable sorted set
*/
trait SortedSet[A] extends Set[A] with scala.collection.SortedSet[A] with SortedSetLike[A, SortedSet[A]] {
/** Needs to be overridden in subclasses. */
override def empty: SortedSet[A] = SortedSet.empty[A]
}
-/**
- * @since 2.4
+/** $factoryInfo
+ * @define Coll immutable.SortedSet
+ * @define coll immutable sorted set
*/
object SortedSet extends ImmutableSortedSetFactory[SortedSet] {
+ /** $sortedSetCanBuildFromInfo */
implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A]
def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A]
}
diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala
index 2813bc6656..58a37a8136 100644
--- a/src/library/scala/collection/immutable/Stack.scala
+++ b/src/library/scala/collection/immutable/Stack.scala
@@ -1,39 +1,57 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package immutable
-import scala.annotation.tailrec
+import generic._
+import mutable.{ ArrayBuffer, Builder }
-object Stack {
- val Empty: Stack[Nothing] = new Stack(Nil)
- def apply[A](elems: A*) = new Stack(elems.toList)
+/** $factoryInfo
+ * @define Coll immutable.Stack
+ * @define coll immutable stack
+ */
+object Stack extends SeqFactory[Stack] {
+ /** $genericCanBuildFromInfo */
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Stack[A]] = new GenericCanBuildFrom[A]
+ def newBuilder[A]: Builder[A, Stack[A]] = new ArrayBuffer[A] mapResult (buf => new Stack(buf.toList))
+
+ @deprecated("Use Stack.empty instead")
+ val Empty: Stack[Nothing] = Stack()
}
/** This class implements immutable stacks using a list-based data
- * structure. Instances of <code>Stack</code> represent
- * empty stacks; they can be either created by calling the constructor
- * directly, or by applying the function <code>Stack.Empty</code>.
+ * structure.
*
- * @note This class exists only for historical reason and as an
- * analogue of mutable stacks
+ * '''Note:''' This class exists only for historical reason and as an
+ * analogue of mutable stacks.
* Instead of an immutable stack you can just use a list.
*
+ * @tparam A the type of the elements contained in this stack.
+ *
* @author Matthias Zenger
* @version 1.0, 10/07/2003
* @since 1
+ * @define Coll immutable.Stack
+ * @define coll immutable stack
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
@serializable @SerialVersionUID(1976480595012942526L)
-class Stack[+A] protected (protected val elems: List[A]) extends Seq[A] {
+class Stack[+A] protected (protected val elems: List[A])
+ extends LinearSeq[A]
+ with GenericTraversableTemplate[A, Stack]
+ with LinearSeqOptimized[A, Stack[A]] {
+ override def companion: GenericCompanion[Stack] = Stack
def this() = this(Nil)
@@ -43,9 +61,8 @@ class Stack[+A] protected (protected val elems: List[A]) extends Seq[A] {
*/
override def isEmpty: Boolean = elems.isEmpty
- /** The number of elements in the stack
- */
- def length: Int = elems.length
+ override def head = elems.head
+ override def tail = new Stack(elems.tail)
/** Push an element on the stack.
*
@@ -63,26 +80,15 @@ class Stack[+A] protected (protected val elems: List[A]) extends Seq[A] {
def push[B >: A](elem1: B, elem2: B, elems: B*): Stack[B] =
this.push(elem1).push(elem2).pushAll(elems)
- /** Push all elements provided by the given iterator object onto
- * the stack. The last element returned by the iterable object
- * will be on top of the new stack.
- *
- * @param elems the iterator object.
- * @return the stack with the new elements on top.
- * @deprecated
- */
- def pushAll[B >: A](elems: Iterator[B]): Stack[B] =
- ((this: Stack[B]) /: elems)(_ push _)
-
/** Push all elements provided by the given traversable object onto
- * the stack. The last element returned by the iterable object
+ * the stack. The last element returned by the traversable object
* will be on top of the new stack.
*
- * @param elems the iterable object.
+ * @param elems the iterator object.
* @return the stack with the new elements on top.
*/
- def pushAll[B >: A](elems: scala.collection.Traversable[B]): Stack[B] =
- ((this: Stack[B]) /: elems)(_ push _)
+ def pushAll[B >: A](xs: TraversableOnce[B]): Stack[B] =
+ ((this: Stack[B]) /: xs.toIterator)(_ push _)
/** Returns the top element of the stack. An error is signaled if
* there is no element on the stack.
@@ -91,7 +97,7 @@ class Stack[+A] protected (protected val elems: List[A]) extends Seq[A] {
* @return the top element.
*/
def top: A =
- if (!elems.isEmpty) elems.head
+ if (!isEmpty) elems.head
else throw new NoSuchElementException("top of empty stack")
/** Removes the top element from the stack.
@@ -101,31 +107,14 @@ class Stack[+A] protected (protected val elems: List[A]) extends Seq[A] {
* @return the new stack without the former top element.
*/
def pop: Stack[A] =
- if (!elems.isEmpty) new Stack(elems.tail)
+ if (!isEmpty) new Stack(elems.tail)
else throw new NoSuchElementException("pop of empty stack")
def pop2: (A, Stack[A]) =
- if (!elems.isEmpty) (elems.head, new Stack(elems.tail))
+ if (!isEmpty) (elems.head, new Stack(elems.tail))
else throw new NoSuchElementException("pop of empty stack")
- /** Returns the n-th element of this stack. The bottom element has index
- * 0, elements above are indexed with increasing numbers.
- *
- * @throws Predef.NoSuchElementException
- * @param n the index number.
- * @return the n-th element on the stack.
- */
- def apply(n: Int): A = {
- @tailrec
- def walk(i: Int, xs: List[A]): A =
- (i == 0, xs.isEmpty) match {
- case (_, true) => throw new NoSuchElementException("index out of range")
- case (true, _) => xs.head
- case (false, _) => walk(i - 1, xs.tail)
- }
-
- walk(n, elems)
- }
+ override def reverse: Stack[A] = new Stack(elems.reverse)
/** Returns an iterator over all elements on the stack. The iterator
* issues elements in the reversed order they were inserted into the
@@ -133,23 +122,10 @@ class Stack[+A] protected (protected val elems: List[A]) extends Seq[A] {
*
* @return an iterator over all stack elements.
*/
- override def iterator: Iterator[A] = new Iterator[A] {
- private var cur = Stack.this
- def hasNext: Boolean = !cur.isEmpty
- def next: A = { val r = top; cur = cur.pop; r }
- }
-
- /** Returns the hash code for this stack.
- *
- * @return the hash code of the stack.
- */
- override def hashCode(): Int = //"Stack".hashCode
- if (isEmpty) 0
- else pop2 match { case (x,y) => x.hashCode + y.hashCode }
+ override def iterator: Iterator[A] = elems.iterator
/** Returns a string representation of this stack.
*/
override def toString() = elems.mkString("Stack(", ", ", ")")
-
}
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 6a57c3596d..40f71aa845 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -16,31 +15,39 @@ import generic._
import mutable.{Builder, StringBuilder, LazyBuilder, ListBuffer}
import scala.annotation.tailrec
-/**
- * <p>The class <code>Stream</code> implements lazy lists where elements
- * are only evaluated when they are needed. Here is an example:</p>
- * <pre>
- * <b>object</b> Main <b>extends</b> Application {
+
+
+/** The class `Stream` implements lazy lists where elements
+ * are only evaluated when they are needed. Here is an example:
*
- * <b>def</b> from(n: Int): Stream[Int] =
- * Stream.cons(n, from(n + 1))
+ * {{{
+ * object Main extends Application {
*
- * <b>def</b> sieve(s: Stream[Int]): Stream[Int] =
- * Stream.cons(s.head, sieve(s.tail filter { _ % s.head != 0 }))
+ * def from(n: Int): Stream[Int] =
+ * Stream.cons(n, from(n + 1))
*
- * <b>def</b> primes = sieve(from(2))
+ * def sieve(s: Stream[Int]): Stream[Int] =
+ * Stream.cons(s.head, sieve(s.tail filter { _ % s.head != 0 }))
*
- * primes take 10 print
- * }
- * </pre>
+ * def primes = sieve(from(2))
*
- * @author Martin Odersky, Matthias Zenger
- * @version 1.1 08/08/03
- * @since 2.8
+ * primes take 10 print
+ * }
+ * }}}
+ *
+ * @tparam A the type of the elements contained in this stream.
+ *
+ * @author Martin Odersky, Matthias Zenger
+ * @version 1.1 08/08/03
+ * @since 2.8
+ * @define Coll Stream
+ * @define coll stream
+ * @define orderDependent
+ * @define orderDependentFold
*/
abstract class Stream[+A] extends LinearSeq[A]
with GenericTraversableTemplate[A, Stream]
- with LinearSeqLike[A, Stream[A]] {
+ with LinearSeqOptimized[A, Stream[A]] {
self =>
override def companion: GenericCompanion[Stream] = Stream
@@ -68,21 +75,22 @@ self =>
/** The stream resulting from the concatenation of this stream with the argument stream.
* @param rest The stream that gets appended to this stream
+ * @return The stream containing elements of this stream and the traversable object.
*/
def append[B >: A](rest: => Traversable[B]): Stream[B] =
if (isEmpty) rest.toStream else new Stream.Cons(head, tail append rest)
- /** Force evaluation of the whole stream and return it */
+ /** Forces evaluation of the whole stream and returns it. */
def force: Stream[A] = {
var these = this
while (!these.isEmpty) these = these.tail
this
}
- /** Prints elements of this stream one by one, separated by commas */
+ /** Prints elements of this stream one by one, separated by commas. */
def print() { print(", ") }
- /** Prints elements of this stream one by one, separated by <code>sep</code>
+ /** Prints elements of this stream one by one, separated by `sep`.
* @param sep The separator string printed between consecutive elements.
*/
def print(sep: String) {
@@ -97,6 +105,22 @@ self =>
loop(this, "")
}
+ override def length: Int = {
+ var len = 0
+ var left = this
+ while (!left.isEmpty) {
+ len += 1
+ left = left.tail
+ }
+ len
+ }
+
+ /** It's an imperfect world, but at least we can bottle up the
+ * imperfection in a capsule.
+ */
+ @inline private def asThat[That](x: AnyRef): That = x.asInstanceOf[That]
+ @inline private def asStream[B](x: AnyRef): Stream[B] = x.asInstanceOf[Stream[B]]
+
// Overridden methods from Traversable
override def toStream: Stream[A] = this
@@ -107,40 +131,48 @@ self =>
}
/** Create a new stream which contains all elements of this stream
- * followed by all elements of Traversable `that'
+ * followed by all elements of Traversable `that`.
* @note It's subtle why this works. We know that if the target type
* of the Builder That is either a Stream, or one of its supertypes, or undefined,
* then StreamBuilder will be chosen for the implicit.
* we recognize that fact and optimize to get more laziness.
*/
- override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
+ override def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
// we assume there is no other builder factory on streams and therefore know that That = Stream[A]
- (if (isEmpty) that.toStream
- else new Stream.Cons(head, (tail ++ that).asInstanceOf[Stream[A]])).asInstanceOf[That]
- }
+ asThat[That](
+ if (isEmpty) that.toStream
+ else new Stream.Cons(head, asStream[A](tail ++ that))
+ )
- /** Create a new stream which contains all elements of this stream
- * followed by all elements of Iterator `that'
+ /**
+ * Create a new stream which contains all intermediate results of applying the operator
+ * to subsequent elements left to right.
+ * @note This works because the target type of the Builder That is a Stream.
*/
- override def++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
- this ++ that.toStream
+ override final def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ asThat[That](
+ if (isEmpty) Stream(z)
+ else new Stream.Cons(z, asStream[B](tail.scanLeft(op(z, head))(op)))
+ )
/** Returns the stream resulting from applying the given function
- * <code>f</code> to each element of this stream.
+ * `f` to each element of this stream.
*
* @param f function to apply to each element.
* @return <code>f(a<sub>0</sub>), ..., f(a<sub>n</sub>)</code> if this
* sequence is <code>a<sub>0</sub>, ..., a<sub>n</sub></code>.
*/
- override final def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
- (if (isEmpty) Stream.Empty
- else new Stream.Cons(f(head), (tail map f).asInstanceOf[Stream[B]])).asInstanceOf[That]
- }
+ override final def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ asThat[That](
+ if (isEmpty) Stream.Empty
+ else new Stream.Cons(f(head), asStream[B](tail map f))
+ )
- /** Applies the given function <code>f</code> to each element of
+ /** Applies the given function `f` to each element of
* this stream, then concatenates the results.
*
- * @param f the function to apply on each element.
+ * @param f the function to apply on each element.
+ * @param bf $bfinfo
* @return <code>f(a<sub>0</sub>) ::: ... ::: f(a<sub>n</sub>)</code> if
* this stream is <code>[a<sub>0</sub>, ..., a<sub>n</sub>]</code>.
*/
@@ -148,20 +180,22 @@ self =>
// we assume there is no other builder factory on streams and therefore know that That = Stream[B]
// optimisations are not for speed, but for functionality
// see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala)
- (if (isEmpty) Stream.Empty
- else {
- // establish !prefix.isEmpty || nonEmptyPrefix.isEmpty
- var nonEmptyPrefix = this
- var prefix = f(nonEmptyPrefix.head).toStream
- while (!nonEmptyPrefix.isEmpty && prefix.isEmpty) {
- nonEmptyPrefix = nonEmptyPrefix.tail
- if(!nonEmptyPrefix.isEmpty)
- prefix = f(nonEmptyPrefix.head).toStream
+ asThat[That](
+ if (isEmpty) Stream.Empty
+ else {
+ // establish !prefix.isEmpty || nonEmptyPrefix.isEmpty
+ var nonEmptyPrefix = this
+ var prefix = f(nonEmptyPrefix.head).toStream
+ while (!nonEmptyPrefix.isEmpty && prefix.isEmpty) {
+ nonEmptyPrefix = nonEmptyPrefix.tail
+ if(!nonEmptyPrefix.isEmpty)
+ prefix = f(nonEmptyPrefix.head).toStream
+ }
+
+ if (nonEmptyPrefix.isEmpty) Stream.empty
+ else prefix append asStream[B](nonEmptyPrefix.tail flatMap f)
}
-
- if(nonEmptyPrefix.isEmpty) Stream.empty
- else prefix append (nonEmptyPrefix.tail flatMap f).asInstanceOf[Stream[B]]
- }).asInstanceOf[That]
+ )
/** Returns all the elements of this stream that satisfy the
* predicate <code>p</code>. The order of the elements is preserved.
@@ -169,11 +203,46 @@ self =>
* @param p the predicate used to filter the stream.
* @return the elements of this stream satisfying <code>p</code>.
*/
- override final def filter(p: A => Boolean): Stream[A] = {
+ override def filter(p: A => Boolean): Stream[A] = {
// optimization: drop leading prefix of elems for which f returns false
- var rest = this dropWhile (!p(_))
- if (rest.isEmpty) Stream.Empty
- else new Stream.Cons(rest.head, rest.tail filter p)
+ // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise
+ var rest = this
+ while (!rest.isEmpty && !p(rest.head)) rest = rest.tail
+ // private utility func to avoid `this` on stack (would be needed for the lazy arg)
+ if (rest.nonEmpty) Stream.filteredTail(rest, p)
+ else Stream.Empty
+ }
+
+ override final def withFilter(p: A => Boolean): StreamWithFilter = new StreamWithFilter(p)
+
+ /** A lazier implementation of WithFilter than TraversableLike's.
+ */
+ final class StreamWithFilter(p: A => Boolean) extends WithFilter(p) {
+
+ override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
+ def tailMap = asStream[B](tail withFilter p map f)
+ asThat[That](
+ if (isEmpty) Stream.Empty
+ else if (p(head)) new Stream.Cons(f(head), tailMap)
+ else tailMap
+ )
+ }
+
+ override def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
+ def tailFlatMap = asStream[B](tail withFilter p flatMap f)
+ asThat[That](
+ if (isEmpty) Stream.Empty
+ else if (p(head)) f(head).toStream append tailFlatMap
+ else tailFlatMap
+ )
+ }
+
+ override def foreach[B](f: A => B) =
+ for (x <- self)
+ if (p(x)) f(x)
+
+ override def withFilter(q: A => Boolean): StreamWithFilter =
+ new StreamWithFilter(x => p(x) && q(x))
}
/** Apply the given function <code>f</code> to each element of this linear sequence
@@ -220,11 +289,12 @@ self =>
* <code>Stream(a<sub>0</sub>, ..., a<sub>m</sub>)
* zip Stream(b<sub>0</sub>, ..., b<sub>n</sub>)</code> is invoked.
*/
- override final def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That = {
+ override final def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That =
// we assume there is no other builder factory on streams and therefore know that That = Stream[(A1, B)]
- (if (this.isEmpty || that.isEmpty) Stream.Empty
- else new Stream.Cons((this.head, that.head), (this.tail zip that.tail).asInstanceOf[Stream[(A1, B)]])).asInstanceOf[That]
- }
+ asThat[That](
+ if (this.isEmpty || that.isEmpty) Stream.Empty
+ else new Stream.Cons((this.head, that.head), asStream[(A1, B)](this.tail zip that.tail))
+ )
/** Zips this iterable with its indices. `s.zipWithIndex` is equivalent to
* `s zip s.indices`
@@ -280,6 +350,8 @@ self =>
if (n <= 0 || isEmpty) Stream.Empty
else new Stream.Cons(head, if (n == 1) Stream.empty else tail take (n-1))
+ override def splitAt(n: Int): (Stream[A], Stream[A]) = (take(n), drop(n))
+
/** A substream starting at index `from`
* and extending up to (but not including) index `until`.
*
@@ -344,9 +416,9 @@ self =>
/** Builds a new stream from this stream in which any duplicates (wrt to ==) removed.
* Among duplicate elements, only the first one is retained in the result stream
*/
- override def removeDuplicates: Stream[A] =
+ override def distinct: Stream[A] =
if (isEmpty) this
- else new Stream.Cons(head, tail.filter(head !=).removeDuplicates)
+ else new Stream.Cons(head, tail.filter(head !=).distinct)
/** Returns a new sequence of given length containing the elements of this sequence followed by zero
* or more occurrences of given elements.
@@ -355,7 +427,8 @@ self =>
def loop(len: Int, these: Stream[A]): Stream[B] =
if (these.isEmpty) Stream.fill(len)(elem)
else new Stream.Cons(these.head, loop(len - 1, these.tail))
- loop(len, this).asInstanceOf[That]
+
+ asThat[That](loop(len, this))
// was: if (bf.isInstanceOf[Stream.StreamCanBuildFrom[_]]) loop(len, this).asInstanceOf[That]
// else super.padTo(len, elem)
}
@@ -420,12 +493,12 @@ object Stream extends SeqFactory[Stream] {
import scala.collection.{Iterable, Seq, IndexedSeq}
/** A builder for streams
- * @note: This builder is lazy only in the sense that it does not go downs the spine
- * of traversables that are added as a whole. If more laziness can be achieved,
- * this builder should be bypassed.
+ * @note This builder is lazy only in the sense that it does not go downs the spine
+ * of traversables that are added as a whole. If more laziness can be achieved,
+ * this builder should be bypassed.
*/
class StreamBuilder[A] extends scala.collection.mutable.LazyBuilder[A, Stream[A]] {
- def result: Stream[A] = (for (xs <- parts.iterator; x <- xs.toIterable.iterator) yield x).toStream
+ def result: Stream[A] = parts.toStream flatMap (_.toStream)
}
object Empty extends Stream[Nothing] {
@@ -480,13 +553,18 @@ object Stream extends SeqFactory[Stream] {
}
/** A lazy cons cell, from which streams are built. */
+ @serializable @SerialVersionUID(-602202424901551803L)
final class Cons[+A](hd: A, tl: => Stream[A]) extends Stream[A] {
override def isEmpty = false
override def head = hd
- private[this] var tlVal: Stream[A] = _
- def tailDefined = tlVal ne null
+ @volatile private[this] var tlVal: Stream[A] = _
+ def tailDefined: Boolean = tlVal ne null
override def tail: Stream[A] = {
- if (!tailDefined) { tlVal = tl }
+ if (!tailDefined)
+ synchronized {
+ if (!tailDefined) tlVal = tl
+ }
+
tlVal
}
}
@@ -535,8 +613,8 @@ object Stream extends SeqFactory[Stream] {
if (n <= 0) Empty else new Cons(elem, fill(n-1)(elem))
override def tabulate[A](n: Int)(f: Int => A): Stream[A] = {
- def loop(i: Int) =
- if (i >= n) Empty else new Cons(f(i), tabulate(i+1)(f))
+ def loop(i: Int): Stream[A] =
+ if (i >= n) Empty else new Cons(f(i), loop(i+1))
loop(0)
}
@@ -544,6 +622,10 @@ object Stream extends SeqFactory[Stream] {
if (if (step < 0) start <= end else end <= start) Empty
else new Cons(start, range(start + step, end, step))
+ private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean) = {
+ new Stream.Cons(stream.head, stream.tail filter p)
+ }
+
/** A stream containing all elements of a given iterator, in the order they are produced.
* @param it The iterator producing the stream's elements
*/
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index e993a412fc..6fe6b4555d 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,9 +14,10 @@ package immutable
import generic._
import mutable.Builder
import scala.util.matching.Regex
+import scala.math.ScalaNumber
-/**
- * @since 2.8
+/** A companion object for the `StringLike` containing some constants.
+ * @since 2.8
*/
object StringLike {
@@ -30,10 +30,19 @@ object StringLike {
import StringLike._
-/**
- * @since 2.8
+/** A trait describing stringlike collections.
+ *
+ * @tparam Repr The type of the actual collection inheriting `StringLike`.
+ *
+ * @since 2.8
+ * @define Coll String
+ * @define coll string
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
-trait StringLike[+Repr] extends IndexedSeqLike[Char, Repr] with Ordered[String] {
+trait StringLike[+Repr] extends IndexedSeqOptimized[Char, Repr] with Ordered[String] {
self =>
/** Creates a string builder buffer as builder for this class */
@@ -48,7 +57,7 @@ self =>
override def mkString = toString
- /** return n times the current string
+ /** Return the current string concatenated `n` times.
*/
def * (n: Int): String = {
val buf = new StringBuilder
@@ -60,18 +69,16 @@ self =>
private def isLineBreak(c: Char) = c == LF || c == FF
- /** <p>
- * Strip trailing line end character from this string if it has one.
- * A line end character is one of
- * </p>
+ /**
+ * Strip trailing line end character from this string if it has one.
+ *
+ * A line end character is one of
* <ul style="list-style-type: none;">
* <li>LF - line feed (0x0A hex)</li>
* <li>FF - form feed (0x0C hex)</li>
* </ul>
- * <p>
- * If a line feed character LF is preceded by a carriage return CR
- * (0x0D hex), the CR character is also stripped (Windows convention).
- * </p>
+ * If a line feed character LF is preceded by a carriage return CR
+ * (0x0D hex), the CR character is also stripped (Windows convention).
*/
def stripLineEnd: String = {
val len = toString.length
@@ -85,19 +92,18 @@ self =>
}
}
- /** <p>
+ /**
* Return all lines in this string in an iterator, including trailing
* line end characters.
- * </p>
- * <p>
+ *
* The number of strings returned is one greater than the number of line
* end characters in this string. For an empty string, a single empty
* line is returned. A line end character is one of
- * </p>
- * <ul style="list-style-type: none;">
- * <li>LF - line feed (0x0A hex)</li>
- * <li>FF - form feed (0x0C hex)</li>
- * </ul>
+ *
+ * <ul style="list-style-type: none;">
+ * <li>LF - line feed (0x0A hex)</li>
+ * <li>FF - form feed (0x0C hex)</li>
+ * </ul>
*/
def linesWithSeparators: Iterator[String] = new Iterator[String] {
val str = self.toString
@@ -114,15 +120,15 @@ self =>
}
/** Return all lines in this string in an iterator, excluding trailing line
- * end characters, i.e. apply <code>.stripLineEnd</code> to all lines
- * returned by <code>linesWithSeparators</code>.
+ * end characters, i.e. apply `.stripLineEnd` to all lines
+ * returned by `linesWithSeparators`.
*/
def lines: Iterator[String] =
linesWithSeparators map (line => new WrappedString(line).stripLineEnd)
/** Return all lines in this string in an iterator, excluding trailing line
- * end characters, i.e. apply <code>.stripLineEnd</code> to all lines
- * returned by <code>linesWithSeparators</code>.
+ * end characters, i.e. apply `.stripLineEnd` to all lines
+ * returned by `linesWithSeparators`.
*/
def linesIterator: Iterator[String] =
linesWithSeparators map (line => new WrappedString(line).stripLineEnd)
@@ -137,23 +143,24 @@ self =>
new String(chars)
}
- /** Returns this string with the given <code>prefix</code> stripped. */
+ /** Returns this string with the given `prefix` stripped. */
def stripPrefix(prefix: String) =
if (toString.startsWith(prefix)) toString.substring(prefix.length)
else toString
- /** Returns this string with the given <code>suffix</code> stripped. */
+ /** Returns this string with the given `suffix` stripped. If this string does not
+ * end with `suffix`, it is returned unchanged. */
def stripSuffix(suffix: String) =
if (toString.endsWith(suffix)) toString.substring(0, toString.length() - suffix.length)
else toString
- /** <p>
+ /**
* For every line in this string:
- * </p>
- * <blockquote>
- * Strip a leading prefix consisting of blanks or control characters
- * followed by <code>marginChar</code> from the line.
- * </blockquote>
+ *
+ * <blockquote>
+ * Strip a leading prefix consisting of blanks or control characters
+ * followed by `marginChar` from the line.
+ * </blockquote>
*/
def stripMargin(marginChar: Char): String = {
val buf = new StringBuilder
@@ -167,13 +174,13 @@ self =>
buf.toString
}
- /** <p>
+ /**
* For every line in this string:
- * </p>
- * <blockquote>
- * Strip a leading prefix consisting of blanks or control characters
- * followed by <code>|</code> from the line.
- * </blockquote>
+ *
+ * <blockquote>
+ * Strip a leading prefix consisting of blanks or control characters
+ * followed by `|` from the line.
+ * </blockquote>
*/
def stripMargin: String = stripMargin('|')
@@ -220,38 +227,48 @@ self =>
}
*/
- /** <p>
+ private def unwrapArg(arg: Any): AnyRef = arg match {
+ case x: ScalaNumber => x.underlying
+ case x => x.asInstanceOf[AnyRef]
+ }
+
+ /**
* Uses the underlying string as a pattern (in a fashion similar to
* printf in C), and uses the supplied arguments to fill in the
* holes.
- * </p>
- * <p>
+ *
* The interpretation of the formatting patterns is described in
* <a href="" target="contentFrame" class="java/util/Formatter">
- * <code>java.util.Formatter</code></a>.
- * </p>
+ * `java.util.Formatter`</a>, with the addition that
+ * classes deriving from `ScalaNumber` (such as `scala.BigInt` and
+ * `scala.BigDecimal`) are unwrapped to pass a type which `Formatter`
+ * understands.
+ *
*
* @param args the arguments used to instantiating the pattern.
* @throws java.lang.IllegalArgumentException
*/
- def format(args : Any*) : String =
- java.lang.String.format(toString, args.asInstanceOf[scala.collection.Seq[AnyRef]]: _*)
-
- /** <p>
- * Like format(args*) but takes an initial Locale parameter
- * which influences formatting as in java.lang.String's format.
- * </p>
- * <p>
+ def format(args : Any*): String =
+ java.lang.String.format(toString, args map unwrapArg: _*)
+
+ /**
+ * Like `format(args*)` but takes an initial `Locale` parameter
+ * which influences formatting as in `java.lang.String`'s format.
+ *
+ *
* The interpretation of the formatting patterns is described in
* <a href="" target="contentFrame" class="java/util/Formatter">
- * <code>java.util.Formatter</code></a>.
- * </p>
+ * `java.util.Formatter`</a>, with the addition that
+ * classes deriving from `ScalaNumber` (such as `scala.BigInt` and
+ * `scala.BigDecimal`) are unwrapped to pass a type which `Formatter`
+ * understands.
+ *
*
- * @param locale an instance of java.util.Locale
+ * @param locale an instance of `java.util.Locale`
* @param args the arguments used to instantiating the pattern.
* @throws java.lang.IllegalArgumentException
*/
- def format(l: java.util.Locale, args: Any*): String =
- java.lang.String.format(l, toString, args.asInstanceOf[scala.collection.Seq[AnyRef]]: _*)
+ def formatLocal(l: java.util.Locale, args: Any*): String =
+ java.lang.String.format(l, toString, args map unwrapArg: _*)
}
diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala
index a31e98335e..0d8f5f6b83 100644
--- a/src/library/scala/collection/immutable/StringOps.scala
+++ b/src/library/scala/collection/immutable/StringOps.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,9 +14,21 @@ package immutable
import mutable.StringBuilder
/**
- * @since 2.8
+ * This class serves as a wrapper providing `String`s with all the operations
+ * found in indexed sequences. Where needed, instances of `String` object
+ * are implicitly converted into this class.
+ *
+ * The difference between this class and `WrappedString` is that calling transformer
+ * methods such as `filter` and `map` will yield a `String` object, whereas a
+ * `WrappedString` will remain a `WrappedString`.
+ *
+ * @param repr the actual representation of this string operations object.
+ *
+ * @since 2.8
+ * @define Coll StringOps
+ * @define coll string
*/
-class StringOps(override val repr: String) extends StringLike[String] {
+final class StringOps(override val repr: String) extends StringLike[String] {
override protected[this] def thisCollection: WrappedString = new WrappedString(repr)
override protected[this] def toCollection(repr: String): WrappedString = new WrappedString(repr)
@@ -25,5 +36,16 @@ class StringOps(override val repr: String) extends StringLike[String] {
/** Creates a string builder buffer as builder for this class */
override protected[this] def newBuilder = new StringBuilder
+ override def slice(from: Int, until: Int): String = {
+ /** Slice must be forgiving on all out of bounds indices and
+ * substring is not.
+ */
+ val start = from max 0
+ val end = until min repr.length
+
+ if (start >= end) ""
+ else repr.substring(start, end)
+ }
+
override def toString = repr
}
diff --git a/src/library/scala/collection/immutable/Traversable.scala b/src/library/scala/collection/immutable/Traversable.scala
index 239706ba71..bab1a71a97 100644
--- a/src/library/scala/collection/immutable/Traversable.scala
+++ b/src/library/scala/collection/immutable/Traversable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,13 +14,9 @@ package immutable
import generic._
import mutable.Builder
-/** A subtrait of <code>collection.Traversable</code> which represents
- * traversables that cannot be mutated.
- *
- * @author Matthias Zenger
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** A trait for traversable collections that are guaranteed immutable.
+ * $traversableInfo
+ * @define mutability immutable
*/
trait Traversable[+A] extends scala.collection.Traversable[A]
with GenericTraversableTemplate[A, Traversable]
@@ -30,11 +25,10 @@ trait Traversable[+A] extends scala.collection.Traversable[A]
override def companion: GenericCompanion[Traversable] = Traversable
}
-/** A factory object for the trait <code>Traversable</code>.
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** $factoryInfo
+ * The current default implementation of a $Coll is a `Vector`.
+ * @define coll immutable traversable collection
+ * @define Coll immutable.Traversable
*/
object Traversable extends TraversableFactory[Traversable] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = new GenericCanBuildFrom[A]
diff --git a/src/library/scala/collection/immutable/Tree.scala.disabled b/src/library/scala/collection/immutable/Tree.scala.disabled
deleted file mode 100644
index 1c1eb17acc..0000000000
--- a/src/library/scala/collection/immutable/Tree.scala.disabled
+++ /dev/null
@@ -1,440 +0,0 @@
-o/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-// This is probably no longer needed; replaced by RedBlack.scala
-*
-/* General Balanced Trees - highly efficient functional dictionaries.
-**
-** This is a scala version of gb_trees.erl which is
-** copyrighted (C) 1999-2001 by Sven-Olof Nystrom, and Richard Carlsson
-**
-** An efficient implementation of Prof. Arne Andersson's General
-** Balanced Trees. These have no storage overhead compared to plain
-** unbalanced binary trees, and their performance is in general better
-** than AVL trees.
-**
-** NOTE: This code was until 2007-04-01 under a GPL license. The author has
-** given permission to remove that license, so that now this code is under
-** the general license for the Scala libraries.
-*/
-
-package scala.collection.immutable
-
-
-/** <p>
- * General Balanced Trees - highly efficient functional dictionaries.
- * </p>
- * <p>
- * An efficient implementation of Prof. Arne Andersson's
- * <a href="http://citeseer.ist.psu.edu/andersson99general.html"
- * target="_top">General Balanced Trees</a>. These have no storage overhead
- * compared to plain unbalanced binary trees, and their performance is in
- * general better than AVL trees.
- * </p>
- * <p>
- * This implementation does not balance the trees after deletions.
- * Since deletions don't increase the height of a tree, this should
- * be OK in most applications. A balance method is provided for those
- * cases where rebalancing is needed.
- * </p>
- * <p>
- * The tree consists of entries conatining a key with an order.
- * </p>
- * <p>
- * When instanciating the tree an order for the keys has to be
- * supplied.
- * </p>
- *
- * @author Erik Stenman, Michel Schinz
- * @version 1.1, 2005-01-20
- * @since 1
- */
-
-@serializable
-abstract class Tree[A <% Ordered[A], B]() extends AnyRef {
- /* Data structure:
- ** - size:Int - the number of elements in the tree.
- ** - tree:T, which is composed of nodes of the form:
- ** - GBNode(key: A, entry:B, smaller:T, bigger:T),
- ** - and the "empty tree" node GBLeaf.
- **
- ** Original balance condition h(T) <= ceil(c * log(|T|)) has been
- ** changed to the similar (but not quite equivalent) condition
- ** 2 ^ h(T) <= |T| ^ c.
- **
- */
-
- /** The type returned when creating a new tree.
- * This type should be defined by concrete implementations
- * e.g. <pre>
- * class C[T](...) extends Tree[A,B](...) {
- * type This = C[T];
- * </pre>
- */
- protected type This <: Tree[A,B]
- protected def getThis: This
-
- /**
- * The type of nodes that the tree is build from.
- */
- protected type aNode = GBTree[A,B]
-
- /** The nodes in the tree.
- */
- protected def tree: aNode = GBLeaf[A,B]()
-
- /** <p>
- * This abstract method should be defined by a concrete implementation
- * <code>C[T]</code> as something like:
- * </p>
- * <pre>
- * <b>override def</b> New(sz: Int, t: aNode): This {
- * <b>new</b> C[T](order) {
- * <b>override def</b> size = sz
- * <b>override protected def</b> tree: aNode = t
- * }
- * </pre>
- * <p>
- * The concrete implementation should also override the def of This
- * <code>override type This = C[T];</code>
- * </p>
- */
- protected def New(sz: Int, t: aNode): This
-
- /** The size of the tree, returns 0 (zero) if the tree is empty.
- *
- * @return The number of nodes in the tree as an integer.
- */
- def size: Int = 0
-
- /** A new tree with the entry added is returned,
- * assuming that key is <em>not</em> in the tree.
- *
- * @param key ...
- * @param entry ...
- * @return ...
- */
- protected def add(key: A, entry: B): This = {
- val newSize = size + 1
- New(newSize, tree.insert(key, entry, newSize * newSize).node)
- }
-
- /** A new tree with the entry added is returned,
- * if key is <em>not</em> in the tree, otherwise
- * the key is updated with the new entry.
- */
- protected def updateOrAdd(key: A, entry: B): This =
- if (tree.isDefinedAt(key))
- New(size,tree.update(key,entry))
- else
- add(key,entry)
-
- /** Removes the key from the tree.
- *
- * @param key ...
- * @return ...
- */
- protected def deleteAny(key: A): This =
- if (tree.isDefinedAt(key))
- delete(key)
- else
- getThis
-
- /** Removes the key from the tree, assumimg that key is present.
- *
- * @param key ...
- * @return ...
- */
- private def delete(key: A): This =
- New(size - 1, tree.delete(key))
-
- /** Check if this map maps <code>key</code> to a value and return the
- * value if it exists.
- *
- * @param key the key of the mapping of interest
- * @return the value of the mapping, if it exists
- */
- protected def findValue(key: A): Option[B] =
- tree.get(key)
-
- /** Gives you an iterator over all elements in the tree.
- * The iterator structure corresponds to
- * the call stack of an in-order traversal.
- *
- * Note: The iterator itself has a state, i.e., it is not functional.
- */
- protected def entries: Iterator[B] =
- new Iterator[B] {
- var iter = tree.mk_iter(scala.Nil)
- def hasNext = !iter.isEmpty
- def next = iter match {
- case GBNode(_,v,_,t)::iter_tail =>
- iter = t.mk_iter(iter_tail)
- v
- case scala.Nil =>
- throw new NoSuchElementException("next on empty iterator")
- }
- }
-
- /** Create a new balanced tree from the tree. Might be useful to call
- * after many deletions, since deletion does not rebalance the tree.
- */
- def balance: This =
- New(size, tree.balance(size))
-}
-
-protected abstract class InsertTree[A <% Ordered[A],B]() extends AnyRef {
- def insertLeft(k: A, v: B, t: GBTree[A,B]): InsertTree[A,B]
- def insertRight(k: A, v: B, t: GBTree[A,B]): InsertTree[A,B]
- def node: GBTree[A,B]
-}
-
-/**
- * <code>ITree</code> is an internal class used by
- * <a href="Tree.html" target="contentFrame"><code>Tree</code></a>.
- */
-private case class ITree[A <% Ordered[A],B](t: GBTree[A,B])
- extends InsertTree[A,B] {
- def insertLeft(key: A, value: B, bigger: GBTree[A,B]) =
- ITree(GBNode(key, value, t, bigger))
- def insertRight(key: A, value: B, smaller: GBTree[A,B]) =
- ITree(GBNode(key, value, smaller, t))
- def node = t
-}
-
-/**
- * <code>INode</code> is an internal class used by
- * <a href="Tree.html" target="contentFrame"><code>Tree</code></a>.
- */
-private case class INode[A <% Ordered[A],B](t1: GBTree[A,B],
- height: Int,
- size: Int)
- extends InsertTree[A,B] {
- def insertLeft(key: A, value: B, bigger: GBTree[A,B]) =
- balance_p(GBNode(key, value, t1, bigger), bigger);
- def insertRight(key: A, value: B, smaller: GBTree[A,B]) =
- balance_p(GBNode(key, value, smaller, t1),smaller);
- protected def balance_p(t:GBTree[A,B],subtree:GBTree[A,B]):InsertTree[A,B] = {
- val (subHeight, subSize) = subtree.count
- val totalHeight = 2 * Math.max(height, subHeight)
- val totalSize = size + subSize + 1
- val BalanceHeight = totalSize * totalSize
- if (totalHeight > BalanceHeight) ITree(t.balance(totalSize))
- else INode(t, totalHeight, totalSize)
- }
- def node = t1
-}
-
-/**
- * <code>GBTree</code> is an internal class used by
- * <a href="Tree.html" target="contentFrame"><code>Tree</code></a>.
- *
- * @author Erik Stenman
- * @version 1.0, 2005-01-20
- */
-@serializable
-protected abstract class GBTree[A <% Ordered[A],B] extends AnyRef {
- type aNode = GBTree[A,B]
- type anInsertTree = InsertTree[A,B]
-
- /** Calculates 2^h, and size, where h is the height of the tree
- * and size is the number of nodes in the tree.
- */
- def count: (Int,Int)
- def isDefinedAt(Key: A): Boolean
- def get(key: A): Option[B]
- def apply(key: A): B
- def update(key: A, value: B): aNode
- def insert(key: A, value: B, size: Int): anInsertTree
- def toList(acc: List[(A,B)]): List[(A,B)]
- def mk_iter(iter_tail: List[aNode]): List[aNode]
- def delete(key: A): aNode
- def merge(t: aNode): aNode
- def takeSmallest: (A,B,aNode)
- def balance(s: Int): GBTree[A,B]
-}
-
-private case class GBLeaf[A <% Ordered[A],B]() extends GBTree[A,B] {
- def count = (1, 0)
- def isDefinedAt(key: A) = false
- def get(_key: A) = None
- def apply(key: A) = throw new NoSuchElementException("key " + key + " not found")
- def update(key: A, value: B) = throw new NoSuchElementException("key " + key + " not found")
- def insert(key: A, value: B, s: Int): anInsertTree = {
- if (s == 0)
- INode(GBNode(key, value, this, this), 1, 1)
- else
- ITree(GBNode(key, value, this, this))
- }
- def toList(acc: List[(A,B)]): List[(A,B)] = acc
- def mk_iter(iter_tail: List[GBTree[A,B]]) = iter_tail
- def merge(larger: GBTree[A,B]) = larger
- def takeSmallest: (A,B, GBTree[A,B]) =
- throw new NoSuchElementException("takeSmallest on empty tree")
- def delete(_key: A) = throw new NoSuchElementException("Delete on empty tree.")
- def balance(s: Int) = this
- override def hashCode() = 0
-}
-
-private case class GBNode[A <% Ordered[A],B](key: A,
- value: B,
- smaller: GBTree[A,B],
- bigger: GBTree[A,B])
- extends GBTree[A,B] {
- def count: (Int,Int) = {
- val (sHeight, sSize) = smaller.count
- val (bHeight, bSize) = bigger.count
- val mySize = sSize + bSize + 1
- if (mySize == 1)
- (1, mySize)
- else
- (2 * Math.max(sHeight, bHeight), mySize)
- }
-
- def isDefinedAt(sKey: A): Boolean =
- if (sKey < key) smaller.isDefinedAt(sKey)
- else if (sKey > key) bigger.isDefinedAt(sKey)
- else true
-
- def get(sKey: A): Option[B] =
- if (sKey < key) smaller.get(sKey)
- else if (sKey > key) bigger.get(sKey)
- else Some(value)
-
- def apply(sKey: A): B =
- if (sKey < key) smaller.apply(sKey)
- else if (sKey > key) bigger.apply(sKey)
- else value
-
- def update(newKey: A, newValue: B): aNode =
- if (newKey < key)
- GBNode(key, value, smaller.update(newKey,newValue), bigger)
- else if (newKey > key)
- GBNode(key, value, smaller, bigger.update(newKey,newValue))
- else
- GBNode(newKey, newValue, smaller, bigger)
-
- def insert(newKey: A, newValue: B, s: Int): anInsertTree = {
- if (newKey < key)
- smaller.insert(newKey, newValue, s / 2).insertLeft(key, value, bigger)
- else if (newKey > key)
- bigger.insert(newKey, newValue, s / 2).insertRight(key, value, smaller)
- else
- throw new NoSuchElementException("Key exists: " + newKey)
- }
-
- def toList(acc: List[(A,B)]): List[(A,B)] =
- smaller.toList((key, value) :: bigger.toList(acc))
-
- def mk_iter(iter_tail:List[aNode]):List[aNode] =
- smaller.mk_iter(this :: iter_tail)
-
- def delete(sKey:A):aNode = {
- if (sKey < key)
- GBNode(key, value, smaller.delete(sKey), bigger)
- else if (sKey > key)
- GBNode(key, value, smaller, bigger.delete(sKey))
- else
- smaller.merge(bigger)
- }
-
- def merge(larger: aNode): GBTree[A,B] = larger match {
- case GBLeaf() =>
- this
- case _ =>
- val (key1, value1, larger1) = larger.takeSmallest
- GBNode(key1, value1, this, larger1)
- }
-
- def takeSmallest: (A, B, aNode) = smaller match {
- case GBLeaf() =>
- (key, value, bigger)
- case _ =>
- val (key1, value1, smaller1) = smaller.takeSmallest
- (key1, value1, GBNode(key, value, smaller1, bigger))
- }
-
- /**
- * @param s ...
- * @return ...
- */
- def balance(s: Int): GBTree[A,B] =
- balance_list(toList(scala.Nil), s)
-
- protected def balance_list(list: List[(A,B)], s: Int): GBTree[A,B] = {
- val empty = GBLeaf[A,B]();
- def bal(list: List[(A,B)], s: Int): (aNode, List[(A,B)]) = {
- if (s > 1) {
- val sm = s - 1
- val s2 = sm / 2
- val s1 = sm - s2
- val (t1, (k, v) :: l1) = bal(list, s1)
- val (t2, l2) = bal(l1, s2)
- val t = GBNode(k, v, t1, t2)
- (t, l2)
- } else if (s == 1) {
- val (k,v) :: rest = list
- (GBNode(k, v, empty, empty), rest)
- } else
- (empty, list)
- }
- bal(list, s)._1
- }
-
- override def hashCode() =
- value.hashCode() + smaller.hashCode() + bigger.hashCode()
-}
-
-/* Here is the e-mail where the Author agreed to the change in license.
-
-from Erik Stenman <happi.stenman@gmail.com>
-to martin odersky <martin.odersky@epfl.ch>,
-date Tue, Apr 29, 2008 at 3:31 PM
-subject Re: test
-mailed-by chara.epfl.ch
-signed-by gmail.com
-
-Hi Martin,
-
-I am fine with that change, and since I don't have a scala distribution handy,
-I am also fine with you doing the change yourself. Is that OK?
-
-Sorry for my dead home address, I'll add an English response to it at some time.
-
-I am doing fine, and my family is also doing fine.
-Hope all is well with you too.
-
-Cheers,
-Erik
-- Hide quoted text -
-
-On Tue, Apr 29, 2008 at 3:13 PM, martin odersky <martin.odersky@epfl.ch> wrote:
-
- Hi Erik,
-
- I tried to send mail to happi@home.se, but got a response n swedish. I
- was sort of guessing from the response that it contained an
- alternative e-mail address and tried to send it there.
-
- Anyway, I hope things are going well with you!
-
- There was some discussion recently about the license of Tree.scala in
- package collection.immutable. It's GPL, whereas the rest of the Scala
- library is BSD. It seems this poses problems with Scala being packaged
- with Fedora. Would it be OK with you to change the license to the
- general one of Scala libraries? You could simply remove the references
- to the GPL
- license in the code and send it back to me if that's OK with you. On
- the other hand, if there's a problem we'll try something else instead.
-
- All the best
-
- -- Martin
-*/
diff --git a/src/library/scala/collection/immutable/TreeHashMap.scala b/src/library/scala/collection/immutable/TreeHashMap.scala
index d991df196f..a7de5bf8d1 100644
--- a/src/library/scala/collection/immutable/TreeHashMap.scala
+++ b/src/library/scala/collection/immutable/TreeHashMap.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-// $Id$
package scala.collection
package immutable
@@ -55,7 +54,7 @@ class TreeHashMap[Key, +Value] private (private val underlying : IntMap[AssocMap
def empty[V] = TreeHashMap.empty[Key, V]
private def hash(key : Key) = {
- var h = key.hashCode;
+ var h = key.##
h ^= ((h >>> 20) ^ (h >>> 12));
h ^ (h >>> 7) ^ (h >>> 4);
}
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 9d3f93cb7d..07212f8f5c 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,22 +14,33 @@ package immutable
import generic._
import mutable.Builder
-/** The canonical factory of <a href="TreeMap.html">TreeMap</a>'s.
- *
- * @since 1
+/** $factoryInfo
+ * @define Coll immutable.TreeMap
+ * @define coll immutable tree map
*/
object TreeMap extends ImmutableSortedMapFactory[TreeMap] {
def empty[A, B](implicit ord: Ordering[A]) = new TreeMap[A, B]()(ord)
+ /** $sortedMapCanBuildFromInfo */
implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), TreeMap[A, B]] = new SortedMapCanBuildFrom[A, B]
private def make[A, B](s: Int, t: RedBlack[A]#Tree[B])(implicit ord: Ordering[A]) = new TreeMap[A, B](s, t)(ord)
}
/** This class implements immutable maps using a tree.
*
+ * @tparam A the type of the keys contained in this tree map.
+ * @tparam B the type of the values associated with the keys.
+ * @param ordering the implicit ordering used to compare objects of type `A`.
+ *
* @author Erik Stenman
* @author Matthias Zenger
* @version 1.1, 03/05/2004
* @since 1
+ * @define Coll immutable.TreeMap
+ * @define coll immutable tree map
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
@serializable
class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit val ordering: Ordering[A])
@@ -65,18 +75,20 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va
* if key is <em>not</em> in the TreeMap, otherwise
* the key is updated with the new entry.
*
- * @param key ...
- * @param value ...
- * @return ...
+ * @tparam B1 type of the value of the new binding which is a supertype of `B`
+ * @param key the key that should be updated
+ * @param value the value to be associated with `key`
+ * @return a new $coll with the updated binding
*/
override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = {
val newsize = if (tree.lookup(key).isEmpty) size + 1 else size
TreeMap.make(newsize, tree.update(key, value))
}
- /** Add a key/value pair to this map.
- * @param kv the key/value pair
- * @return A new map with the new binding added to this map
+ /** Add a key/value pair to this map.
+ * @tparam B1 type of the value of the new binding, a supertype of `B`
+ * @param kv the key/value pair
+ * @return A new $coll with the new binding added to this map
*/
override def + [B1 >: B] (kv: (A, B1)): TreeMap[A, B1] = updated(kv._1, kv._2)
@@ -84,15 +96,22 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va
* either the collection itself (if it is mutable), or a new collection
* with the added elements.
*
+ * @tparam B1 type of the values of the new bindings, a supertype of `B`
* @param elem1 the first element to add.
* @param elem2 the second element to add.
* @param elems the remaining elements to add.
+ * @return a new $coll with the updated bindings
*/
override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): TreeMap[A, B1] =
this + elem1 + elem2 ++ elems
/** A new TreeMap with the entry added is returned,
* assuming that key is <em>not</em> in the TreeMap.
+ *
+ * @tparam B1 type of the values of the new bindings, a supertype of `B`
+ * @param key the key to be inserted
+ * @param value the value to be associated with `key`
+ * @return a new $coll with the inserted binding, if it wasn't present in the map
*/
def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = {
assert(tree.lookup(key).isEmpty)
@@ -103,11 +122,11 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va
if (tree.lookup(key).isEmpty) this
else TreeMap.make(size - 1, tree.delete(key))
- /** Check if this map maps <code>key</code> to a value and return the
+ /** Check if this map maps `key` to a value and return the
* value if it exists.
*
* @param key the key of the mapping of interest
- * @return the value of the mapping, if it exists
+ * @return the value of the mapping, if it exists
*/
override def get(key: A): Option[B] = tree.lookup(key) match {
case n: NonEmpty[b] => Some(n.value)
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 3600392d7c..9eee10f165 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -1,26 +1,24 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package immutable
import generic._
-import mutable.{Builder, AddingBuilder}
+import mutable.{ Builder, AddingBuilder }
-/** The canonical factory of <a href="TreeSet.html">TreeSet</a>'s.
- *
- * @since 1
+/** $factoryInfo
+ * @define Coll immutable.TreeSet
+ * @define coll immutable tree set
*/
-object TreeSet extends SortedSetFactory[TreeSet]{
-
+object TreeSet extends ImmutableSortedSetFactory[TreeSet] {
implicit def implicitBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = newBuilder[A](ordering)
override def newBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] =
new AddingBuilder(empty[A](ordering))
@@ -28,14 +26,22 @@ object TreeSet extends SortedSetFactory[TreeSet]{
/** The empty set of this type
*/
def empty[A](implicit ordering: Ordering[A]) = new TreeSet[A]
-
}
/** This class implements immutable sets using a tree.
*
+ * @tparam A the type of the elements contained in this tree set
+ * @param ordering the implicit ordering used to compare objects of type `A`
+ *
* @author Martin Odersky
* @version 2.0, 02/01/2007
* @since 1
+ * @define Coll immutable.TreeSet
+ * @define coll immutable tree set
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
@serializable @SerialVersionUID(-234066569443569402L)
class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit])
@@ -56,29 +62,40 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit])
*/
override def empty = TreeSet.empty
- /** A new TreeSet with the entry added is returned,
+ /** Creates a new `TreeSet` with the entry added.
+ *
+ * @param elem a new element to add.
+ * @return a new $coll containing `elem` and all the elements of this $coll.
*/
def + (elem: A): TreeSet[A] = {
val newsize = if (tree.lookup(elem).isEmpty) size + 1 else size
newSet(newsize, tree.update(elem, ()))
}
- /** A new TreeSet with the entry added is returned,
+ /** A new `TreeSet` with the entry added is returned,
* assuming that elem is <em>not</em> in the TreeSet.
+ *
+ * @param elem a new element to add.
+ * @return a new $coll containing `elem` and all the elements of this $coll.
*/
def insert(elem: A): TreeSet[A] = {
assert(tree.lookup(elem).isEmpty)
newSet(size + 1, tree.update(elem, ()))
}
+ /** Creates a new `TreeSet` with the entry removed.
+ *
+ * @param elem a new element to add.
+ * @return a new $coll containing all the elements of this $coll except `elem`.
+ */
def - (elem:A): TreeSet[A] =
if (tree.lookup(elem).isEmpty) this
else newSet(size - 1, tree delete elem)
- /** Checks if this set contains element <code>elem</code>.
+ /** Checks if this set contains element `elem`.
*
* @param elem the element to check for membership.
- * @return true, iff <code>elem</code> is contained in this set.
+ * @return true, iff `elem` is contained in this set.
*/
def contains(elem: A): Boolean = !tree.lookup(elem).isEmpty
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index e0a73fe427..9faab71158 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id: Vector.scala 19072 2009-10-13 12:19:59Z rompf $
package scala.collection
package immutable
@@ -19,24 +18,25 @@ import scala.collection.mutable.Builder
object Vector extends SeqFactory[Vector] {
- /*private[immutable]*/ val BF = new GenericCanBuildFrom[Nothing] {
+ private[immutable] val BF = new GenericCanBuildFrom[Nothing] {
override def apply() = newBuilder[Nothing]
}
@inline implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] =
BF.asInstanceOf[CanBuildFrom[Coll, A, Vector[A]]]
def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A]
- /*private[immutable]*/ val NIL = new Vector[Nothing](0, 0, 0)
+ private[immutable] val NIL = new Vector[Nothing](0, 0, 0)
@inline override def empty[A]: Vector[A] = NIL
}
-// TODO: most members are still public -> restrict access (caveat: private prevents inlining)
+// in principle, most members should be private. however, access privileges must
+// be carefully chosen to not prevent method inlining
@serializable
-final class Vector[+A](startIndex: Int, endIndex: Int, focus: Int) extends Seq[A]
+final class Vector[+A](startIndex: Int, endIndex: Int, focus: Int) extends IndexedSeq[A]
with GenericTraversableTemplate[A, Vector]
- with SeqLike[A, Vector[A]]
- with VectorPointer[A @uncheckedVariance] {
+ with IndexedSeqLike[A, Vector[A]]
+ with VectorPointer[A @uncheckedVariance] { self =>
override def companion: GenericCompanion[Vector] = Vector
@@ -45,7 +45,7 @@ override def companion: GenericCompanion[Vector] = Vector
//assert(focus >= 0, focus+"<0")
//assert(focus <= endIndex, focus+">"+endIndex)
- /*private*/ var dirty = false
+ private[immutable] var dirty = false
def length = endIndex - startIndex
@@ -60,20 +60,35 @@ override def companion: GenericCompanion[Vector] = Vector
s
}
+
+ // can still be improved
+ override /*SeqLike*/
+ def reverseIterator: Iterator[A] = new Iterator[A] {
+ private var i = self.length
+ def hasNext: Boolean = 0 < i
+ def next: A =
+ if (0 < i) {
+ i -= 1
+ self(i)
+ } else Iterator.empty.next
+ }
+
+ // TODO: reverse
+
// TODO: check performance of foreach/map etc. should override or not?
// Ideally, clients will inline calls to map all the way down, including the iterator/builder methods.
// In principle, escape analysis could even remove the iterator/builder allocations and do it
// with local variables exclusively. But we're not quite there yet ...
- @inline def foreach0[U](f: A => U): Unit = iterator.foreach0(f)
- @inline def map0[B, That](f: A => B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
+ @deprecated("this method is experimental and will be removed in a future release")
+ @inline def foreachFast[U](f: A => U): Unit = iterator.foreachFast(f)
+ @deprecated("this method is experimental and will be removed in a future release")
+ @inline def mapFast[B, That](f: A => B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
val b = bf(repr)
- foreach0(x => b += f(x))
+ foreachFast(x => b += f(x))
b.result
}
- // TODO: reverse
- // TODO: reverseIterator
def apply(index: Int): A = {
val idx = checkRangeConvert(index)
@@ -108,41 +123,71 @@ override def companion: GenericCompanion[Vector] = Vector
}
override def take(n: Int): Vector[A] = {
- if (n < 0) throw new IllegalArgumentException(n.toString)
- if (startIndex + n < endIndex) {
+ if (n <= 0)
+ Vector.empty
+ else if (startIndex + n < endIndex)
dropBack0(startIndex + n)
- } else
+ else
this
}
override def drop(n: Int): Vector[A] = {
- if (n < 0) throw new IllegalArgumentException(n.toString)
- if (startIndex + n < endIndex) {
+ if (n <= 0)
+ this
+ else if (startIndex + n < endIndex)
dropFront0(startIndex + n)
- } else
+ else
Vector.empty
}
override def takeRight(n: Int): Vector[A] = {
- if (n < 0) throw new IllegalArgumentException(n.toString)
- if (endIndex - n > startIndex) {
+ if (n <= 0)
+ Vector.empty
+ else if (endIndex - n > startIndex)
dropFront0(endIndex - n)
- } else
+ else
this
}
override def dropRight(n: Int): Vector[A] = {
- if (n < 0) throw new IllegalArgumentException(n.toString)
- if (endIndex - n > startIndex) {
+ if (n <= 0)
+ this
+ else if (endIndex - n > startIndex)
dropBack0(endIndex - n)
- } else
+ else
Vector.empty
}
+ override /*IterableLike*/ def head: A = {
+ if (isEmpty) throw new UnsupportedOperationException("empty.head")
+ apply(0)
+ }
+
+ override /*TraversableLike*/ def tail: Vector[A] = {
+ if (isEmpty) throw new UnsupportedOperationException("empty.tail")
+ drop(1)
+ }
+
+ override /*TraversableLike*/ def last: A = {
+ if (isEmpty) throw new UnsupportedOperationException("empty.last")
+ apply(length-1)
+ }
+
+ override /*TraversableLike*/ def init: Vector[A] = {
+ if (isEmpty) throw new UnsupportedOperationException("empty.init")
+ dropRight(1)
+ }
+
+ override /*IterableLike*/ def slice(from: Int, until: Int): Vector[A] =
+ take(until).drop(from)
+
+ override /*IterableLike*/ def splitAt(n: Int): (Vector[A], Vector[A]) = (take(n), drop(n))
+
+
// semi-private api
- def updateAt[B >: A](index: Int, elem: B): Vector[B] = {
+ private[immutable] def updateAt[B >: A](index: Int, elem: B): Vector[B] = {
val idx = checkRangeConvert(index)
val s = new Vector[B](startIndex, endIndex, idx)
s.initFrom(this)
@@ -153,7 +198,6 @@ override def companion: GenericCompanion[Vector] = Vector
}
-
private def gotoPosWritable(oldIndex: Int, newIndex: Int, xor: Int) = if (dirty) {
gotoPosWritable1(oldIndex, newIndex, xor)
} else {
@@ -168,7 +212,7 @@ override def companion: GenericCompanion[Vector] = Vector
dirty = true
}
- def appendFront[B>:A](value: B): Vector[B] = {
+ private[immutable] def appendFront[B>:A](value: B): Vector[B] = {
if (endIndex != startIndex) {
var blockIndex = (startIndex - 1) & ~31
var lo = (startIndex - 1) & 31
@@ -263,7 +307,7 @@ override def companion: GenericCompanion[Vector] = Vector
}
}
- def appendBack[B>:A](value: B): Vector[B] = {
+ private[immutable] def appendBack[B>:A](value: B): Vector[B] = {
// //println("------- append " + value)
// debug()
if (endIndex != startIndex) {
@@ -361,22 +405,22 @@ override def companion: GenericCompanion[Vector] = Vector
display5 = copyRange(display5, oldLeft, newLeft)
}
- def zeroLeft(array: Array[AnyRef], index: Int): Unit = {
+ private def zeroLeft(array: Array[AnyRef], index: Int): Unit = {
var i = 0; while (i < index) { array(i) = null; i+=1 }
}
- def zeroRight(array: Array[AnyRef], index: Int): Unit = {
+ private def zeroRight(array: Array[AnyRef], index: Int): Unit = {
var i = index; while (i < array.length) { array(i) = null; i+=1 }
}
- def copyLeft(array: Array[AnyRef], right: Int): Array[AnyRef] = {
+ private def copyLeft(array: Array[AnyRef], right: Int): Array[AnyRef] = {
// if (array eq null)
// println("OUCH!!! " + right + "/" + depth + "/"+startIndex + "/" + endIndex + "/" + focus)
val a2 = new Array[AnyRef](array.length)
Platform.arraycopy(array, 0, a2, 0, right)
a2
}
- def copyRight(array: Array[AnyRef], left: Int): Array[AnyRef] = {
+ private def copyRight(array: Array[AnyRef], left: Int): Array[AnyRef] = {
val a2 = new Array[AnyRef](array.length)
Platform.arraycopy(array, left, a2, left, a2.length - left)
a2
@@ -564,7 +608,7 @@ final class VectorIterator[+A](_startIndex: Int, _endIndex: Int) extends Iterato
private var lo: Int = _startIndex & 31
private var endIndex: Int = _endIndex
- private var endLo = Math.min(endIndex - blockIndex, 32)
+ private var endLo = math.min(endIndex - blockIndex, 32)
def hasNext = _hasNext
@@ -582,7 +626,7 @@ final class VectorIterator[+A](_startIndex: Int, _endIndex: Int) extends Iterato
gotoNextBlockStart(newBlockIndex, blockIndex ^ newBlockIndex)
blockIndex = newBlockIndex
- endLo = Math.min(endIndex - blockIndex, 32)
+ endLo = math.min(endIndex - blockIndex, 32)
lo = 0
} else {
_hasNext = false
@@ -592,18 +636,17 @@ final class VectorIterator[+A](_startIndex: Int, _endIndex: Int) extends Iterato
res
}
- // TODO: take
- // TODO: drop
+ // TODO: drop (important?)
- // TODO: remove!
- @inline def foreach0[U](f: A => U) { while (hasNext) f(next()) }
+ @deprecated("this method is experimental and will be removed in a future release")
+ @inline def foreachFast[U](f: A => U) { while (hasNext) f(next()) }
}
final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A @uncheckedVariance] {
- // TODO: possible alternative: start with display0 = null, blockIndex = -32, lo = 32
- // to avoid allocation initial array if the result will be empty anyways
+ // possible alternative: start with display0 = null, blockIndex = -32, lo = 32
+ // to avoid allocating initial array if the result will be empty anyways
display0 = new Array[AnyRef](32)
depth = 1
@@ -612,7 +655,7 @@ final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A
private var lo = 0
def += (elem: A): this.type = {
- if (lo == 32) {
+ if (lo >= display0.length) {
val newBlockIndex = blockIndex+32
gotoNextBlockStartWritable(newBlockIndex, blockIndex ^ newBlockIndex)
blockIndex = newBlockIndex
@@ -624,11 +667,12 @@ final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A
}
def result: Vector[A] = {
- if (blockIndex + lo == 0)
+ val size = blockIndex + lo
+ if (size == 0)
return Vector.empty
- val s = new Vector[A](0, blockIndex + lo, 0) // TODO: should focus front or back?
+ val s = new Vector[A](0, size, 0) // should focus front or back?
s.initFrom(this)
- if (depth > 1) s.gotoPos(0, blockIndex + lo)
+ if (depth > 1) s.gotoPos(0, size - 1) // we're currently focused to size - 1, not size!
s
}
@@ -643,18 +687,18 @@ final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A
private[immutable] trait VectorPointer[T] {
- var depth: Int = _
- var display0: Array[AnyRef] = _
- var display1: Array[AnyRef] = _
- var display2: Array[AnyRef] = _
- var display3: Array[AnyRef] = _
- var display4: Array[AnyRef] = _
- var display5: Array[AnyRef] = _
+ private[immutable] var depth: Int = _
+ private[immutable] var display0: Array[AnyRef] = _
+ private[immutable] var display1: Array[AnyRef] = _
+ private[immutable] var display2: Array[AnyRef] = _
+ private[immutable] var display3: Array[AnyRef] = _
+ private[immutable] var display4: Array[AnyRef] = _
+ private[immutable] var display5: Array[AnyRef] = _
// used
- final def initFrom[U](that: VectorPointer[U]): Unit = initFrom(that, that.depth)
+ private[immutable] final def initFrom[U](that: VectorPointer[U]): Unit = initFrom(that, that.depth)
- final def initFrom[U](that: VectorPointer[U], depth: Int) = {
+ private[immutable] final def initFrom[U](that: VectorPointer[U], depth: Int) = {
this.depth = depth
(depth - 1) match {
case -1 =>
@@ -690,7 +734,7 @@ private[immutable] trait VectorPointer[T] {
// requires structure is at pos oldIndex = xor ^ index
- final def getElem(index: Int, xor: Int): T = {
+ private[immutable] final def getElem(index: Int, xor: Int): T = {
if (xor < (1 << 5)) { // level = 0
display0(index & 31).asInstanceOf[T]
} else
@@ -717,7 +761,7 @@ private[immutable] trait VectorPointer[T] {
// go to specific position
// requires structure is at pos oldIndex = xor ^ index,
// ensures structure is at pos index
- final def gotoPos(index: Int, xor: Int): Unit = {
+ private[immutable] final def gotoPos(index: Int, xor: Int): Unit = {
if (xor < (1 << 5)) { // level = 0 (could maybe removed)
} else
if (xor < (1 << 10)) { // level = 1
@@ -754,7 +798,7 @@ private[immutable] trait VectorPointer[T] {
// USED BY ITERATOR
// xor: oldIndex ^ index
- final def gotoNextBlockStart(index: Int, xor: Int): Unit = { // goto block start pos
+ private[immutable] final def gotoNextBlockStart(index: Int, xor: Int): Unit = { // goto block start pos
if (xor < (1 << 10)) { // level = 1
display0 = display1((index >> 5) & 31).asInstanceOf[Array[AnyRef]]
} else
@@ -787,7 +831,7 @@ private[immutable] trait VectorPointer[T] {
// USED BY BUILDER
// xor: oldIndex ^ index
- final def gotoNextBlockStartWritable(index: Int, xor: Int): Unit = { // goto block start pos
+ private[immutable] final def gotoNextBlockStartWritable(index: Int, xor: Int): Unit = { // goto block start pos
if (xor < (1 << 10)) { // level = 1
if (depth == 1) { display1 = new Array(32); display1(0) = display0; depth+=1}
display0 = new Array(32)
@@ -841,14 +885,15 @@ private[immutable] trait VectorPointer[T] {
// STUFF BELOW USED BY APPEND / UPDATE
- final def copyOf(a: Array[AnyRef]) = {
+ private[immutable] final def copyOf(a: Array[AnyRef]) = {
//println("copy")
+ if (a eq null) println ("NULL")
val b = new Array[AnyRef](a.length)
Platform.arraycopy(a, 0, b, 0, a.length)
b
}
- final def nullSlotAndCopy(array: Array[AnyRef], index: Int) = {
+ private[immutable] final def nullSlotAndCopy(array: Array[AnyRef], index: Int) = {
//println("copy and null")
val x = array(index)
array(index) = null
@@ -860,7 +905,7 @@ private[immutable] trait VectorPointer[T] {
// requires structure is at pos index
// ensures structure is clean and at pos index and writable at all levels except 0
- final def stabilize(index: Int) = (depth - 1) match {
+ private[immutable] final def stabilize(index: Int) = (depth - 1) match {
case 5 =>
display5 = copyOf(display5)
display4 = copyOf(display4)
@@ -901,16 +946,13 @@ private[immutable] trait VectorPointer[T] {
-
-
-
/// USED IN UPDATE AND APPEND BACK
// prepare for writing at an existing position
// requires structure is clean and at pos oldIndex = xor ^ newIndex,
// ensures structure is dirty and at pos newIndex and writable at level 0
- final def gotoPosWritable0(newIndex: Int, xor: Int): Unit = (depth - 1) match {
+ private[immutable] final def gotoPosWritable0(newIndex: Int, xor: Int): Unit = (depth - 1) match {
case 5 =>
display5 = copyOf(display5)
display4 = nullSlotAndCopy(display5, (newIndex >> 25) & 31).asInstanceOf[Array[AnyRef]]
@@ -943,7 +985,7 @@ private[immutable] trait VectorPointer[T] {
// requires structure is dirty and at pos oldIndex,
// ensures structure is dirty and at pos newIndex and writable at level 0
- final def gotoPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = {
+ private[immutable] final def gotoPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = {
if (xor < (1 << 5)) { // level = 0
display0 = copyOf(display0)
} else
@@ -1009,9 +1051,9 @@ private[immutable] trait VectorPointer[T] {
// USED IN DROP
- final def copyRange(array: Array[AnyRef], oldLeft: Int, newLeft: Int) = {
+ private[immutable] final def copyRange(array: Array[AnyRef], oldLeft: Int, newLeft: Int) = {
val elems = new Array[AnyRef](32)
- Platform.arraycopy(array, oldLeft, elems, newLeft, 32 - Math.max(newLeft,oldLeft))
+ Platform.arraycopy(array, oldLeft, elems, newLeft, 32 - math.max(newLeft,oldLeft))
elems
}
@@ -1023,7 +1065,7 @@ private[immutable] trait VectorPointer[T] {
// requires structure is clean and at pos oldIndex,
// ensures structure is dirty and at pos newIndex and writable at level 0
- final def gotoFreshPosWritable0(oldIndex: Int, newIndex: Int, xor: Int): Unit = { // goto block start pos
+ private[immutable] final def gotoFreshPosWritable0(oldIndex: Int, newIndex: Int, xor: Int): Unit = { // goto block start pos
if (xor < (1 << 5)) { // level = 0
//println("XXX clean with low xor")
} else
@@ -1103,7 +1145,7 @@ private[immutable] trait VectorPointer[T] {
// requires structure is dirty and at pos oldIndex,
// ensures structure is dirty and at pos newIndex and writable at level 0
- final def gotoFreshPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = {
+ private[immutable] final def gotoFreshPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = {
stabilize(oldIndex)
gotoFreshPosWritable0(oldIndex, newIndex, xor)
}
@@ -1113,7 +1155,7 @@ private[immutable] trait VectorPointer[T] {
// DEBUG STUFF
- def debug(): Unit = {
+ private[immutable] def debug(): Unit = {
return
/*
//println("DISPLAY 5: " + display5 + " ---> " + (if (display5 ne null) display5.map(x=> if (x eq null) "." else x + "->" +x.asInstanceOf[Array[AnyRef]].mkString("")).mkString(" ") else "null"))
diff --git a/src/library/scala/collection/immutable/WrappedString.scala b/src/library/scala/collection/immutable/WrappedString.scala
index 5b46e5b332..8fe3a2bc5c 100644
--- a/src/library/scala/collection/immutable/WrappedString.scala
+++ b/src/library/scala/collection/immutable/WrappedString.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -17,7 +16,18 @@ import mutable.{Builder, StringBuilder}
import scala.util.matching.Regex
/**
- * @since 2.8
+ * This class serves as a wrapper augmenting `String`s with all the operations
+ * found in indexed sequences.
+ *
+ * The difference between this class and `StringOps` is that calling transformer
+ * methods such as `filter` and `map` will yield an object of type `WrappedString`
+ * rather than a `String`.
+ *
+ * @param self a string contained within this wrapped string
+ *
+ * @since 2.8
+ * @define Coll WrappedString
+ * @define coll wrapped string
*/
class WrappedString(override val self: String) extends IndexedSeq[Char] with StringLike[WrappedString] with Proxy {
@@ -26,10 +36,14 @@ class WrappedString(override val self: String) extends IndexedSeq[Char] with Str
/** Creates a string builder buffer as builder for this class */
override protected[this] def newBuilder = WrappedString.newBuilder
+
+ override def slice(from: Int, until: Int): WrappedString =
+ new WrappedString(self.substring(from max 0, until min self.length))
}
-/**
- * @since 2.8
+/** A companion object for wrapped strings.
+ *
+ * @since 2.8
*/
object WrappedString {
def newBuilder: Builder[Char, WrappedString] = new StringBuilder() mapResult (new WrappedString(_))
diff --git a/src/library/scala/collection/interfaces/IterableMethods.scala b/src/library/scala/collection/interfaces/IterableMethods.scala
index fe44c0a380..6c22a298c3 100644
--- a/src/library/scala/collection/interfaces/IterableMethods.scala
+++ b/src/library/scala/collection/interfaces/IterableMethods.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/interfaces/MapMethods.scala b/src/library/scala/collection/interfaces/MapMethods.scala
index 74c4229d3d..fd6e7ad2a7 100644
--- a/src/library/scala/collection/interfaces/MapMethods.scala
+++ b/src/library/scala/collection/interfaces/MapMethods.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -30,15 +30,15 @@ with SubtractableMethods[A, This]
def apply(key: A): B
def contains(key: A): Boolean
def isDefinedAt(key: A): Boolean
- def keySet: Set[A]
+ def keys: Iterable[A]
def keysIterator: Iterator[A]
- def valuesIterable: Iterable[B]
+ def keySet: Set[A]
+ def values: Iterable[B]
def valuesIterator: Iterator[B]
def default(key: A): B
def filterKeys(p: A => Boolean): DefaultMap[A, B]
def mapValues[C](f: B => C): DefaultMap[A, C]
def updated [B1 >: B](key: A, value: B1): Map[A, B1]
def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): Map[A, B1]
- def ++[B1 >: B](elems: Traversable[(A, B1)]): Map[A, B1]
- def ++[B1 >: B] (iter: Iterator[(A, B1)]): Map[A, B1]
+ def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1]
}
diff --git a/src/library/scala/collection/interfaces/SeqMethods.scala b/src/library/scala/collection/interfaces/SeqMethods.scala
index 8256c5304c..401c5e6c55 100644
--- a/src/library/scala/collection/interfaces/SeqMethods.scala
+++ b/src/library/scala/collection/interfaces/SeqMethods.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -25,8 +25,8 @@ trait SeqMethods[+A, +This <: SeqLike[A, This] with Seq[A]] extends IterableMeth
def contains(elem: Any): Boolean
def diff[B >: A, That](that: Seq[B]): This
def endsWith[B](that: Seq[B]): Boolean
- def indexOfSeq[B >: A](that: Seq[B]): Int
- def indexOfSeq[B >: A](that: Seq[B], fromIndex: Int): Int
+ def indexOfSlice[B >: A](that: Seq[B]): Int
+ def indexOfSlice[B >: A](that: Seq[B], fromIndex: Int): Int
def indexOf[B >: A](elem: B): Int
def indexOf[B >: A](elem: B, from: Int): Int
def indexWhere(p: A => Boolean): Int
@@ -34,8 +34,8 @@ trait SeqMethods[+A, +This <: SeqLike[A, This] with Seq[A]] extends IterableMeth
def indices: Range
def intersect[B >: A, That](that: Seq[B]): This
def isDefinedAt(x: Int): Boolean
- def lastIndexOfSeq[B >: A](that: Seq[B]): Int
- def lastIndexOfSeq[B >: A](that: Seq[B], fromIndex: Int): Int
+ def lastIndexOfSlice[B >: A](that: Seq[B]): Int
+ def lastIndexOfSlice[B >: A](that: Seq[B], fromIndex: Int): Int
def lastIndexOf[B >: A](elem: B): Int
def lastIndexOf[B >: A](elem: B, end: Int): Int
def lastIndexWhere(p: A => Boolean): Int
@@ -44,11 +44,10 @@ trait SeqMethods[+A, +This <: SeqLike[A, This] with Seq[A]] extends IterableMeth
def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That
def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That
def prefixLength(p: A => Boolean): Int
- def removeDuplicates: This
+ def distinct: This
def reverse: This
def reverseIterator: Iterator[A]
def segmentLength(p: A => Boolean, from: Int): Int
- def slice(from: Int): Seq[A]
def startsWith[B](that: Seq[B]): Boolean
def startsWith[B](that: Seq[B], offset: Int): Boolean
def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[This, B, That]): That
diff --git a/src/library/scala/collection/interfaces/SetMethods.scala b/src/library/scala/collection/interfaces/SetMethods.scala
index c7a7addec3..453143b790 100644
--- a/src/library/scala/collection/interfaces/SetMethods.scala
+++ b/src/library/scala/collection/interfaces/SetMethods.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,8 +21,7 @@ trait AddableMethods[A, +This <: Addable[A, This]] {
protected def repr: This
def +(elem: A): This
def + (elem1: A, elem2: A, elems: A*): This
- def ++ (elems: Traversable[A]): This
- def ++ (iter: Iterator[A]): This
+ def ++ (xs: TraversableOnce[A]): This
}
/**
@@ -32,8 +31,7 @@ trait SubtractableMethods[A, +This <: Subtractable[A, This]] {
protected def repr: This
def -(elem: A): This
def -(elem1: A, elem2: A, elems: A*): This
- def --(elems: Traversable[A]): This
- def --(iter: Iterator[A]): This
+ def --(xs: TraversableOnce[A]): This
}
/**
diff --git a/src/library/scala/collection/interfaces/TraversableMethods.scala b/src/library/scala/collection/interfaces/TraversableMethods.scala
index c42ec00f0e..d381b1dc24 100644
--- a/src/library/scala/collection/interfaces/TraversableMethods.scala
+++ b/src/library/scala/collection/interfaces/TraversableMethods.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -24,11 +24,12 @@ trait TraversableMethods[+A, +This <: TraversableLike[A, This] with Traversable[
// maps/iteration
def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That
- def partialMap[B, That](pf: PartialFunction[Any, B])(implicit bf: CanBuildFrom[This, B, That]): That
+ def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That
+ def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[This, B, That]): That // could be fold or new collection too - where to put it?
+ def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[This, B, That]): That
// new collections
- def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[This, B, That]): That
- def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That
+ def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
def copyToArray[B >: A](xs: Array[B], start: Int): Unit
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit
def copyToBuffer[B >: A](dest: Buffer[B]): Unit
@@ -40,7 +41,8 @@ trait TraversableMethods[+A, +This <: TraversableLike[A, This] with Traversable[
def toSeq: Seq[A]
def toSet[B >: A]: immutable.Set[B]
def toStream: Stream[A]
- def toIndexedSeq[B >: A]: mutable.IndexedSeq[B]
+ def toIndexedSeq[B >: A]: immutable.IndexedSeq[B]
+ def toBuffer[B >: A]: mutable.Buffer[B]
// strings
def addString(b: StringBuilder): StringBuilder
diff --git a/src/library/scala/collection/interfaces/TraversableOnceMethods.scala b/src/library/scala/collection/interfaces/TraversableOnceMethods.scala
new file mode 100644
index 0000000000..1e71215efd
--- /dev/null
+++ b/src/library/scala/collection/interfaces/TraversableOnceMethods.scala
@@ -0,0 +1,69 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package interfaces
+
+import mutable.Buffer
+
+trait TraversableOnceMethods[+A] {
+ self: TraversableOnce[A] =>
+
+ def foreach[U](f: A => U): Unit
+ protected[this] def reversed: TraversableOnce[A]
+
+ // tests
+ def isEmpty: Boolean
+ def nonEmpty: Boolean
+ def hasDefiniteSize: Boolean
+ def isTraversableAgain: Boolean
+
+ // applying a predicate
+ def forall(p: A => Boolean): Boolean
+ def exists(p: A => Boolean): Boolean
+ def find(p: A => Boolean): Option[A]
+ def count(p: A => Boolean): Int
+
+ // folds
+ def /:[B](z: B)(op: (B, A) => B): B
+ def :\[B](z: B)(op: (A, B) => B): B
+ def foldLeft[B](z: B)(op: (B, A) => B): B
+ def foldRight[B](z: B)(op: (A, B) => B): B
+ def reduceLeft[B >: A](op: (B, A) => B): B
+ def reduceRight[B >: A](op: (A, B) => B): B
+ def reduceLeftOption[B >: A](op: (B, A) => B): Option[B]
+ def reduceRightOption[B >: A](op: (A, B) => B): Option[B]
+
+ def sum[B >: A](implicit num: Numeric[B]): B
+ def product[B >: A](implicit num: Numeric[B]): B
+ def min[B >: A](implicit cmp: Ordering[B]): A
+ def max[B >: A](implicit cmp: Ordering[B]): A
+
+ // copies and conversions
+ def copyToBuffer[B >: A](dest: Buffer[B]): Unit
+ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit
+ def copyToArray[B >: A](xs: Array[B], start: Int): Unit
+ def copyToArray[B >: A](xs: Array[B]): Unit
+
+ def toArray[B >: A : ClassManifest]: Array[B]
+ def toIterable: Iterable[A]
+ def toIterator: Iterator[A]
+ def toList: List[A]
+ def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U]
+ def toSet[B >: A]: immutable.Set[B]
+ def toStream: Stream[A]
+ def toTraversable: Traversable[A]
+
+ def mkString(start: String, sep: String, end: String): String
+ def mkString(sep: String): String
+ def mkString: String
+
+ def addString(buf: StringBuilder, start: String, sep: String, end: String): StringBuilder
+ def addString(buf: StringBuilder, sep: String): StringBuilder
+ def addString(buf: StringBuilder): StringBuilder
+}
diff --git a/src/library/scala/collection/mutable/AddingBuilder.scala b/src/library/scala/collection/mutable/AddingBuilder.scala
index 2496329c3c..5dbf4813bf 100644
--- a/src/library/scala/collection/mutable/AddingBuilder.scala
+++ b/src/library/scala/collection/mutable/AddingBuilder.scala
@@ -1,29 +1,36 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
import generic._
-/** The canonical builder for collections that are addable, i.e. that support an efficient + method
+/** The canonical builder for collections that are addable, i.e. that support an efficient `+` method
* which adds an element to the collection.
- * Collections are built from their empty element using this + method.
- * @param empty The empty element of the collection.
*
+ * Collections are built from their empty element using this `+` method.
+ * @param empty the empty element of the collection.
+ * @tparam Elem the type of elements that get added to the builder.
+ * @tparam To the type of the built collection.
+ *
+ * @note "efficient `+`" is not idle talk. Do not use this on mutable collections or any others
+ * for which `+` may perform an unshared copy! See GrowingBuilder comments for more.
+ *
+ * @author Martin Odersky
+ * @version 2.8
* @since 2.8
*/
-class AddingBuilder[A, Coll <: Addable[A, Coll] with scala.collection.Iterable[A] with scala.collection.IterableLike[A, Coll]](empty: Coll)
-extends Builder[A, Coll] {
- protected var elems: Coll = empty
- def +=(x: A): this.type = { elems = elems + x; this }
+class AddingBuilder[Elem, To <: Addable[Elem, To] with collection.Iterable[Elem] with collection.IterableLike[Elem, To]](empty: To)
+extends Builder[Elem, To] {
+ protected var elems: To = empty
+ def +=(x: Elem): this.type = { elems = elems + x; this }
def clear() { elems = empty }
- def result: Coll = elems
+ def result: To = elems
}
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 80c2719df2..e9955b5563 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,7 +13,7 @@ package mutable
import generic._
-/** An implementation of the <code>Buffer</code> class using an array to
+/** An implementation of the `Buffer` class using an array to
* represent the assembled sequence internally. Append, update and random
* access take constant time (amortized time). Prepends and removes are
* linear in the buffer size.
@@ -23,13 +22,29 @@ import generic._
* @author Martin Odersky
* @version 2.8
* @since 1
+ *
+ * @tparam A the type of this arraybuffer's elements.
+ *
+ * @define Coll ArrayBuffer
+ * @define coll arraybuffer
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is always `ArrayBuffer[B]` because an implicit of type `CanBuildFrom[ArrayBuffer, B, ArrayBuffer[B]]`
+ * is defined in object `ArrayBuffer`.
+ * @define $bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`. This is usually the `canBuildFrom` value
+ * defined in object `ArrayBuffer`.
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
@serializable @SerialVersionUID(1529165946227428979L)
class ArrayBuffer[A](override protected val initialSize: Int)
extends Buffer[A]
with GenericTraversableTemplate[A, ArrayBuffer]
with BufferLike[A, ArrayBuffer[A]]
- with IndexedSeqLike[A, ArrayBuffer[A]]
+ with IndexedSeqOptimized[A, ArrayBuffer[A]]
with Builder[A, ArrayBuffer[A]]
with ResizableArray[A] {
@@ -43,16 +58,17 @@ class ArrayBuffer[A](override protected val initialSize: Int)
override def sizeHint(len: Int) {
if (len > size && len >= 1) {
- val newarray = new Array[AnyRef](len min 1)
- Array.copy(array, 0, newarray, 0, size0)
+ val newarray = new Array[AnyRef](len)
+ compat.Platform.arraycopy(array, 0, newarray, 0, size0)
array = newarray
}
}
/** Appends a single element to this buffer and returns
- * the identity of the buffer. It takes constant time.
+ * the identity of the buffer. It takes constant amortized time.
*
* @param elem the element to append.
+ * @return the updated buffer.
*/
def +=(elem: A): this.type = {
ensureSize(size0 + 1)
@@ -61,14 +77,13 @@ class ArrayBuffer[A](override protected val initialSize: Int)
this
}
- /** Appends a number of elements provided by an iterable object
- * via its <code>iterator</code> method. The identity of the
- * buffer is returned.
+ /** Appends a number of elements provided by a traversable object.
+ * The identity of the buffer is returned.
*
- * @param iter the iterfable object.
+ * @param xs the traversable object.
* @return the updated buffer.
*/
- override def ++=(iter: Traversable[A]): this.type = iter match {
+ override def ++=(xs: TraversableOnce[A]): this.type = xs match {
case v: IndexedSeq[_] =>
val n = v.length
ensureSize(size0 + n)
@@ -76,10 +91,10 @@ class ArrayBuffer[A](override protected val initialSize: Int)
size0 += n
this
case _ =>
- super.++=(iter)
+ super.++=(xs)
}
- /** Prepends a single element to this buffer and return
+ /** Prepends a single element to this buffer and returns
* the identity of the buffer. It takes time linear in
* the buffer size.
*
@@ -94,26 +109,24 @@ class ArrayBuffer[A](override protected val initialSize: Int)
this
}
- /** Prepends a number of elements provided by an iterable object
- * via its <code>iterator</code> method. The identity of the
- * buffer is returned.
+ /** Prepends a number of elements provided by a traversable object.
+ * The identity of the buffer is returned.
*
- * @param iter the iterable object.
+ * @param xs the traversable object.
* @return the updated buffer.
*/
- override def ++=:(iter: Traversable[A]): this.type = { insertAll(0, iter); this }
+ override def ++=:(xs: TraversableOnce[A]): this.type = { insertAll(0, xs.toTraversable); this }
- /** Inserts new elements at the index <code>n</code>. Opposed to method
- * <code>update</code>, this method will not replace an element with a
- * one. Instead, it will insert a new element at index <code>n</code>.
+ /** Inserts new elements at the index `n`. Opposed to method
+ * `update`, this method will not replace an element with a
+ * one. Instead, it will insert a new element at index `n`.
*
* @param n the index where a new element will be inserted.
- * @param iter the iterable object providing all elements to insert.
- * @throws Predef.IndexOutOfBoundsException if <code>n</code> is out of bounds.
+ * @param seq the traversable object providing all elements to insert.
+ * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
*/
def insertAll(n: Int, seq: Traversable[A]) {
- if ((n < 0) || (n > size0))
- throw new IndexOutOfBoundsException(n.toString)
+ if (n < 0 || n > size0) throw new IndexOutOfBoundsException(n.toString)
val xs = seq.toList
val len = xs.length
ensureSize(size0 + len)
@@ -125,21 +138,21 @@ class ArrayBuffer[A](override protected val initialSize: Int)
/** Removes the element on a given index position. It takes time linear in
* the buffer size.
*
- * @param n the index which refers to the first element to delete.
- * @param count the number of elemenets to delete
- * @throws Predef.IndexOutOfBoundsException if <code>n</code> is out of bounds.
+ * @param n the index which refers to the first element to delete.
+ * @param count the number of elements to delete
+ * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
*/
override def remove(n: Int, count: Int) {
- if ((n < 0) || (n >= size0) && count > 0)
- throw new IndexOutOfBoundsException(n.toString)
+ require(count >= 0, "removing negative number of elements")
+ if (n < 0 || n > size0 - count) throw new IndexOutOfBoundsException(n.toString)
copy(n + count, n, size0 - (n + count))
- size0 -= count
+ reduceToSize(size0 - count)
}
- /** Removes the element on a given index position
+ /** Removes the element at a given index position.
*
* @param n the index which refers to the element to delete.
- * @return The element that was formerly at position `n`
+ * @return the element that was formerly at position `n`.
*/
def remove(n: Int): A = {
val result = apply(n)
@@ -149,7 +162,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
/** Return a clone of this buffer.
*
- * @return an <code>ArrayBuffer</code> with the same elements.
+ * @return an `ArrayBuffer` with the same elements.
*/
override def clone(): ArrayBuffer[A] = new ArrayBuffer[A] ++= this
@@ -160,13 +173,14 @@ class ArrayBuffer[A](override protected val initialSize: Int)
override def stringPrefix: String = "ArrayBuffer"
}
-/** Factory object for <code>ArrayBuffer</code> class.
+/** Factory object for the `ArrayBuffer` class.
*
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+ * $factoryInfo
+ * @define coll array buffer
+ * @define Coll ArrayBuffer
*/
object ArrayBuffer extends SeqFactory[ArrayBuffer] {
+ /** $genericCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArrayBuffer[A]] = new GenericCanBuildFrom[A]
def newBuilder[A]: Builder[A, ArrayBuffer[A]] = new ArrayBuffer[A]
}
diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala
index 05ad865862..ec1351f671 100644
--- a/src/library/scala/collection/mutable/ArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/ArrayBuilder.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,20 +14,33 @@ package mutable
import generic._
import scala.reflect.ClassManifest
-/** A builder class for arrays
+/** A builder class for arrays.
*
- * @since 2.8
+ * @since 2.8
+ *
+ * @tparam T the type of the elements for the builder.
*/
+@serializable
abstract class ArrayBuilder[T] extends Builder[T, Array[T]]
-/**
- * @since 2.8
+/** A companion object for array builders.
+ *
+ * @since 2.8
*/
object ArrayBuilder {
+ /** Creates a new arraybuilder of type `T`.
+ *
+ * @tparam T type of the elements for the array builder, with a `ClassManifest` context bound.
+ * @return a new empty array builder.
+ */
def make[T: ClassManifest](): ArrayBuilder[T] =
implicitly[ClassManifest[T]].newArrayBuilder()
+ /** A class for array builders for arrays of reference types.
+ *
+ * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassManifest` context bound.
+ */
class ofRef[T <: AnyRef : ClassManifest] extends ArrayBuilder[T] {
private var elems: Array[T] = _
@@ -51,9 +63,8 @@ object ArrayBuilder {
}
private def ensureSize(size: Int) {
- if (capacity == 0) resize(16)
- if (capacity < size) {
- var newsize = capacity * 2
+ if (capacity < size || capacity == 0) {
+ var newsize = if (capacity == 0) 16 else capacity * 2
while (newsize < size) newsize *= 2
resize(newsize)
}
@@ -66,7 +77,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[T]): this.type = (xs: AnyRef) match {
+ override def ++=(xs: TraversableOnce[T]): this.type = (xs: AnyRef) match {
case xs: WrappedArray.ofRef[_] =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -84,8 +95,16 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofRef[_] => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofRef"
}
+ /** A class for array builders for arrays of `byte`s. */
class ofByte extends ArrayBuilder[Byte] {
private var elems: Array[Byte] = _
@@ -108,9 +127,8 @@ object ArrayBuilder {
}
private def ensureSize(size: Int) {
- if (capacity == 0) resize(16)
- if (capacity < size) {
- var newsize = capacity * 2
+ if (capacity < size || capacity == 0) {
+ var newsize = if (capacity == 0) 16 else capacity * 2
while (newsize < size) newsize *= 2
resize(newsize)
}
@@ -123,7 +141,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Byte]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Byte]): this.type = xs match {
case xs: WrappedArray.ofByte =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -141,8 +159,16 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofByte => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofByte"
}
+ /** A class for array builders for arrays of `short`s. */
class ofShort extends ArrayBuilder[Short] {
private var elems: Array[Short] = _
@@ -165,9 +191,8 @@ object ArrayBuilder {
}
private def ensureSize(size: Int) {
- if (capacity == 0) resize(16)
- if (capacity < size) {
- var newsize = capacity * 2
+ if (capacity < size || capacity == 0) {
+ var newsize = if (capacity == 0) 16 else capacity * 2
while (newsize < size) newsize *= 2
resize(newsize)
}
@@ -180,7 +205,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Short]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Short]): this.type = xs match {
case xs: WrappedArray.ofShort =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -198,8 +223,16 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofShort => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofShort"
}
+ /** A class for array builders for arrays of `char`s. */
class ofChar extends ArrayBuilder[Char] {
private var elems: Array[Char] = _
@@ -222,9 +255,8 @@ object ArrayBuilder {
}
private def ensureSize(size: Int) {
- if (capacity == 0) resize(16)
- if (capacity < size) {
- var newsize = capacity * 2
+ if (capacity < size || capacity == 0) {
+ var newsize = if (capacity == 0) 16 else capacity * 2
while (newsize < size) newsize *= 2
resize(newsize)
}
@@ -237,7 +269,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Char]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Char]): this.type = xs match {
case xs: WrappedArray.ofChar =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -255,8 +287,16 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofChar => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofChar"
}
+ /** A class for array builders for arrays of `int`s. */
class ofInt extends ArrayBuilder[Int] {
private var elems: Array[Int] = _
@@ -279,9 +319,8 @@ object ArrayBuilder {
}
private def ensureSize(size: Int) {
- if (capacity == 0) resize(16)
- if (capacity < size) {
- var newsize = capacity * 2
+ if (capacity < size || capacity == 0) {
+ var newsize = if (capacity == 0) 16 else capacity * 2
while (newsize < size) newsize *= 2
resize(newsize)
}
@@ -294,7 +333,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Int]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Int]): this.type = xs match {
case xs: WrappedArray.ofInt =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -312,8 +351,16 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofInt => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofInt"
}
+ /** A class for array builders for arrays of `long`s. */
class ofLong extends ArrayBuilder[Long] {
private var elems: Array[Long] = _
@@ -336,9 +383,8 @@ object ArrayBuilder {
}
private def ensureSize(size: Int) {
- if (capacity == 0) resize(16)
- if (capacity < size) {
- var newsize = capacity * 2
+ if (capacity < size || capacity == 0) {
+ var newsize = if (capacity == 0) 16 else capacity * 2
while (newsize < size) newsize *= 2
resize(newsize)
}
@@ -351,7 +397,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Long]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Long]): this.type = xs match {
case xs: WrappedArray.ofLong =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -369,8 +415,16 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofLong => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofLong"
}
+ /** A class for array builders for arrays of `float`s. */
class ofFloat extends ArrayBuilder[Float] {
private var elems: Array[Float] = _
@@ -393,9 +447,8 @@ object ArrayBuilder {
}
private def ensureSize(size: Int) {
- if (capacity == 0) resize(16)
- if (capacity < size) {
- var newsize = capacity * 2
+ if (capacity < size || capacity == 0) {
+ var newsize = if (capacity == 0) 16 else capacity * 2
while (newsize < size) newsize *= 2
resize(newsize)
}
@@ -408,7 +461,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Float]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Float]): this.type = xs match {
case xs: WrappedArray.ofFloat =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -426,8 +479,16 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofFloat => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofFloat"
}
+ /** A class for array builders for arrays of `double`s. */
class ofDouble extends ArrayBuilder[Double] {
private var elems: Array[Double] = _
@@ -450,9 +511,8 @@ object ArrayBuilder {
}
private def ensureSize(size: Int) {
- if (capacity == 0) resize(16)
- if (capacity < size) {
- var newsize = capacity * 2
+ if (capacity < size || capacity == 0) {
+ var newsize = if (capacity == 0) 16 else capacity * 2
while (newsize < size) newsize *= 2
resize(newsize)
}
@@ -465,7 +525,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Double]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Double]): this.type = xs match {
case xs: WrappedArray.ofDouble =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -483,8 +543,16 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofDouble => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofDouble"
}
+ /** A class for array builders for arrays of `boolean`s. */
class ofBoolean extends ArrayBuilder[Boolean] {
private var elems: Array[Boolean] = _
@@ -507,9 +575,8 @@ object ArrayBuilder {
}
private def ensureSize(size: Int) {
- if (capacity == 0) resize(16)
- if (capacity < size) {
- var newsize = capacity * 2
+ if (capacity < size || capacity == 0) {
+ var newsize = if (capacity == 0) 16 else capacity * 2
while (newsize < size) newsize *= 2
resize(newsize)
}
@@ -522,7 +589,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Boolean]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Boolean]): this.type = xs match {
case xs: WrappedArray.ofBoolean =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -540,8 +607,16 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofBoolean => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofBoolean"
}
+ /** A class for array builders for arrays of `Unit` type. */
class ofUnit extends ArrayBuilder[Unit] {
private var elems: Array[Unit] = _
@@ -564,9 +639,8 @@ object ArrayBuilder {
}
private def ensureSize(size: Int) {
- if (capacity == 0) resize(16)
- if (capacity < size) {
- var newsize = capacity * 2
+ if (capacity < size || capacity == 0) {
+ var newsize = if (capacity == 0) 16 else capacity * 2
while (newsize < size) newsize *= 2
resize(newsize)
}
@@ -579,7 +653,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Unit]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Unit]): this.type = xs match {
case xs: WrappedArray.ofUnit =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -597,5 +671,12 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofUnit => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofUnit"
}
}
diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala
index b60c757070..ceb1377620 100644
--- a/src/library/scala/collection/mutable/ArrayLike.scala
+++ b/src/library/scala/collection/mutable/ArrayLike.scala
@@ -1,33 +1,42 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
import generic._
-/** A subtrait of collection.IndexedSeq which represents sequences
- * that can be mutated.
+/** A common supertrait of `ArrayOps` and `WrappedArray` that factors out most
+ * operations on arrays and wrapped arrays.
*
- * @since 2.8
+ * @tparam A type of the elements contained in the array like object.
+ * @tparam Repr the type of the actual collection containing the elements.
+ *
+ * @define Coll ArrayLike
+ * @version 2.8
+ * @since 2.8
*/
-trait ArrayLike[A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
+trait ArrayLike[A, +Repr] extends IndexedSeqOptimized[A, Repr] { self =>
- /** Creates a possible nested IndexedSeq which consists of all the elements
- * of this array. If the elements are arrays themselves, the `deep' transformation
- * is applied recursively to them. The stringPrefix of the IndexedSeq is
- * "Array", hence the IndexedSeq prints like an array with all its
+ /** Creates a possible nested `IndexedSeq` which consists of all the elements
+ * of this array. If the elements are arrays themselves, the `deep` transformation
+ * is applied recursively to them. The `stringPrefix` of the `IndexedSeq` is
+ * "Array", hence the `IndexedSeq` prints like an array with all its
* elements shown, and the same recursively for any subarrays.
*
- * Example: Array(Array(1, 2), Array(3, 4)).deep.toString
- * prints: Array(Array(1, 2), Array(3, 4))
+ * Example:
+ * {{{
+ * Array(Array(1, 2), Array(3, 4)).deep.toString
+ * }}}
+ * prints: `Array(Array(1, 2), Array(3, 4))`
+ *
+ * @return An possibly nested indexed sequence of consisting of all the elements of the array.
*/
def deep: scala.collection.IndexedSeq[Any] = new scala.collection.IndexedSeq[Any] {
def length = self.length
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 52d069ea2d..00e8697b53 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -1,21 +1,36 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
+import compat.Platform.arraycopy
import scala.reflect.ClassManifest
-/**
- * @since 2.8
+/** This class serves as a wrapper for `Array`s with all the operations found in
+ * indexed sequences. Where needed, instances of arrays are implicitly converted
+ * into this class.
+ *
+ * The difference between this class and `WrappedArray` is that calling transformer
+ * methods such as `filter` and `map` will yield an array, whereas a `WrappedArray`
+ * will remain a `WrappedArray`.
+ *
+ * @since 2.8
+ *
+ * @tparam T type of the elements contained in this array.
+ *
+ * @define Coll ArrayOps
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] {
@@ -24,8 +39,25 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] {
ClassManifest.fromClass(
repr.getClass.getComponentType.getComponentType.asInstanceOf[Predef.Class[U]]))
+ override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) {
+ var l = len
+ if (repr.length < l) l = repr.length
+ if (xs.length - start < l) l = xs.length - start max 0
+ Array.copy(repr, 0, xs, start, l)
+ }
+
+ override def toArray[U >: T : ClassManifest]: Array[U] =
+ if (implicitly[ClassManifest[U]].erasure eq repr.getClass.getComponentType)
+ repr.asInstanceOf[Array[U]]
+ else
+ super.toArray[U]
+
/** Flattens a two-dimensional array by concatenating all its rows
- * into a single array
+ * into a single array.
+ *
+ * @tparam U Type of row elements.
+ * @param asArray A function that converts elements of this array to rows - arrays of type `U`.
+ * @return An array obtained by concatenating rows of this array.
*/
def flatten[U](implicit asArray: T => /*<:<!!!*/ Array[U]): Array[U] = {
val b = rowBuilder[U]
@@ -34,7 +66,11 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] {
b.result
}
- /** Transposes a two dimensional array
+ /** Transposes a two dimensional array.
+ *
+ * @tparam U Type of row elements.
+ * @param asArray A function that converts elements of this array to rows - arrays of type `U`.
+ * @return An array obtained by replacing elements of this arrays with rows the represent.
*/
def transpose[U](implicit asArray: T => Array[U]): Array[Array[U]] = {
val bs = asArray(head) map (_ => rowBuilder[U])
@@ -54,10 +90,13 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] {
}
/**
+ * A companion object for `ArrayOps`.
+ *
* @since 2.8
*/
object ArrayOps {
+ /** A class of `ArrayOps` for arrays containing reference types. */
class ofRef[T <: AnyRef](override val repr: Array[T]) extends ArrayOps[T] with ArrayLike[T, Array[T]] {
override protected[this] def thisCollection: WrappedArray[T] = new WrappedArray.ofRef[T](repr)
@@ -70,6 +109,7 @@ object ArrayOps {
def update(index: Int, elem: T) { repr(index) = elem }
}
+ /** A class of `ArrayOps` for arrays containing `byte`s. */
class ofByte(override val repr: Array[Byte]) extends ArrayOps[Byte] with ArrayLike[Byte, Array[Byte]] {
override protected[this] def thisCollection: WrappedArray[Byte] = new WrappedArray.ofByte(repr)
@@ -81,6 +121,7 @@ object ArrayOps {
def update(index: Int, elem: Byte) { repr(index) = elem }
}
+ /** A class of `ArrayOps` for arrays containing `short`s. */
class ofShort(override val repr: Array[Short]) extends ArrayOps[Short] with ArrayLike[Short, Array[Short]] {
override protected[this] def thisCollection: WrappedArray[Short] = new WrappedArray.ofShort(repr)
@@ -92,6 +133,7 @@ object ArrayOps {
def update(index: Int, elem: Short) { repr(index) = elem }
}
+ /** A class of `ArrayOps` for arrays containing `char`s. */
class ofChar(override val repr: Array[Char]) extends ArrayOps[Char] with ArrayLike[Char, Array[Char]] {
override protected[this] def thisCollection: WrappedArray[Char] = new WrappedArray.ofChar(repr)
@@ -103,6 +145,7 @@ object ArrayOps {
def update(index: Int, elem: Char) { repr(index) = elem }
}
+ /** A class of `ArrayOps` for arrays containing `int`s. */
class ofInt(override val repr: Array[Int]) extends ArrayOps[Int] with ArrayLike[Int, Array[Int]] {
override protected[this] def thisCollection: WrappedArray[Int] = new WrappedArray.ofInt(repr)
@@ -114,6 +157,7 @@ object ArrayOps {
def update(index: Int, elem: Int) { repr(index) = elem }
}
+ /** A class of `ArrayOps` for arrays containing `long`s. */
class ofLong(override val repr: Array[Long]) extends ArrayOps[Long] with ArrayLike[Long, Array[Long]] {
override protected[this] def thisCollection: WrappedArray[Long] = new WrappedArray.ofLong(repr)
@@ -125,6 +169,7 @@ object ArrayOps {
def update(index: Int, elem: Long) { repr(index) = elem }
}
+ /** A class of `ArrayOps` for arrays containing `float`s. */
class ofFloat(override val repr: Array[Float]) extends ArrayOps[Float] with ArrayLike[Float, Array[Float]] {
override protected[this] def thisCollection: WrappedArray[Float] = new WrappedArray.ofFloat(repr)
@@ -136,6 +181,7 @@ object ArrayOps {
def update(index: Int, elem: Float) { repr(index) = elem }
}
+ /** A class of `ArrayOps` for arrays containing `double`s. */
class ofDouble(override val repr: Array[Double]) extends ArrayOps[Double] with ArrayLike[Double, Array[Double]] {
override protected[this] def thisCollection: WrappedArray[Double] = new WrappedArray.ofDouble(repr)
@@ -147,6 +193,7 @@ object ArrayOps {
def update(index: Int, elem: Double) { repr(index) = elem }
}
+ /** A class of `ArrayOps` for arrays containing `boolean`s. */
class ofBoolean(override val repr: Array[Boolean]) extends ArrayOps[Boolean] with ArrayLike[Boolean, Array[Boolean]] {
override protected[this] def thisCollection: WrappedArray[Boolean] = new WrappedArray.ofBoolean(repr)
@@ -158,6 +205,7 @@ object ArrayOps {
def update(index: Int, elem: Boolean) { repr(index) = elem }
}
+ /** A class of `ArrayOps` for arrays of `Unit` types. */
class ofUnit(override val repr: Array[Unit]) extends ArrayOps[Unit] with ArrayLike[Unit, Array[Unit]] {
override protected[this] def thisCollection: WrappedArray[Unit] = new WrappedArray.ofUnit(repr)
diff --git a/src/library/scala/collection/mutable/GenericArray.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index 84341bacd4..1a287ed4cc 100644
--- a/src/library/scala/collection/mutable/GenericArray.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,20 +13,37 @@ package mutable
import generic._
-/** This class is used internally to implement data structures that
- * are based on resizable arrays.
+/** A class for polymorphic arrays of elements that's represented
+ * internally by an array of objects. This means that elements of
+ * primitive types are boxed.
*
- * @author Matthias Zenger, Burak Emir
* @author Martin Odersky
* @version 2.8
* @since 2.8
+ *
+ * @tparam A type of the elements contained in this array sequence.
+ * @param length the length of the underlying array.
+ *
+ * @define Coll ArraySeq
+ * @define coll array sequence
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is always `ArraySeq[B]` because an implicit of type `CanBuildFrom[ArraySeq, B, ArraySeq[B]]`
+ * is defined in object `ArraySeq`.
+ * @define $bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`. This is usually the `canBuildFrom` value
+ * defined in object `ArraySeq`.
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
-class GenericArray[A](override val length: Int)
+class ArraySeq[A](override val length: Int)
extends IndexedSeq[A]
- with GenericTraversableTemplate[A, GenericArray]
- with IndexedSeqLike[A, GenericArray[A]] {
+ with GenericTraversableTemplate[A, ArraySeq]
+ with IndexedSeqOptimized[A, ArraySeq[A]] {
- override def companion: GenericCompanion[GenericArray] = GenericArray
+ override def companion: GenericCompanion[ArraySeq] = ArraySeq
val array: Array[AnyRef] = new Array[AnyRef](length)
@@ -49,7 +65,7 @@ extends IndexedSeq[A]
}
}
- /** Fills the given array <code>xs</code> with at most `len` elements of
+ /** Fills the given array `xs` with at most `len` elements of
* this traversable starting at position `start`.
* Copying will stop once either the end of the current traversable is reached or
* `len` elements have been copied or the end of the array is reached.
@@ -64,11 +80,16 @@ extends IndexedSeq[A]
}
}
-object GenericArray extends SeqFactory[GenericArray] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenericArray[A]] = new GenericCanBuildFrom[A]
- def newBuilder[A]: Builder[A, GenericArray[A]] =
+/** $factoryInfo
+ * @define coll array sequence
+ * @define Coll ArraySeq
+ */
+object ArraySeq extends SeqFactory[ArraySeq] {
+ /** $genericCanBuildFromInfo */
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArraySeq[A]] = new GenericCanBuildFrom[A]
+ def newBuilder[A]: Builder[A, ArraySeq[A]] =
new ArrayBuffer[A] mapResult { buf =>
- val result = new GenericArray[A](buf.length)
+ val result = new ArraySeq[A](buf.length)
buf.copyToArray(result.array.asInstanceOf[Array[Any]], 0)
result
}
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 0059d5ff6b..fdabead34a 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -28,14 +27,22 @@ private object Utils {
}
}
-/**
- * Simple stack class backed by an array. Should be significantly faster
- * than the standard mutable stack.
+/** Simple stack class backed by an array. Should be significantly faster
+ * than the standard mutable stack.
*
- * @author David MacIver
- * @since 2.7
+ * @author David MacIver
+ * @since 2.7
+ *
+ * @tparam T type of the elements contained in this array stack.
+ *
+ * @define Coll ArrayStack
+ * @define coll array stack
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
-@cloneable
+@cloneable @serializable @SerialVersionUID(8565219180626620510L)
class ArrayStack[T] private(private var table : Array[AnyRef],
private var index : Int) extends scala.collection.Seq[T] with Cloneable[ArrayStack[T]] {
def this() = this(new Array[AnyRef](1), 0)
@@ -47,10 +54,9 @@ class ArrayStack[T] private(private var table : Array[AnyRef],
/** The number of elements in the stack */
def length = index
- /**
- * Push an element onto the stack.
+ /** Push an element onto the stack.
*
- * @param x The element to push
+ * @param x The element to push
*/
def push(x: T) {
if (index == table.length) table = Utils.growArray(table)
@@ -58,8 +64,9 @@ class ArrayStack[T] private(private var table : Array[AnyRef],
index += 1
}
- /**
- * Pop the top element off the stack.
+ /** Pop the top element off the stack.
+ *
+ * @return the element on top of the stack
*/
def pop: T = {
if (index == 0) error("Stack empty")
@@ -73,76 +80,73 @@ class ArrayStack[T] private(private var table : Array[AnyRef],
@deprecated("use top instead")
def peek = top
- /**
- * View the top element of the stack.
+ /** View the top element of the stack.
+ *
+ * Does not remove the element on the top. If the stack is empty,
+ * an exception is thrown.
+ *
+ * @return the element on top of the stack.
*/
def top: T = table(index - 1).asInstanceOf[T]
- /**
- * Duplicate the top element of the stack.
+ /** Duplicate the top element of the stack.
+ *
+ * After calling this method, the stack will have an additional element at
+ * the top equal to the element that was previously at the top.
+ * If the stack is empty, an exception is thrown.
*/
def dup = push(top)
- /**
- * Empties the stack.
- */
+ /** Empties the stack. */
def clear {
index = 0
table = new Array(1)
}
- /**
- * Empties the stack, passing all elements on it in FIFO order to the
- * provided function.
+ /** Empties the stack, passing all elements on it in LIFO order to the
+ * provided function.
*
- * @param f The function to drain to.
+ * @param f The function to drain to.
*/
def drain(f: T => Unit) = while (!isEmpty) f(pop)
- /**
- * Pushes all the provided elements onto the stack.
+ /** Pushes all the provided elements in the traversable object onto the stack.
*
- * @param x The source of elements to push
+ * @param x The source of elements to push.
+ * @return A reference to this stack.
*/
- def ++=(x: scala.collection.Iterable[T]): this.type = { x.foreach(this +=(_)); this }
+ def ++=(xs: TraversableOnce[T]): this.type = { xs foreach += ; this }
-
- /**
- * Pushes all the provided elements onto the stack.
+ /** Does the same as `push`, but returns the updated stack.
*
- * @param x The source of elements to push
- */
- def ++=(x: Iterator[T]): this.type = { x.foreach(this +=(_)); this }
-
- /**
- * Alias for push.
- *
- * @param x The element to push
+ * @param x The element to push.
+ * @return A reference to this stack.
*/
def +=(x: T): this.type = { push(x); this }
- /**
- * Pop the top two elements off the stack, apply f to them and push the result
- * back on to the stack.
+ /** Pop the top two elements off the stack, apply `f` to them and push the result
+ * back on to the stack.
+ *
+ * This function will throw an exception if stack contains fewer than 2 elements.
*
- * @param f The combining function
+ * @param f The function to apply to the top two elements.
*/
- def combine(f: (T, T) => T) = push(f(pop, pop));
+ def combine(f: (T, T) => T): Unit = push(f(pop, pop))
- /**
- * Repeatedly combine the top elements of the stack until the stack contains only
- * one element.
+ /** Repeatedly combine the top elements of the stack until the stack contains only
+ * one element.
+ *
+ * @param f The function to apply repeatedly to topmost elements.
*/
- def reduceWith(f: (T, T) => T) = while(size > 1) combine(f)
+ def reduceWith(f: (T, T) => T): Unit = while(size > 1) combine(f)
override def size = index
- /**
- * Evaluates the expression, preserving the contents of the stack so that
- * any changes the evaluation makes to the stack contents will be undone after
- * it completes.
+ /** Evaluates the expression, preserving the contents of the stack so that
+ * any changes the evaluation makes to the stack contents will be undone after
+ * it completes.
*
- * @param action The action to run.
+ * @param action The action to run.
*/
def preserving[T](action: => T) = {
val oldIndex = index
@@ -158,8 +162,8 @@ class ArrayStack[T] private(private var table : Array[AnyRef],
override def isEmpty: Boolean = index == 0
- /**
- * Iterates over the stack in LIFO order.
+ /** Creates and iterator over the stack in LIFO order.
+ * @return an iterator over the elements of the stack.
*/
def iterator: Iterator[T] = new Iterator[T] {
var currentIndex = index
diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala
index cdec8c90b4..41977e51a9 100644
--- a/src/library/scala/collection/mutable/BitSet.scala
+++ b/src/library/scala/collection/mutable/BitSet.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -17,7 +16,21 @@ import BitSetLike.{LogWL, updateArray}
/** A class for mutable bitsets.
*
- * @since 1
+ * $bitsetinfo
+ *
+ * @define Coll BitSet
+ * @define coll bitset
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is always `BitSet[B]` because an implicit of type `CanBuildFrom[BitSet, B, BitSet]`
+ * is defined in object `BitSet`.
+ * @define $bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`. This is usually the `canBuildFrom` value
+ * defined in object `BitSet`.
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
@serializable @SerialVersionUID(8483111450368547763L)
class BitSet(protected var elems: Array[Long]) extends Set[Int]
@@ -27,6 +40,10 @@ class BitSet(protected var elems: Array[Long]) extends Set[Int]
override def empty = BitSet.empty
+ /** Creates the bitset of a certain initial size.
+ *
+ * @param initSize initial size of the bitset.
+ */
def this(initSize: Int) = this(new Array[Long]((initSize + 63) >> 6 max 1))
def this() = this(0)
@@ -48,9 +65,6 @@ class BitSet(protected var elems: Array[Long]) extends Set[Int]
protected def fromArray(words: Array[Long]): BitSet = new BitSet(words)
- /** Adds element to bitset,
- * @return element was already present.
- */
override def add(elem: Int): Boolean = {
require(elem >= 0)
if (contains(elem)) false
@@ -61,9 +75,6 @@ class BitSet(protected var elems: Array[Long]) extends Set[Int]
}
}
- /** Removes element from bitset.
- * @return element was already present.
- */
override def remove(elem: Int): Boolean = {
require(elem >= 0)
if (contains(elem)) {
@@ -79,6 +90,14 @@ class BitSet(protected var elems: Array[Long]) extends Set[Int]
override def clear() {
elems = new Array[Long](elems.length)
}
+
+ /** Wraps this bitset as an immutable bitset backed by the array of bits
+ * of this bitset.
+ *
+ * @note Subsequent changes in this bitset will be reflected in the returned immutable bitset.
+ *
+ * @return an immutable set containing all the elements of this set.
+ */
def toImmutable = immutable.BitSet.fromArray(elems)
override def clone(): BitSet = {
@@ -88,8 +107,16 @@ class BitSet(protected var elems: Array[Long]) extends Set[Int]
}
}
-/** A factory object for mutable bitsets */
+/** $factoryInfo
+ * @define coll bitset
+ * @define Coll BitSet
+ */
object BitSet extends BitSetFactory[BitSet] {
def empty: BitSet = new BitSet
+
+ /** A growing builder for mutable Sets. */
+ def newBuilder: Builder[Int, BitSet] = new GrowingBuilder[Int, BitSet](empty)
+
+ /** $bitsetCanBuildFrom */
implicit def canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom
}
diff --git a/src/library/scala/collection/mutable/Buffer.scala b/src/library/scala/collection/mutable/Buffer.scala
index 8f9bebce7c..48d877233e 100644
--- a/src/library/scala/collection/mutable/Buffer.scala
+++ b/src/library/scala/collection/mutable/Buffer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -18,12 +17,17 @@ import generic._
* appending, prepending, or inserting new elements. It is also
* possible to access and modify elements in a random access fashion
* via the index of the element in the current sequence.
- *
+ *
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.8
* @since 1
- */
+ *
+ * @tparam A type of the elements contained in this buffer.
+ *
+ * @define Coll Buffer
+ * @define coll buffer
+ */
@cloneable
trait Buffer[A] extends Seq[A]
with GenericTraversableTemplate[A, Buffer]
@@ -31,7 +35,9 @@ trait Buffer[A] extends Seq[A]
override def companion: GenericCompanion[Buffer] = Buffer
}
-/** Factory object for <code>Buffer</code> trait.
+/** $factoryInfo
+ * @define coll buffer
+ * @define Coll Buffer
*/
object Buffer extends SeqFactory[Buffer] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Buffer[A]] = new GenericCanBuildFrom[A]
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 3a92f4f7da..80a8824a3b 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,96 +13,110 @@ package mutable
import generic._
import script._
+import annotation.migration
-/** Buffers are used to create sequences of elements incrementally by
+/** A template trait for buffers of type `Buffer[A]`.
+ *
+ * Buffers are used to create sequences of elements incrementally by
* appending, prepending, or inserting new elements. It is also
* possible to access and modify elements in a random access fashion
* via the index of the element in the current sequence.
- *
- * @author Matthias Zenger
- * @author Martin Odersky
+ *
+ * @tparam A the type of the elements of the buffer
+ * @tparam This the type of the buffer itself.
+ *
+ * $buffernote
+ *
+ * @author Martin Odersky
+ * @author Matthias Zenger
* @version 2.8
* @since 2.8
- */
+ * @define buffernote @note
+ * This trait provides most of the operations of a `Buffer` independently of its representation.
+ * It is typically inherited by concrete implementations of buffers.
+ *
+ * To implement a concrete buffer, you need to provide implementations of the
+ * following methods:
+ * {{{
+ * def apply(idx: Int): A
+ * def update(idx: Int, elem: A)
+ * def length: Int
+ * def clear()
+ * def +=(elem: A): this.type
+ * def +=:(elem: A): this.type
+ * def insertAll(n: Int, iter: Traversable[A])
+ * def remove(n: Int): A
+ * }}}
+ * @define coll buffer
+ * @define Coll Buffer
+ * @define add append
+ * @define Add Append
+ * @define willNotTerminateInf
+ * @define mayNotTerminateInf
+ * @define compatMutate
+ * Note that for backward compatibility reasons, this method
+ * mutates the collection in place, unlike similar but
+ * undeprecated methods throughout the collections hierarchy.
+ */
@cloneable
trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
extends Growable[A]
with Shrinkable[A]
with Scriptable[A]
- with Addable[A, This]
with Subtractable[A, This]
with Cloneable[This]
with SeqLike[A, This]
{ self : This =>
+ // Note this does not extend Addable because `+` is being phased out of
+ // all Seq-derived classes.
+
import scala.collection.{Iterable, Traversable}
-// Abstract methods from IndexedSeq:
+ // Abstract methods from IndexedSeq:
- /** Return element at index `n`
- * @throws IndexOutofBoundsException if the index is not valid
- */
def apply(n: Int): A
-
- /** Replace element at index <code>n</code> with the new element
- * <code>newelem</code>.
- *
- * @param n the index of the element to replace.
- * @param newelem the new element.
- * @throws IndexOutofBoundsException if the index is not valid
- */
def update(n: Int, newelem: A)
-
- /** Return number of elements in the buffer
- */
def length: Int
-// Abstract methods from Appendabl
+ // Abstract methods from Growable:
- /** Append a single element to this buffer.
- *
- * @param elem the element to append.
- */
def +=(elem: A): this.type
-
- /** Clears the buffer contents.
- */
def clear()
-// Abstract methods new in this class
+ // Abstract methods new in this class:
- /** Prepend a single element to this buffer and return
- * the identity of the buffer.
+ /** Prepends a single element to this buffer.
* @param elem the element to prepend.
+ * @return the buffer itself.
*/
- def +=:(elem: A): This
+ def +=:(elem: A): this.type
- @deprecated("use `+=:' instead")
- final def +:(elem: A): This = +=:(elem)
-
- /** Inserts new elements at the index <code>n</code>. Opposed to method
- * <code>update</code>, this method will not replace an element with a
- * one. Instead, it will insert a new element at index <code>n</code>.
- *
- * @param n the index where a new element will be inserted.
- * @param iter the iterable object providing all elements to insert.
- * @throws IndexOutofBoundsException if the index is not valid
- */
- def insertAll(n: Int, iter: Traversable[A])
-
-
- /** Removes the element on a given index position.
+ /** Inserts new elements at a given index into this buffer.
*
- * @param n the index which refers to the element to delete.
- * @return the previous element
+ * @param n the index where new elements are inserted.
+ * @param elems the traversable collection containing the elements to insert.
+ * @throws IndexOutofBoundsException if the index `n` is not in the valid range
+ * `0 <= n <= length`.
*/
+ def insertAll(n: Int, elems: Traversable[A])
+
+ /** Removes the element at a given index from this buffer.
+ *
+ * @param n the index which refers to the element to delete.
+ * @return the previous element at index `n`
+ * @throws IndexOutofBoundsException if the if the index `n` is not in the valid range
+ * `0 <= n < length`.
+ */
def remove(n: Int): A
- /** Removes a number of elements from a given index position.
+ /** Removes a number of elements from a given index position.
*
- * @param n the index which refers to the element to delete.
- * @param count the number of elements to delete
- * @throws IndexOutofBoundsException if the index is not valid
+ * @param n the index which refers to the first element to remove.
+ * @param count the number of elements to remove.
+ * @throws IndexOutofBoundsException if the index `n` is not in the valid range
+ * `0 <= n <= length - count`.
+ * @throws IllegalArgumentException if `count < 0`.
*/
def remove(n: Int, count: Int) {
for (i <- 0 until count) remove(n)
@@ -113,6 +126,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* If the buffer does not contain that element, it is unchanged.
*
* @param x the element to remove.
+ * @return the buffer itself
*/
def -= (x: A): this.type = {
val i = indexOf(x)
@@ -120,80 +134,51 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
this
}
- /** Prepends a number of elements provided by an iterable object
- * via its <code>iterator</code> method. The identity of the
- * buffer is returned.(repr /: elems) (_ plus _)
+ /** Prepends elements to this buffer.
*
- * @param iter the iterable object.
+ * @param xs the TraversableOnce containing the elements to prepend.
+ * @return the buffer itself.
*/
- def ++=:(iter: Traversable[A]): This = { insertAll(0, iter); this }
+ def ++=:(xs: TraversableOnce[A]): this.type = { insertAll(0, xs.toTraversable); this }
- @deprecated("use ++=: instead")
- final def ++:(iter: Traversable[A]): This = ++=:(iter)
-
- /** Prepends a number of elements provided by an iterator
- * The identity of the buffer is returned.
- *
- * @param iter the iterator
- * @return the updated buffer.
- */
- def ++=:(iter: Iterator[A]): This = { insertAll(0, iter.toSeq); this }
-
- @deprecated("use ++=: instead")
- final def ++:(iter: Iterator[A]): This = ++=:(iter)
-
- /** Appends elements to this buffer.
+ /** Appends the given elements to this buffer.
*
* @param elems the elements to append.
*/
- def append(elems: A*) { this ++= elems }
-
- /** Appends a number of elements provided by an iterable object
- * via its <code>iterator</code> method.
- *
- * @param iter the iterable object.
- */
- def appendAll(iter: Traversable[A]) { this ++= iter }
+ def append(elems: A*) { appendAll(elems) }
- /** Prepend given elements to this list.
- *
- * @param elem the element to prepend.
+ /** Appends the elements contained in a traversable object to this buffer.
+ * @param xs the traversable object containing the elements to append.
*/
- def prepend(elems: A*) { elems ++=: this }
+ def appendAll(xs: TraversableOnce[A]) { this ++= xs }
- /** Prepends a number of elements provided by an iterable object
- * via its <code>iterator</code> method. The identity of the
- * buffer is returned.
- *
- * @param iter the iterable object.
+ /** Prepends given elements to this buffer.
+ * @param elems the elements to prepend.
*/
- def prependAll(iter: Traversable[A]) { iter ++=: this }
+ def prepend(elems: A*) { prependAll(elems) }
- /** Prepends a number of elements provided by an iterable object
- * via its <code>iterator</code> method. The identity of the
- * buffer is returned.
- *
- * @param iter the iterable object.
+ /** Prepends the elements contained in a traversable object to this buffer.
+ * @param elems the collection containing the elements to prepend.
*/
- def prependAll(iter: Iterator[A]) { iter ++=: this }
+ def prependAll(xs: TraversableOnce[A]) { xs ++=: this }
- /** Inserts new elements at the index <code>n</code>. Opposed to method
- * <code>update</code>, this method will not replace an element with a
- * one. Instead, it will insert the new elements at index <code>n</code>.
+ /** Inserts new elements at a given index into this buffer.
*
- * @param n the index where a new element will be inserted.
- * @param elems the new elements to insert.
+ * @param n the index where new elements are inserted.
+ * @param elems the traversable collection containing the elements to insert.
+ * @throws IndexOutofBoundsException if the index `n` is not in the valid range
+ * `0 <= n <= length`.
*/
def insert(n: Int, elems: A*) { insertAll(n, elems) }
- /** Removes the first <code>n</code> elements.
+ /** Removes the first ''n'' elements of this buffer.
*
* @param n the number of elements to remove from the beginning
* of this buffer.
*/
def trimStart(n: Int) { remove(0, n) }
- /** Removes the last <code>n</code> elements.
+ /** Removes the last ''n'' elements of this buffer.
*
* @param n the number of elements to remove from the end
* of this buffer.
@@ -224,17 +209,24 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
case _ => throw new UnsupportedOperationException("message " + cmd + " not understood")
}
- /** Defines the prefix of the string representation.
+ /** Defines the prefix of this object's `toString` representation.
+ * @return a string representation which starts the result of `toString` applied to this set.
+ * Unless overridden this is simply `"Buffer"`.
*/
override def stringPrefix: String = "Buffer"
+ /** Provide a read-only view of this buffer as a sequence
+ * @return A sequence which refers to this buffer for all its operations.
+ */
+ def readOnly: scala.collection.Seq[A] = toSeq
+
/** Adds a number of elements in an array
*
* @param src the array
* @param start the first element to append
* @param len the number of elements to append
*/
- @deprecated("replace by: <code>buf ++= src.view(start, end)</code>")
+ @deprecated("replace by: `buf ++= src.view(start, end)`")
def ++=(src: Array[A], start: Int, len: Int) {
var i = start
val end = i + len
@@ -244,102 +236,108 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
}
}
+
+ /** This method prepends elements to the buffer and
+ * returns the same buffer.
+ *
+ * $compatMutate
+ * You are strongly recommended to use `++=:` instead.
+ *
+ * @param xs elements to prepend
+ * @return this buffer
+ */
+ @deprecated("use ++=: instead")
+ final def ++:(xs: Traversable[A]): This = ++=:(xs)
+
+ /** This method prepends elements to the buffer and
+ * returns the same buffer.
+ *
+ * $compatMutate
+ * You are strongly recommended to use `+=:` instead.
+ *
+ * @param xs elements to prepend
+ * @return this buffer
+ */
+ @deprecated("use `+=:' instead")
+ final def +:(elem: A): This = +=:(elem)
+
/** Adds a single element to this collection and returns
* the collection itself.
*
+ * $compatMutate
+ * You are strongly recommended to use '+=' instead.
+ *
* @param elem the element to add.
*/
@deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.")
- override def + (elem: A): This = { +=(elem); repr }
+ "Use `clone() +=' if you intend to create a new collection.")
+ def + (elem: A): This = { +=(elem); repr }
/** Adds two or more elements to this collection and returns
* the collection itself.
*
+ * $compatMutate
+ * You are strongly recommended to use '++=' instead.
+ *
* @param elem1 the first element to add.
* @param elem2 the second element to add.
* @param elems the remaining elements to add.
*/
- @deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
+ @deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
"Use `clone() ++=' if you intend to create a new collection.")
- override def + (elem1: A, elem2: A, elems: A*): This = {
+ def + (elem1: A, elem2: A, elems: A*): This = {
this += elem1 += elem2 ++= elems
repr
}
- /** Adds a number of elements provided by a traversable object and returns
- * either the collection itself.
+ /** Creates a new collection containing both the elements of this collection and the provided
+ * traversable object.
*
- * @param iter the iterable object.
+ * @param xs the traversable object.
+ * @return a new collection consisting of all the elements of this collection and `xs`.
*/
- @deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=` if you intend to create a new collection.")
- override def ++(iter: Traversable[A]): This = {
- for (elem <- iter) +=(elem)
- repr
- }
+ @migration(2, 8,
+ "As of 2.8, ++ always creates a new collection, even on Buffers.\n"+
+ "Use ++= instead if you intend to add by side effect to an existing collection.\n"
+ )
+ def ++(xs: TraversableOnce[A]): This = clone() ++= xs
- /** Adds a number of elements provided by an iterator and returns
- * the collection itself.
- *
- * @param iter the iterator
- */
- @deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=` if you intend to create a new collection.")
- override def ++ (iter: Iterator[A]): This = {
- for (elem <- iter) +=(elem)
- repr
- }
-
- /** Removes a single element from this collection and returns
- * the collection itself.
+ /** Creates a new collection with all the elements of this collection except `elem`.
*
* @param elem the element to remove.
+ * @return a new collection consisting of all the elements of this collection except `elem`.
*/
- @deprecated("Use -= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() -=` if you intend to create a new collection.")
- override def -(elem: A): This = { -=(elem); repr }
-
- /** Removes two or more elements from this collection and returns
- * the collection itself.
+ @migration(2, 8,
+ "As of 2.8, - always creates a new collection, even on Buffers.\n"+
+ "Use -= instead if you intend to remove by side effect from an existing collection.\n"
+ )
+ override def -(elem: A): This = clone() -= elem
+
+ /** Creates a new collection with all the elements of this collection except the two
+ * or more specified elements.
*
* @param elem1 the first element to remove.
* @param elem2 the second element to remove.
* @param elems the remaining elements to remove.
+ * @return a new collection consisting of all the elements of this collection except
+ * `elem1`, `elem2` and those in `elems`.
*/
- @deprecated("Use -= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() -=` if you intend to create a new collection.")
- override def -(elem1: A, elem2: A, elems: A*): This = {
- this -= elem1 -= elem2 --= elems
- repr
- }
-
- /** Removes a number of elements provided by a Traversable object and returns
- * the collection itself.
- *
- * @param iter the Traversable object.
- */
- @deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() --=` if you intend to create a new collection.")
- override def --(iter: Traversable[A]): This = {
- for (elem <- iter) -=(elem)
- repr
- }
-
- /** Removes a number of elements provided by an iterator and returns
- * the collection itself.
+ @migration(2, 8,
+ "As of 2.8, - always creates a new collection, even on Buffers.\n"+
+ "Use -= instead if you intend to remove by side effect from an existing collection.\n"
+ )
+ override def -(elem1: A, elem2: A, elems: A*): This = clone() -= elem1 -= elem2 --= elems
+
+ /** Creates a new collection with all the elements of this collection except those
+ * provided by the specified traversable object.
*
- * @param iter the iterator
+ * @param xs the traversable object.
+ * @return a new collection with all the elements of this collection except
+ * those in `xs`
*/
- @deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() --=` if you intend to create a new collection.")
- override def --(iter: Iterator[A]): This = {
- for (elem <- iter) -=(elem)
- repr
- }
-
- def readOnly: scala.collection.Seq[A] = toSeq
+ @migration(2, 8,
+ "As of 2.8, -- always creates a new collection, even on Buffers.\n"+
+ "Use --= instead if you intend to remove by side effect from an existing collection.\n"
+ )
+ override def --(xs: TraversableOnce[A]): This = clone() --= xs
}
-
-
-
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index a31beda57b..5d2e7fd86d 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -16,13 +15,18 @@ import generic._
import script._
/** This is a simple proxy class for <a href="Buffer.html"
- * target="contentFrame"><code>scala.collection.mutable.Buffer</code></a>.
+ * target="contentFrame">`scala.collection.mutable.Buffer`</a>.
* It is most useful for assembling customized set abstractions
* dynamically using object composition and forwarding.
*
* @author Matthias Zenger
* @version 1.0, 16/04/2004
* @since 1
+ *
+ * @tparam A type of the elements the buffer proxy contains.
+ *
+ * @define Coll BufferProxy
+ * @define coll buffer proxy
*/
trait BufferProxy[A] extends Buffer[A] with Proxy {
@@ -52,23 +56,23 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
override def readOnly = self.readOnly
- /** Appends a number of elements provided by an iterable object
- * via its <code>iterator</code> method. The identity of the
+ /** Appends a number of elements provided by a traversable object
+ * via its <code>foreach</code> method. The identity of the
* buffer is returned.
*
- * @param iter the iterable object.
+ * @param iter the traversable object.
* @return the updated buffer.
*/
@deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
"Use `clone() ++=` if you intend to create a new collection.")
- def ++(iter: scala.collection.Iterable[A]): Buffer[A] = self.++(iter)
+ override def ++(xs: TraversableOnce[A]): Buffer[A] = self.++(xs)
- /** Appends a number of elements provided by an iterable object
- * via its <code>iterator</code> method.
+ /** Appends a number of elements provided by a traversable object.
*
- * @param iter the iterable object.
+ * @param xs the traversable object.
+ * @return a reference to this $coll.
*/
- def ++=(iter: scala.collection.Iterable[A]): this.type = { self.++=(iter); this }
+ override def ++=(xs: TraversableOnce[A]): this.type = { self.++=(xs); this }
/** Appends a sequence of elements to this buffer.
*
@@ -76,22 +80,21 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
*/
override def append(elems: A*) { self.++=(elems) }
- /** Appends a number of elements provided by an iterable object
- * via its <code>iterator</code> method.
+ /** Appends a number of elements provided by a traversable object.
*
- * @param iter the iterable object.
+ * @param xs the traversable object.
*/
- def appendAll(iter: scala.collection.Iterable[A]) { self.appendAll(iter) }
+ override def appendAll(xs: TraversableOnce[A]) { self.appendAll(xs) }
/** Prepend a single element to this buffer and return
* the identity of the buffer.
*
* @param elem the element to append.
+ * @return a reference to this $coll.
*/
- def +=:(elem: A): Buffer[A] = self.+=:(elem)
+ def +=:(elem: A): this.type = { self.+=:(elem); this }
- override def ++=:(iter: scala.collection.Traversable[A]): Buffer[A] = self.++=:(iter)
- override def ++=:(iter: scala.collection.Iterator[A]): Buffer[A] = self.++=:(iter)
+ override def ++=:(xs: TraversableOnce[A]): this.type = { self.++=:(xs); this }
/** Prepend an element to this list.
*
@@ -99,41 +102,43 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
*/
override def prepend(elems: A*) { self.prependAll(elems) }
- /** Prepends a number of elements provided by an iterable object
- * via its <code>iterator</code> method. The identity of the
- * buffer is returned.
+ /** Prepends a number of elements provided by a traversable object.
+ * The identity of the buffer is returned.
*
- * @param iter the iterable object.
+ * @param xs the traversable object.
*/
- def prependAll(elems: scala.collection.Iterable[A]) { self.prependAll(elems) }
+ override def prependAll(xs: TraversableOnce[A]) { self.prependAll(xs) }
- /** Inserts new elements at the index <code>n</code>. Opposed to method
- * <code>update</code>, this method will not replace an element with a
- * one. Instead, it will insert the new elements at index <code>n</code>.
+ /** Inserts new elements at the index `n`. Opposed to method
+ * `update`, this method will not replace an element with a
+ * one. Instead, it will insert the new elements at index `n`.
*
* @param n the index where a new element will be inserted.
* @param elems the new elements to insert.
*/
override def insert(n: Int, elems: A*) { self.insertAll(n, elems) }
- /** Inserts new elements at the index <code>n</code>. Opposed to method
- * <code>update</code>, this method will not replace an element with a
- * one. Instead, it will insert a new element at index <code>n</code>.
+ /** Inserts new elements at the index `n`. Opposed to method
+ * `update`, this method will not replace an element with a
+ * one. Instead, it will insert a new element at index `n`.
*
* @param n the index where a new element will be inserted.
* @param iter the iterable object providing all elements to insert.
*/
- def insertAll(n: Int, iter: scala.collection.Iterable[A]): Unit = self.insertAll(n, iter)
+ def insertAll(n: Int, iter: scala.collection.Iterable[A]) {
+ self.insertAll(n, iter)
+ }
- override def insertAll(n: Int, iter: scala.collection.Traversable[A]): Unit = self.insertAll(n, iter)
+ override def insertAll(n: Int, iter: scala.collection.Traversable[A]) {
+ self.insertAll(n, iter)
+ }
- /** Replace element at index <code>n</code> with the new element
- * <code>newelem</code>.
+ /** Replace element at index `n` with the new element `newelem`.
*
* @param n the index of the element to replace.
* @param newelem the new element.
*/
- def update(n: Int, newelem: A): Unit = self.update(n, newelem)
+ def update(n: Int, newelem: A) { self.update(n, newelem) }
/** Removes the element on a given index position.
*
@@ -153,7 +158,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
/** Return a clone of this buffer.
*
- * @return a <code>Buffer</code> with the same elements.
+ * @return a `Buffer` with the same elements.
*/
override def clone(): Buffer[A] = new BufferProxy[A] {
def self = BufferProxy.this.self.clone()
diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala
index 655f1d12ac..701e39e36f 100644
--- a/src/library/scala/collection/mutable/Builder.scala
+++ b/src/library/scala/collection/mutable/Builder.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
@@ -15,43 +14,92 @@ import generic._
/** The base trait of all builders.
* A builder lets one construct a collection incrementally, by adding
- * elements to the builder with += and then converting to the required
+ * elements to the builder with `+=` and then converting to the required
* collection type with `result`.
*
+ * @tparam Elem the type of elements that get added to the builder.
+ * @tparam To the type of collection that it produced.
+ *
* @since 2.8
*/
trait Builder[-Elem, +To] extends Growable[Elem] {
/** Adds a single element to the builder.
- * @param elem The element to be added
+ * @param elem the element to be added.
+ * @return the builder itself.
*/
def +=(elem: Elem): this.type
- /** Clear the contents of this builder
+ /** Clears the contents of this builder.
+ * After execution of this method the builder will contain no elements.
*/
def clear()
- /** Returns collection resulting from this builder. The buffer's contents
- * are undefined afterwards.
+ /** Produces a collection from the added elements.
+ * The builder's contents are undefined after this operation.
+ * @return a collection containing the elements added to this builder.
*/
def result(): To
- /** Give a hint how many elements are expected to be added
- * when the next `result` is called.
+ /** Gives a hint how many elements are expected to be added
+ * when the next `result` is called. Some builder classes
+ * will optimize their representation based on the hint. However,
+ * builder implementations are still required to work correctly even if the hint is
+ * wrong, i.e. a different number of elements is added.
+ *
+ * @param size the hint how many elements will be added.
*/
def sizeHint(size: Int) {}
- /** Create a new builder which is the same as the current builder except
- * that a given function is applied to the current builder's result.
- * @param f the function to apply to the builder's result
+ /** Gives a hint that one expects the `result` of this builder
+ * to have the same size as the given collection, plus some delta. This will
+ * provide a hint only if the collection is known to have a cheap
+ * `size` method. Currently this is assumed ot be the case if and only if
+ * the collection is of type `IndexedSeqLike`.
+ * Some builder classes
+ * will optimize their representation based on the hint. However,
+ * builder implementations are still required to work correctly even if the hint is
+ * wrong, i.e. a different number of elements is added.
+ *
+ * @param coll the collection which serves as a hint for the result's size.
+ * @param delta a correction to add to the `coll.size` to produce the size hint.
+ */
+ def sizeHint(coll: TraversableLike[_, _], delta: Int = 0) {
+ if (coll.isInstanceOf[IndexedSeqLike[_,_]]) {
+ sizeHint(coll.size + delta)
+ }
+ }
+
+ /** Gives a hint how many elements are expected to be added
+ * when the next `result` is called, together with an upper bound
+ * given by the size of some other collection. Some builder classes
+ * will optimize their representation based on the hint. However,
+ * builder implementations are still required to work correctly even if the hint is
+ * wrong, i.e. a different number of elements is added.
+ *
+ * @param size the hint how many elements will be added.
+ * @param boundingColl the bounding collection. If it is
+ * an IndexedSeqLike, then sizes larger
+ * than collection's size are reduced.
+ */
+ def sizeHintBounded(size: Int, boundingColl: TraversableLike[_, _]) {
+ if (boundingColl.isInstanceOf[IndexedSeqLike[_,_]])
+ sizeHint(size min boundingColl.size)
+ }
+
+ /** Creates a new builder by applying a transformation function to
+ * the results of this builder.
+ * @param f the transformation function.
+ * @tparam NewTo the type of collection returned by `f`.
+ * @return a new builder which is the same as the current builder except
+ * that a transformation function is applied to this builder's result.
*/
def mapResult[NewTo](f: To => NewTo): Builder[Elem, NewTo] =
new Builder[Elem, NewTo] with Proxy {
val self = Builder.this
def +=(x: Elem): this.type = { self += x; this }
def clear() = self.clear()
- override def ++=(xs: Iterator[Elem]): this.type = { self ++= xs; this }
- override def ++=(xs:scala.collection.Traversable[Elem]): this.type = { self ++= xs; this }
+ override def ++=(xs: TraversableOnce[Elem]): this.type = { self ++= xs; this }
def result: NewTo = f(self.result)
}
}
diff --git a/src/library/scala/collection/mutable/Cloneable.scala b/src/library/scala/collection/mutable/Cloneable.scala
index 553c4ad7d6..b77172ae16 100644
--- a/src/library/scala/collection/mutable/Cloneable.scala
+++ b/src/library/scala/collection/mutable/Cloneable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,7 +13,9 @@ package mutable
/** A trait for cloneable collections.
*
- * @since 2.8
+ * @since 2.8
+ *
+ * @tparam A Type of the elements contained in the collection, covariant and with reference types as upperbound.
*/
@cloneable
trait Cloneable[+A <: AnyRef] {
diff --git a/src/library/scala/collection/mutable/CloneableCollection.scala b/src/library/scala/collection/mutable/CloneableCollection.scala
index 11fd15d709..625b849821 100644
--- a/src/library/scala/collection/mutable/CloneableCollection.scala
+++ b/src/library/scala/collection/mutable/CloneableCollection.scala
@@ -1,19 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/** The J2ME version of the library defined this trait with a <code>clone</code>
- * method to substitute for the lack of <code>Object.clone</code> there.
+/** The J2ME version of the library defined this trait with a `clone`
+ * method to substitute for the lack of `Object.clone` there.
*
* @since 2.6
*/
diff --git a/src/library/scala/collection/mutable/ConcurrentMap.scala b/src/library/scala/collection/mutable/ConcurrentMap.scala
new file mode 100644
index 0000000000..f9505f694b
--- /dev/null
+++ b/src/library/scala/collection/mutable/ConcurrentMap.scala
@@ -0,0 +1,68 @@
+package scala.collection
+package mutable
+
+
+/** A template trait for mutable maps that allow concurrent access.
+ *
+ * $concurrentmapinfo
+ *
+ * @since 2.8
+ *
+ * @tparam A the key type of the map
+ * @tparam B the value type of the map
+ *
+ * @define Coll ConcurrentMap
+ * @define coll concurrent map
+ * @define concurrentmapinfo
+ * This is a base trait for all Scala concurrent map implementations. It
+ * provides all of the methods a `Map` does, with the difference that all the
+ * changes are atomic. It also describes methods specific to concurrent maps.
+ * Note: The concurrent maps do not accept `null` for keys or values.
+ *
+ * @define atomicop
+ * This is done atomically.
+ */
+trait ConcurrentMap[A, B] extends Map[A, B] {
+
+ /**
+ * Associates the given key with a given value, unless the key was already associated with some other value.
+ * $atomicop
+ *
+ * @param k key with which the specified value is to be associated with
+ * @param v value to be associated with the specified key
+ * @return `Some(oldvalue)` if there was a value `oldvalue` previously associated with the
+ * specified key, or `None` if there was no mapping for the specified key
+ */
+ def putIfAbsent(k: A, v: B): Option[B]
+
+ /**
+ * Removes the entry for the specified key if its currently mapped to the specified value.
+ * $atomicop
+ *
+ * @param k key for which the entry should be removed
+ * @param v value expected to be associated with the specified key if the removal is to take place
+ * @return `true` if the removal took place, `false` otherwise
+ */
+ def remove(k: A, v: B): Boolean
+
+ /**
+ * Replaces the entry for the given key only if it was previously mapped to a given value.
+ * $atomicop
+ *
+ * @param k key for which the entry should be replaced
+ * @param oldvalue value expected to be associated with the specified key if replacing is to happen
+ * @param newvalue value to be associated with the specified key
+ * @return `true` if the entry was replaced, `false` otherwise
+ */
+ def replace(k: A, oldvalue: B, newvalue: B): Boolean
+
+ /**
+ * Replaces the entry for the given key only if it was previously mapped to some value.
+ * $atomicop
+ *
+ * @param k key for which the entry should be replaced
+ * @param v value to be associated with the specified key
+ * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise
+ */
+ def replace(k: A, v: B): Option[B]
+}
diff --git a/src/library/scala/collection/mutable/DefaultEntry.scala b/src/library/scala/collection/mutable/DefaultEntry.scala
index ec17dab027..5144f4f590 100644
--- a/src/library/scala/collection/mutable/DefaultEntry.scala
+++ b/src/library/scala/collection/mutable/DefaultEntry.scala
@@ -1,19 +1,20 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/**
- * @since 2.3
+
+
+/** Class used internally for default map model.
+ * @since 2.3
*/
@serializable
final class DefaultEntry[A, B](val key: A, var value: B)
diff --git a/src/library/scala/collection/mutable/DefaultMapModel.scala b/src/library/scala/collection/mutable/DefaultMapModel.scala
index 345540435d..226a09697a 100644
--- a/src/library/scala/collection/mutable/DefaultMapModel.scala
+++ b/src/library/scala/collection/mutable/DefaultMapModel.scala
@@ -1,20 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/** This class is used internally. It implements the mutable <code>Map</code>
- * class in terms of three functions: <code>findEntry</code>,
- * <code>addEntry</code>, and <code>entries</code>.
+/** This class is used internally. It implements the mutable `Map`
+ * class in terms of three functions: `findEntry`, `addEntry`, and `entries`.
*
* @author Matthias Zenger
* @version 1.0, 08/07/2003
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index 6a4476d743..d3c86953c8 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
-* */
+\* */
-// $Id$
package scala.collection
@@ -14,37 +13,77 @@ package mutable
import generic._
-/** This class implements single linked lists where both the head (<code>elem</code>)
- * and the tail (<code>next</code>) are mutable.
+/** This class implements double linked lists where both the head (`elem`),
+ * the tail (`next`) and a reference to the previous node (`prev`) are mutable.
*
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.8
* @since 1
+ *
+ * @tparam A the type of the elements contained in this double linked list.
+ *
+ * @define Coll DoubleLinkedList
+ * @define coll double linked list
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is always `DoubleLinkedList[B]` because an implicit of type `CanBuildFrom[DoubleLinkedList, B, DoubleLinkedList[B]]`
+ * is defined in object `DoubleLinkedList`.
+ * @define $bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`. This is usually the `canBuildFrom` value
+ * defined in object `DoubleLinkedList`.
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
-@serializable @SerialVersionUID(419155950203746706L)
-class DoubleLinkedList[A](_elem: A, _next: DoubleLinkedList[A]) extends LinearSeq[A]
- with GenericTraversableTemplate[A, DoubleLinkedList]
- with DoubleLinkedListLike[A, DoubleLinkedList[A]] {
- elem = _elem
- next = _next
+@serializable @SerialVersionUID(-8144992287952814767L)
+class DoubleLinkedList[A]() extends LinearSeq[A]
+ with GenericTraversableTemplate[A, DoubleLinkedList]
+ with DoubleLinkedListLike[A, DoubleLinkedList[A]] {
+ next = this
+
+ /** Creates a node for the double linked list.
+ *
+ * @param elem the element this node contains.
+ * @param next the next node in the double linked list.
+ */
+ def this(elem: A, next: DoubleLinkedList[A]) {
+ this()
+ if (next != null) {
+ this.elem = elem
+ this.next = next
+ }
+ }
+
override def companion: GenericCompanion[DoubleLinkedList] = DoubleLinkedList
}
+/** $factoryInfo
+ * @define coll double linked list
+ * @define Coll DoubleLinkedList
+ */
object DoubleLinkedList extends SeqFactory[DoubleLinkedList] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, DoubleLinkedList[A]] = new GenericCanBuildFrom[A] //new CanBuildFrom[Coll, A, DoubleLinkedList[A]] { : Coll) = from.traversableBuilder[A] }
+ /** $genericCanBuildFrom */
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, DoubleLinkedList[A]] = new GenericCanBuildFrom[A]
def newBuilder[A]: Builder[A, DoubleLinkedList[A]] =
new Builder[A, DoubleLinkedList[A]] {
var current: DoubleLinkedList[A] = _
+ val emptyList = new DoubleLinkedList[A]()
+ if(null == current)
+ current = emptyList
+
def +=(elem: A): this.type = {
- val tmp = new DoubleLinkedList(elem, null)
- if (current != null)
- current.insert(tmp)
+ if (current.nonEmpty)
+ current.insert(new DoubleLinkedList(elem, emptyList))
else
- current = tmp
+ current = new DoubleLinkedList(elem, emptyList)
this
}
- def clear() { current = null }
+
+ def clear() {
+ current = emptyList
+ }
def result() = current
}
}
diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
index 6d89b67719..9112ade5af 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
@@ -1,28 +1,34 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
/** This extensible class may be used as a basis for implementing double
- * linked lists. Type variable <code>A</code> refers to the element type
- * of the list, type variable <code>This</code> is used to model self
+ * linked lists. Type variable `A` refers to the element type
+ * of the list, type variable `This` is used to model self
* types of linked lists.
*
* @author Matthias Zenger
* @version 1.0, 08/07/2003
* @since 2.8
+ *
+ * @tparam A type of the elements contained in the double linked list
+ * @tparam This the type of the actual linked list holding the elements
+ *
+ * @define Coll DoubleLinkedList
+ * @define coll double linked list
*/
trait DoubleLinkedListLike[A, This <: Seq[A] with DoubleLinkedListLike[A, This]] extends LinkedListLike[A, This] { self =>
+ /** A reference to the node in the linked list preceeding the current node. */
var prev: This = _
override def append(that: This): This =
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index 1d55933050..d46c59173b 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -1,18 +1,24 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/**
- * @since 2.3
+
+/** An implementation class backing a `HashSet`.
+ *
+ * This trait is used internally. It can be mixed in with various collections relying on
+ * hash table as an implementation.
+ *
+ * @since 2.3
+ *
+ * @tparam A the type of the elements contained in the flat hash table.
*/
trait FlatHashTable[A] {
@@ -27,19 +33,65 @@ trait FlatHashTable[A] {
private final val tableDebug = false
+ @transient private[collection] var _loadFactor = loadFactor
+
/** The actual hash table.
*/
- protected var table: Array[AnyRef] =
- if (initialSize == 0) null else new Array(initialSize)
+ @transient protected var table: Array[AnyRef] = new Array(initialCapacity)
/** The number of mappings contained in this hash table.
*/
- protected var tableSize = 0
+ @transient protected var tableSize = 0
/** The next size value at which to resize (capacity * load factor).
*/
- protected var threshold: Int = newThreshold(initialSize)
+ @transient protected var threshold: Int = newThreshold(initialCapacity)
+
+ import HashTable.powerOfTwo
+ private def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
+ private def initialCapacity = capacity(initialSize)
+
+ /**
+ * Initializes the collection from the input stream. `f` will be called for each element
+ * read from the input stream in the order determined by the stream. This is useful for
+ * structures where iteration order is important (e.g. LinkedHashSet).
+ *
+ * The serialization format expected is the one produced by `serializeTo`.
+ */
+ private[collection] def init(in: java.io.ObjectInputStream, f: A => Unit) {
+ in.defaultReadObject
+
+ _loadFactor = in.readInt
+ assert(_loadFactor > 0)
+
+ val size = in.readInt
+ assert(size >= 0)
+
+ table = new Array(capacity(size * loadFactorDenum / _loadFactor))
+ threshold = newThreshold(table.size)
+
+ var index = 0
+ while (index < size) {
+ val elem = in.readObject.asInstanceOf[A]
+ f(elem)
+ addEntry(elem)
+ index += 1
+ }
+ }
+
+ /**
+ * Serializes the collection to the output stream by saving the load factor, collection
+ * size and collection elements. `foreach` determines the order in which the elements are saved
+ * to the stream. To deserialize, `init` should be used.
+ */
+ private[collection] def serializeTo(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ out.writeInt(_loadFactor)
+ out.writeInt(tableSize)
+ iterator.foreach(out.writeObject)
+ }
+ /** Finds an entry in the hash table if such an element exists. */
def findEntry(elem: A): Option[A] = {
var h = index(elemHashCode(elem))
var entry = table(h)
@@ -50,6 +102,7 @@ trait FlatHashTable[A] {
if (null == entry) None else Some(entry.asInstanceOf[A])
}
+ /** Checks whether an element is contained in the hash table. */
def containsEntry(elem: A): Boolean = {
var h = index(elemHashCode(elem))
var entry = table(h)
@@ -60,8 +113,8 @@ trait FlatHashTable[A] {
null != entry
}
- /** Add entry if not yet in table
- * Return whether a new entry was added
+ /** Add entry if not yet in table.
+ * @return Returns `true` if a new entry was added, `false` otherwise.
*/
def addEntry(elem: A) : Boolean = {
var h = index(elemHashCode(elem))
@@ -77,6 +130,7 @@ trait FlatHashTable[A] {
true
}
+ /** Removes an entry from the hash table, returning an option value with the element, or `None` if it didn't exist. */
def removeEntry(elem: A) : Option[A] = {
if (tableDebug) checkConsistent()
def precedes(i: Int, j: Int) = {
@@ -139,10 +193,10 @@ trait FlatHashTable[A] {
private def checkConsistent() {
for (i <- 0 until table.length)
if (table(i) != null && !containsEntry(table(i).asInstanceOf[A]))
- assert(false, i+" "+table(i)+" "+table.toString)
+ assert(false, i+" "+table(i)+" "+table.mkString)
}
- protected def elemHashCode(elem: A) = elem.hashCode()
+ protected def elemHashCode(elem: A) = if (elem == null) 0 else elem.hashCode()
protected final def improve(hcode: Int) = {
var h: Int = hcode + ~(hcode << 9)
@@ -154,7 +208,7 @@ trait FlatHashTable[A] {
protected final def index(hcode: Int) = improve(hcode) & (table.length - 1)
private def newThreshold(size: Int) = {
- val lf = loadFactor
+ val lf = _loadFactor
assert(lf < (loadFactorDenum / 2), "loadFactor too large; must be < 0.5")
(size.toLong * lf / loadFactorDenum ).toInt
}
diff --git a/src/library/scala/collection/mutable/GrowingBuilder.scala b/src/library/scala/collection/mutable/GrowingBuilder.scala
new file mode 100644
index 0000000000..781753c24d
--- /dev/null
+++ b/src/library/scala/collection/mutable/GrowingBuilder.scala
@@ -0,0 +1,38 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package mutable
+
+import generic._
+
+/** The canonical builder for collections that are growable, i.e. that support an
+ * efficient `+=` method which adds an element to the collection. It is
+ * almost identical to `AddingBuilder`, but necessitated by the existence of
+ * classes which are `Growable` but not `Addable`, which is a result of covariance
+ * interacting surprisingly with any2stringadd thus driving '+' out of the `Seq`
+ * hierarchy. The tendrils of original sin should never be underestimated.
+ *
+ * Addendum: of even greater significance is that '+' on mutable collections now
+ * creates a new collection. This means using AddingBuilder on them will create
+ * a new intermediate collection for every element given to the builder, taking
+ * '+' from an O(1) to O(n) operation.
+ *
+ * @author Paul Phillips
+ * @version 2.8
+ * @since 2.8
+ *
+ * @define Coll GrowingBuilder
+ * @define coll growing builder
+ */
+class GrowingBuilder[Elem, To <: Growable[Elem]](empty: To) extends Builder[Elem, To] {
+ protected var elems: To = empty
+ def +=(x: Elem): this.type = { elems += x; this }
+ def clear() { elems = empty }
+ def result: To = elems
+}
diff --git a/src/library/scala/collection/mutable/HashEntry.scala b/src/library/scala/collection/mutable/HashEntry.scala
index 426798e8f2..7c62dc6281 100644
--- a/src/library/scala/collection/mutable/HashEntry.scala
+++ b/src/library/scala/collection/mutable/HashEntry.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,8 +8,8 @@
package scala.collection
package mutable
-/**
- * @since 2.8
+/** Class used internally.
+ * @since 2.8
*/
trait HashEntry [A, E] {
val key: A
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index b2f259e4e9..1d605fe444 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,10 +13,29 @@ package mutable
import generic._
-/**
- * @since 1
+
+/** This class implements mutable maps using a hashtable.
+ *
+ * @since 1
+ *
+ * @tparam A the type of the keys contained in this hash map.
+ * @tparam B the type of the values assigned to keys in this hash map.
+ *
+ * @define Coll mutable.HashMap
+ * @define coll mutable hash map
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is always `HashMap[A, B]` if the elements contained in the resulting collection are
+ * pairs of type `(A, B)`. This is because an implicit of type `CanBuildFrom[HashMap, (A, B), HashMap[A, B]]`
+ * is defined in object `HashMap`. Otherwise, `That` resolves to the most specific type that doesn't have
+ * to contain pairs of type `(A, B)`, which is `Iterable`.
+ * @define $bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`. This is usually the `canBuildFrom` value
+ * defined in object `HashMap`.
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
-@serializable @SerialVersionUID(-8682987922734091219L)
+@serializable @SerialVersionUID(1L)
class HashMap[A, B] extends Map[A, B]
with MapLike[A, B, HashMap[A, B]]
with HashTable[A] {
@@ -58,13 +76,45 @@ class HashMap[A, B] extends Map[A, B]
def -=(key: A): this.type = { removeEntry(key); this }
def iterator = entriesIterator map {e => (e.key, e.value)}
+
+ override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f(e.key, e.value))
+
+ /* Override to avoid tuple allocation in foreach */
+ override def keySet: collection.Set[A] = new DefaultKeySet {
+ override def foreach[C](f: A => C) = foreachEntry(e => f(e.key))
+ }
+
+ /* Override to avoid tuple allocation in foreach */
+ override def values: collection.Iterable[B] = new DefaultValuesIterable {
+ override def foreach[C](f: B => C) = foreachEntry(e => f(e.value))
+ }
+
+ /* Override to avoid tuple allocation */
+ override def keysIterator: Iterator[A] = new Iterator[A] {
+ val iter = entriesIterator
+ def hasNext = iter.hasNext
+ def next = iter.next.key
+ }
+
+ /* Override to avoid tuple allocation */
+ override def valuesIterator: Iterator[B] = new Iterator[B] {
+ val iter = entriesIterator
+ def hasNext = iter.hasNext
+ def next = iter.next.value
+ }
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ serializeTo(out, _.value)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ init[B](in, new Entry(_, _))
+ }
}
-/** This class implements mutable maps using a hashtable.
- *
- * @author Matthias Zenger
- * @author Martin Odersky
- * @version 2.8
+/** $factoryInfo
+ * @define Coll mutable.HashMap
+ * @define coll mutable hash map
*/
object HashMap extends MutableMapFactory[HashMap] {
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = new MapCanBuildFrom[A, B]
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index 9144a4be88..ebfeaa29ad 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -20,8 +19,22 @@ import generic._
* @author Martin Odersky
* @version 2.0, 31/12/2006
* @since 1
+ *
+ * @tparam A the type of the elements contained in this set.
+ *
+ * @define Coll mutable.HashSet
+ * @define coll mutable hash set
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is always `HashSet[B]` because an implicit of type `CanBuildFrom[HashSet, B, HashSet[B]]`
+ * is defined in object `HashSet`.
+ * @define $bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`. This is usually the `canBuildFrom` value
+ * defined in object `HashSet`.
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
-@serializable
+@serializable @SerialVersionUID(1L)
class HashSet[A] extends Set[A]
with GenericSetTemplate[A, HashSet]
with SetLike[A, HashSet[A]]
@@ -50,11 +63,22 @@ class HashSet[A] extends Set[A]
}
}
- override def clone(): Set[A] = new HashSet[A] ++= this
+ override def clone() = new HashSet[A] ++= this
+
+ private def writeObject(s: java.io.ObjectOutputStream) {
+ serializeTo(s)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ init(in, x => x)
+ }
}
-/** Factory object for `HashSet` class */
-object HashSet extends SetFactory[HashSet] {
+/** $factoryInfo
+ * @define Coll mutable.HashSet
+ * @define coll mutable hash set
+ */
+object HashSet extends MutableSetFactory[HashSet] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = setCanBuildFrom[A]
override def empty[A]: HashSet[A] = new HashSet[A]
}
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 9dd8a7aeb0..b924a38dde 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -1,36 +1,38 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
/** This class can be used to construct data structures that are based
- * on hashtables. Class <code>HashTable[A]</code> implements a hashtable
- * that maps keys of type <code>A</code> to values of the fully abstract
- * member type <code>Entry</code>. Classes that make use of <code>HashTable</code>
- * have to provide an implementation for <code>Entry</code>
+ * on hashtables. Class `HashTable[A]` implements a hashtable
+ * that maps keys of type `A` to values of the fully abstract
+ * member type `Entry`. Classes that make use of `HashTable`
+ * have to provide an implementation for `Entry`.
*
* There are mainly two parameters that affect the performance of a hashtable:
* the <i>initial size</i> and the <i>load factor</i>. The <i>size</i>
* refers to the number of <i>buckets</i> in the hashtable, and the <i>load
* factor</i> is a measure of how full the hashtable is allowed to get before
* its size is automatically doubled. Both parameters may be changed by
- * overriding the corresponding values in class <code>HashTable</code>.
+ * overriding the corresponding values in class `HashTable`.
*
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.0, 31/12/2006
* @since 1
+ *
+ * @tparam A type of the elements contained in this hash table.
*/
trait HashTable[A] {
+ import HashTable._
protected type Entry >: Null <: HashEntry[A, Entry]
@@ -47,35 +49,67 @@ trait HashTable[A] {
*/
protected def initialThreshold: Int = newThreshold(initialCapacity)
+ @transient private[collection] var _loadFactor = loadFactor
+
/** The actual hash table.
*/
- protected var table: Array[HashEntry[A, Entry]] = new Array(initialCapacity)
+ @transient protected var table: Array[HashEntry[A, Entry]] = new Array(initialCapacity)
- private def initialCapacity = if (initialSize == 0) 1 else powerOfTwo(initialSize)
+ /** The number of mappings contained in this hash table.
+ */
+ @transient protected var tableSize: Int = 0
+
+ /** The next size value at which to resize (capacity * load factor).
+ */
+ @transient protected var threshold: Int = initialThreshold
+
+ private def initialCapacity = capacity(initialSize)
/**
- * Returns a power of two >= `target`.
+ * Initializes the collection from the input stream. `f` will be called for each key/value pair
+ * read from the input stream in the order determined by the stream. This is useful for
+ * structures where iteration order is important (e.g. LinkedHashMap).
*/
- private def powerOfTwo(target: Int): Int = {
- /* See http://bits.stephan-brumme.com/roundUpToNextPowerOfTwo.html */
- var c = target - 1;
- c |= c >>> 1;
- c |= c >>> 2;
- c |= c >>> 4;
- c |= c >>> 8;
- c |= c >>> 16;
- c + 1;
+ private[collection] def init[B](in: java.io.ObjectInputStream, f: (A, B) => Entry) {
+ in.defaultReadObject
+
+ _loadFactor = in.readInt
+ assert(_loadFactor > 0)
+
+ val size = in.readInt
+ assert(size >= 0)
+
+ table = new Array(capacity(size * loadFactorDenum / _loadFactor))
+ threshold = newThreshold(table.size)
+
+ var index = 0
+ while (index < size) {
+ addEntry(f(in.readObject.asInstanceOf[A], in.readObject.asInstanceOf[B]))
+ index += 1
+ }
}
- /** The number of mappings contained in this hash table.
+ /**
+ * Serializes the collection to the output stream by saving the load factor, collection
+ * size, collection keys and collection values. `value` is responsible for providing a value
+ * from an entry.
+ *
+ * `foreach` determines the order in which the key/value pairs are saved to the stream. To
+ * deserialize, `init` should be used.
*/
- protected var tableSize: Int = 0
+ private[collection] def serializeTo[B](out: java.io.ObjectOutputStream, value: Entry => B) {
+ out.defaultWriteObject
+ out.writeInt(loadFactor)
+ out.writeInt(tableSize)
+ foreachEntry { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(value(entry))
+ }
+ }
- /** The next size value at which to resize (capacity * load factor).
- */
- protected var threshold: Int = initialThreshold
+ private def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
- /** Find entry with given key in table, null if not found
+ /** Find entry with given key in table, null if not found.
*/
protected def findEntry(key: A): Entry = {
val h = index(elemHashCode(key))
@@ -96,7 +130,7 @@ trait HashTable[A] {
resize(2 * table.length)
}
- /** Remove entry from table if present
+ /** Remove entry from table if present.
*/
protected def removeEntry(key: A) : Entry = {
val h = index(elemHashCode(key))
@@ -122,7 +156,7 @@ trait HashTable[A] {
null
}
- /** An iterator returning all entries
+ /** An iterator returning all entries.
*/
protected def entriesIterator: Iterator[Entry] = new Iterator[Entry] {
val iterTable = table
@@ -144,6 +178,21 @@ trait HashTable[A] {
}
}
+ /*
+ * We should implement this as a primitive operation over the underlying array, but it can
+ * cause a behaviour change in edge cases where:
+ * - Someone modifies a map during iteration
+ * - The insertion point is close to the iteration point.
+ *
+ * The reason this happens is that the iterator prefetches the following element before
+ * returning from next (to simplify the implementation of hasNext) while the natural
+ * implementation of foreach does not.
+ *
+ * It should be mentioned that modifying a map during iteration leads to unpredictable
+ * results with either implementation.
+ */
+ protected final def foreachEntry[C](f: Entry => C) { entriesIterator.foreach(f) }
+
/** An iterator returning all entries */
@deprecated("use entriesIterator instead")
protected def entries: Iterator[Entry] = entriesIterator
@@ -157,9 +206,9 @@ trait HashTable[A] {
}
private def newThreshold(size: Int) =
- ((size.toLong * loadFactor)/loadFactorDenum).toInt
+ ((size.toLong * _loadFactor)/loadFactorDenum).toInt
- private def resize(newSize: Int) = {
+ private def resize(newSize: Int) {
val oldTable = table
table = new Array(newSize)
var i = oldTable.length - 1
@@ -179,7 +228,7 @@ trait HashTable[A] {
protected def elemEquals(key1: A, key2: A): Boolean = (key1 == key2)
- protected def elemHashCode(key: A) = key.hashCode()
+ protected def elemHashCode(key: A) = if (key == null) 0 else key.##
protected final def improve(hcode: Int) = {
var h: Int = hcode + ~(hcode << 9)
@@ -191,4 +240,19 @@ trait HashTable[A] {
protected final def index(hcode: Int) = improve(hcode) & (table.length - 1)
}
+private[collection] object HashTable {
+ /**
+ * Returns a power of two >= `target`.
+ */
+ private[collection] def powerOfTwo(target: Int): Int = {
+ /* See http://bits.stephan-brumme.com/roundUpToNextPowerOfTwo.html */
+ var c = target - 1;
+ c |= c >>> 1;
+ c |= c >>> 2;
+ c |= c >>> 4;
+ c |= c >>> 8;
+ c |= c >>> 16;
+ c + 1;
+ }
+}
diff --git a/src/library/scala/collection/mutable/History.scala b/src/library/scala/collection/mutable/History.scala
index dcc6539630..d480c4342b 100644
--- a/src/library/scala/collection/mutable/History.scala
+++ b/src/library/scala/collection/mutable/History.scala
@@ -1,39 +1,42 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/tPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/** <code>History[A, B]</code> objects may subscribe to events of
- * type <code>A</code> published by an object of type <code>B</code>.
+/** `History[A, B]` objects may subscribe to events of
+ * type `A` published by an object of type `B`.
* The history subscriber object records all published events
- * up to maximum number of <code>maxHistory</code> events.
+ * up to maximum number of `maxHistory` events.
*
* @author Matthias Zenger
* @version 1.0, 08/07/2003
* @since 1
+ *
+ * @tparam Evt Type of events.
+ * @tparam Pub Type of publishers.
*/
@serializable
@SerialVersionUID(5219213543849892588L)
-class History[A, B] extends AnyRef with Subscriber[A, B] with Iterable[(B, A)]
+class History[Evt, Pub] extends Subscriber[Evt, Pub] with Iterable[(Pub, Evt)]
{
- protected val log: Queue[(B, A)] = new Queue[(B, A)]
+ protected val log: Queue[(Pub, Evt)] = new Queue
val maxHistory: Int = 1000
- /**
- * @param pub ...
- * @param event ...
+ /** Notifies this listener with an event by enqueuing it in the log.
+ *
+ * @param pub the publisher.
+ * @param event the event.
*/
- def notify(pub: B, event: A) {
+ def notify(pub: Pub, event: Evt) {
if (log.length >= maxHistory)
log.dequeue
@@ -41,8 +44,8 @@ class History[A, B] extends AnyRef with Subscriber[A, B] with Iterable[(B, A)]
}
override def size: Int = log.length
- def iterator: Iterator[(B, A)] = log.iterator
- def events: Iterator[A] = log.iterator.map { case (_, e) => e }
+ def iterator: Iterator[(Pub, Evt)] = log.iterator
+ def events: Iterator[Evt] = log.iterator map (_._2)
def clear() { log.clear }
diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
index 69da34f5be..24f11e105f 100644
--- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
@@ -1,22 +1,22 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
+import annotation.migration
/** This class can be used as an adaptor to create mutable maps from
- * immutable map implementations. Only method <code>empty</code> has
+ * immutable map implementations. Only method `empty` has
* to be redefined if the immutable map on which this mutable map is
- * originally based is not empty. <code>empty</code> is supposed to
+ * originally based is not empty. `empty` is supposed to
* return the representation of an empty map.
*
* @author Matthias Zenger
@@ -41,19 +41,17 @@ extends Map[A, B]
override def isDefinedAt(key: A) = imap.isDefinedAt(key)
- override def keySet: scala.collection.Set[A] = imap.keySet
+ override def keySet: collection.Set[A] = imap.keySet
override def keysIterator: Iterator[A] = imap.keysIterator
- @deprecated("use `keysIterator' instead")
- override def keys: Iterator[A] = imap.keysIterator
-
- override def valuesIterable: scala.collection.Iterable[B] = imap.valuesIterable
+ @migration(2, 8, "As of 2.8, keys returns Iterable[A] rather than Iterator[A].")
+ override def keys: collection.Iterable[A] = imap.keys
override def valuesIterator: Iterator[B] = imap.valuesIterator
- @deprecated("use `valuesIterator' instead")
- override def values: Iterator[B] = imap.valuesIterator
+ @migration(2, 8, "As of 2.8, values returns Iterable[B] rather than Iterator[B].")
+ override def values: collection.Iterable[B] = imap.values
def iterator: Iterator[(A, B)] = imap.iterator
diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
index 8cf3e2920f..4560d01d4d 100644
--- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,9 +13,9 @@ package mutable
/** This class can be used as an adaptor to create mutable sets from
- * immutable set implementations. Only method <code>empty</code> has
+ * immutable set implementations. Only method `empty` has
* to be redefined if the immutable set on which this mutable set is
- * originally based is not empty. <code>empty</code> is supposed to
+ * originally based is not empty. `empty` is supposed to
* return the representation of an empty set.
*
* @author Matthias Zenger
diff --git a/src/library/scala/collection/mutable/IndexedSeq.scala b/src/library/scala/collection/mutable/IndexedSeq.scala
index 540e57fca3..13788671c2 100644
--- a/src/library/scala/collection/mutable/IndexedSeq.scala
+++ b/src/library/scala/collection/mutable/IndexedSeq.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,8 +13,10 @@ package mutable
import generic._
-/** A subtrait of <code>collection.IndexedSeq</code> which represents sequences
+/** A subtrait of `collection.IndexedSeq` which represents sequences
* that can be mutated.
+ *
+ * $indexedSeqInfo
*/
trait IndexedSeq[A] extends Seq[A]
with scala.collection.IndexedSeq[A]
@@ -24,6 +25,11 @@ trait IndexedSeq[A] extends Seq[A]
override def companion: GenericCompanion[IndexedSeq] = IndexedSeq
}
+/** $factoryInfo
+ * The current default implementation of a $Coll is an `ArrayBuffer`.
+ * @define coll mutable indexed sequence
+ * @define Coll mutable.IndexedSeq
+ */
object IndexedSeq extends SeqFactory[IndexedSeq] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = new GenericCanBuildFrom[A]
def newBuilder[A]: Builder[A, IndexedSeq[A]] = new ArrayBuffer[A]
diff --git a/src/library/scala/collection/mutable/IndexedSeqLike.scala b/src/library/scala/collection/mutable/IndexedSeqLike.scala
index 74117e3707..f08353ef3d 100644
--- a/src/library/scala/collection/mutable/IndexedSeqLike.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -16,13 +15,38 @@ import generic._
/** A subtrait of scala.collection.IndexedSeq which represents sequences
* that can be mutated.
*
- * @since 2.8
+ * It declares a method `update` which allows updating an element
+ * at a specific index in the sequence.
+ *
+ * This trait just implements `iterator` in terms of `apply` and `length`.
+ * However, see `IndexedSeqOptimized` for an implementation trait that overrides operations
+ * to make them run faster under the assumption of fast random access with `apply`.
+ *
+ * $indexedSeqInfo
+ *
+ * @tparam A the element type of the $coll
+ * @tparam Repr the type of the actual $coll containing the elements.
+ *
+ * @define Coll IndexedSeq
+ * @define coll mutable indexed sequence
+ * @define indexedSeqInfo
+ * @author Martin Odersky
+ * @version 2.8
+ * @since 2.8
+ * @define willNotTerminateInf
+ * @define mayNotTerminateInf
*/
trait IndexedSeqLike[A, +Repr] extends scala.collection.IndexedSeqLike[A, Repr] { self =>
override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]]
override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]]
+ /** Replaces element at given index with a new value.
+ *
+ * @param n the index of the element to replace.
+ * @param elem the new value.
+ * @throws IndexOutofBoundsException if the index is not valid.
+ */
def update(idx: Int, elem: A)
/** Creates a view of this iterable @see Iterable.View
diff --git a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
new file mode 100755
index 0000000000..293588af7d
--- /dev/null
+++ b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.collection
+package mutable
+import generic._
+
+/** A subtrait of scala.collection.IndexedSeq which represents sequences
+ * that can be mutated.
+ *
+ * @since 2.8
+ */
+trait IndexedSeqOptimized[A, +Repr] extends IndexedSeqLike[A, Repr] with scala.collection.IndexedSeqOptimized[A, Repr]
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index 77b8862421..966a537bf1 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -1,32 +1,35 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
import generic._
-import Math.MAX_INT
import TraversableView.NoBuilder
-/** A non-strict view of a mutable IndexedSeq.
- * This is a leaf class which mixes methods returning a plain IndexedSeq view
- * and methods returning a mutable IndexedSeq view.
- * There is no associated `Like' class.
- * @author Sean McDirmid
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** A non-strict view of a mutable `IndexedSeq`.
+ * $viewinfo
+ * Some of the operations of this class will yield again a mutable indexed sequence,
+ * others will just yield a plain indexed sequence of type `collection.IndexedSeq`.
+ * Because this is a leaf class there is no associated `Like' class.
+ * @author Martin Odersky
+ * @version 2.8
+ * @since 2.8
+ * @tparam A the element type of the view
+ * @tparam Coll the type of the underlying collection containing the elements.
*/
-trait IndexedSeqView[A, +Coll] extends scala.collection.IndexedSeqView[A, Coll] {
+trait IndexedSeqView[A, +Coll] extends IndexedSeq[A]
+ with IndexedSeqOptimized[A, IndexedSeqView[A, Coll]]
+ with scala.collection.SeqView[A, Coll]
+ with scala.collection.SeqViewLike[A, Coll, IndexedSeqView[A, Coll]] {
self =>
def update(idx: Int, elem: A)
@@ -79,7 +82,7 @@ self =>
override def filter(p: A => Boolean): IndexedSeqView[A, Coll] = newFiltered(p)
override def init: IndexedSeqView[A, Coll] = newSliced(0, size - 1).asInstanceOf[IndexedSeqView[A, Coll]]
- override def drop(n: Int): IndexedSeqView[A, Coll] = newSliced(n max 0, MAX_INT).asInstanceOf[IndexedSeqView[A, Coll]]
+ override def drop(n: Int): IndexedSeqView[A, Coll] = newSliced(n max 0, Int.MaxValue).asInstanceOf[IndexedSeqView[A, Coll]]
override def take(n: Int): IndexedSeqView[A, Coll] = newSliced(0, n).asInstanceOf[IndexedSeqView[A, Coll]]
override def slice(from: Int, until: Int): IndexedSeqView[A, Coll] = newSliced(from max 0, until).asInstanceOf[IndexedSeqView[A, Coll]]
override def dropWhile(p: A => Boolean): IndexedSeqView[A, Coll] = newDroppedWhile(p).asInstanceOf[IndexedSeqView[A, Coll]]
@@ -89,9 +92,24 @@ self =>
override def reverse: IndexedSeqView[A, Coll] = newReversed.asInstanceOf[IndexedSeqView[A, Coll]]
}
-/*
- * object IndexedSeqView {
- type Coll = TraversableView[_, C] forSome { type C <: scala.collection.Traversable[_] }
- implicit def canBuildFrom[A]: CanBuildFrom[IndexedSeq[_], A, IndexedSeqView[A], Coll] = new CanBuildFrom[mutable.IndexedSeq[_], A, IndexedSeqView[A], Coll] { : Coll) = new NoBuilder }
+/** An object containing the necessary implicit definitions to make
+ * `SeqView`s work. Its definitions are generally not accessed directly by clients.
+ *
+ * Note that the `canBuildFrom` factories yield `SeqView`s, not `IndexedSewqView`s.
+ * This is intentional, because not all operations yield again a `mutable.IndexedSeqView`.
+ * For instance, `map` just gives a `SeqView`, which reflects the fact that
+ * `map` cannot do its work and maintain a pointer into the original indexed sequence.
+ */
+object IndexedSeqView {
+ type Coll = TraversableView[_, C] forSome {type C <: Traversable[_]}
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] =
+ new CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] {
+ def apply(from: Coll) = new NoBuilder
+ def apply() = new NoBuilder
+ }
+ implicit def arrCanBuildFrom[A]: CanBuildFrom[TraversableView[_, Array[_]], A, SeqView[A, Array[A]]] =
+ new CanBuildFrom[TraversableView[_, Array[_]], A, SeqView[A, Array[A]]] {
+ def apply(from: TraversableView[_, Array[_]]) = new NoBuilder
+ def apply() = new NoBuilder
+ }
}
-*/
diff --git a/src/library/scala/collection/mutable/Iterable.scala b/src/library/scala/collection/mutable/Iterable.scala
index 5d20664730..71143f74e3 100644
--- a/src/library/scala/collection/mutable/Iterable.scala
+++ b/src/library/scala/collection/mutable/Iterable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,15 +10,8 @@ package mutable
import generic._
-/** <p>
- * A subtrait of <a href="../Iterable.html" target="contentFrame">
- * <code>collection.Iterable</code></a> which represents iterables
- * that can be mutated.
- * </p>
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** A base trait for iterable collections that can be mutated.
+ * $iterableInfo
*/
trait Iterable[A] extends Traversable[A]
with scala.collection.Iterable[A]
@@ -27,13 +20,10 @@ trait Iterable[A] extends Traversable[A]
override def companion: GenericCompanion[Iterable] = Iterable
}
-/** <p>
- * A factory object for the trait <a href="Iterable.html"
- * target="contentFrame"><code>Iterable</code></a>.
- * </p>
- *
- * @author Martin Odersky
- * @version 2.8
+/** $factoryInfo
+ * The current default implementation of a $Coll is an `ArrayBuffer`.
+ * @define coll mutable iterable collection
+ * @define Coll mutable.Iterable
*/
object Iterable extends TraversableFactory[Iterable] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = new GenericCanBuildFrom[A]
diff --git a/src/library/scala/collection/mutable/LazyBuilder.scala b/src/library/scala/collection/mutable/LazyBuilder.scala
index b70d635cc3..db6ca19f2b 100644
--- a/src/library/scala/collection/mutable/LazyBuilder.scala
+++ b/src/library/scala/collection/mutable/LazyBuilder.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
@@ -17,13 +16,15 @@ import immutable.{List, Nil}
* be added to this builder with `++=` are not evaluated until `result` is called.
*
* @since 2.8
+ *
+ * @tparam Elem type of the elements for this builder.
+ * @tparam To type of the collection this builder builds.
*/
abstract class LazyBuilder[Elem, +To] extends Builder[Elem, To] {
/** The different segments of elements to be added to the builder, represented as iterators */
- protected var parts = new ListBuffer[scala.collection.Traversable[Elem]]
+ protected var parts = new ListBuffer[TraversableOnce[Elem]]
def +=(x: Elem): this.type = { parts += List(x); this }
- override def ++=(xs: Iterator[Elem]): this.type = { parts += xs.toStream; this }
- override def ++=(xs: scala.collection.Traversable[Elem]): this.type = { parts += xs; this }
+ override def ++=(xs: TraversableOnce[Elem]): this.type = { parts += xs ; this }
def result(): To
def clear() { parts.clear() }
}
diff --git a/src/library/scala/collection/mutable/LinearSeq.scala b/src/library/scala/collection/mutable/LinearSeq.scala
index f23b68c3c4..08a8c8282c 100644
--- a/src/library/scala/collection/mutable/LinearSeq.scala
+++ b/src/library/scala/collection/mutable/LinearSeq.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,10 +13,12 @@ package mutable
import generic._
-/** A subtrait of <code>collection.Seq</code> which represents sequences
- * that cannot be mutated.
+/** A subtrait of `collection.LinearSeq` which represents sequences
+ * that can be mutated.
+ * $linearSeqInfo
*
- * @since 2.8
+ * @define Coll LinearSeq
+ * @define coll linear sequence
*/
trait LinearSeq[A] extends Seq[A]
with scala.collection.LinearSeq[A]
@@ -26,6 +27,11 @@ trait LinearSeq[A] extends Seq[A]
override def companion: GenericCompanion[LinearSeq] = LinearSeq
}
+/** $factoryInfo
+ * The current default implementation of a $Coll is a `MutableList`.
+ * @define coll mutable linear sequence
+ * @define Coll mutable.LinearSeq
+ */
object LinearSeq extends SeqFactory[LinearSeq] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = new GenericCanBuildFrom[A]
def newBuilder[A]: Builder[A, LinearSeq[A]] = new MutableList[A]
diff --git a/src/library/scala/collection/mutable/LinkedEntry.scala b/src/library/scala/collection/mutable/LinkedEntry.scala
index 049cab32c4..667d45b637 100644
--- a/src/library/scala/collection/mutable/LinkedEntry.scala
+++ b/src/library/scala/collection/mutable/LinkedEntry.scala
@@ -1,19 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/**
- * @since 2.8
+/** Class for the linked hash map entry, used internally.
+ * @since 2.8
*/
@serializable
final class LinkedEntry[A, B](val key: A, var value: B)
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index b0cad79ef4..be6442561c 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,22 +13,38 @@ package mutable
import generic._
-/** This class implements mutable maps using a hashtable.
- *
- * @author Matthias Zenger
- * @author Martin Odersky
- * @version 2.8
- * @since 2.7
+/** $factoryInfo
+ * @define Coll LinkedHashMap
+ * @define coll linked hash map
*/
object LinkedHashMap extends MutableMapFactory[LinkedHashMap] {
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), LinkedHashMap[A, B]] = new MapCanBuildFrom[A, B]
def empty[A, B] = new LinkedHashMap[A, B]
}
-/**
- * @since 2.7
+/** This class implements mutable maps using a hashtable.
+ * The iterator and all traversal methods of this class visit elements in the order they were inserted.
+ *
+ * @tparam A the type of the keys contained in this hash map.
+ * @tparam B the type of the values assigned to keys in this hash map.
+ *
+ * @define Coll LinkedHashMap
+ * @define coll linked hash map
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is always `LinkedHashMap[A, B]` if the elements contained in the resulting collection are
+ * pairs of type `(A, B)`. This is because an implicit of type `CanBuildFrom[LinkedHashMap, (A, B), LinkedHashMap[A, B]]`
+ * is defined in object `LinkedHashMap`. Otherwise, `That` resolves to the most specific type that doesn't have
+ * to contain pairs of type `(A, B)`, which is `Iterable`.
+ * @define $bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`. This is usually the `canBuildFrom` value
+ * defined in object `LinkedHashMap`.
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define orderDependent
+ * @define orderDependentFold
*/
-@serializable
+@serializable @SerialVersionUID(1L)
class LinkedHashMap[A, B] extends Map[A, B]
with MapLike[A, B, LinkedHashMap[A, B]]
with HashTable[A] {
@@ -39,8 +54,8 @@ class LinkedHashMap[A, B] extends Map[A, B]
type Entry = LinkedEntry[A, B]
- protected var firstEntry: Entry = null
- protected var lastEntry: Entry = null
+ @transient protected var firstEntry: Entry = null
+ @transient protected var lastEntry: Entry = null
def get(key: A): Option[B] = {
val e = findEntry(key)
@@ -53,9 +68,7 @@ class LinkedHashMap[A, B] extends Map[A, B]
if (e == null) {
val e = new Entry(key, value)
addEntry(e)
- if (firstEntry == null) firstEntry = e
- else { lastEntry.later = e; e.earlier = lastEntry }
- lastEntry = e
+ updateLinkedEntries(e)
None
} else {
val v = e.value
@@ -64,6 +77,12 @@ class LinkedHashMap[A, B] extends Map[A, B]
}
}
+ private def updateLinkedEntries(e: Entry) {
+ if (firstEntry == null) firstEntry = e
+ else { lastEntry.later = e; e.earlier = lastEntry }
+ lastEntry = e
+ }
+
override def remove(key: A): Option[B] = {
val e = removeEntry(key)
if (e eq null) None
@@ -115,4 +134,16 @@ class LinkedHashMap[A, B] extends Map[A, B]
clearTable()
firstEntry = null
}
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ serializeTo(out, _.value)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ init[B](in, { (key, value) =>
+ val entry = new Entry(key, value)
+ updateLinkedEntries(entry)
+ entry
+ })
+ }
}
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index 081b068723..922ff25276 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -1,23 +1,42 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
import generic._
-/** Todo: this has O(n) cost for element removal.
- * Should be rewritten to be more efficient.
- * @since 2.2
+/** This class implements mutable sets using a hashtable.
+ * The iterator and all traversal methods of this class visit elements in the order they were inserted.
+ *
+ * @author Matthias Zenger
+ * @author Martin Odersky
+ * @version 2.0, 31/12/2006
+ * @since 1
+ *
+ * @tparam A the type of the elements contained in this set.
+ *
+ * @define Coll LinkedHashSet
+ * @define coll linked hash set
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is always `LinkedHashSet[B]` because an implicit of type `CanBuildFrom[LinkedHashSet, B, LinkedHashSet[B]]`
+ * is defined in object `LinkedHashSet`.
+ * @define $bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`. This is usually the `canBuildFrom` value
+ * defined in object `LinkedHashSet`.
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define orderDependent
+ * @define orderDependentFold
*/
-@serializable
+@serializable @SerialVersionUID(1L)
class LinkedHashSet[A] extends Set[A]
with GenericSetTemplate[A, LinkedHashSet]
with SetLike[A, LinkedHashSet[A]]
@@ -25,7 +44,7 @@ class LinkedHashSet[A] extends Set[A]
{
override def companion: GenericCompanion[LinkedHashSet] = LinkedHashSet
- protected val ordered = new ListBuffer[A]
+ @transient private[this] var ordered = new ListBuffer[A]
override def size = tableSize
@@ -49,13 +68,25 @@ class LinkedHashSet[A] extends Set[A]
clearTable()
}
- override def iterator = ordered.iterator
+ override def iterator: Iterator[A] = ordered.iterator
override def foreach[U](f: A => U) = ordered foreach f
+
+ private def writeObject(s: java.io.ObjectOutputStream) {
+ serializeTo(s)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ ordered = new ListBuffer[A]
+ init(in, ordered += )
+ }
}
-/** Factory object for `LinkedHashSet` class */
-object LinkedHashSet extends SetFactory[LinkedHashSet] {
+/** $factoryInfo
+ * @define Coll LinkedHashSet
+ * @define coll linked hash set
+ */
+object LinkedHashSet extends MutableSetFactory[LinkedHashSet] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedHashSet[A]] = setCanBuildFrom[A]
override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A]
}
diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala
index abd691ef5c..538e2f1ba5 100644
--- a/src/library/scala/collection/mutable/LinkedList.scala
+++ b/src/library/scala/collection/mutable/LinkedList.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,15 +13,31 @@ package mutable
import generic._
-/** This class implements single linked lists where both the head (<code>elem</code>)
- * and the tail (<code>next</code>) are mutable.
+/** This class implements single linked lists where both the head (`elem`)
+ * and the tail (`next`) are mutable.
*
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.8
* @since 1
+ *
+ * @tparam A the type of the elements contained in this linked list.
+ *
+ * @define Coll LinkedList
+ * @define coll linked list
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is always `LinkedList[B]` because an implicit of type `CanBuildFrom[LinkedList, B, LinkedList[B]]`
+ * is defined in object `LinkedList`.
+ * @define $bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`. This is usually the `canBuildFrom` value
+ * defined in object `LinkedList`.
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
-@serializable
+@serializable @SerialVersionUID(-7308240733518833071L)
class LinkedList[A]() extends LinearSeq[A]
with GenericTraversableTemplate[A, LinkedList]
with LinkedListLike[A, LinkedList[A]] {
@@ -39,6 +54,10 @@ class LinkedList[A]() extends LinearSeq[A]
override def companion: GenericCompanion[LinkedList] = LinkedList
}
+/** $factoryInfo
+ * @define Coll LinkedList
+ * @define coll linked list
+ */
object LinkedList extends SeqFactory[LinkedList] {
override def empty[A]: LinkedList[A] = new LinkedList[A]
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index 42f37766b2..6fb516d566 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -16,13 +15,20 @@ import generic._
import annotation.tailrec
/** This extensible class may be used as a basis for implementing linked
- * list. Type variable <code>A</code> refers to the element type of the
- * list, type variable <code>This</code> is used to model self types of
+ * list. Type variable `A` refers to the element type of the
+ * list, type variable `This` is used to model self types of
* linked lists.
+ *
* @author Matthias Zenger
* @author Martin Odersky
- * @version 2.8
+ * @version 1.0, 08/07/2003
* @since 2.8
+ *
+ * @tparam A type of the elements contained in the linked list
+ * @tparam This the type of the actual linked list holding the elements
+ *
+ * @define Coll LinkedList
+ * @define coll linked list
*/
trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends SeqLike[A, This] { self =>
@@ -31,7 +37,7 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq
override def isEmpty = next eq this
- override def length: Int = if (isEmpty) 0 else next.length
+ override def length: Int = if (isEmpty) 0 else next.length + 1
override def head: A = elem
@@ -55,13 +61,12 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq
}
/** Insert linked list `that` at current position of this linked list
- * @pre this linked list is not empty
+ * @note this linked list must not be empty
*/
def insert(that: This): Unit = {
require(nonEmpty, "insert into empty list")
if (that.nonEmpty) {
- that.append(next)
- next = that
+ next = next.append(that)
}
}
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index ee6e672fb1..415096ed89 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,13 +14,29 @@ package mutable
import generic._
import immutable.{List, Nil, ::}
-/** A Buffer implementation back up by a list. It provides constant time
+/** A `Buffer` implementation back up by a list. It provides constant time
* prepend and append. Most other operations are linear.
*
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.8
* @since 1
+ *
+ * @tparam A the type of this list buffer's elements.
+ *
+ * @define Coll ListBuffer
+ * @define coll list buffer
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is always `ListBuffer[B]` because an implicit of type `CanBuildFrom[ListBuffer, B, ListBuffer[B]]`
+ * is defined in object `ListBuffer`.
+ * @define $bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`. This is usually the `canBuildFrom` value
+ * defined in object `ListBuffer`.
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
@serializable @SerialVersionUID(3419063961353022661L)
final class ListBuffer[A]
@@ -42,7 +57,9 @@ final class ListBuffer[A]
protected def underlying: immutable.Seq[A] = start
- /** The current length of the buffer
+ /** The current length of the buffer.
+ *
+ * This operation takes constant time.
*/
override def length = len
@@ -52,13 +69,13 @@ final class ListBuffer[A]
if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString())
else super.apply(n)
- /** Replaces element at index <code>n</code> with the new element
- * <code>newelem</code>. Takes time linear in the buffer size. (except the
+ /** Replaces element at index `n` with the new element
+ * `newelem`. Takes time linear in the buffer size. (except the
* first element, which is updated in constant time).
*
* @param n the index of the element to replace.
* @param x the new element.
- * @throws Predef.IndexOutOfBoundsException if <code>n</code> is out of bounds.
+ * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
*/
def update(n: Int, x: A) {
try {
@@ -90,6 +107,7 @@ final class ListBuffer[A]
/** Appends a single element to this buffer. This operation takes constant time.
*
* @param x the element to append.
+ * @return this $coll.
*/
def += (x: A): this.type = {
if (exported) copy()
@@ -117,7 +135,7 @@ final class ListBuffer[A]
* time.
*
* @param x the element to prepend.
- * @return this buffer.
+ * @return this $coll.
*/
def +=: (x: A): this.type = {
if (exported) copy()
@@ -128,13 +146,13 @@ final class ListBuffer[A]
this
}
- /** Inserts new elements at the index <code>n</code>. Opposed to method
- * <code>update</code>, this method will not replace an element with a new
- * one. Instead, it will insert a new element at index <code>n</code>.
+ /** Inserts new elements at the index `n`. Opposed to method
+ * `update`, this method will not replace an element with a new
+ * one. Instead, it will insert a new element at index `n`.
*
* @param n the index where a new element will be inserted.
* @param iter the iterable object providing all elements to insert.
- * @throws Predef.IndexOutOfBoundsException if <code>n</code> is out of bounds.
+ * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
*/
def insertAll(n: Int, seq: Traversable[A]) {
try {
@@ -227,12 +245,12 @@ final class ListBuffer[A]
// Overrides of methods in Buffer
/** Removes the element on a given index position. May take time linear in
- * the buffer size
+ * the buffer size.
*
* @param n the index which refers to the element to delete.
- * @return n the element that was formerly at position <code>n</code>.
- * @pre an element exists at position <code>n</code>
- * @throws Predef.IndexOutOfBoundsException if <code>n</code> is out of bounds.
+ * @return n the element that was formerly at position `n`.
+ * @note an element must exists at position `n`.
+ * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
*/
def remove(n: Int): A = {
if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString())
@@ -259,6 +277,7 @@ final class ListBuffer[A]
* buffer size.
*
* @param x the element to remove.
+ * @return this $coll.
*/
override def -= (elem: A): this.type = {
if (exported) copy()
@@ -328,12 +347,11 @@ final class ListBuffer[A]
override def stringPrefix: String = "ListBuffer"
}
-/** Factory object for <code>ListBuffer</code> class.
- *
- * @author Martin Odersky
- * @version 2.8
+/** $factoryInfo
+ * @define Coll ListBuffer
+ * @define coll list buffer
*/
object ListBuffer extends SeqFactory[ListBuffer] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListBuffer[A]] = new GenericCanBuildFrom[A]
- def newBuilder[A]: Builder[A, ListBuffer[A]] = new AddingBuilder(new ListBuffer[A])
+ def newBuilder[A]: Builder[A, ListBuffer[A]] = new GrowingBuilder(new ListBuffer[A])
}
diff --git a/src/library/scala/collection/mutable/ListMap.scala b/src/library/scala/collection/mutable/ListMap.scala
index a63f3a024c..3aa6f9c753 100644
--- a/src/library/scala/collection/mutable/ListMap.scala
+++ b/src/library/scala/collection/mutable/ListMap.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,8 +13,26 @@ package mutable
import generic._
-/**
- * @since 2.8
+/** A simple mutable map backed by a list.
+ *
+ * @tparam A the type of the keys contained in this list map.
+ * @tparam B the type of the values assigned to keys in this list map.
+ *
+ * @define Coll mutable.ListMap
+ * @define coll mutable list map
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is always `ListMap[A, B]` if the elements contained in the resulting collection are
+ * pairs of type `(A, B)`. This is because an implicit of type `CanBuildFrom[ListMap, (A, B), ListMap[A, B]]`
+ * is defined in object `ListMap`. Otherwise, `That` resolves to the most specific type that doesn't have
+ * to contain pairs of type `(A, B)`, which is `Iterable`.
+ * @define $bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`. This is usually the `canBuildFrom` value
+ * defined in object `ListMap`.
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define orderDependent
+ * @define orderDependentFold
*/
@serializable
class ListMap[A, B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] {
@@ -40,10 +57,9 @@ class ListMap[A, B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] {
override def size: Int = siz
}
-/** This class implements mutable maps using a list.
- *
- * @author Martin Odersky
- * @version 2.8
+/** $factoryInfo
+ * @define Coll mutable.ListMap
+ * @define coll mutable list map
*/
object ListMap extends MutableMapFactory[ListMap] {
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), ListMap[A, B]] = new MapCanBuildFrom[A, B]
diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala
index eae9a88bfd..287069d04b 100644
--- a/src/library/scala/collection/mutable/Map.scala
+++ b/src/library/scala/collection/mutable/Map.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,8 +13,11 @@ package mutable
import generic._
-/**
- * @since 1
+/** A base trait for maps that can be mutated.
+ * $mapNote
+ * $mapTags
+ * @since 1.0
+ * @author Matthias Zenger
*/
trait Map[A, B]
extends Iterable[(A, B)]
@@ -36,11 +38,16 @@ trait Map[A, B]
}
*/
}
-/* Factory object for `Map` class
- * Currently this returns a HashMap.
+
+/** $factoryInfo
+ * The current default implementation of a $Coll is a `HashMap`.
+ * @define coll mutable map
+ * @define Coll mutable.Map
*/
object Map extends MutableMapFactory[Map] {
+ /** $canBuildFromInfo */
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B]
+
def empty[A, B]: Map[A, B] = new HashMap[A, B]
}
diff --git a/src/library/scala/collection/mutable/MapBuilder.scala b/src/library/scala/collection/mutable/MapBuilder.scala
index bb53c1c447..4e5b6d1ef7 100644
--- a/src/library/scala/collection/mutable/MapBuilder.scala
+++ b/src/library/scala/collection/mutable/MapBuilder.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
@@ -14,6 +13,10 @@ package mutable
/** The canonical builder for immutable maps, working with the map's `+` method
* to add new elements.
* Collections are built from their `empty` element using this + method.
+ *
+ * @tparam A Type of the keys for the map this builder creates.
+ * @tparam B Type of the values for the map this builder creates.
+ * @tparam Coll The type of the actual collection this builder builds.
* @param empty The empty element of the collection.
*
* @since 2.8
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 9ea83c43c2..11677aca43 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -1,51 +1,26 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
import generic._
+import annotation.migration
-/** <p>
- * A generic template for mutable maps from keys of type <code>A</code> to
- * values of type <code>B</code>.
- * </p>
- * <p>
- * To implement a concrete mutable map, you need to provide implementations
- * of the following methods:
- * </p><pre>
- * <b>def</b> get(key: A): Option[B]
- * <b>def</b> iterator: Iterator[(A, B)]
- * <b>def</b> += (kv: (A, B)): <b>this.type</b>
- * <b>def</b> -= (key: A): <b>this.type</b></pre>
- * <p>
- * If you wish that methods <code>like</code>, <code>take</code>,
- * <code>drop</code>, <code>filter</code> return the same kind of map, you
- * should also override:
- * </p><pre>
- * <b>def</b> empty: This</pre>
- * <p>
- * If you to avoid the unncessary construction of an <code>Option</code>
- * object, you could also override <code>apply</code>, <code>update</code>,
- * and <code>delete</code>.
- * </p>
- * <p>
- * It is also good idea to override methods <code>foreach</code> and
- * <code>size</code> for efficiency.
- * </p>
- *
- * @since 2.8
+/** A template trait for mutable maps.
+ * $mapNote
+ * $mapTags
+ * @since 2.8
*/
trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
- extends MapLikeBase[A, B, This]
+ extends scala.collection.MapLike[A, B, This]
with Builder[(A, B), This]
with Growable[(A, B)]
with Shrinkable[A]
@@ -54,22 +29,22 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
import scala.collection.Traversable
- /** <p>
- * A common implementation of <code>newBuilder</code> for all mutable maps
- * in terms of <code>empty</code>.
- * </p>
- * <p>
- * Overrides <code>MapLike</code> implementation for better efficiency.
- * </p>
+ /** A common implementation of `newBuilder` for all mutable maps
+ * in terms of `empty`.
+ *
+ * Overrides `MapLike` implementation for better efficiency.
*/
override protected[this] def newBuilder: Builder[(A, B), This] = empty
- /** Adds a new mapping from <code>key</code>
- * to <code>value</code> to the map. If the map already contains a
- * mapping for <code>key</code>, it will be overridden.
+ /** Adds a new key/value pair to this map and optionally returns previously bound value.
+ * If the map already contains a
+ * mapping for the key, it will be overridden by the new value.
*
- * @param key The key to update
- * @param value The new value
+ * @param key the key to update
+ * @param value the new value
+ * @return an option value containing the value associated with the key
+ * before the `put` operation was executed, or `None` if `key`
+ * was not defined in the map before.
*/
def put(key: A, value: B): Option[B] = {
val r = get(key)
@@ -77,89 +52,84 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
r
}
- /** Adds a new mapping from <code>key</code>
- * to <code>value</code> to the map. If the map already contains a
- * mapping for <code>key</code>, it will be overridden.
+ /** Adds a new key/value pair to this map.
+ * If the map already contains a
+ * mapping for the key, it will be overridden by the new value.
*
* @param key The key to update
* @param value The new value
- * @return An option consisting of value associated previously associated with `key` in the map,
- * or None if `key` was not yet defined in the map.
*/
def update(key: A, value: B) { this += ((key, value)) }
- /** Add a new key/value mapping this map.
+ /** Adds a new key/value pair to this map.
+ * If the map already contains a
+ * mapping for the key, it will be overridden by the new value.
* @param kv the key/value pair.
* @return the map itself
*/
def += (kv: (A, B)): this.type
- /** Create a new map consisting of all elements of the current map
- * plus the given mapping from <code>key</code> to <code>value</code>.
+ /** Creates a new map consisting of all key/value pairs of the current map
+ * plus a new pair of a given key and value.
*
- * @param key The key to ad
+ * @param key The key to add
* @param value The new value
- * @return A fresh immutable map
- */
- override def updated[B1 >: B](key: A, value: B1): mutable.Map[A, B1] = this + ((key, value))
-
- /** If given key is already in this map, returns associated value
- * Otherwise, computes value from given expression `op`, stores with key
- * in map and returns that value.
+ * @return A fresh immutable map with the binding from `key` to
+ * `value` added to this map.
*/
- def cached(key: A, op: => B) = get(key) match {
- case Some(v) => v
- case None => val v = op; update(key, v); v
- }
+ override def updated[B1 >: B](key: A, value: B1): Map[A, B1] = this + ((key, value))
- /** Add a new key/value mapping and return the map itself.
+ /** Creates a new map containing a new key/value mapping and all the key/value mappings
+ * of this map.
+ *
+ * Mapping `kv` will override existing mappings from this map with the same key.
*
* @param kv the key/value mapping to be added
+ * @return a new map containing mappings of this map and the mapping `kv`.
*/
- @deprecated("This operation will create a new map in the future. To add an element as a side\n"+
- "effect to an existing map and return that map itself, use +=. If you do want\n"+
- "to create a fresh map, you can use `clone() +=' to avoid a @deprecated warning.")
- def + (kv: (A, B)): this.type = { update(kv._1, kv._2); this }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new map. To add an element as a\n"+
+ "side effect to an existing map and return that map itself, use +=."
+ )
+ def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = clone().asInstanceOf[Map[A, B1]] += kv
- /** Adds two or more key/value mappings and return the map itself.
- * with the added elements.
+ /** Creates a new map containing two or more key/value mappings and all the key/value
+ * mappings of this map.
+ *
+ * Specified mappings will override existing mappings from this map with the same keys.
*
* @param elem1 the first element to add.
* @param elem2 the second element to add.
* @param elems the remaining elements to add.
+ * @return a new map containing mappings of this map and two or more specified mappings.
*/
- @deprecated("This operation will create a new map in the future. To add an element as a side\n"+
- "effect to an existing map and return that map itself, use +=. If you do want to\n"+
- "create a fresh map, you can use `clone() +=` to avoid a @deprecated warning.")
- def +(elem1: (A, B), elem2: (A, B), elems: (A, B)*): this.type =
- this += elem1 += elem2 ++= elems
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new map. To add an element as a\n"+
+ "side effect to an existing map and return that map itself, use +=."
+ )
+ override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): Map[A, B1] =
+ clone().asInstanceOf[Map[A, B1]] += elem1 += elem2 ++= elems
- /** Adds a number of elements provided by a traversable object
- * via its <code>iterator</code> method and returns
- * either the collection itself (if it is mutable), or a new collection
- * with the added elements.
+ /** Creates a new map containing the key/value mappings provided by the specified traversable object
+ * and all the key/value mappings of this map.
*
- * @param iter the traversable object.
- */
- @deprecated("This operation will create a new map in the future. To add elements as a side\n"+
- "effect to an existing map and return that map itself, use ++=. If you do want\n"+
- "to create a fresh map, you can use `clone() ++=` to avoid a @deprecated warning.")
- def ++(iter: Traversable[(A, B)]): this.type = { for (elem <- iter) +=(elem); this }
-
- /** Adds a number of elements provided by an iterator
- * via its <code>iterator</code> method and returns
- * the collection itself.
+ * Note that existing mappings from this map with the same key as those in `xs` will be overriden.
*
- * @param iter the iterator
- */
- @deprecated("This operation will create a new map in the future. To add elements as a side\n"+
- "effect to an existing map and return that map itself, use ++=. If you do want\n"+
- "to create a fresh map, you can use `clone() +=` to avoid a @deprecated warning.")
- def ++(iter: Iterator[(A, B)]): this.type = { for (elem <- iter) +=(elem); this }
-
- /** If given key is defined in this map, remove it and return associated value as an Option.
- * If key is not present return None.
+ * @param xs the traversable object.
+ * @return a new map containing mappings of this map and those provided by `xs`.
+ */
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new map. To add the elements as a\n"+
+ "side effect to an existing map and return that map itself, use ++=."
+ )
+ override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] =
+ clone().asInstanceOf[Map[A, B1]] ++= xs
+
+ /** Removes a key from this map, returning the value associated previously
+ * with that key as an option.
* @param key the key to be removed
+ * @return an option value containing the value associated previously with `key`,
+ * or `None` if `key` was not defined in the map before.
*/
def remove(key: A): Option[B] = {
val r = get(key)
@@ -167,46 +137,57 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
r
}
- /** Delete a key from this map if it is present.
+ /** Removes a key from this map.
* @param key the key to be removed
- * @note same as `delete`.
+ * @return the map itself.
*/
def -= (key: A): this.type
- /** Delete a key from this map if it is present and return the map itself.
+ /** Creates a new map with all the key/value mappings of this map except the key/value mapping
+ * with the specified key.
+ *
* @param key the key to be removed
+ * @return a new map with all the mappings of this map except that with a key `key`.
*/
- @deprecated("This operation will create a new map in the future. To add elements as a side\n"+
- "effect to an existing map and return that map itself, use -=. If you do want\n"+
- "to create a fresh map, you can use `clone() -=` to avoid a @deprecated warning.")
- override def -(key: A): This = { -=(key); repr }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new map. To remove an element as a\n"+
+ "side effect to an existing map and return that map itself, use -=."
+ )
+ override def -(key: A): This = clone() -= key
/** If given key is defined in this map, remove it and return associated value as an Option.
* If key is not present return None.
* @param key the key to be removed
*/
- @deprecated("Use `remove' instead") def removeKey(key: A): Option[B] = remove(key)
-
+ @deprecated("Use `remove' instead") def removeKey(key: A): Option[B] = remove(key)
- /** Removes all elements from the set. After this operation is completed,
- * the set will be empty.
+ /** Removes all bindings from the map. After this operation has completed,
+ * the map will be empty.
*/
- def clear() { for ((k, v) <- this.iterator) -=(k) }
+ def clear() { keysIterator foreach -= }
- /** Check if this map maps <code>key</code> to a value.
- * Return that value if it exists, otherwise put <code>default</code>
- * as that key's value and return it.
- */
- def getOrElseUpdate(key: A, default: => B): B =
+ /** If given key is already in this map, returns associated value.
+ *
+ * Otherwise, computes value from given expression `op`, stores with key
+ * in map and returns that value.
+ * @param key the key to test
+ * @param op the computation yielding the value to associate with `key`, if
+ * `key` is previously unbound.
+ * @return the value associated with key (either previously or as a result
+ * of executing the method).
+ */
+ def getOrElseUpdate(key: A, op: => B): B =
get(key) match {
case Some(v) => v
- case None => val d = default; this(key) = d; d
+ case None => val d = op; this(key) = d; d
}
- /** This function transforms all the values of mappings contained
- * in this map with function <code>f</code>.
+ /** Applies a transformation function to all values contained in this map.
+ * The transformation function produces new values from existing keys
+ * associated values.
*
- * @param f The transformation to apply
+ * @param f the transformation to apply
+ * @return the map itself.
*/
def transform(f: (A, B) => B): this.type = {
this.iterator foreach {
@@ -215,59 +196,51 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
this
}
- /** Retain only those mappings for which the predicate
- * <code>p</code> returns <code>true</code>.
+ /** Retains only those mappings for which the predicate
+ * `p` returns `true`.
*
* @param p The test predicate
*/
- @deprecated("cannot be type inferred because of retain in Iterable.")
def retain(p: (A, B) => Boolean): this.type = {
- for ((k, v) <- this) if (!p(k, v)) -=(k)
+ for ((k, v) <- this ; if !p(k, v))
+ this -= k
+
this
}
- override def clone(): This =
- empty ++= repr
+ override def clone(): This = empty ++= repr
- /** The result when this map is used as a builder */
+ /** The result when this map is used as a builder
+ * @return the map representation itself.
+ */
def result: This = repr
- /** Removes two or more elements from this collection and returns
- * the collection itself.
+ /** Creates a new map with all the key/value mappings of this map except mappings with keys
+ * equal to any of the two or more specified keys.
*
* @param elem1 the first element to remove.
* @param elem2 the second element to remove.
* @param elems the remaining elements to remove.
- */
- @deprecated("Use -= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() -=' if you intend to create a new collection.")
- override def -(elem1: A, elem2: A, elems: A*): This = {
- this -= elem1 -= elem2 --= elems
- repr
- }
-
- /** Removes a number of elements provided by a Traversable object and returns
- * the collection itself.
- *
- * @param iter the Traversable object.
- */
- @deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() --=' if you intend to create a new collection.")
- override def --(iter: Traversable[A]): This = {
- for (elem <- iter) -=(elem)
- repr
- }
-
-
- /** Removes a number of elements provided by an iterator and returns
- * the collection itself.
+ * @return a new map containing all the mappings of this map except mappings
+ * with a key equal to `elem1`, `elem2` or any of `elems`.
+ */
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new map. To remove an element as a\n"+
+ "side effect to an existing map and return that map itself, use -=."
+ )
+ override def -(elem1: A, elem2: A, elems: A*): This =
+ clone() -= elem1 -= elem2 --= elems
+
+ /** Creates a new map with all the key/value mappings of this map except mappings with keys
+ * equal to any of those provided by the specified traversable object.
*
- * @param iter the iterator
- */
- @deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() --=' if you intend to create a new collection.")
- override def --(iter: Iterator[A]): This = {
- for (elem <- iter) -=(elem)
- repr
- }
+ * @param xs the traversable object.
+ * @return a new map with all the key/value mappings of this map except mappings
+ * with a key equal to a key from `xs`.
+ */
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new map. To remove the elements as a\n"+
+ "side effect to an existing map and return that map itself, use --=."
+ )
+ override def --(xs: TraversableOnce[A]): This = clone() --= xs
}
diff --git a/src/library/scala/collection/mutable/MapLikeBase.scala b/src/library/scala/collection/mutable/MapLikeBase.scala
deleted file mode 100644
index 9afbfe2242..0000000000
--- a/src/library/scala/collection/mutable/MapLikeBase.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.collection
-package mutable
-
-/** The reason for this class is so that we can
- * have both a generic immutable `+` with signature
- *
- * def + [B1 >: B](kv: (A, B1)): Map[A, B1]
- *
- * and a (deprecated) mutable `+` of signature
- *
- * def + (kv: (A, B)): this.type = this += kv
- *
- * The former is required to fulfill the Map contract.
- * The latter is required for backwards compatibility.
- * We can't have both methods in the same class, as that would give a double definition.
- * They are OK in different classes though, and narrowly escape a `same erasure' problem.
- * Once the deprecated + goes away we can do without class MapLikeBase.
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
- */
-trait MapLikeBase[A, B, +This <: MapLikeBase[A, B, This] with Map[A, B]]
- extends scala.collection.MapLike[A, B, This] with Cloneable[This] {
- def + [B1 >: B] (kv: (A, B1)): mutable.Map[A, B1] = clone().asInstanceOf[mutable.Map[A, B1]] += kv
-}
diff --git a/src/library/scala/collection/mutable/MapProxy.scala b/src/library/scala/collection/mutable/MapProxy.scala
index b360b95693..c39d14b52e 100644
--- a/src/library/scala/collection/mutable/MapProxy.scala
+++ b/src/library/scala/collection/mutable/MapProxy.scala
@@ -1,41 +1,39 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/** <p>
- * This is a simple wrapper class for <a href="Map.html"
- * target="contentFrame"><code>scala.collection.mutable.Map</code></a>.
- * </p>
- * <p>
- * It is most useful for assembling customized map abstractions
- * dynamically using object composition and forwarding.
- * </p>
+/**
+ * This trait implements a proxy for <a href="Map.html"
+ * target="contentFrame"><code>scala.collection.mutable.Map</code></a>.
+ *
+ * It is most useful for assembling customized map abstractions
+ * dynamically using object composition and forwarding.
*
* @author Matthias Zenger, Martin Odersky
* @version 2.0, 31/12/2006
* @since 1
*/
+trait MapProxy[A, B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] {
+ private def newProxy[B1 >: B](newSelf: Map[A, B1]): MapProxy[A, B1] =
+ new MapProxy[A, B1] { val self = newSelf }
-trait MapProxy[A, B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]]
-{
override def repr = this
override def empty: MapProxy[A, B] = new MapProxy[A, B] { val self = MapProxy.this.self.empty }
+ override def updated [B1 >: B](key: A, value: B1) = newProxy(self.updated(key, value))
- override def +(kv: (A, B)) = { self.update(kv._1, kv._2) ; this }
- override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) =
- { self.+(elem1, elem2, elems: _*) ; this }
-
- override def -(key: A) = { self.remove(key); this }
+ override def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = newProxy(self + kv)
+ override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) = newProxy(self.+(elem1, elem2, elems: _*))
+ override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]) = newProxy(self ++ xs)
+ override def -(key: A) = newProxy(self - key)
override def += (kv: (A, B)) = { self += kv ; this }
override def -= (key: A) = { self -= key ; this }
diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala
index b30c82971b..2f547b2c51 100644
--- a/src/library/scala/collection/mutable/MultiMap.scala
+++ b/src/library/scala/collection/mutable/MultiMap.scala
@@ -1,30 +1,53 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/** This class is typically used as a mixin. It turns maps which map <code>A</code>
- * to <code>Set[B]</code> objects into multi maps which map <code>A</code> to
- * <code>B</code> objects.
+/** A trait for mutable maps with multiple values assigned to a key.
*
+ * This class is typically used as a mixin. It turns maps which map `A`
+ * to `Set[B]` objects into multi maps which map `A` to
+ * `B` objects.
+ *
+ * @define coll multimap
+ * @define Coll MultiMap
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.8
* @since 1
*/
trait MultiMap[A, B] extends Map[A, Set[B]] {
+ /** Creates a new set.
+ *
+ * Classes that use this trait as a mixin can override this method
+ * to have the desired implementation of sets assigned to new keys.
+ * By default this is `HashSet`.
+ *
+ * @return An empty set of values of type `B`.
+ */
protected def makeSet: Set[B] = new HashSet[B]
+ @deprecated("use addBinding instead")
+ def add(key: A, value: B): this.type = addBinding(key, value)
+
+ /** Assigns the specified `value` to a specified `key`, replacing
+ * the existing value assigned to that `key` if it is equal to
+ * the specified value. Otherwise, simply adds another binding to
+ * the `key`.
+ *
+ * @param key The key to which to bind the new value.
+ * @param value The value to bind to the key.
+ * @return A reference to this multimap.
+ */
def addBinding(key: A, value: B): this.type = {
get(key) match {
case None =>
@@ -37,14 +60,31 @@ trait MultiMap[A, B] extends Map[A, Set[B]] {
this
}
+ /** Removes the binding of `value` to `key` if it exists.
+ *
+ * If this was the last value assigned to the specified key, the
+ * set assigned to that key will be removed as well.
+ *
+ * @param key The key of the binding.
+ * @param value The value to remove.
+ * @return A reference to this multimap.
+ */
def removeBinding(key: A, value: B): this.type = {
get(key) match {
case None =>
- case Some(set) => set -= value
+ case Some(set) =>
+ set -= value
+ if (set.isEmpty) this -= key
}
this
}
+ /** Checks if there exists a binding to `key` such that it satisfies the predicate `p`.
+ *
+ * @param key The key for which the predicate is checked.
+ * @param p The predicate which a value assigned to the key must satisfy.
+ * @return A boolean if such a binding exists
+ */
def entryExists(key: A, p: B => Boolean): Boolean = get(key) match {
case None => false
case Some(set) => set exists p
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index ad9d165678..65c973c9ec 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,12 +14,11 @@ package mutable
import generic._
import immutable.{List, Nil}
-/** <p>
- * This class is used internally to represent mutable lists. It is the
- * basis for the implementation of the classes
- * <code>Stack</code>, and <code>Queue</code>.
- * </p>
- * !!! todo: convert to LinkedListBuffer?
+// !!! todo: convert to LinkedListBuffer?
+/**
+ * This class is used internally to represent mutable lists. It is the
+ * basis for the implementation of the classes
+ * `Stack`, and `Queue`.
*
* @author Matthias Zenger
* @author Martin Odersky
@@ -29,7 +27,7 @@ import immutable.{List, Nil}
*/
@serializable @SerialVersionUID(5938451523372603072L)
class MutableList[A] extends LinearSeq[A]
- with LinearSeqLike[A, MutableList[A]]
+ with LinearSeqOptimized[A, MutableList[A]]
with Builder[A, MutableList[A]] {
override protected[this] def newBuilder = new MutableList[A]
@@ -57,6 +55,13 @@ class MutableList[A] extends LinearSeq[A]
tl
}
+ /** Prepends a single element to this list. This operation takes constant
+ * time.
+ * @param elem the element to prepend.
+ * @return this $coll.
+ */
+ def +=: (elem: A): this.type = { prependElem(elem); this }
+
/** Returns the length of this list.
*/
override def length: Int = len
diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala
index e1cd8ace4d..3552e1262d 100644
--- a/src/library/scala/collection/mutable/ObservableBuffer.scala
+++ b/src/library/scala/collection/mutable/ObservableBuffer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,18 +14,17 @@ package mutable
import script._
/** This class is typically used as a mixin. It adds a subscription
- * mechanism to the <code>Buffer</code> class into which this abstract
- * class is mixed in. Class <code>ObservableBuffer</code> publishes
- * events of the type <code>Message</code>.
+ * mechanism to the `Buffer` class into which this abstract
+ * class is mixed in. Class `ObservableBuffer` publishes
+ * events of the type `Message`.
*
* @author Matthias Zenger
* @version 1.0, 08/07/2003
* @since 1
*/
-trait ObservableBuffer[A, This <: ObservableBuffer[A, This]]
- extends Buffer[A]
- with Publisher[Message[A] with Undoable, This]
-{ self: This =>
+trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoable]
+{
+ type Pub <: ObservableBuffer[A]
abstract override def +=(element: A): this.type = {
super.+=(element)
diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala
index 4de49c34b2..fdc5addd53 100644
--- a/src/library/scala/collection/mutable/ObservableMap.scala
+++ b/src/library/scala/collection/mutable/ObservableMap.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -16,19 +15,19 @@ import script._
/** This class is typically used as a mixin. It adds a subscription
- * mechanism to the <code>Map</code> class into which this abstract
- * class is mixed in. Class <code>ObservableMap</code> publishes
- * events of the type <code>Message</code>.
+ * mechanism to the `Map` class into which this abstract
+ * class is mixed in. Class `ObservableMap` publishes
+ * events of the type `Message`.
*
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.0, 31/12/2006
* @since 1
*/
-trait ObservableMap[A, B, This <: ObservableMap[A, B, This]]
- extends Map[A, B]
- with Publisher[Message[(A, B)] with Undoable, This]
-{ self: This =>
+trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with Undoable]
+{
+
+ type Pub <: ObservableMap[A, B]
abstract override def += (kv: (A, B)): this.type = {
val (key, value) = kv
diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala
index bb61e6c363..e71156cdb7 100644
--- a/src/library/scala/collection/mutable/ObservableSet.scala
+++ b/src/library/scala/collection/mutable/ObservableSet.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,18 +14,18 @@ package mutable
import script._
/** This class is typically used as a mixin. It adds a subscription
- * mechanism to the <code>Set</code> class into which this abstract
- * class is mixed in. Class <code>ObservableSet</code> publishes
- * events of the type <code>Message</code>.
+ * mechanism to the `Set` class into which this abstract
+ * class is mixed in. Class `ObservableSet` publishes
+ * events of the type `Message`.
*
* @author Matthias Zenger
* @version 1.0, 08/07/2003
* @since 1
*/
-trait ObservableSet[A, This <: ObservableSet[A, This]]
- extends Set[A]
- with Publisher[Message[A] with Undoable, This]
-{ self: This =>
+trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable]
+{
+
+ type Pub <: ObservableSet[A]
abstract override def +=(elem: A): this.type = {
if (!contains(elem)) {
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index 044ae59fd8..b10b88049e 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -1,21 +1,24 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
+
/**
- * @since 2.7
+ * @define Coll OpenHashMap
+ * @define coll open hash map
+ *
+ * @since 2.7
*/
-object OpenHashMap{
+object OpenHashMap {
def apply[K, V](elems : (K, V)*) = {
val dict = new OpenHashMap[K, V];
elems.foreach({case (x, y) => dict(x) = y});
@@ -24,9 +27,10 @@ object OpenHashMap{
def empty[K, V] = new OpenHashMap[K, V];
- private[mutable] class Entry[Key, Value](val key : Key,
- val hash : Int,
- var value : Option[Value])
+ final private class OpenEntry[Key, Value](val key: Key,
+ val hash: Int,
+ var value: Option[Value])
+ extends HashEntry[Key, OpenEntry[Key, Value]]
private[mutable] def highestOneBit(j : Int) = { // This should really go somewhere central as we're now code sharing by cut and paste. :(
var i = j;
@@ -41,26 +45,41 @@ object OpenHashMap{
private[mutable] def nextPowerOfTwo(i : Int) = highestOneBit(i) << 1;
}
-import OpenHashMap.Entry;
-
-/**
- * A mutable hash map based on an open hashing scheme. The precise scheme is undefined,
- * but it should make a reasonable effort to ensure that an insert with consecutive hash
- * codes is not unneccessarily penalised. In particular, mappings of consecutive integer
- * keys should work without significant performance loss.
+/** A mutable hash map based on an open hashing scheme. The precise scheme is undefined,
+ * but it should make a reasonable effort to ensure that an insert with consecutive hash
+ * codes is not unneccessarily penalised. In particular, mappings of consecutive integer
+ * keys should work without significant performance loss.
*
- * @author David MacIver
- * @since 2.7
+ * @tparam Key type of the keys in this map.
+ * @tparam Value type of the values in this map.
+ * @param initialSize the initial size of the internal hash table.
+ *
+ * @author David MacIver
+ * @since 2.7
+ *
+ * @define Coll OpenHashMap
+ * @define coll open hash map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
-class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutable.Map[Key, Value]{
+class OpenHashMap[Key, Value](initialSize : Int)
+extends Map[Key, Value]
+ with MapLike[Key, Value, OpenHashMap[Key, Value]] {
+
+ import OpenHashMap.OpenEntry
+ type Entry = OpenEntry[Key, Value]
+
+ /**
+ * A default constructor creates a hashmap with initial size 8.
+ */
def this() = this(8);
- override def empty = OpenHashMap.empty
+ override def empty: OpenHashMap[Key, Value] = OpenHashMap.empty[Key, Value]
private[this] val actualInitialSize = OpenHashMap.nextPowerOfTwo(initialSize);
private var mask = actualInitialSize - 1;;
- private var table : Array[Entry[Key, Value]] = new Array[Entry[Key, Value]](actualInitialSize);
+ private var table : Array[Entry] = new Array[Entry](actualInitialSize);
private var _size = 0;
private var deleted = 0;
@@ -70,8 +89,9 @@ class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutabl
override def size = _size;
private[this] def size_=(s : Int) = _size = s;
+ /** Returns a mangled hash code of the provided key. */
protected def hashOf(key : Key) = {
- var h = key.hashCode;
+ var h = key.##
h ^= ((h >>> 20) ^ (h >>> 12));
h ^ (h >>> 7) ^ (h >>> 4);
}
@@ -80,7 +100,7 @@ class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutabl
val oldSize = mask + 1;
val newSize = 4 * oldSize;
val oldTable = table;
- table = new Array[Entry[Key, Value]](newSize);
+ table = new Array[Entry](newSize);
mask = newSize - 1;
oldTable.foreach( entry =>
if (entry != null && entry.value != None) addEntry(entry));
@@ -104,7 +124,7 @@ class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutabl
index;
}
- private[this] def addEntry(entry : Entry[Key, Value]) =
+ private[this] def addEntry(entry : Entry) =
if (entry != null) table(findIndex(entry.key, entry.hash)) = entry;
override def update(key : Key, value : Value) {
@@ -122,7 +142,7 @@ class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutabl
val index = findIndex(key, hash);
val entry = table(index);
if (entry == null) {
- table(index) = new Entry(key, hash, Some(value));
+ table(index) = new OpenEntry(key, hash, Some(value));
modCount += 1;
size += 1;
None
@@ -166,9 +186,10 @@ class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutabl
None;
}
- /**
- * An iterator over the elements of this map. Use of this iterator follows the same
- * contract for concurrent modification as the foreach method.
+ /** An iterator over the elements of this map. Use of this iterator follows the same
+ * contract for concurrent modification as the foreach method.
+ *
+ * @return the iterator
*/
def iterator = new Iterator[(Key, Value)]{
var index = 0;
@@ -189,29 +210,27 @@ class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutabl
}
}
- @deprecated("use `iterator' instead")
- override def elements: Iterator[(Key, Value)] = iterator
-
- override def clone : OpenHashMap[Key, Value] = {
+ override def clone = {
val it = new OpenHashMap[Key, Value]
foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get));
it
}
/**
- * Loop over the key, value mappings of this map.
+ * Loop over the key, value mappings of this map.
*
- * The behaviour of modifying the map during an iteration is as follows:
+ * The behaviour of modifying the map during an iteration is as follows:
*
- * <ul>
- * <li>Deleting a mapping is always permitted.</li>
- * <li>Changing the value of mapping which is already present is permitted.</li>
- * <li>Anything else is not permitted. It will usually, but not always, throw an exception.</li>
- * </ul>
+ * <ul>
+ * <li>Deleting a mapping is always permitted.</li>
+ * <li>Changing the value of mapping which is already present is permitted.</li>
+ * <li>Anything else is not permitted. It will usually, but not always, throw an exception.</li>
+ * </ul>
*
- * @param f The function to apply to each key, value mapping.
+ * @tparam U The return type of the specified function `f`, return result of which is ignored.
+ * @param f The function to apply to each key, value mapping.
*/
- override def foreach[U](f : ((Key, Value)) => U){
+ override def foreach[U](f : ((Key, Value)) => U) {
val startModCount = modCount;
foreachUndeletedEntry(entry => {
if (modCount != startModCount) error("Concurrent Modification")
@@ -219,9 +238,10 @@ class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutabl
);
}
- private[this] def foreachUndeletedEntry(f : Entry[Key, Value] => Unit){
+ private[this] def foreachUndeletedEntry(f : Entry => Unit){
table.foreach(entry => if (entry != null && entry.value != None) f(entry));
}
+
override def transform(f : (Key, Value) => Value) = {
foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get)));
this
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index b3955acacf..acdfc03597 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -1,66 +1,112 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-import generic.{ Addable, Growable }
-
+import generic._
+import annotation.migration
/** This class implements priority queues using a heap.
* To prioritize elements of type T there must be an implicit
* Ordering[T] available at creation.
*
- * Martin: This class is utterly broken. It uses a resizable array
- * as a heap, yet pretends to be a sequence via this same resizable array.
- * Needless to say, order of elements is different in the two.
- * So this class needs to be redesigned so that it uses the array only
- * in its implementation, but implements a sequence interface separately.
+ * @tparam A type of the elements in this priority queue.
+ * @param ord implicit ordering used to compare the elements of type `A`.
*
* @author Matthias Zenger
* @version 1.0, 03/05/2004
* @since 1
+ *
+ * @define Coll PriorityQueue
+ * @define coll priority queue
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
-
@serializable @cloneable
class PriorityQueue[A](implicit ord: Ordering[A])
- extends ResizableArray[A]
- with Addable[A, PriorityQueue[A]]
+ extends Seq[A]
+ with SeqLike[A, PriorityQueue[A]]
with Growable[A]
with Cloneable[PriorityQueue[A]]
+ with Builder[A, PriorityQueue[A]]
{
import ord._
- size0 += 1 // we do not use array(0)
- override def length: Int = super.length - 1 // adjust length accordingly
- override def isEmpty: Boolean = size0 < 2
+ private final class ResizableArrayAccess[A] extends ResizableArray[A] {
+ @inline def p_size0 = size0
+ @inline def p_size0_=(s: Int) = size0 = s
+ @inline def p_array = array
+ @inline def p_ensureSize(n: Int) = super.ensureSize(n)
+ @inline def p_swap(a: Int, b: Int) = super.swap(a, b)
+ }
+
+ protected[this] override def newBuilder = new PriorityQueue[A]
+
+ private val resarr = new ResizableArrayAccess[A]
+
+ resarr.p_size0 += 1 // we do not use array(0)
+ override def length: Int = resarr.length - 1 // adjust length accordingly
+ override def size: Int = length
+ override def isEmpty: Boolean = resarr.p_size0 < 2
override def repr = this
// hey foreach, our 0th element doesn't exist
override def foreach[U](f: A => U) {
var i = 1
- while (i < size) {
- f(toA(array(i)))
+ while (i < resarr.p_size0) {
+ f(toA(resarr.p_array(i)))
+ i += 1
+ }
+ }
+
+ def update(idx: Int, elem: A) {
+ if (idx < 0 || idx >= size) throw new IndexOutOfBoundsException("Indices must be nonnegative and lesser than the size.")
+
+ var i = 0
+ val iter = iterator
+ clear
+ while (iter.hasNext) {
+ val curr = iter.next
+ if (i == idx) this += elem
+ else this += curr
i += 1
}
}
+ def apply(idx: Int) = {
+ if (idx < 0 || idx >= size) throw new IndexOutOfBoundsException("Indices must be nonnegative and lesser than the size.")
+
+ var left = idx
+ val iter = iterator
+ var curr = iter.next
+ while (left > 0) {
+ curr = iter.next
+ left -= 1
+ }
+ curr
+ }
+
+ def result = clone
+
private def toA(x: AnyRef): A = x.asInstanceOf[A]
protected def fixUp(as: Array[AnyRef], m: Int): Unit = {
var k: Int = m
while (k > 1 && toA(as(k / 2)) < toA(as(k))) {
- swap(k, k / 2)
+ resarr.p_swap(k, k / 2)
k = k / 2
}
}
+
protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Unit = {
var k: Int = m
while (n >= 2 * k) {
@@ -78,18 +124,10 @@ class PriorityQueue[A](implicit ord: Ordering[A])
}
}
- /** Inserts a single element into the priority queue.
- *
- * @param elem the element to insert
- */
- def +=(elem: A): this.type = {
- ensureSize(size0 + 1)
- array(size0) = elem.asInstanceOf[AnyRef]
- fixUp(array, size0)
- size0 += 1
- this
- }
-
+ @deprecated(
+ "Use += instead if you intend to add by side effect to an existing collection.\n"+
+ "Use `clone() +=' if you intend to create a new collection."
+ )
def +(elem: A): PriorityQueue[A] = { this.clone() += elem }
/** Add two or more elements to this set.
@@ -97,20 +135,32 @@ class PriorityQueue[A](implicit ord: Ordering[A])
* @param kv2 the second element.
* @param kvs the remaining elements.
*/
- override def +(elem1: A, elem2: A, elems: A*) = { this.clone().+=(elem1, elem2, elems : _*) }
+ @deprecated(
+ "Use ++= instead if you intend to add by side effect to an existing collection.\n"+
+ "Use `clone() ++=' if you intend to create a new collection."
+ )
+ def +(elem1: A, elem2: A, elems: A*) = { this.clone().+=(elem1, elem2, elems : _*) }
- /** Adds all elements provided by an <code>Iterable</code> object
- * into the priority queue.
+ /** Inserts a single element into the priority queue.
*
- * @param iter an iterable object
+ * @param elem the element to insert.
+ * @return this $coll.
*/
- override def ++(elems: scala.collection.Traversable[A]) = { this.clone() ++= elems } // ??? XXX why does this "override nothing" with override?
+ def +=(elem: A): this.type = {
+ resarr.p_ensureSize(resarr.p_size0 + 1)
+ resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef]
+ fixUp(resarr.p_array, resarr.p_size0)
+ resarr.p_size0 += 1
+ this
+ }
- /** Adds all elements provided by an iterator into the priority queue.
+ /** Adds all elements provided by a `TraversableOnce` object
+ * into the priority queue.
*
- * @param it an iterator
+ * @param xs a traversable object.
+ * @return a new priority queue containing elements of both `xs` and `this`.
*/
- override def ++(iter: Iterator[A]) = { this.clone() ++= iter } // ...whereas this doesn't?
+ def ++(xs: TraversableOnce[A]) = { this.clone() ++= xs }
/** Adds all elements to the queue.
*
@@ -125,11 +175,11 @@ class PriorityQueue[A](implicit ord: Ordering[A])
* @return the element with the highest priority.
*/
def dequeue(): A =
- if (size0 > 1) {
- size0 = size0 - 1
- swap(1, size0)
- fixDown(array, 1, size0 - 1)
- toA(array(size0))
+ if (resarr.p_size0 > 1) {
+ resarr.p_size0 = resarr.p_size0 - 1
+ resarr.p_swap(1, resarr.p_size0)
+ fixDown(resarr.p_array, 1, resarr.p_size0 - 1)
+ toA(resarr.p_array(resarr.p_size0))
} else
throw new NoSuchElementException("no element to remove from heap")
@@ -138,12 +188,12 @@ class PriorityQueue[A](implicit ord: Ordering[A])
*
* @return the element with the highest priority.
*/
- def max: A = if (size0 > 1) toA(array(1)) else throw new NoSuchElementException("queue is empty")
+ def max: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty")
/** Removes all elements from the queue. After this operation is completed,
* the queue will be empty.
*/
- def clear(): Unit = { size0 = 1 }
+ def clear(): Unit = { resarr.p_size0 = 1 }
/** Returns an iterator which yields all the elements of the priority
* queue in descending priority order.
@@ -151,9 +201,9 @@ class PriorityQueue[A](implicit ord: Ordering[A])
* @return an iterator over all elements sorted in descending order.
*/
override def iterator: Iterator[A] = new Iterator[A] {
- val as: Array[AnyRef] = new Array[AnyRef](size0)
- Array.copy(array, 0, as, 0, size0)
- var i = size0 - 1
+ val as: Array[AnyRef] = new Array[AnyRef](resarr.p_size0)
+ Array.copy(resarr.p_array, 0, as, 0, resarr.p_size0)
+ var i = resarr.p_size0 - 1
def hasNext: Boolean = i > 0
def next(): A = {
val res = toA(as(1))
@@ -164,12 +214,38 @@ class PriorityQueue[A](implicit ord: Ordering[A])
}
}
- /** This is utterly broken: Two priority queues of different length can still be equal!
- * The method should be removed once PriotyQueue inserts correctly into the sequence class hierarchy.
+
+ /** Returns the reverse of this queue. The priority queue that gets
+ * returned will have an inversed ordering - if for some elements
+ * `x` and `y` the original queue's ordering
+ * had `compare` returning an integer ''w'', the new one will return ''-w'',
+ * assuming the original ordering abides its contract.
+ *
+ * Note that the order of the elements will be reversed unless the
+ * `compare` method returns 0. In this case, such elements
+ * will be subsequent, but their corresponding subinterval may be inappropriately
+ * reversed. However, due to the compare-equals contract, they will also be equal.
+ *
+ * @return A reversed priority queue.
*/
- override def equals(obj: Any): Boolean = obj match {
- case that: PriorityQueue[_] => (this.iterator zip that.iterator) forall { case (x, y) => x == y }
- case _ => false
+ override def reverse = {
+ val revq = new PriorityQueue[A]()(new math.Ordering[A] {
+ def compare(x: A, y: A) = ord.compare(y, x)
+ })
+ for (i <- 1 until resarr.length) revq += resarr(i)
+ revq
+ }
+
+ override def reverseIterator = new Iterator[A] {
+ val arr = new Array[Any](PriorityQueue.this.size)
+ iterator.copyToArray(arr)
+ var i = arr.size - 1
+ def hasNext: Boolean = i >= 0
+ def next(): A = {
+ val curr = arr(i)
+ i -= 1
+ curr.asInstanceOf[A]
+ }
}
/** The hashCode method always yields an error, since it is not
@@ -196,4 +272,18 @@ class PriorityQueue[A](implicit ord: Ordering[A])
* @return a priority queue with the same elements.
*/
override def clone(): PriorityQueue[A] = new PriorityQueue[A] ++= this.iterator
+
+ // def printstate {
+ // println("-----------------------")
+ // println("Size: " + resarr.p_size0)
+ // println("Internal array: " + resarr.p_array.toList)
+ // println(toString)
+ // }
}
+
+// !!! TODO - but no SortedSeqFactory (yet?)
+// object PriorityQueue extends SeqFactory[PriorityQueue] {
+// def empty[A](implicit ord: Ordering[A]): PriorityQueue[A] = new PriorityQueue[A](ord)
+// implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, PriorityQueue] =
+// }
+// \ No newline at end of file
diff --git a/src/library/scala/collection/mutable/PriorityQueueProxy.scala b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
index d8584e607b..f1b25ab18f 100644
--- a/src/library/scala/collection/mutable/PriorityQueueProxy.scala
+++ b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
@@ -1,19 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/** This class implements priority queues using a heap. The
+/** This class servers as a proxy for priority queues. The
* elements of the queue have to be ordered in terms of the
- * <code>Ordered[T]</code> class.
+ * `Ordered[T]` class.
*
* @author Matthias Zenger
* @version 1.0, 03/05/2004
@@ -22,7 +21,6 @@ package mutable
abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends PriorityQueue[A]
with Proxy
{
-
def self: PriorityQueue[A]
/** Creates a new iterator over all elements contained in this
@@ -48,21 +46,11 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority
*/
override def +=(elem: A): this.type = { self += elem; this }
- /** Adds all elements provided by an <code>Iterable</code> object
- * into the priority queue.
- *
- * @param iter an iterable object
- */
- def ++=(iter: scala.collection.Iterable[A]): this.type = {
- self ++= iter
- this
- }
-
/** Adds all elements provided by an iterator into the priority queue.
*
* @param it an iterator
*/
- override def ++=(it: Iterator[A]): this.type = {
+ override def ++=(it: TraversableOnce[A]): this.type = {
self ++= it
this
}
diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala
index 6d1eae7b78..2b89130368 100644
--- a/src/library/scala/collection/mutable/Publisher.scala
+++ b/src/library/scala/collection/mutable/Publisher.scala
@@ -1,49 +1,57 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/** <code>Publisher[A,This]</code> objects publish events of type <code>A</code>
+/** `Publisher[A,This]` objects publish events of type `A`
* to all registered subscribers. When subscribing, a subscriber may specify
* a filter which can be used to constrain the number of events sent to the
* subscriber. Subscribers may suspend their subscription, or reactivate a
- * suspended subscription. Class <code>Publisher</code> is typically used
- * as a mixin. The type variable <code>This</code> models self types.
+ * suspended subscription. Class `Publisher` is typically used
+ * as a mixin. The abstract type `Pub` models the type of the publisher itself.
+ *
+ * @tparam Evt type of the published event.
*
* @author Matthias Zenger
- * @version 1.0, 08/07/2003
+ * @author Martin Odersky
+ * @version 2.8
* @since 1
*/
-trait Publisher[A, This <: Publisher[A, This]] {
- self: This =>
+trait Publisher[Evt] {
+
+ type Pub <: Publisher[Evt]
+ type Sub = Subscriber[Evt, Pub]
+ type Filter = Evt => Boolean
- type SubThis = Subscriber[A, This]
- type Filter = A => Boolean
+ /** The publisher itself of type `Pub'. Implemented by a cast from `this' here.
+ * Needs to be overridden if the actual publisher is different from `this'.
+ */
+ protected val self: Pub = this.asInstanceOf[Pub]
- private val filters = new HashMap[SubThis, Set[Filter]] with MultiMap[SubThis, Filter]
- private val suspended = new HashSet[SubThis]
+ private val filters = new HashMap[Sub, Set[Filter]] with MultiMap[Sub, Filter]
+ private val suspended = new HashSet[Sub]
- def subscribe(sub: SubThis) { subscribe(sub, event => true) }
- def subscribe(sub: SubThis, filter: Filter) { filters(sub) += filter }
- def suspendSubscription(sub: SubThis) { suspended += sub }
- def activateSubscription(sub: SubThis) { suspended -= sub }
- def removeSubscription(sub: SubThis) { filters -= sub }
+ def subscribe(sub: Sub) { subscribe(sub, event => true) }
+ def subscribe(sub: Sub, filter: Filter) { filters.addBinding(sub, filter) }
+ def suspendSubscription(sub: Sub) { suspended += sub }
+ def activateSubscription(sub: Sub) { suspended -= sub }
+ def removeSubscription(sub: Sub) { filters -= sub }
def removeSubscriptions() { filters.clear }
- protected def publish(event: A) {
- filters.keysIterator.foreach(sub =>
- if (filters.entryExists(sub, p => p(event)))
- sub.notify(this, event)
+ protected def publish(event: Evt) {
+ filters.keys.foreach(sub =>
+ if (!suspended.contains(sub) &&
+ filters.entryExists(sub, p => p(event)))
+ sub.notify(self, event)
)
}
@@ -52,11 +60,7 @@ trait Publisher[A, This <: Publisher[A, This]] {
* @return true, iff both publishers contain the same sequence of elements.
*/
override def equals(obj: Any): Boolean = obj match {
- case that: Publisher[_, _] =>
- (this.filters equals that.filters) &&
- (this.suspended equals that.suspended)
- case _ =>
- false
+ case that: Publisher[_] => filters == that.filters && suspended == that.suspended
+ case _ => false
}
-
}
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index 57bf6cd6a4..ae6c60d1d0 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,17 +13,23 @@ package mutable
import generic._
-/** <code>Queue</code> objects implement data structures that allow to
+/** `Queue` objects implement data structures that allow to
* insert and retrieve elements in a first-in-first-out (FIFO) manner.
*
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.8
* @since 1
+ *
+ * @define Coll mutable.Queue
+ * @define coll mutable queue
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
@serializable @cloneable
class Queue[A] extends MutableList[A] with Cloneable[Queue[A]] {
-
/** Adds all elements to the queue.
*
* @param elems the elements to add.
@@ -144,3 +149,8 @@ class Queue[A] extends MutableList[A] with Cloneable[Queue[A]] {
*/
def front: A = first0.elem
}
+
+// !!! TODO - integrate
+object Queue {
+ def apply[A](xs: A*): Queue[A] = new Queue[A] ++= xs
+}
diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala
index 53876479ca..9d93933695 100644
--- a/src/library/scala/collection/mutable/QueueProxy.scala
+++ b/src/library/scala/collection/mutable/QueueProxy.scala
@@ -1,20 +1,21 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/** <code>Queue</code> objects implement data structures that allow to
+/** `Queue` objects implement data structures that allow to
* insert and retrieve elements in a first-in-first-out (FIFO) manner.
*
+ * @tparam A type of the elements in this queue proxy.
+ *
* @author Matthias Zenger
* @version 1.1, 03/05/2004
* @since 1
@@ -45,24 +46,13 @@ trait QueueProxy[A] extends Queue[A] with Proxy {
*/
override def +=(elem: A): this.type = { self += elem; this }
- /** Adds all elements provided by an <code>Iterable</code> object
- * at the end of the queue. The elements are prepended in the order they
- * are given out by the iterator.
- *
- * @param iter an iterable object
- */
- def ++=(iter: scala.collection.Iterable[A]): this.type = {
- self ++= iter
- this
- }
-
/** Adds all elements provided by an iterator
* at the end of the queue. The elements are prepended in the order they
* are given out by the iterator.
*
* @param iter an iterator
*/
- override def ++=(it: Iterator[A]): this.type = {
+ override def ++=(it: TraversableOnce[A]): this.type = {
self ++= it
this
}
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index a8fd82f726..9cc19cd8e2 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -17,6 +16,8 @@ import generic._
/** This class is used internally to implement data structures that
* are based on resizable arrays.
*
+ * @tparam A type of the elements contained in this resizeable array.
+ *
* @author Matthias Zenger, Burak Emir
* @author Martin Odersky
* @version 2.8
@@ -24,12 +25,12 @@ import generic._
*/
trait ResizableArray[A] extends IndexedSeq[A]
with GenericTraversableTemplate[A, ResizableArray]
- with IndexedSeqLike[A, ResizableArray[A]] {
+ with IndexedSeqOptimized[A, ResizableArray[A]] {
override def companion: GenericCompanion[ResizableArray] = ResizableArray
protected def initialSize: Int = 16
- protected var array: Array[AnyRef] = new Array[AnyRef](initialSize max 1)
+ protected var array: Array[AnyRef] = new Array[AnyRef](math.max(initialSize, 1))
protected var size0: Int = 0
@@ -90,9 +91,12 @@ trait ResizableArray[A] extends IndexedSeq[A]
var newsize = array.length * 2
while (n > newsize)
newsize = newsize * 2
+ // println("Internal array before, size " + size0 + ": " + array.toList)
val newar: Array[AnyRef] = new Array(newsize)
Array.copy(array, 0, newar, 0, size0)
+ // println("Internal array after, size " + size0 + ": " + array.toList)
array = newar
+ // println("New array after, size " + size0 + ": " + newar.toList)
}
}
diff --git a/src/library/scala/collection/mutable/RevertibleHistory.scala b/src/library/scala/collection/mutable/RevertibleHistory.scala
index 08b6b56cac..87b4efa5f2 100644
--- a/src/library/scala/collection/mutable/RevertibleHistory.scala
+++ b/src/library/scala/collection/mutable/RevertibleHistory.scala
@@ -1,29 +1,31 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/** A revertible history is a <code>History</code> object which supports
- * an undo operation. Type variable <code>A</code> refers to the type
- * of the published events, <code>B</code> denotes the publisher type.
- * Type <code>B</code> is typically a subtype of <code>Publisher</code>.
+/** A revertible history is a `History` object which supports
+ * an undo operation. Type variable `Evt` refers to the type
+ * of the published events, `Pub` denotes the publisher type.
+ * Type `Pub` is typically a subtype of `Publisher`.
+ *
+ * @tparam Evt type of the events
+ * @tparam Pub type of the publisher
*
* @author Matthias Zenger
* @version 1.0, 08/07/2003
* @since 2.8
*/
@serializable
-class RevertibleHistory[A <: Undoable, B] extends History[A, B] with Undoable {
+class RevertibleHistory[Evt <: Undoable, Pub] extends History[Evt, Pub] with Undoable {
/** Rollback the full history.
*/
diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala
index e028d09923..eff387353e 100644
--- a/src/library/scala/collection/mutable/Seq.scala
+++ b/src/library/scala/collection/mutable/Seq.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,11 +13,16 @@ package mutable
import generic._
-/** A subtrait of <code>collection.Seq</code> which represents sequences
+
+/** A subtrait of `collection.Seq` which represents sequences
* that can be mutated.
- * The class adds an <code>update</code> method to <code>collection.Seq</code>.
*
- * @since 2.8
+ * $seqInfo
+ *
+ * The class adds an `update` method to `collection.Seq`.
+ *
+ * @define Coll mutable.Seq
+ * @define coll mutable sequence
*/
trait Seq[A] extends Iterable[A]
with scala.collection.Seq[A]
@@ -26,14 +30,19 @@ trait Seq[A] extends Iterable[A]
with SeqLike[A, Seq[A]] {
override def companion: GenericCompanion[Seq] = Seq
+ /** Replaces element at given index with a new value.
+ *
+ * @param n the index of the element to replace.
+ * @param lem the new value.
+ * @throws IndexOutofBoundsException if the index is not valid.
+ */
def update(idx: Int, elem: A)
}
-/** A factory object for the trait <code>Seq</code>.
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** $factoryInfo
+ * The current default implementation of a $Coll is an `ArrayBuffer`.
+ * @define coll mutable sequence
+ * @define Coll mutable.Seq
*/
object Seq extends SeqFactory[Seq] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = new GenericCanBuildFrom[A]
diff --git a/src/library/scala/collection/mutable/Set.scala b/src/library/scala/collection/mutable/Set.scala
index 09b3046749..066eecc034 100644
--- a/src/library/scala/collection/mutable/Set.scala
+++ b/src/library/scala/collection/mutable/Set.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,18 +13,11 @@ package mutable
import generic._
-/** A generic trait for mutable sets. Concrete set implementations
- * have to provide functionality for the abstract methods in Set:
- *
- * def contains(elem: A): Boolean
- * def iterator: Iterator[A]
- * def += (elem: A): this.type
- * def -= (elem: A): this.type
- *
+/** A base trait for sets that can be mutated.
+ * $setNote
+ * $setTags
+ * @since 1.0
* @author Matthias Zenger
- * @author Martin Odersky
- * @version 2.8
- * @since 1
*/
trait Set[A] extends Iterable[A]
with scala.collection.Set[A]
@@ -34,10 +26,12 @@ trait Set[A] extends Iterable[A]
override def companion: GenericCompanion[Set] = Set
}
-/** The canonical factory methods for <a href="Set.html">mutable sets</a>.
- * Currently this returns a HashSet.
+/** $factoryInfo
+ * The current default implementation of a $Coll is a `HashSet`.
+ * @define coll mutable set
+ * @define Coll mutable.Set
*/
-object Set extends SetFactory[Set] {
+object Set extends MutableSetFactory[Set] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A]
override def empty[A]: Set[A] = HashSet.empty[A]
}
diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala
index de88266693..6cab092d06 100644
--- a/src/library/scala/collection/mutable/SetBuilder.scala
+++ b/src/library/scala/collection/mutable/SetBuilder.scala
@@ -1,29 +1,23 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
import generic._
-/** The canonical builder for collections that are addable, i.e. that support
- * an efficient + method which adds an element to the collection.
- * Collections are built from their empty element using this + method.
- * @param empty The empty element of the collection.
+/** The canonical builder for mutable Sets.
*
+ * @tparam A The type of the elements that will be contained in this set.
+ * @tparam Coll The type of the actual collection this set builds.
+ * @param empty The empty element of the collection.
* @since 2.8
*/
-class SetBuilder[A, Coll <: Addable[A, Coll] with scala.collection.Iterable[A] with scala.collection.IterableLike[A, Coll]](empty: Coll)
-extends Builder[A, Coll] {
- protected var elems: Coll = empty
- def +=(x: A): this.type = { elems = elems + x; this }
- def clear() { elems = empty }
- def result: Coll = elems
-}
+class SetBuilder[A, Coll <: Addable[A, Coll] with collection.Iterable[A] with collection.IterableLike[A, Coll]](empty: Coll)
+extends AddingBuilder[A, Coll](empty) { }
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 81747cb704..39ab1ff634 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,28 +13,45 @@ package mutable
import generic._
import script._
+import scala.annotation.migration
-/** <p>
- * A generic template for mutable sets of elements of type <code>A</code>.
+/** A template trait for mutable sets of type `mutable.Set[A]`.
+ * @tparam A the type of the elements of the set
+ * @tparam This the type of the set itself.
+ *
+ * $setnote
+ *
+ * @author Martin Odersky
+ * @version 2.8
+ * @since 2.8
+ *
+ * @define setnote
+ * @note
+ * This trait provides most of the operations of a `mutable.Set` independently of its representation.
+ * It is typically inherited by concrete implementations of sets.
+ *
* To implement a concrete mutable set, you need to provide implementations
* of the following methods:
- * </p><pre>
- * <b>def</b> contains(elem: A): Boolean
- * <b>def</b> iterator: Iterator[A]
- * <b>def</b> += (elem: A): <b>this.type</b>
- * <b>def</b> -= (elem: A): <b>this.type</b></pre>
- * <p>
- * If you wish that methods <code>like</code>, <code>take</code>,
- * <code>drop</code>, <code>filter</code> return the same kind of map,
+ * {{{
+ * def contains(elem: A): Boolean
+ * def iterator: Iterator[A]
+ * def += (elem: A): this.type
+ * def -= (elem: A): this.type</pre>
+ * }}}
+ * If you wish that methods like `take`,
+ * `drop`, `filter` return the same kind of set,
* you should also override:
- * </p><pre>
- * <b>def</b> empty: This</pre>
- * <p>
- * It is also good idea to override methods <code>foreach</code> and
- * <code>size</code> for efficiency.
- * </p>
- *
- * @since 2.8
+ * {{{
+ * def empty: This</pre>
+ * }}}
+ * It is also good idea to override methods `foreach` and
+ * `size` for efficiency.
+ * @define addDuplicates
+ * Note that duplicates (elements for which `equals` yields true) will be
+ * removed, but it is not specified whether it will be an element of this
+ * set or a newly added element.
+ * @define coll mutable set
+ * @define Coll mutable.Set
*/
trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
extends scala.collection.SetLike[A, This]
@@ -46,16 +62,19 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
with Cloneable[mutable.Set[A]]
{ self =>
- /** A common implementation of <code>newBuilder</code> for all mutable sets
- * in terms of <code>empty</code>. Overrides <code>SetLike</code>
- * implementation for better efficiency.
+ /** A common implementation of `newBuilder` for all mutable sets
+ * in terms of `empty`. Overrides the implementation in `collection.SetLike`
+ * for better efficiency.
*/
override protected[this] def newBuilder: Builder[A, This] = empty
- /** Adds a new element to the set.
+ @migration(2, 8, "Set.map now returns a Set, so it will discard duplicate values.")
+ override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = super.map(f)(bf)
+
+ /** Adds an element to this $coll.
*
* @param elem the element to be added
- * @return true if the element was not yet present in the set.
+ * @return `true` if the element was not yet present in the set, `false` otherwise.
*/
def add(elem: A): Boolean = {
val r = contains(elem)
@@ -63,10 +82,10 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
r
}
- /** Removes a single element from a set.
+ /** Removes an element from this set.
*
* @param elem The element to be removed.
- * @return true if the element was already present in the set.
+ * @return `true` if the element was previously present in the set, `false` otherwise.
*/
def remove(elem: A): Boolean = {
val r = contains(elem)
@@ -74,29 +93,33 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
r
}
- /** This method allows one to add or remove an element <code>elem</code>
- * from this set depending on the value of parameter <code>included</code>.
+ /** Updates the presence of a single element in this set.
+ *
+ * This method allows one to add or remove an element `elem`
+ * from this set depending on the value of parameter `included`.
* Typically, one would use the following syntax:
- * <pre>set(elem) = true</pre>
+ * {{{
+ * set(elem) = true // adds element
+ * set(elem) = false // removes element
+ * }}}
*
+ * @param elem the element to be added or removed
+ * @param included a flag indicating whether element should be included or excluded.
*/
def update(elem: A, included: Boolean) {
if (included) this += elem else this -= elem
}
- /** Adds a new element to the set.
- *
- * @param elem the element to be added
- */
- def +=(elem: A): this.type
+ // abstract methods from Growable/Shrinkable
- /** Removes a single element from a set.
- * @param elem The element to be removed.
- */
+ /** Adds a single element to the set. */
+ def +=(elem: A): this.type
def -=(elem: A): this.type
- /** Removes all elements from the set for which the predicate <code>p</code>
- * yields the value <code>false</code>.
+ /** Removes all elements from the set for which do not satisfy a predicate.
+ * @param p the predicate used to test elements. Only elements for
+ * which `p` returns `true` are retained in the set; all others
+ * are removed.
*/
def retain(p: A => Boolean): Unit = for (elem <- this.toList) if (!p(elem)) this -= elem
@@ -105,109 +128,102 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
*/
def clear() { foreach(-=) }
- override def clone(): mutable.Set[A] = empty ++= repr
+ override def clone(): This = empty ++= repr
+ /** The result when this set is used as a builder
+ * @return the set representation itself.
+ */
def result: This = repr
- /** Adds a single element to this collection and returns
- * the collection itself.
+ /** Creates a new set consisting of all the elements of this set and `elem`.
+ *
+ * $addDuplicates
*
* @param elem the element to add.
+ * @return a new set consisting of elements of this set and `elem`.
*/
- @deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() +=' if you intend to create a new collection.")
- override def + (elem: A): This = { +=(elem); repr }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new set. To add an element as a\n"+
+ "side effect to an existing set and return that set itself, use +=."
+ )
+ override def + (elem: A): This = clone() += elem
- /** Adds two or more elements to this collection and returns
- * the collection itself.
+ /** Creates a new set consisting of all the elements of this set and two or more
+ * specified elements.
+ *
+ * $addDuplicates
*
* @param elem1 the first element to add.
* @param elem2 the second element to add.
* @param elems the remaining elements to add.
- */
- @deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() +=' if you intend to create a new collection.")
- override def + (elem1: A, elem2: A, elems: A*): This = {
- this += elem1 += elem2 ++= elems
- repr
- }
-
- /** Adds a number of elements provided by a traversable object and returns
- * either the collection itself.
+ * @return a new set consisting of all the elements of this set, `elem1`,
+ * `elem2` and those in `elems`.
+ */
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new set. To add the elements as a\n"+
+ "side effect to an existing set and return that set itself, use +=."
+ )
+ override def + (elem1: A, elem2: A, elems: A*): This =
+ clone() += elem1 += elem2 ++= elems
+
+ /** Creates a new set consisting of all the elements of this set and those
+ * provided by the specified traversable object.
*
- * @param iter the iterable object.
- */
- @deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.")
- override def ++(iter: scala.collection.Traversable[A]): This = {
- for (elem <- iter) +=(elem)
- repr
- }
-
-
- /** Adds a number of elements provided by an iterator and returns
- * the collection itself.
+ * $addDuplicates
*
- * @param iter the iterator
+ * @param xs the traversable object.
+ * @return a new set cconsisting of elements of this set and those in `xs`.
*/
- @deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.")
- override def ++ (iter: Iterator[A]): This = {
- for (elem <- iter) +=(elem)
- repr
- }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new set. To add the elements as a\n"+
+ "side effect to an existing set and return that set itself, use ++=."
+ )
+ override def ++(xs: TraversableOnce[A]): This = clone() ++= xs
- /** Removes a single element from this collection and returns
- * the collection itself.
+ /** Creates a new set consisting of all the elements of this set except `elem`.
*
* @param elem the element to remove.
+ * @return a new set consisting of all the elements of this set except `elem`.
*/
- @deprecated("Use -= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() -=' if you intend to create a new collection.")
- override def -(elem: A): This = { -=(elem); repr }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new set. To remove the element as a\n"+
+ "side effect to an existing set and return that set itself, use -=."
+ )
+ override def -(elem: A): This = clone() -= elem
- /** Removes two or more elements from this collection and returns
- * the collection itself.
+ /** Creates a new set consisting of all the elements of this set except the two
+ * or more specified elements.
*
* @param elem1 the first element to remove.
* @param elem2 the second element to remove.
* @param elems the remaining elements to remove.
- */
- @deprecated("Use -= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() -=' if you intend to create a new collection.")
- override def -(elem1: A, elem2: A, elems: A*): This = {
- this -= elem1 -= elem2 --= elems
- repr
- }
-
- /** Removes a number of elements provided by a Traversable object and returns
- * the collection itself.
- *
- * @param iter the Traversable object.
- */
- @deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() --=' if you intend to create a new collection.")
- override def --(iter: scala.collection.Traversable[A]): This = {
- for (elem <- iter) -=(elem)
- repr
- }
-
- /** Removes a number of elements provided by an iterator and returns
- * the collection itself.
+ * @return a new set consisting of all the elements of this set except
+ * `elem1`, `elem2` and `elems`.
+ */
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new set. To remove the elements as a\n"+
+ "side effect to an existing set and return that set itself, use -=."
+ )
+ override def -(elem1: A, elem2: A, elems: A*): This =
+ clone() -= elem1 -= elem2 --= elems
+
+ /** Creates a new set consisting of all the elements of this set except those
+ * provided by the specified traversable object.
*
- * @param iter the iterator
+ * @param xs the traversable object.
+ * @return a new set consisting of all the elements of this set except
+ * elements from `xs`.
*/
- @deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() --=' if you intend to create a new collection.")
- override def --(iter: Iterator[A]): This = {
- for (elem <- iter) -=(elem)
- repr
- }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new set. To remove the elements as a\n"+
+ "side effect to an existing set and return that set itself, use --=."
+ )
+ override def --(xs: TraversableOnce[A]): This = clone() --= xs
/** Send a message to this scriptable object.
*
* @param cmd the message to send.
- * @throws <code>Predef.UnsupportedOperationException</code>
+ * @throws `Predef.UnsupportedOperationException`
* if the message was not understood.
*/
def <<(cmd: Message[A]): Unit = cmd match {
diff --git a/src/library/scala/collection/mutable/SetProxy.scala b/src/library/scala/collection/mutable/SetProxy.scala
index 21778d79bb..b1a6c48ffd 100644
--- a/src/library/scala/collection/mutable/SetProxy.scala
+++ b/src/library/scala/collection/mutable/SetProxy.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
@@ -20,8 +19,7 @@ package mutable
* @version 1.1, 09/05/2004
* @since 1
*/
-trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]]
-{
+trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] {
override def repr = this
override def empty = new SetProxy[A] { val self = SetProxy.this.self.empty }
override def + (elem: A) = { self += elem ; this }
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index 6e7205c3fa..c791066398 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,14 +14,23 @@ package mutable
import generic._
import collection.immutable.{List, Nil}
import collection.Iterator
+import annotation.migration
/** A stack implements a data structure which allows to store and retrieve
* objects in a last-in-first-out (LIFO) fashion.
*
+ * @tparam A type of the elements contained in this stack.
+ *
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.8
* @since 1
+ * @define Coll Stack
+ * @define coll stack
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
@serializable @cloneable
class Stack[A] private (var elems: List[A]) extends scala.collection.Seq[A] with Cloneable[Stack[A]] {
@@ -54,29 +62,21 @@ class Stack[A] private (var elems: List[A]) extends scala.collection.Seq[A] with
* @param elems the element sequence.
* @return the stack with the new elements on top.
*/
- def push(elem1: A, elem2: A, elems: A*): this.type = this.push(elem1).push(elem2).pushAll(elems)
+ def push(elem1: A, elem2: A, elems: A*): this.type =
+ this.push(elem1).push(elem2).pushAll(elems)
- /** Push all elements provided by the given iterator object onto
- * the stack. The last element returned by the iterator
+ /** Push all elements in the given traversable object onto
+ * the stack. The last element in the traversable object
* will be on top of the new stack.
*
- * @param elems the iterator object.
+ * @param xs the traversable object.
* @return the stack with the new elements on top.
- * @deprecated
*/
- def pushAll(elems: Iterator[A]): this.type = { for (elem <- elems) { push(elem); () }; this }
+ def pushAll(xs: TraversableOnce[A]): this.type = { xs foreach push ; this }
- /** Push all elements provided by the given iterable object onto
- * the stack. The last element returned by the traversable object
- * will be on top of the new stack.
- *
- * @param elems the iterable object.
- * @return the stack with the new elements on top.
- */
- def pushAll(elems: scala.collection.Traversable[A]): this.type = { for (elem <- elems) { push(elem); () }; this }
-
- @deprecated("use pushAll") def ++=(it: Iterator[A]): this.type = pushAll(it)
- @deprecated("use pushAll") def ++=(it: scala.collection.Iterable[A]): this.type = pushAll(it)
+ @deprecated("use pushAll")
+ @migration(2, 8, "Stack ++= now pushes arguments on the stack from left to right.")
+ def ++=(xs: TraversableOnce[A]): this.type = pushAll(xs)
/** Returns the top element of the stack. This method will not remove
* the element from the stack. An error is signaled if there is no
@@ -113,17 +113,27 @@ class Stack[A] private (var elems: List[A]) extends scala.collection.Seq[A] with
*
* @return an iterator over all stack elements.
*/
+ @migration(2, 8, "Stack iterator and foreach now traverse in FIFO order.")
override def iterator: Iterator[A] = elems.iterator
/** Creates a list of all stack elements in LIFO order.
*
* @return the created list.
*/
+ @migration(2, 8, "Stack iterator and foreach now traverse in FIFO order.")
override def toList: List[A] = elems
+ @migration(2, 8, "Stack iterator and foreach now traverse in FIFO order.")
+ override def foreach[U](f: A => U): Unit = super.foreach(f)
+
/** This method clones the stack.
*
* @return a stack with the same elements.
*/
override def clone(): Stack[A] = new Stack[A](elems)
}
+
+// !!! TODO - integrate
+object Stack {
+ def apply[A](xs: A*): Stack[A] = new Stack[A] pushAll xs
+}
diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala
index d248ebd961..b0af178382 100644
--- a/src/library/scala/collection/mutable/StackProxy.scala
+++ b/src/library/scala/collection/mutable/StackProxy.scala
@@ -1,20 +1,22 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
+
/** A stack implements a data structure which allows to store and retrieve
* objects in a last-in-first-out (LIFO) fashion.
*
+ * @tparam A type of the elements in this stack proxy.
+ *
* @author Matthias Zenger
* @version 1.0, 10/05/2004
* @since 1
@@ -48,15 +50,7 @@ trait StackProxy[A] extends Stack[A] with Proxy {
this
}
- override def pushAll(elems: Iterator[A]): this.type = {
- self pushAll elems
- this
- }
-
- override def pushAll(elems: scala.collection.Traversable[A]): this.type = {
- self pushAll elems
- this
- }
+ override def pushAll(xs: TraversableOnce[A]): this.type = { self pushAll xs; this }
/** Pushes all elements provided by an <code>Iterable</code> object
* on top of the stack. The elements are pushed in the order they
@@ -64,21 +58,8 @@ trait StackProxy[A] extends Stack[A] with Proxy {
*
* @param iter an iterable object
*/
- @deprecated("use pushAll") override def ++=(iter: scala.collection.Iterable[A]): this.type = {
- self ++= iter
- this
- }
+ @deprecated("use pushAll") override def ++=(xs: TraversableOnce[A]): this.type = { self ++= xs ; this }
- /** Pushes all elements provided by an iterator
- * on top of the stack. The elements are pushed in the order they
- * are given out by the iterator.
- *
- * @param iter an iterator
- */
- @deprecated("use pushAll") override def ++=(it: Iterator[A]): this.type = {
- self ++= it
- this
- }
override def push(elem1: A, elem2: A, elems: A*): this.type = {
self.push(elem1).push(elem2).pushAll(elems)
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 47bac8ad47..e9258c9730 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -1,47 +1,39 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
-
package scala.collection
package mutable
-import generic._
-import compat.Platform.arraycopy
-import scala.reflect.Manifest
+import java.lang.{ StringBuilder => JavaStringBuilder }
+import annotation.migration
-/** <p>
- * A mutable sequence of characters. This class provides an API compatible
- * with <a class="java/lang/StringBuilder" href="" target="_top">
- * <code>java.lang.StringBuilder</code></a>.
- * </p>generic/
+/** A builder for mutable sequence of characters. This class provides an API
+ * mostly compatible with java.lang.StringBuilder, except where there are conflicts
+ * with the Scala collections API (such as the `reverse` method.)
*
* @author Stephane Micheloud
* @author Martin Odersky
* @version 2.8
- * @since 2.8
+ * @since 2.7
*/
@serializable
@SerialVersionUID(0 - 8525408645367278351L)
-final class StringBuilder(initCapacity: Int, private val initValue: String)
+final class StringBuilder(private val underlying: JavaStringBuilder)
extends Builder[Char, String]
- with IndexedSeq[Char] {
-
- require(initCapacity >= 0)
-
- import scala.collection.Seq
+ with java.lang.CharSequence
+ with IndexedSeq[Char]
+ with IndexedSeqOptimized[Char, IndexedSeq[Char]] {
- /** The value is used for character storage. */
- private var array = new Array[Char](initCapacity + initValue.length)
-
- /** The count is the number of characters used. */
- private var count: Int = 0
+ /** Constructs a string builder initialized with String initValue
+ * and with additional Char capacity initCapacity.
+ */
+ def this(initCapacity: Int, initValue: String) =
+ this(new JavaStringBuilder(initValue.length + initCapacity) append initValue)
/** Constructs a string builder with no characters in it and an
* initial capacity of 16 characters.
@@ -49,11 +41,10 @@ final class StringBuilder(initCapacity: Int, private val initValue: String)
def this() = this(16, "")
/** Constructs a string builder with no characters in it and an
- * initial capacity specified by the <code>capacity</code> argument.
+ * initial capacity specified by the capacity argument.
*
* @param capacity the initial capacity.
- * @throws NegativeArraySizeException if the <code>capacity</code>
- * argument is less than <code>0</code>.
+ * @throws NegativeArraySizeException if capacity < 0.
*/
def this(capacity: Int) = this(capacity, "")
@@ -62,206 +53,139 @@ final class StringBuilder(initCapacity: Int, private val initValue: String)
*/
def this(str: String) = this(16, str)
- append(initValue)
-
- def toArray: Array[Char] = array
+ def toArray: Array[Char] = {
+ val arr = new Array[Char](length)
+ underlying.getChars(0, length, arr, 0)
+ arr
+ }
- def length: Int = count
- def length_=(n: Int) { setLength(n) }
+ def length: Int = underlying.length()
+ def length_=(n: Int) { underlying.setLength(n) }
/** Clears the builder contents.
*/
def clear(): Unit = setLength(0)
- /** Sets the length of the character sequence.
+ /** Sets the length of the character sequence. If the current sequence
+ * is shorter than the given length, it is padded with nulls; if it is
+ * longer, it is truncated.
*
- * @param newLength the new length
- * @throws IndexOutOfBoundsException if the <code>n</code> argument is negative.
+ * @param len the new length
+ * @throws IndexOutOfBoundsException if the argument is negative.
*/
- def setLength(n: Int) {
- require(n >= 0, n)
- while (count < n) append('\0')
- count = n
- }
+ def setLength(len: Int) { underlying setLength len }
- /** Returns the current capacity. The capacity is the amount of storage
- * available for newly inserted characters, beyond which an allocation
- * will occur.
+ /** Returns the current capacity, which is the size of the underlying array.
+ * A new array will be allocated if the current capacity is exceeded.
*
- * @return the current capacity
+ * @return the capacity
*/
- def capacity: Int = array.length
+ def capacity: Int = underlying.capacity()
- /** Same as <code>ensureCapacity</code>. */
- @deprecated("use `ensureCapacity' instead. An assignment is misleading because\n"+
+ @deprecated("Use `ensureCapacity' instead. An assignment is misleading because\n"+
"it can never decrease the capacity.")
def capacity_=(n: Int) { ensureCapacity(n) }
- /** <p>
- * Ensures that the capacity is at least equal to the specified minimum.
- * If the current capacity is less than the argument, then a new internal
- * array is allocated with greater capacity. The new capacity is the larger of:
- * </p>
- * <ul>
- * <li>The <code>n</code> argument.
- * <li>Twice the old capacity, plus <code>2</code>.
- * </ul>
- * <p>
- * If the <code>n</code> argument is non-positive, this
- * method takes no action and simply returns.
- * </p>
- *
- * @param n the minimum desired capacity.
- */
- def ensureCapacity(n: Int) {
- if (n > array.length) {
- var newsize = array.length * 2
- while (n > newsize)
- newsize = newsize * 2
- val newar = new Array[Char](newsize)
- arraycopy(array, 0, newar, 0, count)
- array = newar
- }
- }
+ /** Ensure that the capacity is at least the given argument.
+ * If the argument is greater than the current capacity, new
+ * storage will be allocated with size equal to the given
+ * argument or to (2 * capacity + 2), whichever is larger.
+ *
+ * @param newCapacity the minimum desired capacity.
+ */
+ def ensureCapacity(newCapacity: Int): Unit = underlying ensureCapacity newCapacity
- /** <p>
- * Returns the <code>Char</code> value in this sequence at the specified index.
- * The first <code>Char</code> value is at index <code>0</code>, the next at index
- * <code>1</code>, and so on, as in array indexing.
- * </p>
- * <p>
- * The index argument must be greater than or equal to
- * <code>0</code>, and less than the length of this sequence.
- * </p>
- *
- * @param index the index of the desired <code>Char</code> value.
- * @return the <code>Char</code> value at the specified index.
- * @throws IndexOutOfBoundsException if <code>index</code> is
- * negative or greater than or equal to <code>length()</code>.
- */
- def charAt(index: Int): Char = {
- if (index < 0 || index >= count)
- throw new StringIndexOutOfBoundsException(index)
- array(index)
- }
+ /** Returns the Char at the specified index, counting from 0 as in Arrays.
+ *
+ * @param index the index to look up
+ * @return the Char at the given index.
+ * @throws IndexOutOfBoundsException if the index is out of bounds.
+ */
+ def charAt(index: Int): Char = underlying charAt index
- /** Same as <code>charAt</code>. */
- def apply(i: Int): Char = charAt(i)
+ /** Equivalent to charAt.
+ */
+ def apply(index: Int): Char = underlying charAt index
- /** <p>
- * Removes the <code>Char</code> at the specified position in this
- * sequence. This sequence is shortened by one <code>Char</code>.
- * </p>
+ /** Removes the Char at the specified index. The sequence is
+ * shortened by one.
*
- * @param index Index of <code>Char</code> to remove
- * @return This object.
- * @throws StringIndexOutOfBoundsException if the <code>index</code>
- * is negative or greater than or equal to <code>length()</code>.
+ * @param index The index to remove.
+ * @return This StringBuilder.
+ * @throws IndexOutOfBoundsException if the index is out of bounds.
*/
def deleteCharAt(index: Int): StringBuilder = {
- if (index < 0 || index >= count)
- throw new StringIndexOutOfBoundsException(index)
- arraycopy(array, index + 1, array, index, count - index - 1)
- count -= 1
+ underlying deleteCharAt index
this
}
- /** <p>
- * The character at the specified index is set to <code>ch</code>. This
- * sequence is altered to represent a new character sequence that is
- * identical to the old character sequence, except that it contains the
- * character <code>ch</code> at position <code>index</code>.
- * </p>
- * <p>
- * The index argument must be greater than or equal to
- * <code>0</code>, and less than the length of this sequence.
- * </p>
- *
- * @param index the index of the character to modify.
- * @param ch the new character.
- * @throws IndexOutOfBoundsException if <code>index</code> is
- * negative or greater than or equal to <code>length()</code>.
- */
- def setCharAt(index: Int, ch: Char) {
- if (index < 0 || index >= count)
- throw new StringIndexOutOfBoundsException(index)
- array(index) = ch
- }
+ /** Update the sequence at the given index to hold the specified Char.
+ *
+ * @param index the index to modify.
+ * @param ch the new Char.
+ * @throws IndexOutOfBoundsException if the index is out of bounds.
+ */
+ def setCharAt(index: Int, ch: Char): Unit = underlying.setCharAt(index, ch)
- /** Same as <code>setCharAt</code>. */
- def update(i: Int, c: Char) { setCharAt(i, c) }
+ /** Equivalent to setCharAt.
+ */
+ def update(i: Int, c: Char): Unit = setCharAt(i, c)
- /** Returns a new <code>String</code> that contains a subsequence of
- * characters currently contained in this character sequence. The
- * substring begins at the specified index and extends to the end of
- * this sequence.
+ /** Returns a new String made up of a subsequence of this sequence,
+ * beginning at the given index and extending to the end of the sequence.
*
- * @param start The beginning index, inclusive.
- * @return The new string.
- * @throws StringIndexOutOfBoundsException if <code>start</code> is
- * less than zero, or greater than the length of this object.
+ * target.substring(start) is equivalent to target.drop(start)
+ *
+ * @param start The starting index, inclusive.
+ * @return The new String.
+ * @throws IndexOutOfBoundsException if the index is out of bounds.
*/
- def substring(start: Int): String = substring(start, count)
+ def substring(start: Int): String = substring(start, length)
- /** Returns a new <code>String</code> that contains a subsequence of
- * characters currently contained in this sequence. The
- * substring begins at the specified <code>start</code> and
- * extends to the character at index <code>end - 1</code>.
+ /** Returns a new String made up of a subsequence of this sequence,
+ * beginning at the start index (inclusive) and extending to the
+ * end index (exclusive).
+ *
+ * target.substring(start, end) is equivalent to target.slice(start, end).mkString
*
* @param start The beginning index, inclusive.
* @param end The ending index, exclusive.
- * @return The new string.
- * @throws StringIndexOutOfBoundsException if <code>start</code>
- * or <code>end</code> are negative or greater than
- * <code>length()</code>, or <code>start</code> is
- * greater than <code>end</code>.
- */
- def substring(start: Int, end: Int): String = {
- if (start < 0)
- throw new StringIndexOutOfBoundsException(start)
- if (end > count)
- throw new StringIndexOutOfBoundsException(end)
- if (start > end)
- throw new StringIndexOutOfBoundsException(end - start)
- new String(array, start, end - start)
- }
+ * @return The new String.
+ * @throws StringIndexOutOfBoundsException If either index is out of bounds,
+ * or if start > end.
+ */
+ def substring(start: Int, end: Int): String = underlying.substring(start, end)
+ /** For implementing CharSequence.
+ */
def subSequence(start: Int, end: Int): java.lang.CharSequence = substring(start, end)
- /* Appends the string representation of the <code>Any</code> argument.
+ /** Appends the given Char to the end of the sequence.
*/
def +=(x: Char): this.type = { append(x); this }
+ /** !!! This should create a new sequence.
+ */
def +(x: Char): this.type = { +=(x); this }
-
- /** <p>
- * Appends the string representation of the <code>Any</code>
- * argument.
- * </p>
- * <p>
- * The argument is converted to a string as if by the method
- * <code>String.valueOf</code>, and the characters of that
- * string are then appended to this sequence.
- * </p>
+ /** Appends the string representation of the given argument,
+ * which is converted to a String with String.valueOf.
*
* @param x an <code>Any</code> object.
- * @return a reference to this object.
+ * @return this StringBuilder.
*/
- def append(x: Any): StringBuilder =
- append(String.valueOf(x))
+ def append(x: Any): StringBuilder = {
+ underlying append String.valueOf(x)
+ this
+ }
- /** Appends the specified string to this character sequence.
+ /** Appends the given String to this sequence.
*
- * @param s a string.
- * @return a reference to this object.
+ * @param s a String.
+ * @return this StringBuilder.
*/
def append(s: String): StringBuilder = {
- val str = if (s == null) "null" else s
- val len = str.length
- ensureCapacity(count + len)
- str.getChars(0, len, array, count)
- count += len
+ underlying append s
this
}
@@ -270,589 +194,240 @@ final class StringBuilder(initCapacity: Int, private val initValue: String)
* @param sb
* @return
*/
- def append(sb: StringBuilder): StringBuilder =
- if (sb == null)
- append("null")
- else {
- val len = sb.length
- ensureCapacity(count + len)
- arraycopy(sb.toArray, 0, array, count, len)
- count += len
- this
- }
-
- /** <p>
- * Appends the string representation of the <code>Char</code> sequence
- * argument to this sequence.
- * </p>
- * <p>
- * The characters of the sequence argument are appended, in order,
- * to the contents of this sequence. The length of this sequence
- * increases by the length of the argument.
- * </p>
- *
- * @param x the characters to be appended.
- * @return a reference to this object.
- */
- def appendAll(x: Seq[Char]): StringBuilder =
- appendAll(x.toArray, 0, x.length)
-
- @deprecated("use appendAll instead. This method is deprecated because of the\n"+
- "possible confusion with `append(Any)'.")
- def append(x: Seq[Char]): StringBuilder =
- appendAll(x)
+ def append(sb: StringBuilder): StringBuilder = {
+ underlying append sb
+ this
+ }
- /** <p>
- * Appends the string representation of the <code>Char</code> array
- * argument to this sequence.
- * </p>
- * <p>
- * The characters of the array argument are appended, in order, to
- * the contents of this sequence. The length of this sequence
- * increases by the length of the argument.
- * </p>
+ /** Appends all the Chars in the given Seq[Char] to this sequence.
*
- * @param x the characters to be appended.
- * @return a reference to this object.
+ * @param xs the characters to be appended.
+ * @return this StringBuilder.
*/
- def appendAll(x: Array[Char]): StringBuilder =
- appendAll(x, 0, x.length)
+ def appendAll(xs: TraversableOnce[Char]): StringBuilder = appendAll(xs.toArray)
- @deprecated("use appendAll instead. This method is deprecated because\n"+
- "of the possible confusion with `append(Any)'.")
- def append(x: Array[Char]): StringBuilder =
- appendAll(x)
-
- /** <p>
- * Appends the string representation of a subarray of the
- * <code>char</code> array argument to this sequence.
- * </p>
- * <p>
- * Characters of the <code>Char</code> array <code>x</code>, starting at
- * index <code>offset</code>, are appended, in order, to the contents
- * of this sequence. The length of this sequence increases
- * by the value of <code>len</code>.
- * </p>
- *
- * @param x the characters to be appended.
- * @param offset the index of the first <code>Char</code> to append.
- * @param len the number of <code>Char</code>s to append.
- * @return a reference to this object.
- */
- def appendAll(x: Array[Char], offset: Int, len: Int): StringBuilder = {
- ensureCapacity(count + len)
- arraycopy(x, offset, array, count, len)
- count += len
+ /** Appends all the Chars in the given Array[Char] to this sequence.
+ *
+ * @param xs the characters to be appended.
+ * @return a reference to this object.
+ */
+ def appendAll(xs: Array[Char]): StringBuilder = {
+ underlying append xs
this
}
- @deprecated("use appendAll instead. This method is deprecated because\n"+
- "of the possible confusion with `append(Any, Int, Int)'.")
- def append(x: Array[Char], offset: Int, len: Int): StringBuilder =
- appendAll(x, offset, len)
-
- /** <p>
- * Appends the string representation of the <code>Boolean</code>
- * argument to the sequence.
- * </p>
- * <p>
- * The argument is converted to a string as if by the method
- * <code>String.valueOf</code>, and the characters of that
- * string are then appended to this sequence.
- * </p>
- *
- * @param x a <code>Boolean</code>.
- * @return a reference to this object.
- */
- def append(x: Boolean): StringBuilder = append(String.valueOf(x))
- def append(x: Byte): StringBuilder = append(String.valueOf(x))
-
- def append(x: Char): StringBuilder = {
- ensureCapacity(count + 1)
- array(count) = x
- count += 1
+ /** Appends a portion of the given Array[Char] to this sequence.
+ *
+ * @param xs the Array containing Chars to be appended.
+ * @param offset the index of the first Char to append.
+ * @param len the numbers of Chars to append.
+ * @return this StringBuilder.
+ */
+ def appendAll(xs: Array[Char], offset: Int, len: Int): StringBuilder = {
+ underlying.append(xs, offset, len)
this
}
- def append(x: Short): StringBuilder =
- append(String.valueOf(x))
-
- def append(x: Int): StringBuilder =
- append(String.valueOf(x))
-
- def append(x: Long): StringBuilder =
- append(String.valueOf(x))
-
- def append(x: Float): StringBuilder =
- append(String.valueOf(x))
-
- def append(x: Double): StringBuilder =
- append(String.valueOf(x))
+ /** Append the String representation of the given primitive type
+ * to this sequence. The argument is converted to a String with
+ * String.valueOf.
+ *
+ * @param x a primitive value
+ * @return This StringBuilder.
+ */
+ def append(x: Boolean): StringBuilder = { underlying append x ; this }
+ def append(x: Byte): StringBuilder = { underlying append x ; this }
+ def append(x: Short): StringBuilder = { underlying append x ; this }
+ def append(x: Int): StringBuilder = { underlying append x ; this }
+ def append(x: Long): StringBuilder = { underlying append x ; this }
+ def append(x: Float): StringBuilder = { underlying append x ; this }
+ def append(x: Double): StringBuilder = { underlying append x ; this }
+ def append(x: Char): StringBuilder = { underlying append x ; this }
- /** Removes the characters in a substring of this sequence.
- * The substring begins at the specified <code>start</code> and extends to
- * the character at index <code>end - 1</code> or to the end of the
- * sequence if no such character exists. If
- * <code>start</code> is equal to <code>end</code>, no changes are made.
+ /** Remove a subsequence of Chars from this sequence, starting at the
+ * given start index (inclusive) and extending to the end index (exclusive)
+ * or to the end of the String, whichever comes first.
*
* @param start The beginning index, inclusive.
* @param end The ending index, exclusive.
- * @return This object.
- * @throws StringIndexOutOfBoundsException if <code>start</code>
- * is negative, greater than <code>length()</code>, or
- * greater than <code>end</code>.
+ * @return This StringBuilder.
+ * @throws StringIndexOutOfBoundsException if start < 0 || start > end
*/
def delete(start: Int, end: Int): StringBuilder = {
- if (start < 0 || start > end)
- throw new StringIndexOutOfBoundsException(start)
- val end0 = if (end > count) count else end
- val len = end0 - start
- if (len > 0) {
- arraycopy(array, start + len, array, start, count - end0)
- count -= len
- }
+ underlying.delete(start, end)
this
}
- /** Replaces the characters in a substring of this sequence
- * with characters in the specified <code>String</code>. The substring
- * begins at the specified <code>start</code> and extends to the character
- * at index <code>end - 1</code> or to the end of the sequence if no such
- * character exists. First the characters in the substring are removed and
- * then the specified <code>String</code> is inserted at <code>start</code>.
+ /** Replaces a subsequence of Chars with the given String. The semantics
+ * are as in delete, with the String argument then inserted at index 'start'.
*
* @param start The beginning index, inclusive.
* @param end The ending index, exclusive.
- * @param str String that will replace previous contents.
- * @return This object.
- * @throws StringIndexOutOfBoundsException if <code>start</code>
- * is negative, greater than <code>length()</code>, or
- * greater than <code>end</code>.
- */
- def replace(start: Int, end: Int, str: String) {
- if (start < 0 || start > count || start > end)
- throw new StringIndexOutOfBoundsException(start)
-
- val end0 = if (end > count) count else end
- val len = str.length()
- val newCount = count + len - (end0 - start)
- ensureCapacity(newCount)
-
- arraycopy(array, end, array, start + len, count - end)
- str.getChars(0, len, array, start)
- count = newCount
+ * @param str The String to be inserted at the start index.
+ * @return This StringBuilder.
+ * @throws StringIndexOutOfBoundsException if start < 0, start > length, or start > end
+ */
+ def replace(start: Int, end: Int, str: String): StringBuilder = {
+ underlying.replace(start, end, str)
this
}
- /** Inserts the string representation of a subarray of the <code>str</code>
- * array argument into this sequence. The subarray begins at the specified
- * <code>offset</code> and extends <code>len</code> <code>char</code>s.
- * The characters of the subarray are inserted into this sequence at
- * the position indicated by <code>index</code>. The length of this
- * sequence increases by <code>len</code> <code>Char</code>s.
- *
- * @param index position at which to insert subarray.
- * @param str a <code>Char</code> array.
- * @param offset the index of the first <code>char</code> in subarray to
- * be inserted.
- * @param len the number of <code>Char</code>s in the subarray to
- * be inserted.
- * @return This object
- * @throws StringIndexOutOfBoundsException if <code>index</code>
- * is negative or greater than <code>length()</code>, or
- * <code>offset</code> or <code>len</code> are negative, or
- * <code>(offset+len)</code> is greater than
- * <code>str.length</code>.
+ /** Inserts a subarray of the given Array[Char] at the given index
+ * of this sequence.
+ *
+ * @param index index at which to insert the subarray.
+ * @param str the Array from which Chars will be taken.
+ * @param offset the index of the first Char to insert.
+ * @param len the number of Chars from 'str' to insert.
+ * @return This StringBuilder.
+ *
+ * @throws StringIndexOutOfBoundsException if index < 0, index > length,
+ * offset < 0, len < 0, or (offset + len) > str.length.
*/
def insertAll(index: Int, str: Array[Char], offset: Int, len: Int): StringBuilder = {
- if (index < 0 || index > count)
- throw new StringIndexOutOfBoundsException(index)
- if (offset < 0 || len < 0 || offset > str.length - len)
- throw new StringIndexOutOfBoundsException(
- "offset " + offset + ", len " + len +
- ", str.length " + str.length)
- ensureCapacity(count + len)
- arraycopy(array, index, array, index + len, count - index)
- arraycopy(str, offset, array, index, len)
- count += len
+ underlying.insert(index, str, offset, len)
this
}
- @deprecated("use insertAll instead. This method is deprecated because of the\n"+
- "possible confusion with `insert(Int, Any, Int, Int)'.")
- def insert(index: Int, str: Array[Char], offset: Int, len: Int): StringBuilder =
- insertAll(index, str, offset, len)
+ /** Inserts the String representation (via String.valueOf) of the given
+ * argument into this sequence at the given index.
+ *
+ * @param index the index at which to insert.
+ * @param x a value.
+ * @return this StringBuilder.
+ * @throws StringIndexOutOfBoundsException if the index is out of bounds.
+ */
+ def insert(index: Int, x: Any): StringBuilder = insert(index, String.valueOf(x))
- /** <p>
- * Inserts the string representation of the <code>Any</code>
- * argument into this character sequence.
- * </p>
- * <p>
- * The second argument is converted to a string as if by the method
- * <code>String.valueOf</code>, and the characters of that
- * string are then inserted into this sequence at the indicated
- * offset.
- * </p>
- * <p>
- * The offset argument must be greater than or equal to
- * <code>0</code>, and less than or equal to the length of this
- * sequence.
- * </p>
- *
- * @param offset the offset.
- * @param x an <code>Any</code> value.
- * @return a reference to this object.
- * @throws StringIndexOutOfBoundsException if the offset is invalid.
- */
- def insert(at: Int, x: Any): StringBuilder =
- insert(at, String.valueOf(x))
-
- /** Inserts the string into this character sequence.
- *
- * @param at the offset position.
- * @param x a string.
- * @return a reference to this object.
- * @throws StringIndexOutOfBoundsException if the offset is invalid.
- */
- def insert(at: Int, x: String): StringBuilder = {
- if (at < 0 || at > count)
- throw new StringIndexOutOfBoundsException(at)
- val str = if (x == null) "null" else x
- val len = str.length
- ensureCapacity(count + len)
- arraycopy(array, at, array, at + len, count - at)
- str.getChars(0, len, array, at)
- count += len
+ /** Inserts the String into this character sequence.
+ *
+ * @param index the index at which to insert.
+ * @param x a String.
+ * @return this StringBuilder.
+ * @throws StringIndexOutOfBoundsException if the index is out of bounds.
+ */
+ def insert(index: Int, x: String): StringBuilder = {
+ underlying.insert(index, x)
this
}
- /** Inserts the string representation of the <code>Char</code> sequence
- * argument into this sequence.
+ /** Inserts the given Seq[Char] into this sequence at the given index.
*
- * @param at the offset position.
- * @param x a character sequence.
- * @return a reference to this object.
- * @throws StringIndexOutOfBoundsException if the offset is invalid.
+ * @param index the index at which to insert.
+ * @param xs the Seq[Char].
+ * @return this StringBuilder.
+ * @throws StringIndexOutOfBoundsException if the index is out of bounds.
*/
- def insertAll(at: Int, x: Seq[Char]): StringBuilder =
- insertAll(at, x.toArray)
-
- @deprecated("use insertAll instead. This method is deprecated because of\n"+
- "the possible confusion with `insert(Int, Any)'.")
- def insert(at: Int, x: Seq[Char]): StringBuilder =
- insertAll(at, x)
+ def insertAll(index: Int, xs: TraversableOnce[Char]): StringBuilder = insertAll(index, xs.toArray)
- /** Inserts the string representation of the <code>Char</code> array
- * argument into this sequence.
+ /** Inserts the given Array[Char] into this sequence at the given index.
*
- * @param at the offset position.
- * @param x a character array.
- * @return a reference to this object.
- * @throws StringIndexOutOfBoundsException if the offset is invalid.
- */
- def insertAll(at: Int, x: Array[Char]): StringBuilder = {
- if (at < 0 || at > count)
- throw new StringIndexOutOfBoundsException(at)
- val len = x.length
- ensureCapacity(count + len)
- arraycopy(array, at, array, at + len, count - at)
- arraycopy(x, 0, array, at, len)
- count += len
+ * @param index the index at which to insert.
+ * @param xs the Array[Char].
+ * @return this StringBuilder.
+ * @throws StringIndexOutOfBoundsException if the index is out of bounds.
+ */
+ def insertAll(index: Int, xs: Array[Char]): StringBuilder = {
+ underlying.insert(index, xs)
this
}
+ /** Calls String.valueOf on the given primitive value, and inserts the
+ * String at the given index.
+ *
+ * @param index the offset position.
+ * @param x a primitive value.
+ * @return this StringBuilder.
+ */
+ def insert(index: Int, x: Boolean): StringBuilder = insert(index, String.valueOf(x))
+ def insert(index: Int, x: Byte): StringBuilder = insert(index, String.valueOf(x))
+ def insert(index: Int, x: Short): StringBuilder = insert(index, String.valueOf(x))
+ def insert(index: Int, x: Int): StringBuilder = insert(index, String.valueOf(x))
+ def insert(index: Int, x: Long): StringBuilder = insert(index, String.valueOf(x))
+ def insert(index: Int, x: Float): StringBuilder = insert(index, String.valueOf(x))
+ def insert(index: Int, x: Double): StringBuilder = insert(index, String.valueOf(x))
+ def insert(index: Int, x: Char): StringBuilder = insert(index, String.valueOf(x))
+
+ @deprecated("Use appendAll instead. This method is deprecated because of the\n"+
+ "possible confusion with `append(Any)'.")
+ def append(x: Seq[Char]): StringBuilder = appendAll(x)
+
+ @deprecated("use appendAll instead. This method is deprecated because\n"+
+ "of the possible confusion with `append(Any)'.")
+ def append(x: Array[Char]): StringBuilder = appendAll(x)
+
+ @deprecated("use appendAll instead. This method is deprecated because\n"+
+ "of the possible confusion with `append(Any, Int, Int)'.")
+ def append(x: Array[Char], offset: Int, len: Int): StringBuilder = appendAll(x, offset, len)
+
+ @deprecated("use insertAll instead. This method is deprecated because of the\n"+
+ "possible confusion with `insert(Int, Any, Int, Int)'.")
+ def insert(index: Int, str: Array[Char], offset: Int, len: Int): StringBuilder =
+ insertAll(index, str, offset, len)
+
@deprecated("use insertAll instead. This method is deprecated because of\n"+
"the possible confusion with `insert(Int, Any)'.")
- def insert(at: Int, x: Array[Char]): StringBuilder =
- insertAll(at, x)
-
- /** <p>
- * Inserts the string representation of the <code>Boolean</code> argument
- * into this sequence.
- * </p>
- * <p>
- * The offset argument must be greater than or equal to 0, and less than
- * or equal to the length of this sequence.
- * </p>
- *
- * @param at the offset position.
- * @param x a <code>Boolean</code> value.
- * @return a reference to this object.
- */
- def insert(at: Int, x: Boolean): StringBuilder =
- insert(at, String.valueOf(x))
+ def insert(at: Int, x: Seq[Char]): StringBuilder = insertAll(at, x)
- /** <p>
- * Inserts the string representation of the <code>Byte</code> argument
- * into this sequence.
- * </p>
- * <p>
- * The offset argument must be greater than or equal to 0, and less than
- * or equal to the length of this sequence.
- * </p>
- *
- * @param at the offset position.
- * @param x a <code>Byte</code> value.
- * @return a reference to this object.
- */
- def insert(at: Int, x: Byte): StringBuilder =
- insert(at, String.valueOf(x))
+ @deprecated("use insertAll instead. This method is deprecated because of\n"+
+ "the possible confusion with `insert(Int, Any)'.")
+ def insert(at: Int, x: Array[Char]): StringBuilder = insertAll(at, x)
- /** <p>
- * Inserts the string representation of the <code>Char</code> argument
- * into this sequence.
- * </p>
- * <p>
- * The offset argument must be greater than or equal to 0, and less than
- * or equal to the length of this sequence.
- * </p>
+ /** Finds the index of the first occurrence of the specified substring.
*
- * @param at the offset position.
- * @param x a <code>Char</code> value.
- * @return a reference to this object.
+ * @param str the target string to search for
+ * @return the first applicable index where target occurs, or -1 if not found.
*/
- def insert(at: Int, x: Char): StringBuilder = {
- if (at < 0 || at > count)
- throw new StringIndexOutOfBoundsException(at)
- ensureCapacity(count + 1)
- arraycopy(array, at, array, at + 1, count - at)
- array(at) = x
- count += 1
- this
- }
+ def indexOf(str: String): Int = underlying.indexOf(str)
- /** <p>
- * Inserts the string representation of the <code>Short</code> argument
- * into this sequence.
- * </p>
- * <p>
- * The offset argument must be greater than or equal to 0, and less than
- * or equal to the length of this sequence.
- * </p>
- *
- * @param at the offset position.
- * @param x a <code>Short</code> value.
- * @return a reference to this object.
+ /** Finds the index of the first occurrence of the specified substring.
+ *
+ * @param str the target string to search for
+ * @param fromIndex the smallest index in the source string to consider
+ * @return the first applicable index where target occurs, or -1 if not found.
*/
- def insert(at: Int, x: Short): StringBuilder =
- insert(at, String.valueOf(x))
+ def indexOf(str: String, fromIndex: Int): Int = underlying.indexOf(str, fromIndex)
- /** <p>
- * Inserts the string representation of the <code>Int</code> argument
- * into this sequence.
- * </p>
- * <p>
- * The offset argument must be greater than or equal to 0, and less than
- * or equal to the length of this sequence.
- * </p>
+ /** Finds the index of the last occurrence of the specified substring.
*
- * @param at the offset position.
- * @param x a <code>Int</code> value.
- * @return a reference to this object.
+ * @param str the target string to search for
+ * @return the last applicable index where target occurs, or -1 if not found.
*/
- def insert(at: Int, x: Int): StringBuilder =
- insert(at, String.valueOf(x))
+ def lastIndexOf(str: String): Int = underlying.lastIndexOf(str)
- /** <p>
- * Inserts the string representation of the <code>Long</code> argument
- * into this sequence.
- * </p>
- * <p>
- * The offset argument must be greater than or equal to 0, and less than
- * or equal to the length of this sequence.
- * </p>
+ /** Finds the index of the last occurrence of the specified substring.
*
- * @param at the offset position.
- * @param x a <code>Long</code> value.
- * @return a reference to this object.
+ * @param str the target string to search for
+ * @param fromIndex the smallest index in the source string to consider
+ * @return the last applicable index where target occurs, or -1 if not found.
*/
- def insert(at: Int, x: Long): StringBuilder =
- insert(at, String.valueOf(x))
+ def lastIndexOf(str: String, fromIndex: Int): Int = underlying.lastIndexOf(str, fromIndex)
- /** <p>
- * Inserts the string representation of the <code>Float</code> argument
- * into this sequence.
- * </p>
- * <p>
- * The offset argument must be greater than or equal to 0, and less than
- * or equal to the length of this sequence.
- * </p>
+ /** Creates a new StringBuilder with the reversed contents of this one.
+ * If surrogate pairs are present, they are treated as indivisible units: each
+ * pair will appear in the same order in the updated sequence.
*
- * @param at the offset position.
- * @param x a <code>Float</code> value.
- * @return a reference to this object.
+ * @return the reversed StringBuilder
*/
- def insert(at: Int, x: Float): StringBuilder =
- insert(at, String.valueOf(x))
+ @migration(2, 8, "Since 2.8 reverse returns a new instance. Use 'reverseContents' to update in place.")
+ override def reverse: StringBuilder = new StringBuilder(new JavaStringBuilder(underlying) reverse)
- /** <p>
- * Inserts the string representation of the <code>Double</code> argument
- * into this sequence.
- * </p>
- * <p>
- * The offset argument must be greater than or equal to 0, and less than
- * or equal to the length of this sequence.
- * </p>
+ override def clone(): StringBuilder = new StringBuilder(new JavaStringBuilder(underlying))
+
+ /** Like reverse, but destructively updates the target StringBuilder.
*
- * @param at the offset position.
- * @param x a <code>Double</code> value.
- * @return a reference to this object.
+ * @return the reversed StringBuilder (same as the target StringBuilder)
*/
- def insert(at: Int, x: Double): StringBuilder =
- insert(at, String.valueOf(x))
-
- /** <p>
- * Returns the index within this string of the first occurrence of the
- * specified substring. The integer returned is the smallest value
- * <i>k</i> such that:
- * </p>
- * <blockquote><pre>
- * this.toString().startsWith(str, <i>k</i>)</pre>
- * </blockquote>
- * <p>
- * is <code>true</code>.
- * </p>
- *
- * @param str any string.
- * @return if the string argument occurs as a substring within this
- * object, then the index of the first character of the first
- * such substring is returned; if it does not occur as a
- * substring, <code>-1</code> is returned.
- * @throws NullPointerException if <code>str</code> is <code>null</code>.
- */
- def indexOf(str: String): Int = indexOf(str, 0)
-
- /** <p>
- * Returns the index within this string of the first occurrence of the
- * specified substring, starting at the specified index. The integer
- * returned is the smallest value <code>k</code> for which:
- * </p><pre>
- * k >= Math.min(fromIndex, str.length()) &&
- * this.toString().startsWith(str, k)</pre>
- * <p>
- * If no such value of <code>k</code> exists, then <code>-1</code>
- * is returned.
- * </p>
- *
- * @param str the substring for which to search.
- * @param fromIndex the index from which to start the search.
- * @return the index within this string of the first occurrence
- * of the specified substring, starting at the specified index.
- */
- def indexOf(str: String, fromIndex: Int): Int = indexOfSeq(str.toIndexedSeq, fromIndex)
-
- /** <p>
- * Returns the index within this string of the rightmost occurrence
- * of the specified substring. The rightmost empty string "" is
- * considered to occur at the index value <code>this.length()</code>.
- * The returned index is the largest value <i>k</i> such that
- * </p>
- * <blockquote><pre>
- * this.toString().startsWith(str, k)</pre>
- * </blockquote>
- * <p>
- * is true.
- * </p>
- *
- * @param str the substring to search for.
- * @return if the string argument occurs one or more times as a substring
- * within this object, then the index of the first character of
- * the last such substring is returned. If it does not occur as
- * a substring, <code>-1</code> is returned.
- * @throws NullPointerException if <code>str</code> is <code>null</code>.
- */
- def lastIndexOf(str: String): Int = lastIndexOf(str, count)
-
- /** <p>
- * Returns the index within this string of the last occurrence of the
- * specified substring. The integer returned is the largest value
- * <code>k</code> such that:
- * </p><pre>val
- * k <= Math.min(fromIndex, str.length()) &&
- * this.toString().startsWith(str, k)</pre>
- * <p>
- * If no such value of <code>k</code> exists, then <code>-1</code>
- * is returned.
- * </p>
- *
- * @param str the substring to search for.
- * @param fromIndex the index to start the search from.
- * @return the index within this sequence of the last occurrence
- * of the specified substring.
- */
- def lastIndexOf(str: String, fromIndex: Int): Int = lastIndexOfSeq(str.toIndexedSeq, fromIndex)
-
- /** <p>
- * Causes this character sequence to be replaced by the reverse of the
- * sequence. If there are any surrogate pairs included in the sequence,
- * these are treated as single characters for the reverse operation.
- * Thus, the order of the high-low surrogates is never reversed.
- * </p>
- * <p>
- * Let <i>n</i> be the character length of this character sequence
- * (not the length in <code>Char</code> values) just prior to
- * execution of the <code>reverse</code> method. Then the
- * character at index <i>k</i> in the new character sequence is
- * equal to the character at index <i>n-k-1</i> in the old
- * character sequence.
- * </p>
- *
- * @return a reference to this object.
- */
- override def reverse(): StringBuilder = {
- var hasSurrogate = false
- val n = count - 1
- var j = (n-1) >> 1
- while (j >= 0) {
- val temp = array(j)
- val temp2 = array(n - j)
- if (!hasSurrogate)
- hasSurrogate =
- (temp >= Character.MIN_HIGH_SURROGATE && temp <= Character.MAX_LOW_SURROGATE) ||
- (temp2 >= Character.MIN_HIGH_SURROGATE && temp2 <= Character.MAX_LOW_SURROGATE)
- array(j) = temp2
- array(n - j) = temp
- j -= 1
- }
- if (hasSurrogate) {
- // Reverse back all valid surrogate pairs
- var i = 0
- while (i < count - 1) {
- val c2 = array(i)
- if (Character.isLowSurrogate(c2)) {
- val c1 = array(i + 1)
- if (Character.isHighSurrogate(c1)) {
- array(i) = c1; i += 1
- array(i) = c2
- }
- }
- i += 1
- }
- }
+ def reverseContents(): StringBuilder = {
+ underlying.reverse()
this
}
- /** Returns a string representing the data in this sequence.
- * A new <code>String</code> object is allocated and initialized to
- * contain the character sequence currently represented by this
- * object. This <code>String</code> is then returned. Subsequent
- * changes to this sequence do not affect the contents of the
- * <code>String</code>.
+ /** Returns a new String representing the data in this sequence.
*
- * @return a string representation of this sequence of characters.
+ * @return the current contents of this sequence as a String
*/
- override def toString: String = new String(array, 0, count)
+ override def toString = underlying.toString
def result(): String = toString
}
-
-
-object StringBuilder
-{
- // method <code>java.util.Arrays.copyOf</code> exists since 1.6
- private def copyOf(src: Array[Char], newLength: Int): Array[Char] = {
- val dest = new Array[Char](newLength)
- arraycopy(src, 0, dest, 0, Math.min(src.length, newLength))
- dest
- }
-}
diff --git a/src/library/scala/collection/mutable/Subscriber.scala b/src/library/scala/collection/mutable/Subscriber.scala
index abf92f0840..a89745b107 100644
--- a/src/library/scala/collection/mutable/Subscriber.scala
+++ b/src/library/scala/collection/mutable/Subscriber.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -19,9 +18,10 @@ package mutable
* target="contentFrame"><code>Publisher</code></a>.
*
* @author Matthias Zenger
- * @version 1.0, 08/07/2003
+ * @author Martin Odersky
+ * @version 2.8
* @since 1
*/
-trait Subscriber[-A, -B] {
- def notify(pub: B, event: A): Unit
+trait Subscriber[-Evt, -Pub] {
+ def notify(pub: Pub, event: Evt): Unit
}
diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
index 590757be61..817657af53 100644
--- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala
+++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,12 +13,16 @@ package mutable
import script._
-/** This class should be used as a mixin. It synchronizes the <code>Buffer</code>
+/** This class should be used as a mixin. It synchronizes the `Buffer`
* methods of the class into which it is mixed in.
*
+ * @tparam A type of the elements contained in this buffer.
+ *
* @author Matthias Zenger
* @version 1.0, 08/07/2003
* @since 1
+ * @define Coll SynchronizedBuffer
+ * @define coll synchronized buffer
*/
trait SynchronizedBuffer[A] extends Buffer[A] {
@@ -42,7 +45,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @param elem the element to append.
*/
- override def +(elem: A): Buffer[A] = synchronized {
+ override def +(elem: A): Self = synchronized {
super.+(elem)
}
@@ -54,23 +57,23 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
super.+=(elem)
}
- /** Appends a number of elements provided by an iterable object
- * via its <code>iterator</code> method. The identity of the
- * buffer is returned.
+ /** Appends a number of elements provided by a traversable object via
+ * its `foreach` method.
+ * The identity of the buffer is returned.
*
- * @param iter the iterable object.
+ * @param xs the traversable object.
*/
- override def ++(iter: Traversable[A]): Buffer[A] = synchronized {
- super.++(iter)
+ override def ++(xs: TraversableOnce[A]): Self = synchronized {
+ super.++(xs)
}
- /** Appends a number of elements provided by an iterable object
- * via its <code>iterator</code> method.
+ /** Appends a number of elements provided by a traversable object
+ * via its `foreach` method.
*
* @param iter the iterable object.
*/
- override def ++=(iter: Traversable[A]): this.type = synchronized[this.type] {
- super.++=(iter)
+ override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] {
+ super.++=(xs)
}
/** Appends a sequence of elements to this buffer.
@@ -81,13 +84,13 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
super.++=(elems)
}
- /** Appends a number of elements provided by an iterable object
- * via its <code>iterator</code> method.
+ /** Appends a number of elements provided by a traversable object
+ * via its <code>foreach</code> method.
*
- * @param iter the iterable object.
+ * @param xs the traversable object.
*/
- override def appendAll(iter: Traversable[A]): Unit = synchronized {
- super.appendAll(iter)
+ override def appendAll(xs: TraversableOnce[A]): Unit = synchronized {
+ super.appendAll(xs)
}
/** Prepend a single element to this buffer and return
@@ -95,36 +98,32 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @param elem the element to append.
*/
- abstract override def +=:(elem: A): Buffer[A] = synchronized {
+ abstract override def +=:(elem: A): this.type = synchronized[this.type] {
super.+=:(elem)
}
- /** Prepends a number of elements provided by an iterable object
- * via its <code>iterator</code> method. The identity of the
+ /** Prepends a number of elements provided by a traversable object
+ * via its <code>foreach</code> method. The identity of the
* buffer is returned.
*
- * @param iter the iterable object.
+ * @param xs the traversable object.
*/
- override def ++=:(iter: Traversable[A]): Buffer[A] = synchronized {
- super.++=:(iter)
- }
+ override def ++=:(xs: TraversableOnce[A]): this.type = synchronized[this.type] { super.++=:(xs) }
/** Prepend an element to this list.
*
* @param elem the element to prepend.
*/
- override def prepend(elems: A*): Unit = synchronized {
- super.prependAll(elems)
- }
+ override def prepend(elems: A*): Unit = prependAll(elems)
- /** Prepends a number of elements provided by an iterable object
- * via its <code>iterator</code> method. The identity of the
+ /** Prepends a number of elements provided by a traversable object
+ * via its <code>foreach</code> method. The identity of the
* buffer is returned.
*
- * @param iter the iterable object.
+ * @param xs the traversable object.
*/
- override def prependAll(elems: Traversable[A]): Unit = synchronized {
- super.prependAll(elems)
+ override def prependAll(xs: TraversableOnce[A]): Unit = synchronized {
+ super.prependAll(xs)
}
/** Inserts new elements at the index <code>n</code>. Opposed to method
@@ -143,10 +142,10 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
* one. Instead, it will insert a new element at index <code>n</code>.
*
* @param n the index where a new element will be inserted.
- * @param iter the iterable object providing all elements to insert.
+ * @param xs the traversable object providing all elements to insert.
*/
- abstract override def insertAll(n: Int, iter: Traversable[A]): Unit = synchronized {
- super.insertAll(n, iter)
+ abstract override def insertAll(n: Int, xs: Traversable[A]): Unit = synchronized {
+ super.insertAll(n, xs)
}
/** Replace element at index <code>n</code> with the new element
@@ -181,7 +180,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @return an <code>ArrayBuffer</code> with the same elements.
*/
- override def clone(): Buffer[A] = synchronized {
+ override def clone(): Self = synchronized {
super.clone()
}
diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala
index 650c939936..bf294e548e 100644
--- a/src/library/scala/collection/mutable/SynchronizedMap.scala
+++ b/src/library/scala/collection/mutable/SynchronizedMap.scala
@@ -1,24 +1,29 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
+import annotation.migration
-/** This class should be used as a mixin. It synchronizes the <code>Map</code>
+/** This class should be used as a mixin. It synchronizes the `Map`
* functions of the class into which it is mixed in.
*
+ * @tparam A type of the keys contained in this map.
+ * @tparam B type of the values associated with keys.
+ *
* @author Matthias Zenger, Martin Odersky
* @version 2.0, 31/12/2006
* @since 1
+ * @define Coll SynchronizedMap
+ * @define coll synchronized map
*/
trait SynchronizedMap[A, B] extends Map[A, B] {
@@ -35,22 +40,23 @@ trait SynchronizedMap[A, B] extends Map[A, B] {
override def getOrElseUpdate(key: A, default: => B): B = synchronized { super.getOrElseUpdate(key, default) }
override def transform(f: (A, B) => B): this.type = synchronized[this.type] { super.transform(f) }
override def retain(p: (A, B) => Boolean): this.type = synchronized[this.type] { super.retain(p) }
- override def valuesIterable: scala.collection.Iterable[B] = synchronized { super.valuesIterable }
- @deprecated("Use `valuesIterator' instead") override def values: Iterator[B] = synchronized { super.valuesIterator }
+ @migration(2, 8, "As of 2.8, values returns Iterable[B] rather than Iterator[B].")
+ override def values: collection.Iterable[B] = synchronized { super.values }
override def valuesIterator: Iterator[B] = synchronized { super.valuesIterator }
- override def clone() = synchronized { super.clone() }
+ override def clone(): Self = synchronized { super.clone() }
override def foreach[U](f: ((A, B)) => U) = synchronized { super.foreach(f) }
override def apply(key: A): B = synchronized { super.apply(key) }
- override def keySet: scala.collection.Set[A] = synchronized { super.keySet }
- @deprecated("Use `keysIterator' instead") override def keys: Iterator[A] = synchronized { super.keysIterator }
+ override def keySet: collection.Set[A] = synchronized { super.keySet }
+ @migration(2, 8, "As of 2.8, keys returns Iterable[A] rather than Iterator[A].")
+ override def keys: collection.Iterable[A] = synchronized { super.keys }
override def keysIterator: Iterator[A] = synchronized { super.keysIterator }
override def isEmpty: Boolean = synchronized { super.isEmpty }
override def contains(key: A): Boolean = synchronized {super.contains(key) }
override def isDefinedAt(key: A) = synchronized { super.isDefinedAt(key) }
- @deprecated("See Map.+ for explanation") override def +(kv: (A, B)): this.type = synchronized[this.type] { super.+(kv) }
+ // @deprecated("See Map.+ for explanation") override def +(kv: (A, B)): this.type = synchronized[this.type] { super.+(kv) }
// can't override -, -- same type!
- // @deprecated override def -(key: A): This = synchronized { super.-(key) }
+ // @deprecated override def -(key: A): Self = synchronized { super.-(key) }
// !!! todo: also add all other methods
}
diff --git a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
index 0a1cb0f0ee..41eb1823ae 100644
--- a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
@@ -1,24 +1,27 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/** This class implements synchronized priority queues using a heap.
- * The elements of the queue have to be ordered in terms of the
- * <code>Ordered[T]</code> class.
+/** This class implements synchronized priority queues using a binary heap.
+ * The elements of the queue have to be ordered in terms of the `Ordered[T]` class.
+ *
+ * @tparam A type of the elements contained in this synchronized priority queue
+ * @param ord implicit ordering used to compared elements of type `A`
*
* @author Matthias Zenger
* @version 1.0, 03/05/2004
* @since 1
+ * @define Coll SynchronizedPriorityQueue
+ * @define coll synchronized priority queue
*/
class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQueue[A] {
@@ -39,25 +42,13 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu
this
}
- /** Adds all elements provided by an <code>Iterable</code> object
- * into the priority queue.
- *
- * @param iter an iterable object
- */
- def ++=(iter: scala.collection.Iterable[A]): this.type = {
- synchronized {
- super.++=(iter)
- }
- this
- }
-
- /** Adds all elements provided by an iterator into the priority queue.
+ /** Adds all elements of a traversable object into the priority queue.
*
- * @param it an iterator
+ * @param xs a traversable object
*/
- override def ++=(it: Iterator[A]): this.type = {
+ override def ++=(xs: TraversableOnce[A]): this.type = {
synchronized {
- super.++=(it)
+ super.++=(xs)
}
this
}
@@ -87,7 +78,7 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu
*/
override def clear(): Unit = synchronized { super.clear }
- /** Returns an iterator which yiels all the elements of the priority
+ /** Returns an iterator which yield all the elements of the priority
* queue in descending priority order.
*
* @return an iterator over all elements sorted in descending order.
diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala
index 379139cb4f..95939dd8f2 100644
--- a/src/library/scala/collection/mutable/SynchronizedQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala
@@ -1,25 +1,28 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/** This is a synchronized version of the <code>Queue[T]</code> class. It
+/** This is a synchronized version of the `Queue[T]` class. It
* implements a data structure that allows one to insert and retrieve
* elements in a first-in-first-out (FIFO) manner.
*
+ * @tparam A type of elements contained in this synchronized queue.
+ *
* @author Matthias Zenger
* @version 1.0, 03/05/2004
* @since 1
+ * @define Coll SynchronizedQueue
+ * @define coll synchronized queue
*/
class SynchronizedQueue[A] extends Queue[A] {
import scala.collection.Traversable
@@ -36,21 +39,13 @@ class SynchronizedQueue[A] extends Queue[A] {
*/
override def +=(elem: A): this.type = synchronized[this.type] { super.+=(elem) }
- /** Adds all elements provided by an <code>Iterable</code> object
- * at the end of the queue. The elements are prepended in the order they
- * are given out by the iterator.
- *
- * @param iter an iterable object
- */
- override def ++=(iter: Traversable[A]): this.type = synchronized[this.type] { super.++=(iter) }
-
- /** Adds all elements provided by an iterator
+ /** Adds all elements provided by a `TraversableOnce` object
* at the end of the queue. The elements are prepended in the order they
* are given out by the iterator.
*
- * @param it an iterator
+ * @param xs a traversable object
*/
- override def ++=(it: Iterator[A]): this.type = synchronized[this.type] { super.++=(it) }
+ override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] { super.++=(xs) }
/** Adds all elements to the queue.
*
diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala
index a31d0dfb88..bd5e42a57c 100644
--- a/src/library/scala/collection/mutable/SynchronizedSet.scala
+++ b/src/library/scala/collection/mutable/SynchronizedSet.scala
@@ -1,24 +1,27 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
import script._
-/** This class should be used as a mixin. It synchronizes the <code>Set</code>
+/** This class should be used as a mixin. It synchronizes the `Set`
* functions of the class into which it is mixed in.
*
+ * @tparam A type of the elements contained in this synchronized set.
+ *
* @author Matthias Zenger
* @version 1.0, 08/07/2003
* @since 1
+ * @define Coll SynchronizedSet
+ * @define coll synchronized set
*/
trait SynchronizedSet[A] extends Set[A] {
import scala.collection.Traversable
@@ -39,24 +42,16 @@ trait SynchronizedSet[A] extends Set[A] {
super.+=(elem)
}
- override def ++=(that: Traversable[A]): this.type = synchronized[this.type] {
- super.++=(that)
- }
-
- override def ++=(it: Iterator[A]): this.type = synchronized[this.type] {
- super.++=(it)
+ override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] {
+ super.++=(xs)
}
abstract override def -=(elem: A): this.type = synchronized[this.type] {
super.-=(elem)
}
- override def --=(that: Traversable[A]): this.type = synchronized[this.type] {
- super.--=(that)
- }
-
- override def --=(it: Iterator[A]): this.type = synchronized[this.type] {
- super.--=(it)
+ override def --=(xs: TraversableOnce[A]): this.type = synchronized[this.type] {
+ super.--=(xs)
}
override def update(elem: A, included: Boolean): Unit = synchronized {
@@ -103,7 +98,7 @@ trait SynchronizedSet[A] extends Set[A] {
super.<<(cmd)
}
- override def clone(): Set[A] = synchronized {
+ override def clone(): Self = synchronized {
super.clone()
}
}
diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala
index 069843f822..bb8558ec36 100644
--- a/src/library/scala/collection/mutable/SynchronizedStack.scala
+++ b/src/library/scala/collection/mutable/SynchronizedStack.scala
@@ -1,25 +1,28 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/** This is a synchronized version of the <code>Stack[T]</code> class. It
+/** This is a synchronized version of the `Stack[T]` class. It
* implements a data structure which allows to store and retrieve
* objects in a last-in-first-out (LIFO) fashion.
*
+ * @tparam A type of the elements contained in this stack.
+ *
* @author Matthias Zenger
* @version 1.0, 03/05/2004
* @since 1
+ * @define Coll SynchronizedStack
+ * @define coll synchronized stack
*/
class SynchronizedStack[A] extends Stack[A] {
import scala.collection.Traversable
@@ -39,26 +42,20 @@ class SynchronizedStack[A] extends Stack[A] {
/** Push two or more elements onto the stack. The last element
* of the sequence will be on top of the new stack.
*
- * @param elems the element sequence.
- * @return the stack with the new elements on top.
+ * @param elem1 the first element to push.
+ * @param elem2 the second element to push.
+ * @param elems the element sequence that will be pushed.
+ * @return the stack with the new elements on top.
*/
override def push(elem1: A, elem2: A, elems: A*): this.type = synchronized[this.type] { super.push(elem1, elem2, elems: _*) }
- /** Pushes all elements provided by an <code>Traversable</code> object
- * on top of the stack. The elements are pushed in the order they
- * are given out by the iterator.
- *
- * @param iter an iterable object
- */
- override def pushAll(elems: scala.collection.Traversable[A]): this.type = synchronized[this.type] { super.pushAll(elems) }
-
- /** Pushes all elements provided by an iterator
- * on top of the stack. The elements are pushed in the order they
- * are given out by the iterator.
+ /** Pushes all elements provided by a traversable object
+ * on top of the stack. The elements are pushed in the order the
+ * traversable object is traversed.
*
- * @param elems an iterator
+ * @param xs a traversable object
*/
- override def pushAll(elems: Iterator[A]): this.type = synchronized[this.type] { super.pushAll(elems) }
+ override def pushAll(xs: TraversableOnce[A]): this.type = synchronized[this.type] { super.pushAll(elems) }
/** Returns the top element of the stack. This method will not remove
* the element from the stack. An error is signaled if there is no
diff --git a/src/library/scala/collection/mutable/Traversable.scala b/src/library/scala/collection/mutable/Traversable.scala
index 9e18368111..ea49dd68a7 100644
--- a/src/library/scala/collection/mutable/Traversable.scala
+++ b/src/library/scala/collection/mutable/Traversable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -14,12 +13,9 @@ package mutable
import generic._
-/** A subtrait of <code>collection.Traversable</code> which represents
- * traversables that can be mutated.
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
+/** A trait for traversable collections that can be mutated.
+ * $traversableInfo
+ * @define mutability mutable
*/
trait Traversable[A] extends scala.collection.Traversable[A]
with GenericTraversableTemplate[A, Traversable]
@@ -28,10 +24,10 @@ trait Traversable[A] extends scala.collection.Traversable[A]
override def companion: GenericCompanion[Traversable] = Traversable
}
-/** A factory object for the trait <code>Traversable</code>.
- *
- * @author Martin Odersky
- * @version 2.8
+/** $factoryInfo
+ * The current default implementation of a $Coll is an `ArrayBuffer`.
+ * @define coll mutable traversable collection
+ * @define Coll mutable.Traversable
*/
object Traversable extends TraversableFactory[Traversable] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = new GenericCanBuildFrom[A]
diff --git a/src/library/scala/collection/mutable/Undoable.scala b/src/library/scala/collection/mutable/Undoable.scala
index f6cde4dd03..1a7f67acab 100644
--- a/src/library/scala/collection/mutable/Undoable.scala
+++ b/src/library/scala/collection/mutable/Undoable.scala
@@ -1,20 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
-/** Classes that mix in the <code>Undoable</code> class provide an operation
- * <code>undo</code> which can be used to undo the last operation.
+/** Classes that mix in the `Undoable` class provide an operation
+ * `undo` which can be used to undo the last operation.
*
* @author Matthias Zenger
* @version 1.0, 08/07/2003
diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala
index 11a12957b6..75bc317f4a 100644
--- a/src/library/scala/collection/mutable/WeakHashMap.scala
+++ b/src/library/scala/collection/mutable/WeakHashMap.scala
@@ -1,22 +1,51 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
package mutable
import JavaConversions._
+import generic._
-/**
- * @since 2.8
+
+/** A hash map with references to entries which are weakly reachable.
+ *
+ * @tparam A type of keys contained in this map
+ * @tparam B type of values associated with the keys
+ *
+ * @since 2.8
+ * @define Coll WeakHashMap
+ * @define coll weak hash map
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is always `WeakHashMap[A, B]` if the elements contained in the resulting collection are
+ * pairs of type `(A, B)`. This is because an implicit of type `CanBuildFrom[WeakHashMap, (A, B), WeakHashMap[A, B]]`
+ * is defined in object `WeakHashMap`. Otherwise, `That` resolves to the most specific type that doesn't have
+ * to contain pairs of type `(A, B)`, which is `Iterable`.
+ * @define $bfinfo an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `B`. This is usually the `canBuildFrom` value
+ * defined in object `WeakHashMap`.
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
-class WeakHashMap[A, B] extends JMapWrapper[A, B](new java.util.WeakHashMap) {
+class WeakHashMap[A, B] extends JMapWrapper[A, B](new java.util.WeakHashMap)
+ with JMapWrapperLike[A, B, WeakHashMap[A, B]] {
override def empty = new WeakHashMap[A, B]
}
+
+/** $factoryInfo
+ * @define Coll WeakHashMap
+ * @define coll weak hash map
+ */
+object WeakHashMap extends MutableMapFactory[WeakHashMap] {
+ implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), WeakHashMap[A, B]] = new MapCanBuildFrom[A, B]
+ def empty[A, B]: WeakHashMap[A, B] = new WeakHashMap[A, B]
+}
+
diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala
index a0f8109a9b..6aea49ec9d 100644
--- a/src/library/scala/collection/mutable/WrappedArray.scala
+++ b/src/library/scala/collection/mutable/WrappedArray.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -16,11 +15,19 @@ import scala.reflect.ClassManifest
import scala.collection.generic._
/**
- * <p>A class representing <code>Array[T]</code></p>
+ * A class representing `Array[T]`.
+ *
+ * @tparam T type of the elements in this wrapped array.
*
* @author Martin Odersky, Stephane Micheloud
* @version 1.0
* @since 2.8
+ * @define Coll WrappedArray
+ * @define coll wrapped array
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
*/
abstract class WrappedArray[T] extends IndexedSeq[T] with ArrayLike[T, WrappedArray[T]] {
@@ -41,16 +48,27 @@ abstract class WrappedArray[T] extends IndexedSeq[T] with ArrayLike[T, WrappedAr
/** The underlying array */
def array: Array[T]
+
+ override def toArray[U >: T : ClassManifest]: Array[U] =
+ if (implicitly[ClassManifest[U]].erasure eq array.getClass.getComponentType)
+ array.asInstanceOf[Array[U]]
+ else
+ super.toArray[U]
+
override def stringPrefix = "WrappedArray"
+ /** Clones this object, including the underlying Array. */
+ override def clone: WrappedArray[T] = WrappedArray make array.clone()
+
/** Creates new builder for this collection ==> move to subclasses
*/
override protected[this] def newBuilder: Builder[T, WrappedArray[T]] =
new WrappedArrayBuilder[T](elemManifest)
}
+/** A companion object used to create instances of `WrappedArray`.
+ */
object WrappedArray {
-
def make[T](x: AnyRef): WrappedArray[T] = x match {
case x: Array[AnyRef] => wrapRefArray[AnyRef](x).asInstanceOf[WrappedArray[T]]
case x: Array[Int] => wrapIntArray(x).asInstanceOf[WrappedArray[T]]
diff --git a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
index 5b54f43b38..625f2d831f 100644
--- a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
@@ -15,7 +14,10 @@ package mutable
import generic._
import scala.reflect.ClassManifest
-/** A builder class for arrays
+/** A builder class for arrays.
+ *
+ * @tparam A type of elements that can be added to this builder.
+ * @param manifest class manifest for objects of type `A`.
*
* @since 2.8
*/
diff --git a/src/library/scala/collection/package.scala b/src/library/scala/collection/package.scala
index d5161a0f1a..19d65b73e2 100644
--- a/src/library/scala/collection/package.scala
+++ b/src/library/scala/collection/package.scala
@@ -1,10 +1,79 @@
package scala
+/**
+ * Contains the base traits and objects needed to use and extend Scala's collection library.
+ *
+ * == Guide ==
+ *
+ * A detailed guide for the collections library is avaialble
+ * at [[http://www.scala-lang.org/docu/files/collections-api]].
+ *
+ * == Using Collections ==
+ *
+ * It is convienient to treat all collections as either
+ * a [[scala.collection.Traversable]] or [[scala.collection.Iterable]], as
+ * these traits define the vast majority of operations
+ * on a collection.
+ *
+ * Collections can, of course, be treated as specifically as needed, and
+ * the library is designed to ensure that
+ * the methods that transform collections will return a collection of the same
+ * type: {{{
+ * scala> val array = Array(1,2,3,4,5,6)
+ * array: Array[Int] = Array(1, 2, 3, 4, 5, 6)
+ *
+ * scala> array map { _.toString }
+ * res0: Array[java.lang.String] = Array(1, 2, 3, 4, 5, 6)
+ *
+ * scala> val list = List(1,2,3,4,5,6)
+ * list: List[Int] = List(1, 2, 3, 4, 5, 6)
+ *
+ * scala> list map { _.toString }
+ * res1: List[java.lang.String] = List(1, 2, 3, 4, 5, 6)
+ *
+ * }}}
+ *
+ * == Creating Collections ==
+ *
+ * The most common way to create a collection is to use the companion objects as factories.
+ * Of these, the three most common
+ * are [[scala.collection.immutable.Seq]], [[scala.collection.immutable.Set]], and [[scala.collection.immutable.Map]]. Their
+ * companion objects are all available
+ * as type aliases the either the [[scala]] package or in `scala.Predef`, and can be used
+ * like so:
+ * {{{
+ * scala> val seq = Seq(1,2,3,4,1)
+ * seq: Seq[Int] = List(1, 2, 3, 4, 1)
+ *
+ * scala> val set = Set(1,2,3,4,1)
+ * set: scala.collection.immutable.Set[Int] = Set(1, 2, 3, 4)
+ *
+ * scala> val map = Map(1 -> "one",2 -> "two", 3 -> "three",2 -> "too")
+ * map: scala.collection.immutable.Map[Int,java.lang.String] = Map((1,one), (2,too), (3,three))
+ * }}}
+ *
+ * It is also typical to use the [[scala.collection.immutable]] collections over those
+ * in [[scala.collection.mutable]]; The types aliased in the [[scala]] package and
+ * the `scala.Predef` object are the immutable versions.
+ *
+ * Also note that the collections library was carefully designed to include several implementations of
+ * each of the three basic collection types. These implementations have specific performance
+ * characteristics which are described
+ * in [[http://www.scala-lang.org/docu/files/collections-api the guide]].
+ *
+ * === Converting between Java Collections ===
+ *
+ * The `JavaConversions` object provides implicit defs that will allow mostly seamless integration
+ * between Java Collections-based APIs and the Scala collections library.
+ *
+ */
package object collection {
import scala.collection.generic.CanBuildFrom // can't refer to CanBuild here
+ /** Provides a CanBuildFrom instance that builds a specific target collection (`To') irrespective of the original collection (`From').
+ */
def breakOut[From, T, To](implicit b : CanBuildFrom[Nothing, T, To]) =
- new CanBuildFrom[From, T, To] {
+ new CanBuildFrom[From, T, To] { // TODO: could we just return b instead?
def apply(from: From) = b.apply() ; def apply() = b.apply()
}
-} \ No newline at end of file
+}
diff --git a/src/library/scala/collection/readme-if-you-want-to-add-something.txt b/src/library/scala/collection/readme-if-you-want-to-add-something.txt
new file mode 100755
index 0000000000..6700cb7b68
--- /dev/null
+++ b/src/library/scala/collection/readme-if-you-want-to-add-something.txt
@@ -0,0 +1,50 @@
+Conventions for Collection Implementors
+
+Martin Odersky
+19 Mar 2010
+
+This note describes some conventions which must be followed to keep
+the collection libraries consistent.
+
+We distinguish in the following between two kinds of methods
+
+ - ``Accessors'' access some of the elements of a collection, but return a result which
+ is unrelated to the collection.
+ Example of accessors are: head, foldLeft, indexWhere, toSeq.
+
+ - ``Transformers'' access elements of a collection and produce a new collection of related
+ type as a result. The relation might either be direct (same type as receiver)
+ or indirect, linked by a CanBuildFrom implicit.
+ Example of transformers are: filter, map, groupBy, zip.
+
+1. Proxies
+
+Every collection type has a Proxy class that forwards all operations to
+an underlying collection. Proxy methods are all implemented in classes
+with names ending in `ProxyLike'. If you add a new method to a collection
+class you need to add the same method to the corresponding ProxyLike class.
+
+2. Forwarders
+
+Classes Traversable, Iterable, and Seq also have forwarders, which
+forward all collection-specific accessor operations to an underlying
+collection. These are defined as classes with names ending
+in `Forwarder' in package collection.generic. If you add a new
+accessor method to a Seq or one of its collection superclasses, you
+need to add the same method to the corresponding forwarder class.
+
+3. Views
+
+Classes Traversable, Iterable, Seq, IndexedSeq, and mutable.IndexedSeq
+support views. Their operations are all defined in classes with names
+ending in `ViewLike'. If you add a new transformer method to one of
+the above collection classes, you need to add the same method to the
+corresponding view class. Failure to do so will cause the
+corresponding method to fail at runtime with an exception like
+UnsupportedOperationException("coll.newBuilder"). If there is no good
+way to implement the operation in question lazily, there's a fallback
+using the newForced method. See the definition of sorted in trait
+SeqViewLike as an example.
+
+
+
diff --git a/src/library/scala/collection/script/Location.scala b/src/library/scala/collection/script/Location.scala
index 8214462d82..55ee613877 100644
--- a/src/library/scala/collection/script/Location.scala
+++ b/src/library/scala/collection/script/Location.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
diff --git a/src/library/scala/collection/script/Message.scala b/src/library/scala/collection/script/Message.scala
index 4e9db2fc41..bf813a4d38 100644
--- a/src/library/scala/collection/script/Message.scala
+++ b/src/library/scala/collection/script/Message.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
diff --git a/src/library/scala/collection/script/Scriptable.scala b/src/library/scala/collection/script/Scriptable.scala
index 4bdd88aa4c..ccee3c43a9 100644
--- a/src/library/scala/collection/script/Scriptable.scala
+++ b/src/library/scala/collection/script/Scriptable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.collection
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index 6bae0bccb7..8187360d6f 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.compat
@@ -18,13 +17,23 @@ object Platform {
type StackOverflowError = java.lang.StackOverflowError
type ConcurrentModificationException = java.util.ConcurrentModificationException
- /**
- * @param src ..
- * @param srcPos ..
- * @param dest ..
- * @param destPos ..
- * @param length ..
- */
+ /** Copies `length` elements of array `src` starting at position `srcPos` to the
+ * array `dest` starting at position `destPos`. If `src eq dest`, the copying will
+ * behave as if the elements copied from `src` were first copied to a temporary
+ * array before being copied back into the array at the destination positions.
+ * @param src A non-null array as source for the copy.
+ * @param srcPos The starting index in the source array.
+ * @param dest A non-null array as destination for the copy.
+ * @param destPos The starting index in the destination array.
+ * @param length The number of elements to be copied.
+ * @throws java.lang.NullPointerException If either `src` or `dest` are `null`.
+ * @throws java.lang.ArrayStoreException If either `src` or `dest` are not of type
+ * [java.lang.Array]; or if the element type of `src` is not
+ * compatible with that of `dest`.
+ * @throws java.lang.IndexOutOfBoundsException If either srcPos` or `destPos` are
+ * outside of the bounds of their respective arrays; or if `length`
+ * is negative; or if there are less than `length` elements available
+ * after `srcPos` or `destPos` in `src` and `dest` respectively. */
@inline
def arraycopy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int) {
System.arraycopy(src, srcPos, dest, destPos, length)
@@ -47,8 +56,13 @@ object Platform {
@inline
def getClassForName(name: String): Class[_] = java.lang.Class.forName(name)
- val EOL = System.getProperty("line.separator", "\n")
+ val EOL = util.Properties.lineSeparator
+ /** The current time in milliseconds. The time is counted since 1 January 1970
+ * UTC.
+ *
+ * Note that the operating system timer used to obtain this value may be less
+ * precise than a millisecond. */
@inline
def currentTime: Long = System.currentTimeMillis()
diff --git a/src/library/scala/concurrent/Channel.scala b/src/library/scala/concurrent/Channel.scala
index 1218b48763..865d0187f6 100644
--- a/src/library/scala/concurrent/Channel.scala
+++ b/src/library/scala/concurrent/Channel.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala
index a5d8219a02..fbf799a6a6 100644
--- a/src/library/scala/concurrent/DelayedLazyVal.scala
+++ b/src/library/scala/concurrent/DelayedLazyVal.scala
@@ -1,17 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
-import annotation.experimental
-import ops._
+import ops.future
/** A <code>DelayedLazyVal</code> is a wrapper for lengthy
* computations which have a valid partially computed result.
@@ -27,11 +25,16 @@ import ops._
* @author Paul Phillips
* @version 2.8
*/
-@experimental
class DelayedLazyVal[T](f: () => T, body: => Unit) {
- @volatile private[this] var isDone = false
+ @volatile private[this] var _isDone = false
private[this] lazy val complete = f()
+ /** Whether the computation is complete.
+ *
+ * @return true if the computation is complete.
+ */
+ def isDone = _isDone
+
/** The current result of f(), or the final result if complete.
*
* @return the current value
@@ -40,6 +43,6 @@ class DelayedLazyVal[T](f: () => T, body: => Unit) {
future {
body
- isDone = true
+ _isDone = true
}
}
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index e91b777b9a..c6cff19c28 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
diff --git a/src/library/scala/concurrent/Lock.scala b/src/library/scala/concurrent/Lock.scala
index 63c736ab5e..678d6fe307 100644
--- a/src/library/scala/concurrent/Lock.scala
+++ b/src/library/scala/concurrent/Lock.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
diff --git a/src/library/scala/concurrent/MailBox.scala b/src/library/scala/concurrent/MailBox.scala
index 9e124235ef..7d2b2761d5 100644
--- a/src/library/scala/concurrent/MailBox.scala
+++ b/src/library/scala/concurrent/MailBox.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
diff --git a/src/library/scala/concurrent/ManagedBlocker.scala b/src/library/scala/concurrent/ManagedBlocker.scala
index 3715e8a159..68781a893c 100644
--- a/src/library/scala/concurrent/ManagedBlocker.scala
+++ b/src/library/scala/concurrent/ManagedBlocker.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
diff --git a/src/library/scala/concurrent/SyncChannel.scala b/src/library/scala/concurrent/SyncChannel.scala
index ba03680e61..1c025bcfc3 100644
--- a/src/library/scala/concurrent/SyncChannel.scala
+++ b/src/library/scala/concurrent/SyncChannel.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index d64c2fa51c..dce770ce43 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
@@ -14,7 +13,7 @@ package scala.concurrent
/** The class <code>SyncVar</code> ...
*
- * @author Martin Odersky, Stepan Koltsov
+ * @author Martin Odersky
* @version 1.0, 10/03/2003
*/
class SyncVar[A] {
@@ -63,17 +62,6 @@ class SyncVar[A] {
notifyAll()
}
- @deprecated("Will be removed in 2.8. SyncVar should not allow exception by design.")
- def setWithCatch(x: => A) = synchronized {
- try {
- this set x
- } catch {
- case e =>
- this setException e
- throw e
- }
- }
-
def put(x: A) = synchronized {
while (isDefined) wait()
set(x)
diff --git a/src/library/scala/concurrent/TIMEOUT.scala b/src/library/scala/concurrent/TIMEOUT.scala
index 74c16792ae..cd2d861305 100644
--- a/src/library/scala/concurrent/TIMEOUT.scala
+++ b/src/library/scala/concurrent/TIMEOUT.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
diff --git a/src/library/scala/concurrent/TaskRunner.scala b/src/library/scala/concurrent/TaskRunner.scala
index 58ccc7a386..b8791cf249 100644
--- a/src/library/scala/concurrent/TaskRunner.scala
+++ b/src/library/scala/concurrent/TaskRunner.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
@@ -25,9 +24,4 @@ trait TaskRunner {
def shutdown(): Unit
- /** If expression computed successfully return it in <code>Right</code>,
- * otherwise return exception in <code>Left</code>.
- */
- protected def tryCatch[A](body: => A): Either[Exception, A] =
- ops tryCatchEx body
}
diff --git a/src/library/scala/concurrent/TaskRunners.scala b/src/library/scala/concurrent/TaskRunners.scala
index aa8afa0fd1..4936240f2f 100644
--- a/src/library/scala/concurrent/TaskRunners.scala
+++ b/src/library/scala/concurrent/TaskRunners.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
diff --git a/src/library/scala/concurrent/ThreadPoolRunner.scala b/src/library/scala/concurrent/ThreadPoolRunner.scala
index 925a5ebee1..74f376e87e 100644
--- a/src/library/scala/concurrent/ThreadPoolRunner.scala
+++ b/src/library/scala/concurrent/ThreadPoolRunner.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
diff --git a/src/library/scala/concurrent/ThreadRunner.scala b/src/library/scala/concurrent/ThreadRunner.scala
index f6661fdc83..2eb05840f4 100644
--- a/src/library/scala/concurrent/ThreadRunner.scala
+++ b/src/library/scala/concurrent/ThreadRunner.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
@@ -25,6 +24,14 @@ class ThreadRunner extends FutureTaskRunner {
implicit def functionAsTask[S](fun: () => S): Task[S] = fun
implicit def futureAsFunction[S](x: Future[S]): () => S = x
+ /* If expression computed successfully return it in `Right`,
+ * otherwise return exception in `Left`.
+ */
+ private def tryCatch[A](body: => A): Either[Exception, A] =
+ try Right(body) catch {
+ case ex: Exception => Left(ex)
+ }
+
def execute[S](task: Task[S]) {
val runnable = new Runnable {
def run() { tryCatch(task()) }
@@ -38,7 +45,7 @@ class ThreadRunner extends FutureTaskRunner {
def run() { result set tryCatch(task()) }
}
(new Thread(runnable)).start()
- () => ops getOrThrow result.get
+ () => result.get.fold[S](throw _, identity _)
}
def managedBlock(blocker: ManagedBlocker) {
diff --git a/src/library/scala/concurrent/jolib.scala b/src/library/scala/concurrent/jolib.scala
deleted file mode 100644
index c2cf5d2186..0000000000
--- a/src/library/scala/concurrent/jolib.scala
+++ /dev/null
@@ -1,82 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.concurrent
-
-import ops._
-
-/**
- * Library for using join-calculus concurrent primitives in Scala.
- *
- * @author Vincent Cremet
- * @version 1.0, 17/10/2003
- */
-@deprecated("Will be removed.")
-object jolib {
-
- type Pattern = List[Signal]
-
- type Rule = PartialFunction[List[Any], Unit]
-
- /////////////////// JOIN DEFINITION /////////////////////////
-
- class Join {
-
- private var ruls: List[(Pattern, Rule)] = null
-
- def canMatch(p: Pattern) =
- p forall { s => !s.queue.isEmpty }
-
- def values(p: Pattern): List[Any] =
- p map { s => s.queue.dequeue: Any }
-
- def rules(rs: (Pattern, Rule)*) =
- ruls = rs.asInstanceOf[List[(Pattern, Rule)]]
-
- def tryMatch =
- ruls find { case (p, _) => canMatch(p) } match {
- case None => () => ()
- case Some((p, r)) => {
- val args = values(p)
- () => spawn(r(args))
- }
- }
-
- }
-
- /////////////////// SIGNALS /////////////////////////
-
- abstract class Signal(join: Join) {
- type C
- val queue = new collection.mutable.Queue[C]
- def tryReduction(x: C) {
- val continuation = join synchronized {
- queue.enqueue(x)
- join.tryMatch
- }
- continuation()
- }
- }
-
- abstract class Asynchr(join: Join) extends Signal(join) {
- def apply(x: C): Unit = tryReduction(x)
- }
-
- abstract class Synchr[A](join: Join) extends Signal(join) {
- type C <: SyncVar[A]
- def apply(x: C): A = {
- tryReduction(x)
- x.get
- }
- }
-
-}
-
diff --git a/src/library/scala/concurrent/ops.scala b/src/library/scala/concurrent/ops.scala
index 3606869350..72e292d88c 100644
--- a/src/library/scala/concurrent/ops.scala
+++ b/src/library/scala/concurrent/ops.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
@@ -23,21 +22,13 @@ object ops
val defaultRunner: FutureTaskRunner = TaskRunners.threadRunner
/**
- * If expression computed successfully return it in <code>Right</code>,
- * otherwise return exception in <code>Left</code>.
+ * If expression computed successfully return it in `Right`,
+ * otherwise return exception in `Left`.
*/
- //TODO: make private
- def tryCatch[A](body: => A): Either[Throwable, A] =
+ private def tryCatch[A](body: => A): Either[Throwable, A] =
allCatch[A] either body
- //TODO: make private
- def tryCatchEx[A](body: => A): Either[Exception, A] =
- try Right(body) catch {
- case ex: Exception => Left(ex)
- }
-
- //TODO: make private
- def getOrThrow[T <: Throwable, A](x: Either[T, A]): A =
+ private def getOrThrow[T <: Throwable, A](x: Either[T, A]): A =
x.fold[A](throw _, identity _)
/** Evaluates an expression asynchronously.
diff --git a/src/library/scala/concurrent/pilib.scala b/src/library/scala/concurrent/pilib.scala
index 246f7e2c54..dab9db98ff 100644
--- a/src/library/scala/concurrent/pilib.scala
+++ b/src/library/scala/concurrent/pilib.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.concurrent
diff --git a/src/library/scala/deprecated.scala b/src/library/scala/deprecated.scala
index 1a176d1bc2..50a6601d00 100644
--- a/src/library/scala/deprecated.scala
+++ b/src/library/scala/deprecated.scala
@@ -1,22 +1,24 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
+import annotation.target._
+
/**
* An annotation that designates the definition to which it is applied as deprecated.
* Access to the member then generates a deprecated warning.
*
* @since 2.3
*/
+@getter @setter @beanGetter @beanSetter
class deprecated(message: String) extends StaticAnnotation {
def this() = this("")
}
diff --git a/src/library/scala/inline.scala b/src/library/scala/inline.scala
index 8ac8662ac3..1e91be2b17 100644
--- a/src/library/scala/inline.scala
+++ b/src/library/scala/inline.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala
index ff1861d9b4..f0230d3724 100644
--- a/src/library/scala/io/BufferedSource.scala
+++ b/src/library/scala/io/BufferedSource.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.io
@@ -19,10 +18,10 @@ import Source.DefaultBufSize
*
* @author Burak Emir, Paul Phillips
*/
-class BufferedSource(inputStream: InputStream)(implicit codec: Codec = Codec.default) extends Source
-{
+class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val codec: Codec) extends Source {
+ def this(inputStream: InputStream)(implicit codec: Codec) = this(inputStream, DefaultBufSize)(codec)
def reader() = new InputStreamReader(inputStream, codec.decoder)
- def bufferedReader() = new BufferedReader(reader(), DefaultBufSize)
+ def bufferedReader() = new BufferedReader(reader(), bufferSize)
override val iter = {
val reader = bufferedReader()
diff --git a/src/library/scala/io/BytePickle.scala b/src/library/scala/io/BytePickle.scala
index 7e5628e207..3a51f2e0b8 100644
--- a/src/library/scala/io/BytePickle.scala
+++ b/src/library/scala/io/BytePickle.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.io
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index d8f00a4e10..e001e732c2 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.io
@@ -26,8 +25,7 @@ import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodi
/** A class for character encoding/decoding preferences.
*
*/
-class Codec(val charSet: Charset)
-{
+class Codec(val charSet: Charset) {
type Configure[T] = (T => T, Boolean)
type Handler = CharacterCodingException => Int
@@ -71,11 +69,26 @@ class Codec(val charSet: Charset)
})
}
-object Codec {
+trait LowPriorityCodecImplicits {
+ self: Codec.type =>
+
+ /** The Codec of Last Resort. */
+ implicit def fallbackSystemCodec: Codec = defaultCharsetCodec
+}
+
+object Codec extends LowPriorityCodecImplicits {
final val ISO8859 = Charset forName "ISO-8859-1"
final val UTF8 = Charset forName "UTF-8"
- def default = apply(Charset.defaultCharset)
+ /** Optimistically these two possible defaults will be the same thing.
+ * In practice this is not necessarily true, and in fact Sun classifies
+ * the fact that you can influence anything at all via -Dfile.encoding
+ * as an accident, with any anomalies considered "not a bug".
+ */
+ def defaultCharsetCodec = apply(Charset.defaultCharset)
+ def fileEncodingCodec = apply(util.Properties.encodingString)
+ def default = defaultCharsetCodec
+
def apply(encoding: String): Codec = new Codec(Charset forName encoding)
def apply(charSet: Charset): Codec = new Codec(charSet)
def apply(decoder: CharsetDecoder): Codec = {
diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala
index 4eec0e39d6..b0aaf228f2 100644
--- a/src/library/scala/io/Position.scala
+++ b/src/library/scala/io/Position.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.io
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index e7bf43d00d..cb7403e255 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.io
@@ -27,111 +26,150 @@ object Source {
*/
def stdin = fromInputStream(System.in)
- /** Creates a <code>Source</code> from an Iterable.
+ /** Creates a Source from an Iterable.
*
* @param iterable the Iterable
- * @return the <code>Source</code> instance.
+ * @return the Source
*/
def fromIterable(iterable: Iterable[Char]): Source = new Source {
val iter = iterable.iterator
} withReset(() => fromIterable(iterable))
- /** Creates a <code>Source</code> instance from a single character.
- *
- * @param c ...
- * @return the create <code>Source</code> instance.
+ /** Creates a Source instance from a single character.
*/
def fromChar(c: Char): Source = fromIterable(Array(c))
/** creates Source from array of characters, with empty description.
- *
- * @param chars ...
- * @return ...
*/
def fromChars(chars: Array[Char]): Source = fromIterable(chars)
- /** creates Source from string, with empty description.
- *
- * @param s ...
- * @return ...
+ /** creates Source from a String, with no description.
*/
def fromString(s: String): Source = fromIterable(s)
- /** Create a <code>Source</code> from array of bytes, decoding
- * the bytes according to codec.
- *
- * @param bytes ...
- * @param enc ...
- * @return the created <code>Source</code> instance.
- */
- def fromBytes(bytes: Array[Byte])(implicit codec: Codec = Codec.default): Source =
- fromString(new String(bytes, codec.name))
-
- /** Create a <code>Source</code> from array of bytes, assuming
- * one byte per character (ISO-8859-1 encoding.)
+ /** creates Source from file with given name, setting its description to
+ * filename.
*/
- def fromRawBytes(bytes: Array[Byte]): Source = fromString(new String(bytes, Codec.ISO8859.name))
+ def fromFile(name: String)(implicit codec: Codec): BufferedSource =
+ fromFile(new JFile(name))(codec)
- /** creates Source from file with given name, setting
+ /** creates Source from file with given name, using given encoding, setting
* its description to filename.
*/
- def fromPath(name: String)(implicit codec: Codec = Codec.default): Source = fromFile(new JFile(name))
+ def fromFile(name: String, enc: String): BufferedSource =
+ fromFile(name)(Codec(enc))
/** creates <code>Source</code> from file with given file: URI
*/
- def fromURI(uri: URI)(implicit codec: Codec = Codec.default): Source = fromFile(new JFile(uri))
+ def fromFile(uri: URI)(implicit codec: Codec): BufferedSource =
+ fromFile(new JFile(uri))(codec)
- /** same as fromInputStream(url.openStream())(codec)
+ /** creates Source from file with given file: URI
*/
- def fromURL(url: URL)(implicit codec: Codec = Codec.default): Source =
- fromInputStream(url.openStream())(codec)
+ def fromFile(uri: URI, enc: String): BufferedSource =
+ fromFile(uri)(Codec(enc))
+
+ /** creates Source from file, using default character encoding, setting its
+ * description to filename.
+ */
+ def fromFile(file: JFile)(implicit codec: Codec): BufferedSource =
+ fromFile(file, Source.DefaultBufSize)(codec)
+
+ /** same as fromFile(file, enc, Source.DefaultBufSize)
+ */
+ def fromFile(file: JFile, enc: String): BufferedSource =
+ fromFile(file)(Codec(enc))
+
+ def fromFile(file: JFile, enc: String, bufferSize: Int): BufferedSource =
+ fromFile(file, bufferSize)(Codec(enc))
/** Creates Source from <code>file</code>, using given character encoding,
* setting its description to filename. Input is buffered in a buffer of
* size <code>bufferSize</code>.
*/
- def fromFile(file: JFile, bufferSize: Int = DefaultBufSize)(implicit codec: Codec = Codec.default): Source = {
+ def fromFile(file: JFile, bufferSize: Int)(implicit codec: Codec): BufferedSource = {
val inputStream = new FileInputStream(file)
- fromInputStream(
+ createBufferedSource(
inputStream,
bufferSize,
() => fromFile(file, bufferSize)(codec),
() => inputStream.close()
- ) withDescription ("file:" + file.getAbsolutePath)
+ )(codec) withDescription ("file:" + file.getAbsolutePath)
}
- /** Reads data from <code>inputStream</code> with a buffered reader,
- * using encoding in implicit parameter <code>codec</code>.
+ /** Create a <code>Source</code> from array of bytes, decoding
+ * the bytes according to codec.
+ *
+ * @param bytes ...
+ * @param enc ...
+ * @return the created <code>Source</code> instance.
+ */
+ def fromBytes(bytes: Array[Byte])(implicit codec: Codec): Source =
+ fromString(new String(bytes, codec.name))
+
+ def fromBytes(bytes: Array[Byte], enc: String): Source =
+ fromBytes(bytes)(Codec(enc))
+
+ /** Create a <code>Source</code> from array of bytes, assuming
+ * one byte per character (ISO-8859-1 encoding.)
+ */
+ def fromRawBytes(bytes: Array[Byte]): Source =
+ fromString(new String(bytes, Codec.ISO8859.name))
+
+ /** creates <code>Source</code> from file with given file: URI
+ */
+ def fromURI(uri: URI)(implicit codec: Codec): BufferedSource =
+ fromFile(new JFile(uri))(codec)
+
+ /** same as fromURL(new URL(s))(Codec(enc))
+ */
+ def fromURL(s: String, enc: String): BufferedSource =
+ fromURL(s)(Codec(enc))
+
+ /** same as fromURL(new URL(s))
+ */
+ def fromURL(s: String)(implicit codec: Codec): BufferedSource =
+ fromURL(new URL(s))(codec)
+
+ /** same as fromInputStream(url.openStream())(Codec(enc))
+ */
+ def fromURL(url: URL, enc: String): BufferedSource =
+ fromURL(url)(Codec(enc))
+
+ /** same as fromInputStream(url.openStream())(codec)
+ */
+ def fromURL(url: URL)(implicit codec: Codec): BufferedSource =
+ fromInputStream(url.openStream())(codec)
+
+ /** Reads data from inputStream with a buffered reader, using the encoding
+ * in implicit parameter codec.
*
* @param inputStream the input stream from which to read
* @param bufferSize buffer size (defaults to Source.DefaultBufSize)
* @param reset a () => Source which resets the stream (if unset, reset() will throw an Exception)
+ * @param close a () => Unit method which closes the stream (if unset, close() will do nothing)
* @param codec (implicit) a scala.io.Codec specifying behavior (defaults to Codec.default)
* @return the buffered source
*/
- def fromInputStream(
+ def createBufferedSource(
inputStream: InputStream,
bufferSize: Int = DefaultBufSize,
reset: () => Source = null,
close: () => Unit = null
- )(implicit codec: Codec = Codec.default): Source =
- {
+ )(implicit codec: Codec): BufferedSource = {
// workaround for default arguments being unable to refer to other parameters
- val resetFn = if (reset == null) () => fromInputStream(inputStream, bufferSize, reset, close) else reset
- new BufferedSource(inputStream)(codec) .
- withReset (resetFn) .
- withClose (close)
+ val resetFn = if (reset == null) () => createBufferedSource(inputStream, bufferSize, reset, close)(codec) else reset
+
+ new BufferedSource(inputStream, bufferSize)(codec) withReset resetFn withClose close
}
-}
-// Coming Soon?
-//
-// abstract class Source2[T] extends Iterable[T] { }
-//
-// abstract class ByteSource() extends Source2[Byte] { }
-//
-// abstract class CharSource(implicit codec: Codec = Codec.default) extends Source2[Char] { }
+ def fromInputStream(is: InputStream, enc: String): BufferedSource =
+ fromInputStream(is)(Codec(enc))
+
+ def fromInputStream(is: InputStream)(implicit codec: Codec): BufferedSource =
+ createBufferedSource(is, reset = () => fromInputStream(is)(codec), close = () => is.close())(codec)
+}
/** The class <code>Source</code> implements an iterable representation
* of source data. Calling method <code>reset</code> returns an identical,
@@ -140,8 +178,7 @@ object Source {
* @author Burak Emir
* @version 1.0
*/
-abstract class Source extends Iterator[Char]
-{
+abstract class Source extends Iterator[Char] {
/** the actual iterator */
protected val iter: Iterator[Char]
@@ -153,42 +190,36 @@ abstract class Source extends Iterator[Char]
var nerrors = 0
var nwarnings = 0
- /** convenience method, returns given line (not including newline)
+ /** Convenience method, returns given line (not including newline)
* from Source.
*
* @param line the line index, first line is 1
- * @return the character string of the specified line.
+ * @return the specified line.
*
*/
- def getLine(line: Int): String = getLines() drop (line - 1) next
+ @deprecated("Use a collections method such as getLines().toIndexedSeq for random access.")
+ def getLine(line: Int): String = lineNum(line)
+ private def lineNum(line: Int): String = getLines() drop (line - 1) next
- class LineIterator(separator: String) extends Iterator[String] {
- require(separator.length == 1 || separator.length == 2, "Line separator may be 1 or 2 characters only.")
- lazy val iter: BufferedIterator[Char] = Source.this.iter.buffered
- // For two character newline sequences like \r\n, we peek at
- // the iterator head after seeing \r, and drop the \n if present.
- val isNewline: Char => Boolean = {
- val firstCh = separator(0)
- if (separator.length == 1) (_ == firstCh)
- else (ch: Char) => (ch == firstCh) && iter.hasNext && {
- val res = iter.head == separator(1)
- if (res) { iter.next } // drop the second character
- res
- }
- }
+ class LineIterator() extends Iterator[String] {
private[this] val sb = new StringBuilder
- private def getc() =
- if (!iter.hasNext) false
+ lazy val iter: BufferedIterator[Char] = Source.this.iter.buffered
+ def isNewline(ch: Char) = ch == '\r' || ch == '\n'
+ def getc() = iter.hasNext && {
+ val ch = iter.next
+ if (ch == '\n') false
+ else if (ch == '\r') {
+ if (iter.hasNext && iter.head == '\n')
+ iter.next
+
+ false
+ }
else {
- val ch = iter.next
- if (isNewline(ch)) false
- else {
- sb append ch
- true
- }
+ sb append ch
+ true
}
-
+ }
def hasNext = iter.hasNext
def next = {
sb.clear
@@ -197,12 +228,11 @@ abstract class Source extends Iterator[Char]
}
}
- /** returns an iterator who returns lines (NOT including newline character(s)).
- * If no separator is given, the platform-specific value "line.separator" is used.
- * a line ends in \r, \n, or \r\n.
+ /** Returns an iterator who returns lines (NOT including newline character(s)).
+ * It will treat any of \r\n, \r, or \n as a line separator (longest match) - if
+ * you need more refined behavior you can subclass Source#LineIterator directly.
*/
- def getLines(separator: String = compat.Platform.EOL): Iterator[String] =
- new LineIterator(separator)
+ def getLines(): Iterator[String] = new LineIterator()
/** Returns <code>true</code> if this source has more characters.
*/
@@ -269,10 +299,10 @@ abstract class Source extends Iterator[Char]
* @param out PrintStream to use
*/
def report(pos: Int, msg: String, out: PrintStream) {
- val line = Position line pos
- val col = Position column pos
+ val line = Position line pos
+ val col = Position column pos
- out println "%s:%d:%d: %s%s%s^".format(descr, line, col, msg, getLine(line), spaces(col - 1))
+ out println "%s:%d:%d: %s%s%s^".format(descr, line, col, msg, lineNum(line), spaces(col - 1))
}
/**
@@ -313,8 +343,9 @@ abstract class Source extends Iterator[Char]
}
/** The close() method closes the underlying resource. */
- def close: Unit =
+ def close() {
if (closeFunction != null) closeFunction()
+ }
/** The reset() method creates a fresh copy of this Source. */
def reset(): Source =
diff --git a/src/library/scala/io/UTF8Codec.scala b/src/library/scala/io/UTF8Codec.scala
index 4fac5bca47..41563a9d44 100644
--- a/src/library/scala/io/UTF8Codec.scala
+++ b/src/library/scala/io/UTF8Codec.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.io
@@ -17,7 +16,7 @@ package scala.io
object UTF8Codec
{
final val UNI_REPLACEMENT_CHAR: Int = 0x0000FFFD
- final val UNI_REPLACEMENT_BYTES = encode(UNI_REPLACEMENT_CHAR)
+ final val UNI_REPLACEMENT_BYTES = Array[Byte](-17, -65, -67)
// Note, from http://unicode.org/faq/utf_bom.html#utf8-5
//
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 677dfa7e17..e1d44862e4 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.math
@@ -14,38 +13,31 @@ import java.{ lang => jl }
import java.math.{ MathContext, BigDecimal => BigDec }
import scala.collection.immutable.NumericRange
-/** Conversions which present a consistent conversion interface
- * across all the numeric types.
- */
-trait ScalaNumericConversions extends jl.Number {
- def toChar = intValue.toChar
- def toByte = byteValue
- def toShort = shortValue
- def toInt = intValue
- def toLong = longValue
- def toFloat = floatValue
- def toDouble = doubleValue
-}
+import annotation.migration
/**
* @author Stephane Micheloud
* @version 1.0
* @since 2.7
*/
-object BigDecimal
-{
+object BigDecimal {
+ private val minCached = -512
+ private val maxCached = 512
+
+ val defaultMathContext = MathContext.UNLIMITED
+
+ val MinLong = new BigDecimal(BigDec valueOf Long.MinValue, defaultMathContext)
+ val MaxLong = new BigDecimal(BigDec valueOf Long.MaxValue, defaultMathContext)
+
+ /** Cache ony for defaultMathContext using BigDecimals in a small range. */
+ private lazy val cache = new Array[BigDecimal](maxCached - minCached + 1)
+
@serializable
object RoundingMode extends Enumeration(java.math.RoundingMode.values map (_.toString) : _*) {
type RoundingMode = Value
val UP, DOWN, CEILING, FLOOR, HALF_UP, HALF_DOWN, HALF_EVEN, UNNECESSARY = Value
}
- private val minCached = -512
- private val maxCached = 512
- private lazy val cache = new Array[BigDecimal](maxCached - minCached + 1)
-
- val defaultMathContext = MathContext.UNLIMITED
-
/** Constructs a <code>BigDecimal</code> using the java BigDecimal static
* valueOf constructor.
*
@@ -63,12 +55,13 @@ object BigDecimal
*/
def apply(i: Int): BigDecimal = apply(i, defaultMathContext)
def apply(i: Int, mc: MathContext): BigDecimal =
- if (minCached <= i && i <= maxCached) {
+ if (mc == defaultMathContext && minCached <= i && i <= maxCached) {
val offset = i - minCached
var n = cache(offset)
if (n eq null) { n = new BigDecimal(BigDec.valueOf(i), mc); cache(offset) = n }
n
- } else new BigDecimal(BigDec.valueOf(i), mc)
+ }
+ else new BigDecimal(BigDec.valueOf(i), mc)
/** Constructs a <code>BigDecimal</code> whose value is equal to that of the
* specified long value.
@@ -113,7 +106,7 @@ object BigDecimal
*/
def apply(x: Array[Char]): BigDecimal = apply(x, defaultMathContext)
def apply(x: Array[Char], mc: MathContext): BigDecimal =
- new BigDecimal(new BigDec(x.toString, mc), mc)
+ new BigDecimal(new BigDec(x.mkString, mc), mc)
/** Translates the decimal String representation of a <code>BigDecimal</code>
* into a <code>BigDecimal</code>.
@@ -164,7 +157,7 @@ object BigDecimal
class BigDecimal(
val bigDecimal: BigDec,
val mc: MathContext)
-extends jl.Number with ScalaNumericConversions
+extends ScalaNumber with ScalaNumericConversions
{
def this(bigDecimal: BigDec) = this(bigDecimal, BigDecimal.defaultMathContext)
import BigDecimal.RoundingMode._
@@ -178,17 +171,22 @@ extends jl.Number with ScalaNumericConversions
* which deems 2 == 2.00, whereas in java these are unequal
* with unequal hashCodes.
*/
- override def hashCode(): Int = doubleValue.hashCode()
+ override def hashCode(): Int =
+ if (isWhole) unifiedPrimitiveHashcode
+ else doubleValue.##
/** Compares this BigDecimal with the specified value for equality.
- * Will only claim equality with scala.BigDecimal and java.math.BigDecimal.
*/
override def equals (that: Any): Boolean = that match {
- case that: BigDecimal => this equals that
- case that: BigDec => this equals BigDecimal(that)
- case _ => false
+ case that: BigDecimal => this equals that
+ case that: BigInt => this.toBigIntExact exists (that equals _)
+ case _: Float | _: Double => unifiedPrimitiveEquals(that)
+ case x => isWhole && this <= BigDecimal.MaxLong && this >= BigDecimal.MinLong && unifiedPrimitiveEquals(x)
}
+ protected[math] def isWhole = (this remainder 1) == BigDecimal(0)
+ def underlying = bigDecimal
+
/** Compares this BigDecimal with the specified BigDecimal for equality.
*/
def equals (that: BigDecimal): Boolean = compare(that) == 0
@@ -254,6 +252,10 @@ extends jl.Number with ScalaNumericConversions
*/
def remainder (that: BigDecimal): BigDecimal = this.bigDecimal.remainder(that.bigDecimal, mc)
+ /** Remainder after dividing this by that.
+ */
+ def % (that: BigDecimal): BigDecimal = this.remainder(that)
+
/** Returns a BigDecimal whose value is this ** n.
*/
def pow (n: Int): BigDecimal = this.bigDecimal.pow(n, mc)
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index 5415a29489..a21057c400 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -1,18 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.math
import java.math.BigInteger
-import java.{ lang => jl }
/**
* @author Martin Odersky
@@ -25,6 +23,9 @@ object BigInt {
private val maxCached = 1024
private val cache = new Array[BigInt](maxCached - minCached + 1)
+ val MinLong = BigInt(Long.MinValue)
+ val MaxLong = BigInt(Long.MaxValue)
+
/** Constructs a <code>BigInt</code> whose value is equal to that of the
* specified integer value.
*
@@ -100,7 +101,7 @@ object BigInt {
*/
implicit def int2bigInt(i: Int): BigInt = apply(i)
- /** Implicit copnversion from long to BigInt
+ /** Implicit conversion from long to BigInt
*/
implicit def long2bigInt(l: Long): BigInt = apply(l)
}
@@ -110,19 +111,24 @@ object BigInt {
* @version 1.0, 15/07/2003
*/
@serializable
-class BigInt(val bigInteger: BigInteger) extends jl.Number with ScalaNumericConversions
+class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions
{
/** Returns the hash code for this BigInt. */
- override def hashCode(): Int = this.bigInteger.hashCode()
+ override def hashCode(): Int =
+ if (this >= BigInt.MinLong && this <= BigInt.MaxLong) unifiedPrimitiveHashcode
+ else bigInteger.##
/** Compares this BigInt with the specified value for equality.
*/
override def equals(that: Any): Boolean = that match {
- case that: BigInt => this equals that
- case that: BigInteger => this equals new BigInt(that)
- case _ => false
+ case that: BigInt => this equals that
+ case that: BigDecimal => that.toBigIntExact exists (this equals _)
+ case x => (this <= BigInt.MaxLong && this >= BigInt.MinLong) && unifiedPrimitiveEquals(x)
}
+ protected[math] def isWhole = true
+ def underlying = bigInteger
+
/** Compares this BigInt with the specified BigInt for equality.
*/
def equals (that: BigInt): Boolean = compare(that) == 0
@@ -333,9 +339,9 @@ class BigInt(val bigInteger: BigInteger) extends jl.Number with ScalaNumericConv
def floatValue = this.bigInteger.floatValue
/** Converts this BigInt to a <tt>double</tt>.
- * if this BigInt has too great a magnitude to represent as a float,
- * it will be converted to <code>Float.NEGATIVE_INFINITY</code> or
- * <code>Float.POSITIVE_INFINITY</code> as appropriate.
+ * if this BigInt has too great a magnitude to represent as a double,
+ * it will be converted to <code>Double.NEGATIVE_INFINITY</code> or
+ * <code>Double.POSITIVE_INFINITY</code> as appropriate.
*/
def doubleValue = this.bigInteger.doubleValue
diff --git a/src/library/scala/math/Equiv.scala b/src/library/scala/math/Equiv.scala
index 3cdee42ee7..a45b51af49 100644
--- a/src/library/scala/math/Equiv.scala
+++ b/src/library/scala/math/Equiv.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.math
diff --git a/src/library/scala/math/Fractional.scala b/src/library/scala/math/Fractional.scala
index 4c7c09fe73..2205aded7f 100644
--- a/src/library/scala/math/Fractional.scala
+++ b/src/library/scala/math/Fractional.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.math
diff --git a/src/library/scala/math/Integral.scala b/src/library/scala/math/Integral.scala
index cdace45fb0..2914c9005b 100644
--- a/src/library/scala/math/Integral.scala
+++ b/src/library/scala/math/Integral.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.math
diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala
index 4ecacc975b..673461132f 100644
--- a/src/library/scala/math/Numeric.scala
+++ b/src/library/scala/math/Numeric.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.math
@@ -75,6 +74,21 @@ object Numeric {
}
implicit object ByteIsIntegral extends ByteIsIntegral with Ordering.ByteOrdering
+ trait CharIsIntegral extends Integral[Char] {
+ def plus(x: Char, y: Char): Char = (x + y).toChar
+ def minus(x: Char, y: Char): Char = (x - y).toChar
+ def times(x: Char, y: Char): Char = (x * y).toChar
+ def quot(x: Char, y: Char): Char = (x / y).toChar
+ def rem(x: Char, y: Char): Char = (x % y).toChar
+ def negate(x: Char): Char = (-x).toChar
+ def fromInt(x: Int): Char = x.toChar
+ def toInt(x: Char): Int = x.toInt
+ def toLong(x: Char): Long = x.toLong
+ def toFloat(x: Char): Float = x.toFloat
+ def toDouble(x: Char): Double = x.toDouble
+ }
+ implicit object CharIsIntegral extends CharIsIntegral with Ordering.CharOrdering
+
trait LongIsIntegral extends Integral[Long] {
def plus(x: Long, y: Long): Long = x + y
def minus(x: Long, y: Long): Long = x - y
diff --git a/src/library/scala/math/Ordered.scala b/src/library/scala/math/Ordered.scala
index bd84b1a6a2..30246c7b16 100644
--- a/src/library/scala/math/Ordered.scala
+++ b/src/library/scala/math/Ordered.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.math
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 30e13d9f0e..4527e18338 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.math
@@ -27,11 +26,11 @@ import java.util.Comparator
* <li>reflexive: <code>compare(x, x) == 0</code>, for any <code>x</code> of
* type <code>T</code>.</li>
* <li>symmetry: <code>compare(x, y) == z</code> and <code>compare(y, x) == w</code>
- * then <code>Math.signum(z) == -Math.signum(w)</code>, for any <code>x</code> and <code>y</code> of
+ * then <code>math.signum(z) == -math.signum(w)</code>, for any <code>x</code> and <code>y</code> of
* type <code>T</code> and <code>z</code> and <code>w</code> of type <code>Int</code>.</li>
* <li>transitive: if <code>compare(x, y) == z</code> and <code>compare(y, w) == v</code>
- * and <code>Math.signum(z) &gt;= 0</code> and <code>Math.signum(v) &gt;= 0</code> then
- * <code>compare(x, w) == u</code> and <code>Math.signum(z + v) == Math.signum(u)</code>,
+ * and <code>math.signum(z) &gt;= 0</code> and <code>math.signum(v) &gt;= 0</code> then
+ * <code>compare(x, w) == u</code> and <code>math.signum(z + v) == math.signum(u)</code>,
* for any <code>x</code>, <code>y</code>,
* and <code>w</code> of type <code>T</code> and <code>z</code>, <code>v</code>, and <code>u</code>
* of type <code>Int</code>.</li>
@@ -126,8 +125,15 @@ object Ordering extends LowPriorityOrderingImplicits {
def fromLessThan[T](cmp: (T, T) => Boolean): Ordering[T] = new Ordering[T] {
def compare(x: T, y: T) = if (cmp(x, y)) -1 else if (cmp(y, x)) 1 else 0
+ // overrides to avoid multiple comparisons
+ override def lt(x: T, y: T): Boolean = cmp(x, y)
+ override def gt(x: T, y: T): Boolean = cmp(y, x)
+ override def gteq(x: T, y: T): Boolean = !cmp(x, y)
+ override def lteq(x: T, y: T): Boolean = !cmp(y, x)
}
+ def by[T, S: Ordering](f: T => S): Ordering[T] = fromLessThan((x, y) => implicitly[Ordering[S]].lt(f(x), f(y)))
+
trait UnitOrdering extends Ordering[Unit] {
def compare(x: Unit, y: Unit) = 0
}
diff --git a/src/library/scala/math/PartialOrdering.scala b/src/library/scala/math/PartialOrdering.scala
index 0f3f667cd5..e40a3d29ad 100644
--- a/src/library/scala/math/PartialOrdering.scala
+++ b/src/library/scala/math/PartialOrdering.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.math
diff --git a/src/library/scala/math/PartiallyOrdered.scala b/src/library/scala/math/PartiallyOrdered.scala
index a09d8326f4..2e863ec088 100644
--- a/src/library/scala/math/PartiallyOrdered.scala
+++ b/src/library/scala/math/PartiallyOrdered.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.math
diff --git a/src/library/scala/math/ScalaNumber.java b/src/library/scala/math/ScalaNumber.java
new file mode 100644
index 0000000000..4aa920496c
--- /dev/null
+++ b/src/library/scala/math/ScalaNumber.java
@@ -0,0 +1,21 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.math;
+
+/** A marker class for Number types introduced by Scala
+ * @author Martin Odersky, Paul Phillips
+ * @version 2.8
+ * @since 2.8
+ */
+public abstract class ScalaNumber extends java.lang.Number {
+ protected abstract boolean isWhole();
+ public abstract Object underlying();
+}
diff --git a/src/library/scala/math/ScalaNumericConversions.scala b/src/library/scala/math/ScalaNumericConversions.scala
new file mode 100644
index 0000000000..34698ea39f
--- /dev/null
+++ b/src/library/scala/math/ScalaNumericConversions.scala
@@ -0,0 +1,60 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.math
+
+import java.{ lang => jl }
+
+/** Conversions which present a consistent conversion interface
+ * across all the numeric types.
+ */
+trait ScalaNumericConversions extends ScalaNumber {
+ def toChar = intValue.toChar
+ def toByte = byteValue
+ def toShort = shortValue
+ def toInt = intValue
+ def toLong = longValue
+ def toFloat = floatValue
+ def toDouble = doubleValue
+
+ def isValidByte = isWhole && (toByte == toInt)
+ def isValidShort = isWhole && (toShort == toInt)
+ def isValidInt = isWhole && (toInt == toLong)
+ def isValidChar = isWhole && (toInt >= Char.MinValue && toInt <= Char.MaxValue)
+
+ protected def unifiedPrimitiveHashcode() = {
+ val lv = toLong
+ if (lv >= Int.MinValue && lv <= Int.MaxValue) lv.toInt
+ else lv.##
+ }
+
+ /** Should only be called after all known non-primitive
+ * types have been excluded. This method won't dispatch
+ * anywhere else after checking against the primitives
+ * to avoid infinite recursion between equals and this on
+ * unknown "Number" variants.
+ *
+ * Additionally, this should only be called if the numeric
+ * type is happy to be converted to Long, Float, and Double.
+ * If for instance a BigInt much larger than the Long range is
+ * sent here, it will claim equality with whatever Long is left
+ * in its lower 64 bits. Or a BigDecimal with more precision
+ * than Double can hold: same thing. There's no way given the
+ * interface available here to prevent this error.
+ */
+ protected def unifiedPrimitiveEquals(x: Any) = x match {
+ case x: Char => isValidChar && (toInt == x.toInt)
+ case x: Byte => isValidByte && (toByte == x)
+ case x: Short => isValidShort && (toShort == x)
+ case x: Int => isValidInt && (toInt == x)
+ case x: Long => toLong == x
+ case x: Float => toFloat == x
+ case x: Double => toDouble == x
+ case _ => false
+ }
+}
diff --git a/src/library/scala/math/package.scala b/src/library/scala/math/package.scala
new file mode 100644
index 0000000000..3c62537f64
--- /dev/null
+++ b/src/library/scala/math/package.scala
@@ -0,0 +1,30 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** The package object <code>scala.math</code> contains methods for performing basic numeric
+ * operations such as the elementary exponential, logarithm, square root, and
+ * trigonometric functions.
+ */
+
+package object math extends MathCommon {
+ // These are new in 2.8, so they don't belong in the deprecated scala.Math.
+
+ def log10(x: Double): Double = java.lang.Math.log10(x)
+ def cbrt(x: Double): Double = java.lang.Math.cbrt(x)
+
+ def ulp(x: Double): Double = java.lang.Math.ulp(x)
+ def ulp(x: Float): Float = java.lang.Math.ulp(x)
+ def sinh(x: Double): Double = java.lang.Math.sinh(x)
+ def cosh(x: Double): Double = java.lang.Math.cosh(x)
+ def tanh(x: Double):Double = java.lang.Math.tanh(x)
+ def hypot(x: Double, y: Double): Double = java.lang.Math.hypot(x, y)
+ def expm1(x: Double): Double = java.lang.Math.expm1(x)
+ def log1p(x: Double): Double = java.lang.Math.log1p(x)
+} \ No newline at end of file
diff --git a/src/library/scala/mobile/Code.scala b/src/library/scala/mobile/Code.scala
index cd898e8a3e..fb43de27ab 100644
--- a/src/library/scala/mobile/Code.scala
+++ b/src/library/scala/mobile/Code.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.mobile
diff --git a/src/library/scala/mobile/Location.scala b/src/library/scala/mobile/Location.scala
index 8b389ecf0c..4b5f13111d 100644
--- a/src/library/scala/mobile/Location.scala
+++ b/src/library/scala/mobile/Location.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.mobile
diff --git a/src/library/scala/native.scala b/src/library/scala/native.scala
index db52f25d76..8f5f3e8319 100644
--- a/src/library/scala/native.scala
+++ b/src/library/scala/native.scala
@@ -1,26 +1,23 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
-/**
- * Marker for native methods.
- * <p>
- * <code>@native def f(x: Int, y: List[Long]): String = ..</code>
- * </p>
- * <p>
- * Method body is not generated if method is marked with <code>@native</code>,
- * but it is type checked when present.
- * </p>
- *
- * @since 2.6
- */
+/** Marker for native methods.
+ *
+ * {{{
+ * @native def f(x: Int, y: List[Long]): String = ...
+ * }}}
+ *
+ * Method body is not generated if method is marked with `@native`,
+ * but it is type checked when present.
+ *
+ * @since 2.6 */
class native extends StaticAnnotation {}
diff --git a/src/library/scala/noinline.scala b/src/library/scala/noinline.scala
index af942394a9..18270e7bc5 100644
--- a/src/library/scala/noinline.scala
+++ b/src/library/scala/noinline.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index e45e5cebe7..9531572321 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -1,15 +1,34 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
+/**
+ * Core Scala types. They are always available without an explicit import.
+ */
package object scala {
+ type Throwable = java.lang.Throwable
+ type Exception = java.lang.Exception
+ type Error = java.lang.Error
+
+ type RuntimeException = java.lang.RuntimeException
+ type NullPointerException = java.lang.NullPointerException
+ type ClassCastException = java.lang.ClassCastException
+ type IndexOutOfBoundsException = java.lang.IndexOutOfBoundsException
+ type ArrayIndexOutOfBoundsException = java.lang.ArrayIndexOutOfBoundsException
+ type StringIndexOutOfBoundsException = java.lang.StringIndexOutOfBoundsException
+ type UnsupportedOperationException = java.lang.UnsupportedOperationException
+ type IllegalArgumentException = java.lang.IllegalArgumentException
+ type NoSuchElementException = java.util.NoSuchElementException
+ type NumberFormatException = java.lang.NumberFormatException
+ type AbstractMethodError = java.lang.AbstractMethodError
+
+ type TraversableOnce[+A] = scala.collection.TraversableOnce[A]
type Traversable[+A] = scala.collection.Traversable[A]
val Traversable = scala.collection.Traversable
@@ -44,11 +63,15 @@ package object scala {
val Vector = scala.collection.immutable.Vector
type StringBuilder = scala.collection.mutable.StringBuilder
- val StringBuilder = scala.collection.mutable.StringBuilder
type Range = scala.collection.immutable.Range
val Range = scala.collection.immutable.Range
+ // Migrated from Predef
+
+ val $scope = scala.xml.TopScope
+ def currentThread = java.lang.Thread.currentThread()
+
// Numeric types which were moved into scala.math.*
type BigDecimal = scala.math.BigDecimal
@@ -73,6 +96,56 @@ package object scala {
type PartialOrdering[T] = scala.math.PartialOrdering[T]
type PartiallyOrdered[T] = scala.math.PartiallyOrdered[T]
+ @deprecated("Use Tuple1(x) to create a 1-tuple.")
+ def Tuple[A1](x1: A1) = Tuple1(x1)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2](x1: A1, x2: A2) = Tuple2(x1, x2)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3](x1: A1, x2: A2, x3: A3) = Tuple3(x1, x2, x3)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4](x1: A1, x2: A2, x3: A3, x4: A4) = Tuple4(x1, x2, x3, x4)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5) = Tuple5(x1, x2, x3, x4, x5)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6) = Tuple6(x1, x2, x3, x4, x5, x6)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7) = Tuple7(x1, x2, x3, x4, x5, x6, x7)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8) = Tuple8(x1, x2, x3, x4, x5, x6, x7, x8)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9) = Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10) = Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11) = Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12) = Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13) = Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14) = Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15) = Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16) = Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17) = Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18) = Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19) = Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19, x20: A20) = Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19, x20: A20, x21: A21) = Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19, x20: A20, x21: A21, x22: A22) = Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)
+
+ @deprecated("use <code>java.lang.Integer</code> instead")
+ type Integer = java.lang.Integer
+ @deprecated("use <code>java.lang.Character</code> instead")
+ type Character = java.lang.Character
+
@deprecated("use Iterable instead") type Collection[+A] = Iterable[A]
@deprecated("use Iterable instead") val Collection = Iterable
diff --git a/src/library/scala/ref/PhantomReference.scala b/src/library/scala/ref/PhantomReference.scala
index d070f8f28e..0ae2bc229c 100644
--- a/src/library/scala/ref/PhantomReference.scala
+++ b/src/library/scala/ref/PhantomReference.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.ref
@@ -15,7 +14,11 @@ package scala.ref
*/
class PhantomReference[+T <: AnyRef](value: T, queue: ReferenceQueue[T]) extends ReferenceWrapper[T] {
val underlying: java.lang.ref.PhantomReference[_ <: T] =
- new java.lang.ref.PhantomReference[T](value, queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]])
- queue.register(this)
+ new PhantomReferenceWithWrapper[T](value, queue, this)
}
+/**
+ * @author Philipp Haller
+ */
+private class PhantomReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: PhantomReference[T])
+ extends java.lang.ref.PhantomReference[T](value, queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T]
diff --git a/src/library/scala/ref/Reference.scala b/src/library/scala/ref/Reference.scala
index 4fa69f77b3..64a0dc9ab0 100644
--- a/src/library/scala/ref/Reference.scala
+++ b/src/library/scala/ref/Reference.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.ref
@@ -15,12 +14,10 @@ package scala.ref
* @author Sean McDirmid
*/
trait Reference[+T <: AnyRef] extends Function0[T] {
- @deprecated("Use .get.isDefined instead")
- def isValid: Boolean
/** return the underlying value */
def apply(): T
/** return <code>Some</code> underlying if it hasn't been collected, otherwise <code>None</code> */
- def get : Option[T]
+ def get: Option[T]
override def toString = get.map(_.toString).getOrElse("<deleted>")
def clear(): Unit
def enqueue(): Boolean
diff --git a/src/library/scala/ref/ReferenceQueue.scala b/src/library/scala/ref/ReferenceQueue.scala
index c393e9aa09..d019005b70 100644
--- a/src/library/scala/ref/ReferenceQueue.scala
+++ b/src/library/scala/ref/ReferenceQueue.scala
@@ -1,39 +1,31 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.ref
-import scala.collection.mutable.HashMap
-
/**
- * @author Sean McDirmid, Philipp Haller
+ * @author Sean McDirmid
+ * @author Philipp Haller
*/
class ReferenceQueue[+T <: AnyRef] {
+
private[ref] val underlying: java.lang.ref.ReferenceQueue[_ <: T] = new java.lang.ref.ReferenceQueue[T]
override def toString = underlying.toString
- protected def Wrapper(jref: java.lang.ref.Reference[_]) = jref match {
- case null => None
- case ref =>
- val refWrapper = wrappers(ref)
- wrappers -= ref
- Some(refWrapper.asInstanceOf[Reference[T]])
- }
+ protected def Wrapper(jref: java.lang.ref.Reference[_]): Option[Reference[T]] =
+ jref match {
+ case null => None
+ case ref => Some(ref.asInstanceOf[ReferenceWithWrapper[T]].wrapper)
+ }
def poll: Option[Reference[T]] = Wrapper(underlying.poll)
def remove: Option[Reference[T]] = Wrapper(underlying.remove)
def remove(timeout: Long): Option[Reference[T]] = Wrapper(underlying.remove(timeout))
- protected val wrappers = new HashMap[java.lang.ref.Reference[_],
- ReferenceWrapper[_ <: AnyRef]]
- def register(ref: ReferenceWrapper[_ <: AnyRef]) {
- wrappers += ((ref.underlying, ref))
- }
}
diff --git a/src/library/scala/ref/ReferenceWrapper.scala b/src/library/scala/ref/ReferenceWrapper.scala
index a562e56c3a..d112dcdd9d 100644
--- a/src/library/scala/ref/ReferenceWrapper.scala
+++ b/src/library/scala/ref/ReferenceWrapper.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.ref
@@ -15,7 +14,6 @@ package scala.ref
*/
trait ReferenceWrapper[+T <: AnyRef] extends Reference[T] with Proxy {
val underlying: java.lang.ref.Reference[_ <: T]
- @deprecated("Use .get.isDefined instead") def isValid = underlying.get != null
override def get = {
val ret = underlying.get
if (ret eq null) None else Some(ret)
@@ -31,3 +29,10 @@ trait ReferenceWrapper[+T <: AnyRef] extends Reference[T] with Proxy {
def self = underlying
}
+
+/**
+ * @author Philipp Haller
+ */
+private trait ReferenceWithWrapper[T <: AnyRef] {
+ val wrapper: ReferenceWrapper[T]
+}
diff --git a/src/library/scala/ref/SoftReference.scala b/src/library/scala/ref/SoftReference.scala
index e1807247ed..a9beb5cc16 100644
--- a/src/library/scala/ref/SoftReference.scala
+++ b/src/library/scala/ref/SoftReference.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.ref
@@ -16,8 +15,11 @@ package scala.ref
class SoftReference[+T <: AnyRef](value : T, queue : ReferenceQueue[T]) extends ReferenceWrapper[T] {
def this(value : T) = this(value, null);
val underlying: java.lang.ref.SoftReference[_ <: T] =
- if (queue == null) new java.lang.ref.SoftReference[T](value);
- else new java.lang.ref.SoftReference[T](value, queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]])
- if (queue != null)
- queue.register(this)
+ new SoftReferenceWithWrapper[T](value, queue, this)
}
+
+/**
+ * @author Philipp Haller
+ */
+private class SoftReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: SoftReference[T])
+ extends java.lang.ref.SoftReference[T](value, if (queue == null) null else queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T]
diff --git a/src/library/scala/ref/WeakReference.scala b/src/library/scala/ref/WeakReference.scala
index 767916cd14..6fb03a314b 100644
--- a/src/library/scala/ref/WeakReference.scala
+++ b/src/library/scala/ref/WeakReference.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.ref
@@ -16,8 +15,11 @@ package scala.ref
class WeakReference[+T <: AnyRef](value: T, queue: ReferenceQueue[T]) extends ReferenceWrapper[T] {
def this(value: T) = this(value, null)
val underlying: java.lang.ref.WeakReference[_ <: T] =
- if (queue == null) new java.lang.ref.WeakReference[T](value)
- else new java.lang.ref.WeakReference[T](value, queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]])
- if (queue != null)
- queue.register(this)
+ new WeakReferenceWithWrapper[T](value, queue, this)
}
+
+/**
+ * @author Philipp Haller
+ */
+private class WeakReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: WeakReference[T])
+ extends java.lang.ref.WeakReference[T](value, if (queue == null) null else queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T]
diff --git a/src/library/scala/reflect/BeanDescription.scala b/src/library/scala/reflect/BeanDescription.scala
index 07d16cab4b..74ce8f2d37 100644
--- a/src/library/scala/reflect/BeanDescription.scala
+++ b/src/library/scala/reflect/BeanDescription.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
diff --git a/src/library/scala/reflect/BeanDisplayName.scala b/src/library/scala/reflect/BeanDisplayName.scala
index 24e5f8b89b..414905ae93 100644
--- a/src/library/scala/reflect/BeanDisplayName.scala
+++ b/src/library/scala/reflect/BeanDisplayName.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
diff --git a/src/library/scala/reflect/BeanInfo.scala b/src/library/scala/reflect/BeanInfo.scala
index a2249c818b..5153263f6f 100644
--- a/src/library/scala/reflect/BeanInfo.scala
+++ b/src/library/scala/reflect/BeanInfo.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
diff --git a/src/library/scala/reflect/BeanInfoSkip.scala b/src/library/scala/reflect/BeanInfoSkip.scala
index f7717a8825..f480f6c7b5 100644
--- a/src/library/scala/reflect/BeanInfoSkip.scala
+++ b/src/library/scala/reflect/BeanInfoSkip.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
diff --git a/src/library/scala/reflect/BeanProperty.scala b/src/library/scala/reflect/BeanProperty.scala
index 50c095486f..d784774882 100644
--- a/src/library/scala/reflect/BeanProperty.scala
+++ b/src/library/scala/reflect/BeanProperty.scala
@@ -1,16 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
+import annotation.target._
+
/** <p>
* When attached to a field, this annotation adds a setter and a getter
* method following the Java Bean convention. For example:
@@ -29,4 +30,5 @@ package scala.reflect
* <code>scala.reflect.BooleanBeanProperty</code> annotation instead.
* </p>
*/
+@field
class BeanProperty extends StaticAnnotation
diff --git a/src/library/scala/reflect/BooleanBeanProperty.scala b/src/library/scala/reflect/BooleanBeanProperty.scala
index 8bcd045a23..f90f11f7cb 100644
--- a/src/library/scala/reflect/BooleanBeanProperty.scala
+++ b/src/library/scala/reflect/BooleanBeanProperty.scala
@@ -1,16 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
+import annotation.target._
+
/** <p>
* This annotation has the same functionality as
* <code>scala.reflect.BeanProperty</code>, but the generated
@@ -18,4 +19,5 @@ package scala.reflect
* of <code>getFieldName</code>.
* </p>
*/
+@field
class BooleanBeanProperty extends StaticAnnotation
diff --git a/src/library/scala/reflect/ClassManifest.scala b/src/library/scala/reflect/ClassManifest.scala
index 05205029c0..0337ed6aed 100644
--- a/src/library/scala/reflect/ClassManifest.scala
+++ b/src/library/scala/reflect/ClassManifest.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
@@ -27,7 +26,7 @@ import scala.collection.mutable.{WrappedArray, ArrayBuilder}
* </p>
*/
@serializable
-trait ClassManifest[T] extends OptManifest[T] {
+trait ClassManifest[T] extends OptManifest[T] with Equals {
/** A class representing the type U to which T would be erased. Note
* that there is no subtyping relationship between T and U. */
@@ -52,8 +51,18 @@ trait ClassManifest[T] extends OptManifest[T] {
case _ => false
}
}
- (this.erasure == that.erasure || subtype(this.erasure, that.erasure)) &&
- subargs(this.typeArguments, that.typeArguments)
+
+ import Manifest.{ AnyVal, Nothing, Null }
+
+ that match {
+ // All types which conform to AnyVal will override <:<.
+ case _: AnyValManifest[_] => false
+ // Anything which conforms to a bottom type will override <:<.
+ case AnyVal | Nothing | Null => false
+ case _ =>
+ (this.erasure == that.erasure || subtype(this.erasure, that.erasure)) &&
+ subargs(this.typeArguments, that.typeArguments)
+ }
}
/** Tests whether the type represented by this manifest is a supertype
@@ -63,14 +72,20 @@ trait ClassManifest[T] extends OptManifest[T] {
def >:>(that: ClassManifest[_]): Boolean =
that <:< this
+ def canEqual(other: Any) = other match {
+ case _: ClassManifest[_] => true
+ case _ => false
+ }
+
/** Tests whether the type represented by this manifest is equal to the
* type represented by `that' manifest. BE AWARE: the current
* implementation is an approximation, as the test is done on the
* erasure of the type. */
override def equals(that: Any): Boolean = that match {
- case m: ClassManifest[_] => this.erasure == m.erasure
+ case m: ClassManifest[_] if m canEqual this => this.erasure == m.erasure
case _ => false
}
+ override def hashCode = this.erasure.##
protected def arrayClass[T](tp: Predef.Class[_]): Predef.Class[Array[T]] =
java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[Predef.Class[Array[T]]]
@@ -153,7 +168,7 @@ object ClassManifest {
case _ => classType[T with AnyRef](clazz).asInstanceOf[ClassManifest[T]]
}
- def singleType[T](value: Any): Manifest[T] = Manifest.singleType(value)
+ def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = Manifest.singleType(value)
/** ClassManifest for the class type `clazz', where `clazz' is
* a top-level or static class.
@@ -196,17 +211,22 @@ object ClassManifest {
/** ClassManifest for the abstract type `prefix # name'. `upperBound' is not
* strictly necessary as it could be obtained by reflection. It was
* added so that erasure can be calculated without reflection. */
- def abstractType[T](prefix: OptManifest[_], name: String, upperBound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] =
+ def abstractType[T](prefix: OptManifest[_], name: String, clazz: Predef.Class[_], args: OptManifest[_]*): ClassManifest[T] =
new (ClassManifest[T] @serializable) {
- def erasure = upperBound.erasure
+ def erasure = clazz
override val typeArguments = args.toList
override def toString = prefix.toString+"#"+name+argString
}
- /** ClassManifest for the intersection type `parents_0 with ... with parents_n'. */
- def intersectionType[T](parents: ClassManifest[_]*): ClassManifest[T] =
+ /** ClassManifest for the abstract type `prefix # name'. `upperBound' is not
+ * strictly necessary as it could be obtained by reflection. It was
+ * added so that erasure can be calculated without reflection.
+ * todo: remove after next boostrap
+ */
+ def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] =
new (ClassManifest[T] @serializable) {
- def erasure = parents.head.erasure
- override def toString = parents.mkString(" with ")
+ def erasure = upperbound.erasure
+ override val typeArguments = args.toList
+ override def toString = prefix.toString+"#"+name+argString
}
}
diff --git a/src/library/scala/reflect/Code.scala b/src/library/scala/reflect/Code.scala
index a61d86babb..36537ba447 100644
--- a/src/library/scala/reflect/Code.scala
+++ b/src/library/scala/reflect/Code.scala
@@ -1,18 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
/** This type is required by the compiler and <b>should not be used in client code</b>. */
-class Code[Type](val tree: Tree)
+class Code[T](val tree: Tree)
/** This type is required by the compiler and <b>should not be used in client code</b>. */
object Code {
diff --git a/src/library/scala/reflect/Invocation.scala b/src/library/scala/reflect/Invocation.scala
deleted file mode 100644
index 795c74fef4..0000000000
--- a/src/library/scala/reflect/Invocation.scala
+++ /dev/null
@@ -1,134 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.reflect
-
-import scala.annotation.experimental
-import scala.util.control.Exception.catching
-import java.lang.{ Class => JClass }
-import java.lang.reflect.{ Method => JMethod }
-import scala.{ Symbol => ScalaSymbol }
-
-/** <p>
- * A more convenient syntax for reflective invocation.<br/>
- * Example usage:
- * </p><pre>
- * <b>class</b> Obj { <b>private def</b> foo(x: Int, y: String): Long = x + y.length }</pre>
- * <p>
- * You can call it reflectively one of two ways:
- * </p><pre>
- * <b>import</b> scala.reflect.Invocation._
- * (<b>new</b> Obj) o 'foo(5, "abc") // the 'o' method returns Any
- * <b>val</b> x: Long = (<b>new</b> Obj) oo 'foo(5, "abc") // the 'oo' method casts to expected type.</pre>
- * <p>
- * If you call the <code>oo</code> method and do not give the type inferencer
- * enough help, it will most likely infer <code>Nothing</code>, which will
- * result in a <code>ClassCastException</code>.
- * </p>
- *
- * @author Paul Phillips
- */
-@experimental
-object Invocation
-{
- /** <p>
- * In order to encapsulate anything to do with reflection, we must
- * overcome an issue with the boxing of primitives. If we declare a
- * method which takes arguments of type <code>Any</code>, by the time the
- * method parameters can be examined, the primitives have already been boxed.
- * The reflective call will then fail because <code>classOf[java.lang.Integer]</code>
- * is not the same thing as <code>classOf[scala.Int].</code>
- * </p>
- * <p>
- * Any useful workaround will require examining the arguments before
- * the method is called. The approach here is to define two implicits,
- * one for <code>AnyRef</code>'s and one for <code>AnyVal</code>'s, and
- * box them in a container which preserves their original class identity.
- * </p>
- */
- trait PrimitivePreserver[T] {
- val value: T
- val clazz: JClass[_]
- }
- case class PreservedAnyVal[T <: AnyVal](value: T) extends PrimitivePreserver[T] {
- val clazz = getAnyValClass(value)
- }
- case class PreservedAnyRef[T <: AnyRef](value: T) extends PrimitivePreserver[T] {
- val clazz = value.getClass
- }
- implicit def makePreservedAnyRef[T <: AnyRef](x: T) = PreservedAnyRef(x)
- implicit def makePreservedAnyVal[T <: AnyVal](x: T) = PreservedAnyVal(x)
-
- /** We also require an implicit on scala.Symbol so they appear to contain
- * an apply method, which packages the method arguments. The type parameter
- * is the method's expected result type.
- */
- class SymbolWithArguments(val sym: ScalaSymbol, val args: PrimitivePreserver[_]*) {
- def getArgs = args map (_.value.asInstanceOf[AnyRef])
- def getArgTypes = args.toList map (_.clazz)
- def argsMatch(m: JMethod) =
- List.map2(m.getParameterTypes.toList, getArgTypes)(_ isAssignableFrom _) forall (_ == true)
-
- // only called if getMethod() fails - searches private methods too.
- def getDeclaredMethodsOn(x: AnyRef) =
- (x.getClass.getDeclaredMethods filter (_.getName == sym.name) find argsMatch) match {
- case Some(m) => m setAccessible true ; m
- case None => throw new NoSuchMethodException(sym.name)
- }
-
- def getMethodOn(x: AnyRef) =
- catching(classOf[NoSuchMethodException]) .
- opt (x.getClass.getMethod(sym.name, getArgTypes: _*)) .
- getOrElse (getDeclaredMethodsOn(x))
-
- }
- class RichSymbol(sym: ScalaSymbol) {
- def apply(args: PrimitivePreserver[_]*): SymbolWithArguments =
- new SymbolWithArguments(sym, args: _*)
- }
- implicit def makeRichSymbol(sym: ScalaSymbol): RichSymbol = new RichSymbol(sym)
-
- /** An implicit on AnyRef provides it with the 'o' method, which is supposed
- * to look like a giant '.' and present the feel of method invocation.
- */
- class ReflectionOperators[T <: AnyRef](self: T) {
- val clazz = self.getClass.asInstanceOf[JClass[T]]
-
- /** Issue call without touching result - returns Any.
- */
- def o(sym: ScalaSymbol): Any = oo(new SymbolWithArguments(sym))
- def o(symApp: SymbolWithArguments): Any = oo(symApp)
-
- /** Issue call expecting return type R - casts result to R.
- */
- def oo[R](sym: ScalaSymbol): R = oo[R](new SymbolWithArguments(sym))
- def oo[R](symApp: SymbolWithArguments): R = {
- def method = symApp getMethodOn self
- method.invoke(self, symApp.getArgs: _*).asInstanceOf[R]
- }
- }
- implicit def makeReflectionOperators[T <: AnyRef](x: T): ReflectionOperators[T] =
- new ReflectionOperators(x)
-
- /** Obtain the class object for an <code>AnyVal</code>.
- */
- def getAnyValClass(x: AnyVal): JClass[_] = x match {
- case _: Byte => classOf[Byte]
- case _: Short => classOf[Short]
- case _: Int => classOf[Int]
- case _: Long => classOf[Long]
- case _: Float => classOf[Float]
- case _: Double => classOf[Double]
- case _: Char => classOf[Char]
- case _: Boolean => classOf[Boolean]
- case _: Unit => classOf[Unit]
- }
-}
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index 2b8cf03e7b..6faa99c4c8 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
@@ -27,11 +26,33 @@ import scala.collection.immutable.{List, Nil}
* </p>
*/
@serializable
-trait Manifest[T] extends ClassManifest[T] {
+trait Manifest[T] extends ClassManifest[T] with Equals {
override def typeArguments: List[Manifest[_]] = List()
override def arrayManifest: Manifest[Array[T]] =
Manifest.classType[Array[T]](arrayClass[T](erasure))
+
+ override def canEqual(that: Any): Boolean = that match {
+ case _: Manifest[_] => true
+ case _ => false
+ }
+ override def equals(that: Any): Boolean = that match {
+ case m: Manifest[_] if m canEqual this => (this <:< m) && (m <:< this)
+ case _ => false
+ }
+ override def hashCode = this.erasure.##
+}
+
+@serializable
+trait AnyValManifest[T] extends Manifest[T] with Equals {
+ import Manifest.{ Any, AnyVal }
+ override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) || (that eq AnyVal)
+ override def canEqual(other: Any) = other match {
+ case _: AnyValManifest[_] => true
+ case _ => false
+ }
+ override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = System.identityHashCode(this)
}
/** <ps>
@@ -45,112 +66,135 @@ trait Manifest[T] extends ClassManifest[T] {
* </p>
*/
object Manifest {
-
- val Byte = new (Manifest[Byte] @serializable) {
+ val Byte: AnyValManifest[Byte] = new (AnyValManifest[Byte] @serializable) {
def erasure = java.lang.Byte.TYPE
override def toString = "Byte"
override def newArray(len: Int): Array[Byte] = new Array[Byte](len)
override def newWrappedArray(len: Int): WrappedArray[Byte] = new WrappedArray.ofByte(new Array[Byte](len))
override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte()
+ private def readResolve(): Any = Manifest.Byte
}
- val Short = new (Manifest[Short] @serializable) {
+ val Short: AnyValManifest[Short] = new (AnyValManifest[Short] @serializable) {
def erasure = java.lang.Short.TYPE
override def toString = "Short"
override def newArray(len: Int): Array[Short] = new Array[Short](len)
override def newWrappedArray(len: Int): WrappedArray[Short] = new WrappedArray.ofShort(new Array[Short](len))
override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort()
+ private def readResolve(): Any = Manifest.Short
}
- val Char = new (Manifest[Char] @serializable) {
+ val Char: AnyValManifest[Char] = new (AnyValManifest[Char] @serializable) {
def erasure = java.lang.Character.TYPE
override def toString = "Char"
override def newArray(len: Int): Array[Char] = new Array[Char](len)
override def newWrappedArray(len: Int): WrappedArray[Char] = new WrappedArray.ofChar(new Array[Char](len))
override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar()
+ private def readResolve(): Any = Manifest.Char
}
- val Int = new (Manifest[Int] @serializable) {
+ val Int: AnyValManifest[Int] = new (AnyValManifest[Int] @serializable) {
def erasure = java.lang.Integer.TYPE
override def toString = "Int"
override def newArray(len: Int): Array[Int] = new Array[Int](len)
override def newWrappedArray(len: Int): WrappedArray[Int] = new WrappedArray.ofInt(new Array[Int](len))
override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt()
+ private def readResolve(): Any = Manifest.Int
}
- val Long = new (Manifest[Long] @serializable) {
+ val Long: AnyValManifest[Long] = new (AnyValManifest[Long] @serializable) {
def erasure = java.lang.Long.TYPE
override def toString = "Long"
override def newArray(len: Int): Array[Long] = new Array[Long](len)
override def newWrappedArray(len: Int): WrappedArray[Long] = new WrappedArray.ofLong(new Array[Long](len))
override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong()
+ private def readResolve(): Any = Manifest.Long
}
- val Float = new (Manifest[Float] @serializable) {
+ val Float: AnyValManifest[Float] = new (AnyValManifest[Float] @serializable) {
def erasure = java.lang.Float.TYPE
override def toString = "Float"
override def newArray(len: Int): Array[Float] = new Array[Float](len)
override def newWrappedArray(len: Int): WrappedArray[Float] = new WrappedArray.ofFloat(new Array[Float](len))
override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat()
+ private def readResolve(): Any = Manifest.Float
}
- val Double = new (Manifest[Double] @serializable) {
+ val Double: AnyValManifest[Double] = new (AnyValManifest[Double] @serializable) {
def erasure = java.lang.Double.TYPE
override def toString = "Double"
override def newArray(len: Int): Array[Double] = new Array[Double](len)
override def newWrappedArray(len: Int): WrappedArray[Double] = new WrappedArray.ofDouble(new Array[Double](len))
override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble()
+ private def readResolve(): Any = Manifest.Double
}
- val Boolean = new (Manifest[Boolean] @serializable) {
+ val Boolean: AnyValManifest[Boolean] = new (AnyValManifest[Boolean] @serializable) {
def erasure = java.lang.Boolean.TYPE
override def toString = "Boolean"
override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len)
override def newWrappedArray(len: Int): WrappedArray[Boolean] = new WrappedArray.ofBoolean(new Array[Boolean](len))
override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean()
+ private def readResolve(): Any = Manifest.Boolean
}
- val Unit = new (Manifest[Unit] @serializable) {
+ val Unit: AnyValManifest[Unit] = new (AnyValManifest[Unit] @serializable) {
def erasure = java.lang.Void.TYPE
override def toString = "Unit"
override def newArray(len: Int): Array[Unit] = new Array[Unit](len)
override def newWrappedArray(len: Int): WrappedArray[Unit] = new WrappedArray.ofUnit(new Array[Unit](len))
override def newArrayBuilder(): ArrayBuilder[Unit] = new ArrayBuilder.ofUnit()
+ private def readResolve(): Any = Manifest.Unit
}
val Any: Manifest[Any] = new ClassTypeManifest[Any](None, classOf[java.lang.Object], List()) {
override def toString = "Any"
- // todo: re-implement <:<
+ override def <:<(that: ClassManifest[_]): Boolean = (that eq this)
+ override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = System.identityHashCode(this)
+ private def readResolve(): Any = Manifest.Any
}
val Object: Manifest[Object] = new ClassTypeManifest[Object](None, classOf[java.lang.Object], List()) {
override def toString = "Object"
- // todo: re-implement <:<
+ override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
+ override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = System.identityHashCode(this)
+ private def readResolve(): Any = Manifest.Object
}
val AnyVal: Manifest[AnyVal] = new ClassTypeManifest[AnyVal](None, classOf[java.lang.Object], List()) {
override def toString = "AnyVal"
- // todo: re-implement <:<
+ override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
+ override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = System.identityHashCode(this)
+ private def readResolve(): Any = Manifest.AnyVal
}
val Null: Manifest[Null] = new ClassTypeManifest[Null](None, classOf[java.lang.Object], List()) {
override def toString = "Null"
- // todo: re-implement <:<
+ override def <:<(that: ClassManifest[_]): Boolean =
+ (that ne null) && (that ne Nothing) && !(that <:< AnyVal)
+ override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = System.identityHashCode(this)
+ private def readResolve(): Any = Manifest.Null
}
val Nothing: Manifest[Nothing] = new ClassTypeManifest[Nothing](None, classOf[java.lang.Object], List()) {
override def toString = "Nothing"
- // todo: re-implement <:<
+ override def <:<(that: ClassManifest[_]): Boolean = (that ne null)
+ override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = System.identityHashCode(this)
+ private def readResolve(): Any = Manifest.Nothing
}
/** Manifest for the singleton type `value.type'. */
- def singleType[T](value: Any): Manifest[T] =
+ def singleType[T <: AnyRef](value: AnyRef): Manifest[T] =
new (Manifest[T] @serializable) {
- lazy val erasure =
- value match {
- case anyRefValue: AnyRef => anyRefValue.getClass
- case anyValue => error("There is no singleton type for AnyVal values")
- }
+ /** Note - this was doing a type match on value to exclude AnyVal, which does not work.
+ * Pattern matching _: AnyRef matches everything because of boxing.
+ */
+ lazy val erasure = value.getClass
override lazy val toString = value.toString + ".type"
}
@@ -193,13 +237,24 @@ object Manifest {
/** Manifest for the abstract type `prefix # name'. `upperBound' is not
* strictly necessary as it could be obtained by reflection. It was
* added so that erasure can be calculated without reflection. */
- def abstractType[T](prefix: Manifest[_], name: String, upperBound: Manifest[_], args: Manifest[_]*): Manifest[T] =
+ def abstractType[T](prefix: Manifest[_], name: String, clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
new (Manifest[T] @serializable) {
- def erasure = upperBound.erasure
+ def erasure = clazz
override val typeArguments = args.toList
override def toString = prefix.toString+"#"+name+argString
}
+ /** Manifest for the unknown type `_ >: L <: U' in an existential.
+ */
+ def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] =
+ new (Manifest[T] @serializable) {
+ def erasure = upperBound.erasure
+ override def toString =
+ "_" +
+ (if (lowerBound eq Nothing) "" else " >: "+lowerBound) +
+ (if (upperBound eq Nothing) "" else " <: "+upperBound)
+ }
+
/** Manifest for the intersection type `parents_0 with ... with parents_n'. */
def intersectionType[T](parents: Manifest[_]*): Manifest[T] =
new (Manifest[T] @serializable) {
diff --git a/src/library/scala/util/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala
index cce503e329..31c789e33f 100644..100755
--- a/src/library/scala/util/NameTransformer.scala
+++ b/src/library/scala/reflect/NameTransformer.scala
@@ -1,15 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-package scala.util
+package scala.reflect
/**
* @author Martin Odersky
diff --git a/src/library/scala/reflect/NoManifest.scala b/src/library/scala/reflect/NoManifest.scala
index 6dbb9f9a92..d0497d4244 100644
--- a/src/library/scala/reflect/NoManifest.scala
+++ b/src/library/scala/reflect/NoManifest.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
diff --git a/src/library/scala/reflect/OptManifest.scala b/src/library/scala/reflect/OptManifest.scala
index 47034286c7..f3ccef5af0 100644
--- a/src/library/scala/reflect/OptManifest.scala
+++ b/src/library/scala/reflect/OptManifest.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
diff --git a/src/library/scala/reflect/Print.scala b/src/library/scala/reflect/Print.scala
index 6df7fb032a..adf3860f88 100644
--- a/src/library/scala/reflect/Print.scala
+++ b/src/library/scala/reflect/Print.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
@@ -103,9 +102,8 @@ object Print extends Function1[Any, String] {
case reflect.MethodType(formals, resultType) =>
formals.map(Print).mkString("(", ", ", ")") + " => " + Print(resultType)
case reflect.PolyType(typeParams, typeBounds, resultType) =>
- (List.map2(typeParams, typeBounds)
- ((tp, tb) => "[" + Print(tb._1) + " :> " + Print(tp) + " :> " + Print(tb._2) + "]")).
- mkString("[", ", ", "]") + " -> " + Print(resultType)
+ val z = (typeParams, typeBounds).zip map { case (tp, tb) => "[" + Print(tb._1) + " :> " + Print(tp) + " :> " + Print(tb._2) + "]" }
+ z.mkString("[", ", ", "]") + " -> " + Print(resultType)
case _ =>
"???"
}
diff --git a/src/library/scala/reflect/RichClass.scala b/src/library/scala/reflect/RichClass.scala
deleted file mode 100644
index 7d690f360d..0000000000
--- a/src/library/scala/reflect/RichClass.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.reflect
-
-import scala.annotation.experimental
-import scala.util.control.Exception._
-import scala.util.ScalaClassLoader._
-import java.lang.{ Class => JClass }
-import java.lang.reflect. { Constructor => JConstructor }
-
-object RichClass
-{
- // We can't put this in Predef at the moment because everything referenced
- // from Predef has to be buildable at the first bootstraping phase.
- implicit def classWrapper[T](x: JClass[T]): RichClass[T] = new RichClass(x)
-}
-
-@experimental
-final class RichClass[T](val self: JClass[T]) extends Proxy
-{
- // The getConstructors and getDeclaredConstructors methods on java.lang.Class[T]
- // return "raw type" Constructors rather than Constructor[T]s as one would want.
- // The "why" from http://java.sun.com/javase/6/docs/api/java/lang/Class.html is:
- //
- // Note that while this method returns an array of Constructor<T> objects (that is an array
- // of constructors from this class), the return type of this method is Constructor<?>[] and
- // not Constructor<T>[] as might be expected. This less informative return type is necessary
- // since after being returned from this method, the array could be modified to hold Constructor
- // objects for different classes, which would violate the type guarantees of Constructor<T>[]
- //
- // Since this reasoning is invalid in scala due to its abandonment of Array covariance,
- // these methods exist to correct the return types.
- //
- // In addition, at this writing because of ticket #1560 the compiler crashes on the
- // untyped constructors but not on these.
-
- def getConstructorsTyped(): Array[JConstructor[T]] =
- self.getConstructors() map (_.asInstanceOf[JConstructor[T]])
-
- def getDeclaredConstructorsTyped(): Array[JConstructor[T]] =
- self.getDeclaredConstructors() map (_.asInstanceOf[JConstructor[T]])
-
- private lazy val classLoader = self.getClassLoader match {
- case null => getSystemLoader
- case x => x
- }
- private val exceptions = List(
- classOf[ClassNotFoundException],
- classOf[NoSuchMethodException],
- classOf[SecurityException],
- classOf[NullPointerException],
- classOf[ClassCastException]
- )
-
- // Experimental!
- // scala> classOf[String].reflectiveCall[Array[String]]("ababab", "split")("b")
- // res0: Array[String] = Array(a, a, a)
-
- /** A class representing a reflective method call. It is a function object
- * and will make the call with whatever args are given via apply, or it will
- * throw an exception at that point if there was an error in creation.
- */
- class ReflectiveCall[+U](obj: T, name: String) {
- def methodForArgs(args: AnyRef*) = self.getMethod(name, args map (_.getClass) : _*)
- def isErroneous = false
- def apply(args: Any*): U = {
- val ps = args map (_.asInstanceOf[AnyRef])
- val m = methodForArgs(ps: _*)
- m.invoke(obj, ps: _*).asInstanceOf[U]
- }
- }
-
- class FailedReflectiveCall[+U](ex: Throwable) extends ReflectiveCall[U](null.asInstanceOf[T], null) {
- override def isErroneous = true
- override def apply(args: Any*) = throw ex
- }
-
- def reflectiveCall[U](obj: T, method: String): ReflectiveCall[U] = {
- (catching(exceptions: _*) either (new ReflectiveCall[U](obj, method))) match {
- case Left(x) => new FailedReflectiveCall[U](x)
- case Right(x) => x
- }
- }
-}
-
diff --git a/src/library/scala/reflect/ScalaBeanInfo.scala b/src/library/scala/reflect/ScalaBeanInfo.scala
index 1d5001b420..0ac47b6253 100644
--- a/src/library/scala/reflect/ScalaBeanInfo.scala
+++ b/src/library/scala/reflect/ScalaBeanInfo.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
diff --git a/src/library/scala/reflect/ScalaLongSignature.java b/src/library/scala/reflect/ScalaLongSignature.java
new file mode 100644
index 0000000000..1ffd6d2520
--- /dev/null
+++ b/src/library/scala/reflect/ScalaLongSignature.java
@@ -0,0 +1,13 @@
+package scala.reflect;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/** */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface ScalaLongSignature {
+ public String[] bytes();
+}
diff --git a/src/library/scala/reflect/ScalaSignature.java b/src/library/scala/reflect/ScalaSignature.java
new file mode 100644
index 0000000000..d1cdbc0589
--- /dev/null
+++ b/src/library/scala/reflect/ScalaSignature.java
@@ -0,0 +1,13 @@
+package scala.reflect;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/** */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface ScalaSignature {
+ public String bytes();
+}
diff --git a/src/library/scala/reflect/Symbol.scala b/src/library/scala/reflect/Symbol.scala
index d9aa4a428d..74960f3d1a 100644
--- a/src/library/scala/reflect/Symbol.scala
+++ b/src/library/scala/reflect/Symbol.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
diff --git a/src/library/scala/reflect/Tree.scala b/src/library/scala/reflect/Tree.scala
index 38adc40eaf..ff7fc73c8a 100644
--- a/src/library/scala/reflect/Tree.scala
+++ b/src/library/scala/reflect/Tree.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
diff --git a/src/library/scala/reflect/Type.scala b/src/library/scala/reflect/Type.scala
index 815824a864..029bb3966e 100644
--- a/src/library/scala/reflect/Type.scala
+++ b/src/library/scala/reflect/Type.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.reflect
diff --git a/src/library/scala/reflect/generic/AnnotationInfos.scala b/src/library/scala/reflect/generic/AnnotationInfos.scala
new file mode 100755
index 0000000000..6239ca189c
--- /dev/null
+++ b/src/library/scala/reflect/generic/AnnotationInfos.scala
@@ -0,0 +1,42 @@
+package scala.reflect
+package generic
+
+trait AnnotationInfos { self: Universe =>
+
+ type AnnotationInfo <: AnyRef
+ val AnnotationInfo: AnnotationInfoExtractor
+
+ abstract class AnnotationInfoExtractor {
+ def apply(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)]): AnnotationInfo
+ def unapply(info: AnnotationInfo): Option[(Type, List[Tree], List[(Name, ClassfileAnnotArg)])]
+ }
+
+ type ClassfileAnnotArg <: AnyRef
+ implicit def classfileAnnotArgManifest: ClassManifest[ClassfileAnnotArg] // need a precise manifest to pass to UnPickle's toArray call
+
+ type LiteralAnnotArg <: ClassfileAnnotArg
+ val LiteralAnnotArg: LiteralAnnotArgExtractor
+
+ type ArrayAnnotArg <: ClassfileAnnotArg
+ val ArrayAnnotArg: ArrayAnnotArgExtractor
+
+ type NestedAnnotArg <: ClassfileAnnotArg
+ val NestedAnnotArg: NestedAnnotArgExtractor
+
+ abstract class LiteralAnnotArgExtractor {
+ def apply(const: Constant): LiteralAnnotArg
+ def unapply(arg: LiteralAnnotArg): Option[Constant]
+ }
+
+ abstract class ArrayAnnotArgExtractor {
+ def apply(const: Array[ClassfileAnnotArg]): ArrayAnnotArg
+ def unapply(arg: ArrayAnnotArg): Option[Array[ClassfileAnnotArg]]
+ }
+
+ abstract class NestedAnnotArgExtractor {
+ def apply(anninfo: AnnotationInfo): NestedAnnotArg
+ def unapply(arg: NestedAnnotArg): Option[AnnotationInfo]
+ }
+}
+
+
diff --git a/src/library/scala/reflect/generic/ByteCodecs.scala b/src/library/scala/reflect/generic/ByteCodecs.scala
new file mode 100644
index 0000000000..46146e9e4b
--- /dev/null
+++ b/src/library/scala/reflect/generic/ByteCodecs.scala
@@ -0,0 +1,224 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.reflect.generic
+
+object ByteCodecs {
+
+ def avoidZero(src: Array[Byte]): Array[Byte] = {
+ var i = 0
+ val srclen = src.length
+ var count = 0
+ while (i < srclen) {
+ if (src(i) == 0x7f) count += 1
+ i += 1
+ }
+ val dst = new Array[Byte](srclen + count)
+ i = 0
+ var j = 0
+ while (i < srclen) {
+ val in = src(i)
+ if (in == 0x7f) {
+ dst(j) = (0xc0).toByte
+ dst(j + 1) = (0x80).toByte
+ j += 2
+ } else {
+ dst(j) = (in + 1).toByte
+ j += 1
+ }
+ i += 1
+ }
+ dst
+ }
+
+ def regenerateZero(src: Array[Byte]): Int = {
+ var i = 0
+ val srclen = src.length
+ var j = 0
+ while (i < srclen) {
+ val in: Int = src(i) & 0xff
+ if (in == 0xc0 && (src(i + 1) & 0xff) == 0x80) {
+ src(j) = 0x7f
+ i += 2
+ } else {
+ src(j) = (in - 1).toByte
+ i += 1
+ }
+ j += 1
+ }
+ j
+ }
+
+ def encode8to7(src: Array[Byte]): Array[Byte] = {
+ val srclen = src.length
+ val dstlen = (srclen * 8 + 6) / 7
+ val dst = new Array[Byte](dstlen)
+ var i = 0
+ var j = 0
+ while (i + 6 < srclen) {
+ var in: Int = src(i) & 0xff
+ dst(j) = (in & 0x7f).toByte
+ var out: Int = in >>> 7
+ in = src(i + 1) & 0xff
+ dst(j + 1) = (out | (in << 1) & 0x7f).toByte
+ out = in >>> 6
+ in = src(i + 2) & 0xff
+ dst(j + 2) = (out | (in << 2) & 0x7f).toByte
+ out = in >>> 5
+ in = src(i + 3) & 0xff
+ dst(j + 3) = (out | (in << 3) & 0x7f).toByte
+ out = in >>> 4
+ in = src(i + 4) & 0xff
+ dst(j + 4) = (out | (in << 4) & 0x7f).toByte
+ out = in >>> 3
+ in = src(i + 5) & 0xff
+ dst(j + 5) = (out | (in << 5) & 0x7f).toByte
+ out = in >>> 2
+ in = src(i + 6) & 0xff
+ dst(j + 6) = (out | (in << 6) & 0x7f).toByte
+ out = in >>> 1
+ dst(j + 7) = out.toByte
+ i += 7
+ j += 8
+ }
+ if (i < srclen) {
+ var in: Int = src(i) & 0xff
+ dst(j) = (in & 0x7f).toByte; j += 1
+ var out: Int = in >>> 7
+ if (i + 1 < srclen) {
+ in = src(i + 1) & 0xff
+ dst(j) = (out | (in << 1) & 0x7f).toByte; j += 1
+ out = in >>> 6
+ if (i + 2 < srclen) {
+ in = src(i + 2) & 0xff
+ dst(j) = (out | (in << 2) & 0x7f).toByte; j += 1
+ out = in >>> 5
+ if (i + 3 < srclen) {
+ in = src(i + 3) & 0xff
+ dst(j) = (out | (in << 3) & 0x7f).toByte; j += 1
+ out = in >>> 4
+ if (i + 4 < srclen) {
+ in = src(i + 4) & 0xff
+ dst(j) = (out | (in << 4) & 0x7f).toByte; j += 1
+ out = in >>> 3
+ if (i + 5 < srclen) {
+ in = src(i + 5) & 0xff
+ dst(j) = (out | (in << 5) & 0x7f).toByte; j += 1
+ out = in >>> 2
+ }
+ }
+ }
+ }
+ }
+ if (j < dstlen) dst(j) = out.toByte
+ }
+ dst
+ }
+
+ @deprecated("use 2-argument version instead")
+ def decode7to8(src: Array[Byte], srclen: Int, dstlen: Int) { decode7to8(src, srclen) }
+
+ def decode7to8(src: Array[Byte], srclen: Int): Int = {
+ var i = 0
+ var j = 0
+ val dstlen = (srclen * 7 + 7) / 8
+ while (i + 7 < srclen) {
+ var out: Int = src(i)
+ var in: Byte = src(i + 1)
+ src(j) = (out | (in & 0x01) << 7).toByte
+ out = in >>> 1
+ in = src(i + 2)
+ src(j + 1) = (out | (in & 0x03) << 6).toByte
+ out = in >>> 2
+ in = src(i + 3)
+ src(j + 2) = (out | (in & 0x07) << 5).toByte
+ out = in >>> 3
+ in = src(i + 4)
+ src(j + 3) = (out | (in & 0x0f) << 4).toByte
+ out = in >>> 4
+ in = src(i + 5)
+ src(j + 4) = (out | (in & 0x1f) << 3).toByte
+ out = in >>> 5
+ in = src(i + 6)
+ src(j + 5) = (out | (in & 0x3f) << 2).toByte
+ out = in >>> 6
+ in = src(i + 7)
+ src(j + 6) = (out | in << 1).toByte
+ i += 8
+ j += 7
+ }
+ if (i < srclen) {
+ var out: Int = src(i)
+ if (i + 1 < srclen) {
+ var in: Byte = src(i + 1)
+ src(j) = (out | (in & 0x01) << 7).toByte; j += 1
+ out = in >>> 1
+ if (i + 2 < srclen) {
+ in = src(i + 2)
+ src(j) = (out | (in & 0x03) << 6).toByte; j += 1
+ out = in >>> 2
+ if (i + 3 < srclen) {
+ in = src(i + 3)
+ src(j) = (out | (in & 0x07) << 5).toByte; j += 1
+ out = in >>> 3
+ if (i + 4 < srclen) {
+ in = src(i + 4)
+ src(j) = (out | (in & 0x0f) << 4).toByte; j += 1
+ out = in >>> 4
+ if (i + 5 < srclen) {
+ in = src(i + 5)
+ src(j) = (out | (in & 0x1f) << 3).toByte; j += 1
+ out = in >>> 5
+ if (i + 6 < srclen) {
+ in = src(i + 6)
+ src(j) = (out | (in & 0x3f) << 2).toByte; j += 1
+ out = in >>> 6
+ }
+ }
+ }
+ }
+ }
+ }
+ if (j < dstlen) src(j) = out.toByte
+ }
+ dstlen
+ }
+
+ def encode(xs: Array[Byte]): Array[Byte] = avoidZero(encode8to7(xs))
+
+ @deprecated("use 1-argument version instead")
+ def decode(xs: Array[Byte], dstlen: Int) { decode(xs) }
+
+ /**
+ * Destructively decodes array xs and returns the length of the decoded array.
+ *
+ * Sometimes returns (length+1) of the decoded array. Example:
+ *
+ * scala> val enc = reflect.generic.ByteCodecs.encode(Array(1,2,3))
+ * enc: Array[Byte] = Array(2, 5, 13, 1)
+ *
+ * scala> reflect.generic.ByteCodecs.decode(enc)
+ * res43: Int = 4
+ *
+ * scala> enc
+ * res44: Array[Byte] = Array(1, 2, 3, 0)
+ *
+ * However, this does not always happen.
+ */
+ def decode(xs: Array[Byte]): Int = {
+ val len = regenerateZero(xs)
+ decode7to8(xs, len)
+ }
+}
+
+
+
+
+
+
+
+
diff --git a/src/compiler/scala/tools/nsc/symtab/Constants.scala b/src/library/scala/reflect/generic/Constants.scala
index dfe7147270..bf963a1aae 100644..100755
--- a/src/compiler/scala/tools/nsc/symtab/Constants.scala
+++ b/src/library/scala/reflect/generic/Constants.scala
@@ -1,19 +1,15 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
-
-package scala.tools.nsc
-package symtab
+package scala.reflect
+package generic
import java.lang.Integer.toOctalString
+import PickleFormat._
-import classfile.PickleFormat._
-
-trait Constants {
- self: SymbolTable =>
+trait Constants { self: Universe =>
import definitions._
@@ -33,8 +29,6 @@ trait Constants {
// For supporting java enumerations inside java annotations (see ClassfileParser)
final val EnumTag = LITERALenum - LITERAL
- def isNumeric(tag: Int) = ByteTag <= tag && tag <= DoubleTag
-
case class Constant(value: Any) {
val tag: Int =
@@ -48,11 +42,13 @@ trait Constants {
else if (value.isInstanceOf[Float]) FloatTag
else if (value.isInstanceOf[Double]) DoubleTag
else if (value.isInstanceOf[String]) StringTag
- else if (value.isInstanceOf[Type]) ClassTag
- else if (value.isInstanceOf[Symbol]) EnumTag
+ else if (value.isInstanceOf[AbsType]) ClassTag
+ else if (value.isInstanceOf[AbsSymbol]) EnumTag
else if (value == null) NullTag
else throw new Error("bad constant value: " + value)
+ def isNumeric: Boolean = ByteTag <= tag && tag <= DoubleTag
+
def tpe: Type = tag match {
case UnitTag => UnitClass.tpe
case BooleanTag => BooleanClass.tpe
@@ -65,8 +61,13 @@ trait Constants {
case DoubleTag => DoubleClass.tpe
case StringTag => StringClass.tpe
case NullTag => NullClass.tpe
- case ClassTag => Predef_classOfType(value.asInstanceOf[Type])
- case EnumTag => symbolValue.owner.linkedClassOfClass.tpe
+ case ClassTag => ClassType(value.asInstanceOf[Type])
+ case EnumTag =>
+ // given (in java): "class A { enum E { VAL1 } }"
+ // - symbolValue: the symbol of the actual enumeration value (VAL1)
+ // - .owner: the ModuleClasSymbol of the enumeration (object E)
+ // - .linkedClassOfClass: the ClassSymbol of the enumeration (class E)
+ symbolValue.owner.linkedClassOfClass.tpe
}
/** We need the equals method to take account of tags as well as values.
@@ -75,7 +76,15 @@ trait Constants {
* @return ...
*/
override def equals(other: Any): Boolean = other match {
- case that: Constant => this.tag == that.tag && this.value == that.value
+ case that: Constant =>
+ this.tag == that.tag &&
+ (this.value == that.value || this.isNaN && that.isNaN)
+ case _ => false
+ }
+
+ def isNaN = value match {
+ case f: Float => f.isNaN
+ case d: Double => d.isNaN
case _ => false
}
@@ -220,7 +229,7 @@ trait Constants {
def symbolValue: Symbol = value.asInstanceOf[Symbol]
- override def hashCode(): Int =
- if (value == null) 0 else value.hashCode() * 41 + 17
+ override def hashCode: Int =
+ if (value == null) 0 else value.## * 41 + 17
}
}
diff --git a/src/library/scala/reflect/generic/Flags.scala b/src/library/scala/reflect/generic/Flags.scala
new file mode 100755
index 0000000000..c8ef529bc1
--- /dev/null
+++ b/src/library/scala/reflect/generic/Flags.scala
@@ -0,0 +1,199 @@
+package scala.reflect
+package generic
+
+object Flags extends Flags
+
+class Flags {
+
+ // modifiers
+ final val IMPLICIT = 0x00000200
+ final val FINAL = 0x00000020
+ final val PRIVATE = 0x00000004
+ final val PROTECTED = 0x00000001
+
+ final val SEALED = 0x00000400
+ final val OVERRIDE = 0x00000002
+ final val CASE = 0x00000800
+ final val ABSTRACT = 0x00000008 // abstract class, or used in conjunction
+ // with abstract override.
+ // Note difference to DEFERRED!
+
+ final val DEFERRED = 0x00000010 // was `abstract' for members | trait is virtual
+ final val METHOD = 0x00000040 // a method
+ final val MODULE = 0x00000100 // symbol is module or class implementing a module
+ final val INTERFACE = 0x00000080 // symbol is an interface (i.e. a trait which defines only abstract methods)
+
+ final val MUTABLE = 0x00001000 // symbol is a mutable variable.
+ final val PARAM = 0x00002000 // symbol is a (value or type) parameter to a method
+ final val PACKAGE = 0x00004000 // symbol is a java package
+ // available: 0x00008000
+
+ final val COVARIANT = 0x00010000 // symbol is a covariant type variable
+ final val CAPTURED = 0x00010000 // variable is accessed from nested function.
+ // Set by LambdaLift
+ final val BYNAMEPARAM = 0x00010000 // parameter is by name
+ final val CONTRAVARIANT = 0x00020000 // symbol is a contravariant type variable
+ final val LABEL = 0x00020000 // method symbol is a label. Set by TailCall
+ final val INCONSTRUCTOR = 0x00020000 // class symbol is defined in this/superclass
+ // constructor.
+ final val ABSOVERRIDE = 0x00040000 // combination of abstract & override
+ final val LOCAL = 0x00080000 // symbol is local to current class (i.e. private[this] or protected[this]
+ // pre: PRIVATE or PROTECTED are also set
+ final val JAVA = 0x00100000 // symbol was defined by a Java class
+ final val SYNTHETIC = 0x00200000 // symbol is compiler-generated
+ final val STABLE = 0x00400000 // functions that are assumed to be stable
+ // (typically, access methods for valdefs)
+ // or classes that do not contain abstract types.
+ final val STATIC = 0x00800000 // static field, method or class
+
+ final val CASEACCESSOR = 0x01000000 // symbol is a case parameter (or its accessor)
+ final val TRAIT = 0x02000000 // symbol is a trait
+ final val DEFAULTPARAM = 0x02000000 // the parameter has a default value
+ final val BRIDGE = 0x04000000 // function is a bridge method. Set by Erasure
+ final val ACCESSOR = 0x08000000 // a value or variable accessor (getter or setter)
+
+ final val SUPERACCESSOR = 0x10000000 // a super accessor
+ final val PARAMACCESSOR = 0x20000000 // for field definitions generated for primary constructor
+ // parameters (no matter if it's a 'val' parameter or not)
+ // for parameters of a primary constructor ('val' or not)
+ // for the accessor methods generated for 'val' or 'var' parameters
+ final val MODULEVAR = 0x40000000 // for variables: is the variable caching a module value
+ final val SYNTHETICMETH = 0x40000000 // for methods: synthetic method, but without SYNTHETIC flag
+ final val MONOMORPHIC = 0x40000000 // for type symbols: does not have type parameters
+ final val LAZY = 0x80000000L // symbol is a lazy val. can't have MUTABLE unless transformed by typer
+
+ final val IS_ERROR = 0x100000000L // symbol is an error symbol
+ final val OVERLOADED = 0x200000000L // symbol is overloaded
+ final val LIFTED = 0x400000000L // class has been lifted out to package level
+ // local value has been lifted out to class level
+ // todo: make LIFTED = latePRIVATE?
+ final val MIXEDIN = 0x800000000L // term member has been mixed in
+ final val EXISTENTIAL = 0x800000000L // type is an existential parameter or skolem
+
+ final val EXPANDEDNAME = 0x1000000000L // name has been expanded with class suffix
+ final val IMPLCLASS = 0x2000000000L // symbol is an implementation class
+ final val PRESUPER = 0x2000000000L // value is evaluated before super call
+ final val TRANS_FLAG = 0x4000000000L // transient flag guaranteed to be reset
+ // after each phase.
+
+ final val LOCKED = 0x8000000000L // temporary flag to catch cyclic dependencies
+ final val SPECIALIZED = 0x10000000000L// symbol is a generated specialized member
+ final val DEFAULTINIT = 0x20000000000L// symbol is a generated specialized member
+ final val VBRIDGE = 0x40000000000L// symbol is a varargs bridge
+
+ // pickling and unpickling of flags
+
+ // The flags from 0x001 to 0x800 are different in the raw flags
+ // and in the pickled format.
+
+ private final val IMPLICIT_PKL = 0x00000001
+ private final val FINAL_PKL = 0x00000002
+ private final val PRIVATE_PKL = 0x00000004
+ private final val PROTECTED_PKL = 0x00000008
+
+ private final val SEALED_PKL = 0x00000010
+ private final val OVERRIDE_PKL = 0x00000020
+ private final val CASE_PKL = 0x00000040
+ private final val ABSTRACT_PKL = 0x00000080
+
+ private final val DEFERRED_PKL = 0x00000100
+ private final val METHOD_PKL = 0x00000200
+ private final val MODULE_PKL = 0x00000400
+ private final val INTERFACE_PKL = 0x00000800
+
+ private final val PKL_MASK = 0x00000FFF
+
+ final val PickledFlags: Long = 0xFFFFFFFFL
+
+ private val r2p = {
+ def rawFlagsToPickledAux(flags:Int) = {
+ var pflags=0
+ if ((flags & IMPLICIT )!=0) pflags|=IMPLICIT_PKL
+ if ((flags & FINAL )!=0) pflags|=FINAL_PKL
+ if ((flags & PRIVATE )!=0) pflags|=PRIVATE_PKL
+ if ((flags & PROTECTED)!=0) pflags|=PROTECTED_PKL
+ if ((flags & SEALED )!=0) pflags|=SEALED_PKL
+ if ((flags & OVERRIDE )!=0) pflags|=OVERRIDE_PKL
+ if ((flags & CASE )!=0) pflags|=CASE_PKL
+ if ((flags & ABSTRACT )!=0) pflags|=ABSTRACT_PKL
+ if ((flags & DEFERRED )!=0) pflags|=DEFERRED_PKL
+ if ((flags & METHOD )!=0) pflags|=METHOD_PKL
+ if ((flags & MODULE )!=0) pflags|=MODULE_PKL
+ if ((flags & INTERFACE)!=0) pflags|=INTERFACE_PKL
+ pflags
+ }
+ val v=new Array[Int](PKL_MASK+1)
+ var i=0
+ while (i<=PKL_MASK) {
+ v(i)=rawFlagsToPickledAux(i)
+ i+=1
+ }
+ v
+ }
+
+ private val p2r = {
+ def pickledToRawFlagsAux(pflags:Int) = {
+ var flags=0
+ if ((pflags & IMPLICIT_PKL )!=0) flags|=IMPLICIT
+ if ((pflags & FINAL_PKL )!=0) flags|=FINAL
+ if ((pflags & PRIVATE_PKL )!=0) flags|=PRIVATE
+ if ((pflags & PROTECTED_PKL)!=0) flags|=PROTECTED
+ if ((pflags & SEALED_PKL )!=0) flags|=SEALED
+ if ((pflags & OVERRIDE_PKL )!=0) flags|=OVERRIDE
+ if ((pflags & CASE_PKL )!=0) flags|=CASE
+ if ((pflags & ABSTRACT_PKL )!=0) flags|=ABSTRACT
+ if ((pflags & DEFERRED_PKL )!=0) flags|=DEFERRED
+ if ((pflags & METHOD_PKL )!=0) flags|=METHOD
+ if ((pflags & MODULE_PKL )!=0) flags|=MODULE
+ if ((pflags & INTERFACE_PKL)!=0) flags|=INTERFACE
+ flags
+ }
+ val v=new Array[Int](PKL_MASK+1)
+ var i=0
+ while (i<=PKL_MASK) {
+ v(i)=pickledToRawFlagsAux(i)
+ i+=1
+ }
+ v
+ }
+
+ def rawFlagsToPickled(flags:Long):Long =
+ (flags & ~PKL_MASK) | r2p(flags.toInt & PKL_MASK)
+
+ def pickledToRawFlags(pflags:Long):Long =
+ (pflags & ~PKL_MASK) | p2r(pflags.toInt & PKL_MASK)
+
+ // List of the raw flags, in pickled order
+ protected val pickledListOrder = {
+ def findBit(m:Long):Int = {
+ var mask=m
+ var i=0
+ while (i <= 62) {
+ if ((mask&1) == 1L) return i
+ mask >>= 1
+ i += 1
+ }
+ throw new AssertionError()
+ }
+ val v=new Array[Long](63)
+ v(findBit(IMPLICIT_PKL ))=IMPLICIT
+ v(findBit(FINAL_PKL ))=FINAL
+ v(findBit(PRIVATE_PKL ))=PRIVATE
+ v(findBit(PROTECTED_PKL))=PROTECTED
+ v(findBit(SEALED_PKL ))=SEALED
+ v(findBit(OVERRIDE_PKL ))=OVERRIDE
+ v(findBit(CASE_PKL ))=CASE
+ v(findBit(ABSTRACT_PKL ))=ABSTRACT
+ v(findBit(DEFERRED_PKL ))=DEFERRED
+ v(findBit(METHOD_PKL ))=METHOD
+ v(findBit(MODULE_PKL ))=MODULE
+ v(findBit(INTERFACE_PKL))=INTERFACE
+ var i=findBit(PKL_MASK+1)
+ while (i <= 62) {
+ v(i)=1L << i
+ i += 1
+ }
+ v.toList
+ }
+
+}
diff --git a/src/library/scala/reflect/generic/Names.scala b/src/library/scala/reflect/generic/Names.scala
new file mode 100755
index 0000000000..1b31726e3a
--- /dev/null
+++ b/src/library/scala/reflect/generic/Names.scala
@@ -0,0 +1,21 @@
+package scala.reflect
+package generic
+
+trait Names {
+
+ type Name >: Null <: AnyRef
+
+ def newTermName(cs: Array[Char], offset: Int, len: Int): Name
+ def newTermName(cs: Array[Byte], offset: Int, len: Int): Name
+ def newTermName(s: String): Name
+
+ def mkTermName(name: Name): Name
+
+ def newTypeName(cs: Array[Char], offset: Int, len: Int): Name
+ def newTypeName(cs: Array[Byte], offset: Int, len: Int): Name
+ def newTypeName(s: String): Name
+
+ def mkTypeName(name: Name): Name
+}
+
+
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/PickleBuffer.scala b/src/library/scala/reflect/generic/PickleBuffer.scala
index 9b4f6a6a42..14190016b8 100644..100755
--- a/src/compiler/scala/tools/nsc/symtab/classfile/PickleBuffer.scala
+++ b/src/library/scala/reflect/generic/PickleBuffer.scala
@@ -1,12 +1,10 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
-package scala.tools.nsc
-package symtab
-package classfile
+package scala.reflect
+package generic
/** Variable length byte arrays, with methods for basic pickling and unpickling.
*
@@ -129,6 +127,29 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
x << leading >> leading
}
+ /** Returns the buffer as a sequence of (Int, Array[Byte]) representing
+ * (tag, data) of the individual entries. Saves and restores buffer state.
+ */
+
+ def toIndexedSeq: IndexedSeq[(Int, Array[Byte])] = {
+ val saved = readIndex
+ readIndex = 0
+ readNat() ; readNat() // discarding version
+ val result = new Array[(Int, Array[Byte])](readNat())
+
+ result.indices foreach { index =>
+ val tag = readNat()
+ val len = readNat()
+ val bytes = data.slice(readIndex, len + readIndex)
+ readIndex += len
+
+ result(index) = tag -> bytes
+ }
+
+ readIndex = saved
+ result.toIndexedSeq
+ }
+
/** Perform operation <code>op</code> until the condition
* <code>readIndex == end</code> is satisfied.
* Concatenate results into a list.
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/PickleFormat.scala b/src/library/scala/reflect/generic/PickleFormat.scala
index aed501d3ab..d1e884f513 100644..100755
--- a/src/compiler/scala/tools/nsc/symtab/classfile/PickleFormat.scala
+++ b/src/library/scala/reflect/generic/PickleFormat.scala
@@ -1,12 +1,5 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
- * @author Martin Odersky
- */
-// $Id$
-
-package scala.tools.nsc
-package symtab
-package classfile
+package scala.reflect
+package generic
/** This object provides constants for pickling attributes.
*
@@ -28,21 +21,21 @@ object PickleFormat {
* | 5 ALIASsym len_Nat SymbolInfo
* | 6 CLASSsym len_Nat SymbolInfo [thistype_Ref]
* | 7 MODULEsym len_Nat SymbolInfo
- * | 8 VALsym len_Nat [defaultGetter_Ref] SymbolInfo [alias_Ref]
+ * | 8 VALsym len_Nat [defaultGetter_Ref /* no longer needed*/] SymbolInfo [alias_Ref]
* | 9 EXTref len_Nat name_Ref [owner_Ref]
* | 10 EXTMODCLASSref len_Nat name_Ref [owner_Ref]
* | 11 NOtpe len_Nat
* | 12 NOPREFIXtpe len_Nat
* | 13 THIStpe len_Nat sym_Ref
* | 14 SINGLEtpe len_Nat type_Ref sym_Ref
- * | 15 CONSTANTtpe len_Nat type_Ref constant_Ref
+ * | 15 CONSTANTtpe len_Nat constant_Ref
* | 16 TYPEREFtpe len_Nat type_Ref sym_Ref {targ_Ref}
* | 17 TYPEBOUNDStpe len_Nat tpe_Ref tpe_Ref
* | 18 REFINEDtpe len_Nat classsym_Ref {tpe_Ref}
* | 19 CLASSINFOtpe len_Nat classsym_Ref {tpe_Ref}
* | 20 METHODtpe len_Nat tpe_Ref {sym_Ref}
* | 21 POLYTtpe len_Nat tpe_Ref {sym_Ref}
- * | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {sym_Ref}
+ * | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {sym_Ref} /* no longer needed */
* | 52 SUPERtpe len_Nat tpe_Ref tpe_Ref
* | 24 LITERALunit len_Nat
* | 25 LITERALboolean len_Nat value_Long
@@ -59,7 +52,7 @@ object PickleFormat {
* | 36 LITERALenum len_Nat sym_Ref
* | 40 SYMANNOT len_Nat sym_Ref AnnotInfoBody
* | 41 CHILDREN len_Nat sym_Ref {sym_Ref}
- * | 42 ANNOTATEDtpe len_Nat [sym_Ref] tpe_Ref {annotinfo_Ref}
+ * | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref}
* | 43 ANNOTINFO len_Nat AnnotInfoBody
* | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref}
* | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat
diff --git a/src/library/scala/reflect/generic/Scopes.scala b/src/library/scala/reflect/generic/Scopes.scala
new file mode 100755
index 0000000000..9f8a8ecd19
--- /dev/null
+++ b/src/library/scala/reflect/generic/Scopes.scala
@@ -0,0 +1,15 @@
+package scala.reflect
+package generic
+
+trait Scopes { self: Universe =>
+
+ abstract class AbsScope extends Iterable[Symbol] {
+ def enter(sym: Symbol): Symbol
+ }
+
+ type Scope <: AbsScope
+
+ def newScope(): Scope
+}
+
+
diff --git a/src/library/scala/reflect/generic/StandardDefinitions.scala b/src/library/scala/reflect/generic/StandardDefinitions.scala
new file mode 100755
index 0000000000..c754fca808
--- /dev/null
+++ b/src/library/scala/reflect/generic/StandardDefinitions.scala
@@ -0,0 +1,66 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package generic
+
+trait StandardDefinitions { self: Universe =>
+
+ val definitions: AbsDefinitions
+
+ abstract class AbsDefinitions {
+
+ // outer packages and their classes
+ def RootPackage: Symbol
+ def RootClass: Symbol
+ def EmptyPackage: Symbol
+ def EmptyPackageClass: Symbol
+
+ def ScalaPackage: Symbol
+ def ScalaPackageClass: Symbol
+
+ // top types
+ def AnyClass : Symbol
+ def AnyValClass: Symbol
+ def AnyRefClass: Symbol
+ def ObjectClass: Symbol
+
+ // bottom types
+ def NullClass : Symbol
+ def NothingClass: Symbol
+
+ // the scala value classes
+ def UnitClass : Symbol
+ def ByteClass : Symbol
+ def ShortClass : Symbol
+ def CharClass : Symbol
+ def IntClass : Symbol
+ def LongClass : Symbol
+ def FloatClass : Symbol
+ def DoubleClass : Symbol
+ def BooleanClass: Symbol
+
+ // fundamental reference classes
+ def SymbolClass : Symbol
+ def StringClass : Symbol
+ def ClassClass : Symbol
+
+ // fundamental modules
+ def PredefModule: Symbol
+
+ // fundamental type constructions
+ def ClassType(arg: Type): Type
+
+ /** The string representation used by the given type in the VM.
+ */
+ def signature(tp: Type): String
+
+ /** Is symbol one of the value classes? */
+ def isValueClass(sym: Symbol): Boolean
+
+ /** Is symbol one of the numeric value classes? */
+ def isNumericValueClass(sym: Symbol): Boolean
+ }
+}
diff --git a/src/library/scala/reflect/generic/StdNames.scala b/src/library/scala/reflect/generic/StdNames.scala
new file mode 100755
index 0000000000..03cee8c909
--- /dev/null
+++ b/src/library/scala/reflect/generic/StdNames.scala
@@ -0,0 +1,30 @@
+package scala.reflect
+package generic
+
+trait StdNames { self: Universe =>
+
+ val nme: StandardNames
+
+ class StandardNames {
+ val EXPAND_SEPARATOR_STRING = "$$"
+ val LOCAL_SUFFIX_STRING = " "
+
+ val ANON_CLASS_NAME = newTermName("$anon")
+ val ANON_FUN_NAME = newTermName("$anonfun")
+ val EMPTY_PACKAGE_NAME = newTermName("<empty>")
+ val IMPORT = newTermName("<import>")
+ val REFINE_CLASS_NAME = newTermName("<refinement>")
+ val ROOT = newTermName("<root>")
+ val ROOTPKG = newTermName("_root_")
+ val EMPTY = newTermName("")
+ val MODULE_SUFFIX = newTermName("$module")
+
+ /** The expanded name of `name' relative to this class `base` with given `separator`
+ */
+ def expandedName(name: Name, base: Symbol, separator: String = EXPAND_SEPARATOR_STRING): Name =
+ newTermName(base.fullName('$') + separator + name)
+
+ def moduleVarName(name: Name): Name =
+ newTermName(name.toString() + MODULE_SUFFIX)
+ }
+}
diff --git a/src/library/scala/reflect/generic/Symbols.scala b/src/library/scala/reflect/generic/Symbols.scala
new file mode 100755
index 0000000000..f1226c7e19
--- /dev/null
+++ b/src/library/scala/reflect/generic/Symbols.scala
@@ -0,0 +1,195 @@
+package scala.reflect
+package generic
+
+import Flags._
+
+trait Symbols { self: Universe =>
+
+ type Symbol >: Null <: AbsSymbol
+
+ abstract class AbsSymbol { this: Symbol =>
+
+ /** The owner of this symbol.
+ */
+ def owner: Symbol
+
+ /** The flags of this symbol */
+ def flags: Long
+
+ /** The name of the symbol as a member of the `Name` type.
+ */
+ def name: Name
+
+ /** The name of the symbol before decoding, e.g. `$eq$eq` instead of `==`.
+ */
+ def encodedName: String
+
+ /** The decoded name of the symbol, e.g. `==` instead of `$eq$eq`.
+ */
+ def decodedName: String = stripLocalSuffix(NameTransformer.decode(encodedName))
+
+ /** The encoded full path name of this symbol, where outer names and inner names
+ * are separated by `separator` characters.
+ * Never translates expansions of operators back to operator symbol.
+ * Never adds id.
+ */
+ final def fullName(separator: Char): String = stripLocalSuffix {
+ if (isRoot || isRootPackage || this == NoSymbol) this.toString
+ else if (owner.isEffectiveRoot) encodedName
+ else owner.enclClass.fullName(separator) + separator + encodedName
+ }
+
+ private def stripLocalSuffix(s: String) = s stripSuffix nme.LOCAL_SUFFIX_STRING
+
+ /** The encoded full path name of this symbol, where outer names and inner names
+ * are separated by periods.
+ */
+ final def fullName: String = fullName('.')
+
+ /** Does symbol have ANY flag in `mask` set? */
+ final def hasFlag(mask: Long): Boolean = (flags & mask) != 0L
+
+ /** Does symbol have ALL the flags in `mask` set? */
+ final def hasAllFlags(mask: Long): Boolean = (flags & mask) == mask
+
+ /** Set when symbol has a modifier of the form private[X], NoSymbol otherwise.
+ */
+ def privateWithin: Symbol
+
+ /** The raw info of the type
+ */
+ def rawInfo: Type
+
+ /** The type of the symbol
+ */
+ def tpe: Type = info
+
+ /** The info of the symbol. This is like tpe, except for class symbols where the `info`
+ * describes the contents of the class whereas the `tpe` is a reference to the class.
+ */
+ def info: Type = {
+ val tp = rawInfo
+ tp.complete(this)
+ tp
+ }
+
+ /** If this symbol is a class or trait, its self type, otherwise the type of the symbol itse;lf
+ */
+ def typeOfThis: Type
+
+ def owner_=(sym: Symbol) { throw new UnsupportedOperationException("owner_= inapplicable for " + this) }
+ def flags_=(flags: Long) { throw new UnsupportedOperationException("flags_= inapplicable for " + this) }
+ def info_=(tp: Type) { throw new UnsupportedOperationException("info_= inapplicable for " + this) }
+ def typeOfThis_=(tp: Type) { throw new UnsupportedOperationException("typeOfThis_= inapplicable for " + this) }
+ def privateWithin_=(sym: Symbol) { throw new UnsupportedOperationException("privateWithin_= inapplicable for " + this) }
+ def sourceModule_=(sym: Symbol) { throw new UnsupportedOperationException("sourceModule_= inapplicable for " + this) }
+ def addChild(sym: Symbol) { throw new UnsupportedOperationException("addChild inapplicable for " + this) }
+ def addAnnotation(annot: AnnotationInfo) { throw new UnsupportedOperationException("addAnnotation inapplicable for " + this) }
+
+ /** For a module class its linked class, for a plain class
+ * the module class of its linked module.
+ * For instance
+ * object Foo
+ * class Foo
+ *
+ * Then object Foo has a `moduleClass' (invisible to the user, the backend calls it Foo$
+ * linkedClassOfClass goes from class Foo$ to class Foo, and back.
+ */
+ def linkedClassOfClass: Symbol
+
+ /** The module corresponding to this module class (note that this
+ * is not updated when a module is cloned), or NoSymbol if this is not a ModuleClass
+ */
+ def sourceModule: Symbol = NoSymbol
+
+ /** If symbol is an object definition, it's implied associated class,
+ * otherwise NoSymbol
+ */
+ def moduleClass: Symbol
+
+// flags and kind tests
+
+ def isTerm = false // to be overridden
+ def isType = false // to be overridden
+ def isClass = false // to be overridden
+ def isAliasType = false // to be overridden
+ def isAbstractType = false // to be overridden
+ private[scala] def isSkolem = false // to be overridden
+
+ def isTrait: Boolean = isClass && hasFlag(TRAIT) // refined later for virtual classes.
+ final def hasDefault = isParameter && hasFlag(DEFAULTPARAM)
+ final def isAbstractClass = isClass && hasFlag(ABSTRACT)
+ // XXX This is unlikely to be correct: it's not looking for the ABSOVERRIDE flag?
+ final def isAbstractOverride = isTerm && hasFlag(ABSTRACT) && hasFlag(OVERRIDE)
+ final def isBridge = hasFlag(BRIDGE)
+ final def isCase = hasFlag(CASE)
+ final def isCaseAccessor = hasFlag(CASEACCESSOR)
+ final def isContravariant = isType && hasFlag(CONTRAVARIANT)
+ final def isCovariant = isType && hasFlag(COVARIANT)
+ final def isDeferred = hasFlag(DEFERRED) && !isClass
+ final def isEarlyInitialized: Boolean = isTerm && hasFlag(PRESUPER)
+ final def isExistentiallyBound = isType && hasFlag(EXISTENTIAL)
+ final def isFinal = hasFlag(FINAL)
+ final def isGetterOrSetter = hasFlag(ACCESSOR)
+ final def isImplClass = isClass && hasFlag(IMPLCLASS) // Is this symbol an implementation class for a mixin?
+ final def isImplicit = hasFlag(IMPLICIT)
+ final def isInterface = hasFlag(INTERFACE)
+ final def isJavaDefined = hasFlag(JAVA)
+ final def isLazy = hasFlag(LAZY)
+ final def isMethod = isTerm && hasFlag(METHOD)
+ final def isModule = isTerm && hasFlag(MODULE)
+ final def isModuleClass = isClass && hasFlag(MODULE)
+ final def isMutable = hasFlag(MUTABLE)
+ final def isOverloaded = hasFlag(OVERLOADED)
+ final def isOverride = hasFlag(OVERRIDE)
+ final def isParamAccessor = hasFlag(PARAMACCESSOR)
+ final def isParameter = hasFlag(PARAM)
+ final def isRefinementClass = isClass && name == mkTypeName(nme.REFINE_CLASS_NAME)
+ final def isSealed = isClass && (hasFlag(SEALED) || definitions.isValueClass(this))
+ final def isSourceMethod = isTerm && (flags & (METHOD | STABLE)) == METHOD // exclude all accessors!!!
+ final def isSuperAccessor = hasFlag(SUPERACCESSOR)
+ final def isSynthetic = hasFlag(SYNTHETIC)
+ final def isTypeParameter = isType && isParameter && !isSkolem
+
+ /** Access tests */
+ final def isPrivate = hasFlag(PRIVATE)
+ final def isPrivateLocal = hasFlag(PRIVATE) && hasFlag(LOCAL)
+ final def isProtected = hasFlag(PROTECTED)
+ final def isProtectedLocal = hasFlag(PROTECTED) && hasFlag(LOCAL)
+ final def isPublic = !hasFlag(PRIVATE | PROTECTED) && privateWithin == NoSymbol
+
+ /** Package tests */
+ final def isEmptyPackage = isPackage && name == nme.EMPTY_PACKAGE_NAME
+ final def isEmptyPackageClass = isPackageClass && name == mkTypeName(nme.EMPTY_PACKAGE_NAME)
+ final def isPackage = isModule && hasFlag(PACKAGE)
+ final def isPackageClass = isClass && hasFlag(PACKAGE)
+ final def isRoot = isPackageClass && owner == NoSymbol
+ final def isRootPackage = isPackage && owner == NoSymbol
+
+ /** Is this symbol an effective root for fullname string?
+ */
+ def isEffectiveRoot = isRoot || isEmptyPackageClass
+
+ // creators
+
+ def newAbstractType(name: Name, pos: Position = NoPosition): Symbol
+ def newAliasType(name: Name, pos: Position = NoPosition): Symbol
+ def newClass(name: Name, pos: Position = NoPosition): Symbol
+ def newMethod(name: Name, pos: Position = NoPosition): Symbol
+ def newModule(name: Name, clazz: Symbol, pos: Position = NoPosition): Symbol
+ def newModuleClass(name: Name, pos: Position = NoPosition): Symbol
+ def newValue(name: Name, pos: Position = NoPosition): Symbol
+
+ // access to related symbols
+
+ /** The next enclosing class */
+ def enclClass: Symbol = if (isClass) this else owner.enclClass
+
+ /** The next enclosing method */
+ def enclMethod: Symbol = if (isSourceMethod) this else owner.enclMethod
+ }
+
+ val NoSymbol: Symbol
+}
+
+
diff --git a/src/library/scala/reflect/generic/Trees.scala b/src/library/scala/reflect/generic/Trees.scala
new file mode 100755
index 0000000000..c880a335de
--- /dev/null
+++ b/src/library/scala/reflect/generic/Trees.scala
@@ -0,0 +1,739 @@
+package scala.reflect
+package generic
+
+import java.io.{PrintWriter, StringWriter}
+import Flags._
+
+trait Trees { self: Universe =>
+
+ abstract class AbsTreePrinter(out: PrintWriter) {
+ def print(tree: Tree)
+ def flush()
+ }
+
+ def newTreePrinter(out: PrintWriter): AbsTreePrinter
+
+ private[scala] var nodeCount = 0
+
+ /** @param privateWithin the qualifier for a private (a type name)
+ * or nme.EMPTY.toTypeName, if none is given.
+ * @param annotations the annotations for the definition.
+ * <strong>Note:</strong> the typechecker drops these annotations,
+ * use the AnnotationInfo's (Symbol.annotations) in later phases.
+ */
+ case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position]) {
+ def isAbstract = hasFlag(ABSTRACT )
+ def isAccessor = hasFlag(ACCESSOR )
+ def isArgument = hasFlag(PARAM )
+ def isCase = hasFlag(CASE )
+ def isContravariant = hasFlag(CONTRAVARIANT) // marked with `-'
+ def isCovariant = hasFlag(COVARIANT ) // marked with `+'
+ def isDeferred = hasFlag(DEFERRED )
+ def isFinal = hasFlag(FINAL )
+ def isImplicit = hasFlag(IMPLICIT )
+ def isLazy = hasFlag(LAZY )
+ def isOverride = hasFlag(OVERRIDE )
+ def isPrivate = hasFlag(PRIVATE )
+ def isProtected = hasFlag(PROTECTED)
+ def isPublic = !isPrivate && !isProtected
+ def isSealed = hasFlag(SEALED )
+ def isSynthetic = hasFlag(SYNTHETIC)
+ def isTrait = hasFlag(TRAIT )
+ def isVariable = hasFlag(MUTABLE )
+
+ def hasFlag(flag: Long) = (flag & flags) != 0L
+ def & (flag: Long): Modifiers = {
+ val flags1 = flags & flag
+ if (flags1 == flags) this
+ else Modifiers(flags1, privateWithin, annotations, positions)
+ }
+ def &~ (flag: Long): Modifiers = {
+ val flags1 = flags & (~flag)
+ if (flags1 == flags) this
+ else Modifiers(flags1, privateWithin, annotations, positions)
+ }
+ def | (flag: Long): Modifiers = {
+ val flags1 = flags | flag
+ if (flags1 == flags) this
+ else Modifiers(flags1, privateWithin, annotations, positions)
+ }
+ def withAnnotations(annots: List[Tree]) =
+ if (annots.isEmpty) this
+ else copy(annotations = annotations ::: annots)
+ def withPosition(flag: Long, position: Position) =
+ copy(positions = positions + (flag -> position))
+ }
+
+ def Modifiers(flags: Long, privateWithin: Name): Modifiers = Modifiers(flags, privateWithin, List(), Map.empty)
+ def Modifiers(flags: Long): Modifiers = Modifiers(flags, mkTypeName(nme.EMPTY))
+
+ lazy val NoMods = Modifiers(0)
+
+ abstract class Tree extends Product {
+ val id = nodeCount
+ nodeCount += 1
+
+ private[this] var rawpos: Position = NoPosition
+
+ def pos = rawpos
+ def pos_=(pos: Position) = rawpos = pos
+ def setPos(pos: Position): this.type = { rawpos = pos; this }
+
+ private[this] var rawtpe: Type = _
+
+ def tpe = rawtpe
+ def tpe_=(t: Type) = rawtpe = t
+
+ /** Set tpe to give `tp` and return this.
+ */
+ def setType(tp: Type): this.type = { rawtpe = tp; this }
+
+ /** Like `setType`, but if this is a previously empty TypeTree
+ * that fact is remembered so that resetType will snap back.
+ */
+ def defineType(tp: Type): this.type = setType(tp)
+
+ def symbol: Symbol = null
+ def symbol_=(sym: Symbol) { throw new UnsupportedOperationException("symbol_= inapplicable for " + this) }
+ def setSymbol(sym: Symbol): this.type = { symbol = sym; this }
+
+ def hasSymbol = false
+ def isDef = false
+ def isEmpty = false
+
+ /** The direct child trees of this tree
+ * EmptyTrees are always omitted. Lists are collapsed.
+ */
+ def children: List[Tree] = {
+ def subtrees(x: Any): List[Tree] = x match {
+ case EmptyTree => List()
+ case t: Tree => List(t)
+ case xs: List[_] => xs flatMap subtrees
+ case _ => List()
+ }
+ productIterator.toList flatMap subtrees
+ }
+
+ /** In compiler: Make a copy of this tree, keeping all attributes,
+ * except that all positions are focussed (so nothing
+ * in this tree will be found when searching by position).
+ * If not in compiler may also return tree unchanged.
+ */
+ private[scala] def duplicate: this.type =
+ duplicateTree(this).asInstanceOf[this.type]
+
+ private[scala] def copyAttrs(tree: Tree): this.type = {
+ pos = tree.pos
+ tpe = tree.tpe
+ if (hasSymbol) symbol = tree.symbol
+ this
+ }
+
+ override def toString(): String = {
+ val buffer = new StringWriter()
+ val printer = newTreePrinter(new PrintWriter(buffer))
+ printer.print(this)
+ printer.flush()
+ buffer.toString
+ }
+
+ override def hashCode(): Int = super.hashCode()
+
+ override def equals(that: Any): Boolean = that match {
+ case t: Tree => this eq t
+ case _ => false
+ }
+ }
+
+ private[scala] def duplicateTree(tree: Tree): Tree = tree
+
+ trait SymTree extends Tree {
+ override def hasSymbol = true
+ override var symbol: Symbol = NoSymbol
+ }
+
+ trait RefTree extends SymTree {
+ def name: Name
+ }
+
+ abstract class DefTree extends SymTree {
+ def name: Name
+ override def isDef = true
+ }
+
+ trait TermTree extends Tree
+
+ /** A tree for a type. Note that not all type trees implement
+ * this trait; in particular, Ident's are an exception. */
+ trait TypTree extends Tree
+
+// ----- tree node alternatives --------------------------------------
+
+ /** The empty tree */
+ case object EmptyTree extends TermTree {
+ super.tpe_=(NoType)
+ override def tpe_=(t: Type) =
+ if (t != NoType) throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
+ override def isEmpty = true
+ }
+
+ abstract class MemberDef extends DefTree {
+ def mods: Modifiers
+ def keyword: String = this match {
+ case TypeDef(_, _, _, _) => "type"
+ case ClassDef(mods, _, _, _) => if (mods.isTrait) "trait" else "class"
+ case DefDef(_, _, _, _, _, _) => "def"
+ case ModuleDef(_, _, _) => "object"
+ case PackageDef(_, _) => "package"
+ case ValDef(mods, _, _, _) => if (mods.isVariable) "var" else "val"
+ case _ => ""
+ }
+ final def hasFlag(mask: Long): Boolean = (mods.flags & mask) != 0L
+ }
+
+ /** Package clause
+ */
+ case class PackageDef(pid: RefTree, stats: List[Tree])
+ extends MemberDef {
+ def name = pid.name
+ def mods = NoMods
+ }
+
+ abstract class ImplDef extends MemberDef {
+ def impl: Template
+ }
+
+ /** Class definition */
+ case class ClassDef(mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template)
+ extends ImplDef
+
+ /** Singleton object definition
+ */
+ case class ModuleDef(mods: Modifiers, name: Name, impl: Template)
+ extends ImplDef
+
+ abstract class ValOrDefDef extends MemberDef {
+ def tpt: Tree
+ def rhs: Tree
+ }
+
+ /** Value definition
+ */
+ case class ValDef(mods: Modifiers, name: Name, tpt: Tree, rhs: Tree) extends ValOrDefDef
+
+ /** Method definition
+ */
+ case class DefDef(mods: Modifiers, name: Name, tparams: List[TypeDef],
+ vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) extends ValOrDefDef
+
+ /** Abstract type, type parameter, or type alias */
+ case class TypeDef(mods: Modifiers, name: Name, tparams: List[TypeDef], rhs: Tree)
+ extends MemberDef
+
+ /** <p>
+ * Labelled expression - the symbols in the array (must be Idents!)
+ * are those the label takes as argument
+ * </p>
+ * <p>
+ * The symbol that is given to the labeldef should have a MethodType
+ * (as if it were a nested function)
+ * </p>
+ * <p>
+ * Jumps are apply nodes attributed with label symbol, the arguments
+ * will get assigned to the idents.
+ * </p>
+ * <p>
+ * Note: on 2005-06-09 Martin, Iuli, Burak agreed to have forward
+ * jumps within a Block.
+ * </p>
+ */
+ case class LabelDef(name: Name, params: List[Ident], rhs: Tree)
+ extends DefTree with TermTree
+
+
+ /** Import selector
+ *
+ * Representation of an imported name its optional rename and their optional positions
+ *
+ * @param name the imported name
+ * @param namePos its position or -1 if undefined
+ * @param rename the name the import is renamed to (== name if no renaming)
+ * @param renamePos the position of the rename or -1 if undefined
+ */
+ case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
+
+ /** Import clause
+ *
+ * @param expr
+ * @param selectors
+ */
+ case class Import(expr: Tree, selectors: List[ImportSelector])
+ extends SymTree
+ // The symbol of an Import is an import symbol @see Symbol.newImport
+ // It's used primarily as a marker to check that the import has been typechecked.
+
+ /** Instantiation template of a class or trait
+ *
+ * @param parents
+ * @param body
+ */
+ case class Template(parents: List[Tree], self: ValDef, body: List[Tree])
+ extends SymTree {
+ // the symbol of a template is a local dummy. @see Symbol.newLocalDummy
+ // the owner of the local dummy is the enclosing trait or class.
+ // the local dummy is itself the owner of any local blocks
+ // For example:
+ //
+ // class C {
+ // def foo // owner is C
+ // {
+ // def bar // owner is local dummy
+ // }
+ // System.err.println("TEMPLATE: " + parents)
+ }
+
+ /** Block of expressions (semicolon separated expressions) */
+ case class Block(stats: List[Tree], expr: Tree)
+ extends TermTree
+
+ /** Case clause in a pattern match, eliminated by TransMatch
+ * (except for occurrences in switch statements)
+ */
+ case class CaseDef(pat: Tree, guard: Tree, body: Tree)
+ extends Tree
+
+ /** Alternatives of patterns, eliminated by TransMatch, except for
+ * occurrences in encoded Switch stmt (=remaining Match(CaseDef(...))
+ */
+ case class Alternative(trees: List[Tree])
+ extends TermTree
+
+ /** Repetition of pattern, eliminated by TransMatch */
+ case class Star(elem: Tree)
+ extends TermTree
+
+ /** Bind of a variable to a rhs pattern, eliminated by TransMatch
+ *
+ * @param name
+ * @param body
+ */
+ case class Bind(name: Name, body: Tree)
+ extends DefTree
+
+ case class UnApply(fun: Tree, args: List[Tree])
+ extends TermTree
+
+ /** Array of expressions, needs to be translated in backend,
+ */
+ case class ArrayValue(elemtpt: Tree, elems: List[Tree])
+ extends TermTree
+
+ /** Anonymous function, eliminated by analyzer */
+ case class Function(vparams: List[ValDef], body: Tree)
+ extends TermTree with SymTree
+ // The symbol of a Function is a synthetic value of name nme.ANON_FUN_NAME
+ // It is the owner of the function's parameters.
+
+ /** Assignment */
+ case class Assign(lhs: Tree, rhs: Tree)
+ extends TermTree
+
+ /** Conditional expression */
+ case class If(cond: Tree, thenp: Tree, elsep: Tree)
+ extends TermTree
+
+ /** <p>
+ * Pattern matching expression (before <code>TransMatch</code>)
+ * Switch statements (after TransMatch)
+ * </p>
+ * <p>
+ * After <code>TransMatch</code>, cases will satisfy the following
+ * constraints:
+ * </p>
+ * <ul>
+ * <li>all guards are EmptyTree,</li>
+ * <li>all patterns will be either <code>Literal(Constant(x:Int))</code>
+ * or <code>Alternative(lit|...|lit)</code></li>
+ * <li>except for an "otherwise" branch, which has pattern
+ * <code>Ident(nme.WILDCARD)</code></li>
+ * </ul>
+ */
+ case class Match(selector: Tree, cases: List[CaseDef])
+ extends TermTree
+
+ /** Return expression */
+ case class Return(expr: Tree)
+ extends TermTree with SymTree
+ // The symbol of a Return node is the enclosing method.
+
+ case class Try(block: Tree, catches: List[CaseDef], finalizer: Tree)
+ extends TermTree
+
+ /** Throw expression */
+ case class Throw(expr: Tree)
+ extends TermTree
+
+ /** Object instantiation
+ * One should always use factory method below to build a user level new.
+ *
+ * @param tpt a class type
+ */
+ case class New(tpt: Tree) extends TermTree
+
+ /** Type annotation, eliminated by explicit outer */
+ case class Typed(expr: Tree, tpt: Tree)
+ extends TermTree
+
+ // Martin to Sean: Should GenericApply/TypeApply/Apply not be SymTree's? After all,
+ // ApplyDynamic is a SymTree.
+ abstract class GenericApply extends TermTree {
+ val fun: Tree
+ val args: List[Tree]
+ }
+
+ /** Type application */
+ case class TypeApply(fun: Tree, args: List[Tree])
+ extends GenericApply {
+ override def symbol: Symbol = fun.symbol
+ override def symbol_=(sym: Symbol) { fun.symbol = sym }
+ }
+
+ /** Value application */
+ case class Apply(fun: Tree, args: List[Tree])
+ extends GenericApply {
+ override def symbol: Symbol = fun.symbol
+ override def symbol_=(sym: Symbol) { fun.symbol = sym }
+ }
+
+ /** Dynamic value application.
+ * In a dynamic application q.f(as)
+ * - q is stored in qual
+ * - as is stored in args
+ * - f is stored as the node's symbol field.
+ */
+ case class ApplyDynamic(qual: Tree, args: List[Tree])
+ extends TermTree with SymTree
+ // The symbol of an ApplyDynamic is the function symbol of `qual', or NoSymbol, if there is none.
+
+ /** Super reference */
+ case class Super(qual: Name, mix: Name)
+ extends TermTree with SymTree
+ // The symbol of a Super is the class _from_ which the super reference is made.
+ // For instance in C.super(...), it would be C.
+
+ /** Self reference */
+ case class This(qual: Name)
+ extends TermTree with SymTree
+ // The symbol of a This is the class to which the this refers.
+ // For instance in C.this, it would be C.
+
+ /** Designator <qualifier> . <name> */
+ case class Select(qualifier: Tree, name: Name)
+ extends RefTree
+
+ /** Identifier <name> */
+ case class Ident(name: Name)
+ extends RefTree
+
+ class BackQuotedIdent(name: Name) extends Ident(name)
+
+ /** Literal */
+ case class Literal(value: Constant)
+ extends TermTree {
+ assert(value ne null)
+ }
+
+ def Literal(value: Any): Literal =
+ Literal(Constant(value))
+
+ type TypeTree <: AbsTypeTree
+ val TypeTree: TypeTreeExtractor
+
+ abstract class TypeTreeExtractor {
+ def apply(): TypeTree
+ def unapply(tree: TypeTree): Boolean
+ }
+
+ class Traverser {
+ protected var currentOwner: Symbol = definitions.RootClass
+ def traverse(tree: Tree): Unit = tree match {
+ case EmptyTree =>
+ ;
+ case PackageDef(pid, stats) =>
+ traverse(pid)
+ atOwner(tree.symbol.moduleClass) {
+ traverseTrees(stats)
+ }
+ case ClassDef(mods, name, tparams, impl) =>
+ atOwner(tree.symbol) {
+ traverseTrees(mods.annotations); traverseTrees(tparams); traverse(impl)
+ }
+ case ModuleDef(mods, name, impl) =>
+ atOwner(tree.symbol.moduleClass) {
+ traverseTrees(mods.annotations); traverse(impl)
+ }
+ case ValDef(mods, name, tpt, rhs) =>
+ atOwner(tree.symbol) {
+ traverseTrees(mods.annotations); traverse(tpt); traverse(rhs)
+ }
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ atOwner(tree.symbol) {
+ traverseTrees(mods.annotations); traverseTrees(tparams); traverseTreess(vparamss); traverse(tpt); traverse(rhs)
+ }
+ case TypeDef(mods, name, tparams, rhs) =>
+ atOwner(tree.symbol) {
+ traverseTrees(mods.annotations); traverseTrees(tparams); traverse(rhs)
+ }
+ case LabelDef(name, params, rhs) =>
+ traverseTrees(params); traverse(rhs)
+ case Import(expr, selectors) =>
+ traverse(expr)
+ case Annotated(annot, arg) =>
+ traverse(annot); traverse(arg)
+ case Template(parents, self, body) =>
+ traverseTrees(parents)
+ if (!self.isEmpty) traverse(self)
+ traverseStats(body, tree.symbol)
+ case Block(stats, expr) =>
+ traverseTrees(stats); traverse(expr)
+ case CaseDef(pat, guard, body) =>
+ traverse(pat); traverse(guard); traverse(body)
+ case Alternative(trees) =>
+ traverseTrees(trees)
+ case Star(elem) =>
+ traverse(elem)
+ case Bind(name, body) =>
+ traverse(body)
+ case UnApply(fun, args) =>
+ traverse(fun); traverseTrees(args)
+ case ArrayValue(elemtpt, trees) =>
+ traverse(elemtpt); traverseTrees(trees)
+ case Function(vparams, body) =>
+ atOwner(tree.symbol) {
+ traverseTrees(vparams); traverse(body)
+ }
+ case Assign(lhs, rhs) =>
+ traverse(lhs); traverse(rhs)
+ case If(cond, thenp, elsep) =>
+ traverse(cond); traverse(thenp); traverse(elsep)
+ case Match(selector, cases) =>
+ traverse(selector); traverseTrees(cases)
+ case Return(expr) =>
+ traverse(expr)
+ case Try(block, catches, finalizer) =>
+ traverse(block); traverseTrees(catches); traverse(finalizer)
+ case Throw(expr) =>
+ traverse(expr)
+ case New(tpt) =>
+ traverse(tpt)
+ case Typed(expr, tpt) =>
+ traverse(expr); traverse(tpt)
+ case TypeApply(fun, args) =>
+ traverse(fun); traverseTrees(args)
+ case Apply(fun, args) =>
+ traverse(fun); traverseTrees(args)
+ case ApplyDynamic(qual, args) =>
+ traverse(qual); traverseTrees(args)
+ case Super(_, _) =>
+ ;
+ case This(_) =>
+ ;
+ case Select(qualifier, selector) =>
+ traverse(qualifier)
+ case Ident(_) =>
+ ;
+ case Literal(_) =>
+ ;
+ case TypeTree() =>
+ ;
+ case SingletonTypeTree(ref) =>
+ traverse(ref)
+ case SelectFromTypeTree(qualifier, selector) =>
+ traverse(qualifier)
+ case CompoundTypeTree(templ) =>
+ traverse(templ)
+ case AppliedTypeTree(tpt, args) =>
+ traverse(tpt); traverseTrees(args)
+ case TypeBoundsTree(lo, hi) =>
+ traverse(lo); traverse(hi)
+ case ExistentialTypeTree(tpt, whereClauses) =>
+ traverse(tpt); traverseTrees(whereClauses)
+ case SelectFromArray(qualifier, selector, erasure) =>
+ traverse(qualifier)
+ }
+
+ def traverseTrees(trees: List[Tree]) {
+ trees foreach traverse
+ }
+ def traverseTreess(treess: List[List[Tree]]) {
+ treess foreach traverseTrees
+ }
+ def traverseStats(stats: List[Tree], exprOwner: Symbol) {
+ stats foreach (stat =>
+ if (exprOwner != currentOwner) atOwner(exprOwner)(traverse(stat))
+ else traverse(stat)
+ )
+ }
+
+ def atOwner(owner: Symbol)(traverse: => Unit) {
+ val prevOwner = currentOwner
+ currentOwner = owner
+ traverse
+ currentOwner = prevOwner
+ }
+ }
+
+ /** A synthetic term holding an arbitrary type. Not to be confused with
+ * with TypTree, the trait for trees that are only used for type trees.
+ * TypeTree's are inserted in several places, but most notably in
+ * <code>RefCheck</code>, where the arbitrary type trees are all replaced by
+ * TypeTree's. */
+ abstract class AbsTypeTree extends TypTree {
+ override def symbol = if (tpe == null) null else tpe.typeSymbol
+ override def isEmpty = (tpe eq null) || tpe == NoType
+ }
+
+ /** A tree that has an annotation attached to it. Only used for annotated types and
+ * annotation ascriptions, annotations on definitions are stored in the Modifiers.
+ * Eliminated by typechecker (typedAnnotated), the annotations are then stored in
+ * an AnnotatedType.
+ */
+ case class Annotated(annot: Tree, arg: Tree) extends Tree
+
+ /** Singleton type, eliminated by RefCheck */
+ case class SingletonTypeTree(ref: Tree)
+ extends TypTree
+
+ /** Type selection <qualifier> # <name>, eliminated by RefCheck */
+ case class SelectFromTypeTree(qualifier: Tree, name: Name)
+ extends TypTree with RefTree
+
+ /** Intersection type <parent1> with ... with <parentN> { <decls> }, eliminated by RefCheck */
+ case class CompoundTypeTree(templ: Template)
+ extends TypTree
+
+ /** Applied type <tpt> [ <args> ], eliminated by RefCheck */
+ case class AppliedTypeTree(tpt: Tree, args: List[Tree])
+ extends TypTree {
+ override def symbol: Symbol = tpt.symbol
+ override def symbol_=(sym: Symbol) { tpt.symbol = sym }
+ }
+
+ case class TypeBoundsTree(lo: Tree, hi: Tree)
+ extends TypTree
+
+ case class ExistentialTypeTree(tpt: Tree, whereClauses: List[Tree])
+ extends TypTree
+
+ /** Array selection <qualifier> . <name> only used during erasure */
+ case class SelectFromArray(qualifier: Tree, name: Name, erasure: Type)
+ extends TermTree with RefTree { }
+
+/* A standard pattern match
+ case EmptyTree =>
+ case PackageDef(pid, stats) =>
+ // package pid { stats }
+ case ClassDef(mods, name, tparams, impl) =>
+ // mods class name [tparams] impl where impl = extends parents { defs }
+ case ModuleDef(mods, name, impl) => (eliminated by refcheck)
+ // mods object name impl where impl = extends parents { defs }
+ case ValDef(mods, name, tpt, rhs) =>
+ // mods val name: tpt = rhs
+ // note missing type information is expressed by tpt = TypeTree()
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ // mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs
+ // note missing type information is expressed by tpt = TypeTree()
+ case TypeDef(mods, name, tparams, rhs) => (eliminated by erasure)
+ // mods type name[tparams] = rhs
+ // mods type name[tparams] >: lo <: hi, where lo, hi are in a TypeBoundsTree,
+ and DEFERRED is set in mods
+ case LabelDef(name, params, rhs) =>
+ // used for tailcalls and like
+ // while/do are desugared to label defs as follows:
+ // while (cond) body ==> LabelDef($L, List(), if (cond) { body; L$() } else ())
+ // do body while (cond) ==> LabelDef($L, List(), body; if (cond) L$() else ())
+ case Import(expr, selectors) => (eliminated by typecheck)
+ // import expr.{selectors}
+ // Selectors are a list of pairs of names (from, to).
+ // The last (and maybe only name) may be a nme.WILDCARD
+ // for instance
+ // import qual.{x, y => z, _} would be represented as
+ // Import(qual, List(("x", "x"), ("y", "z"), (WILDCARD, null)))
+ case Template(parents, self, body) =>
+ // extends parents { self => body }
+ // if self is missing it is represented as emptyValDef
+ case Block(stats, expr) =>
+ // { stats; expr }
+ case CaseDef(pat, guard, body) => (eliminated by transmatch/explicitouter)
+ // case pat if guard => body
+ case Alternative(trees) => (eliminated by transmatch/explicitouter)
+ // pat1 | ... | patn
+ case Star(elem) => (eliminated by transmatch/explicitouter)
+ // pat*
+ case Bind(name, body) => (eliminated by transmatch/explicitouter)
+ // name @ pat
+ case UnApply(fun: Tree, args) (introduced by typer, eliminated by transmatch/explicitouter)
+ // used for unapply's
+ case ArrayValue(elemtpt, trees) => (introduced by uncurry)
+ // used to pass arguments to vararg arguments
+ // for instance, printf("%s%d", foo, 42) is translated to after uncurry to:
+ // Apply(
+ // Ident("printf"),
+ // Literal("%s%d"),
+ // ArrayValue(<Any>, List(Ident("foo"), Literal(42))))
+ case Function(vparams, body) => (eliminated by lambdaLift)
+ // vparams => body where vparams:List[ValDef]
+ case Assign(lhs, rhs) =>
+ // lhs = rhs
+ case If(cond, thenp, elsep) =>
+ // if (cond) thenp else elsep
+ case Match(selector, cases) =>
+ // selector match { cases }
+ case Return(expr) =>
+ // return expr
+ case Try(block, catches, finalizer) =>
+ // try block catch { catches } finally finalizer where catches: List[CaseDef]
+ case Throw(expr) =>
+ // throw expr
+ case New(tpt) =>
+ // new tpt always in the context: (new tpt).<init>[targs](args)
+ case Typed(expr, tpt) => (eliminated by erasure)
+ // expr: tpt
+ case TypeApply(fun, args) =>
+ // fun[args]
+ case Apply(fun, args) =>
+ // fun(args)
+ // for instance fun[targs](args) is expressed as Apply(TypeApply(fun, targs), args)
+ case ApplyDynamic(qual, args) (introduced by erasure, eliminated by cleanup)
+ // fun(args)
+ case Super(qual, mix) =>
+ // qual.super[mix] if qual and/or mix is empty, ther are nme.EMPTY.toTypeName
+ case This(qual) =>
+ // qual.this
+ case Select(qualifier, selector) =>
+ // qualifier.selector
+ case Ident(name) =>
+ // name
+ // note: type checker converts idents that refer to enclosing fields or methods
+ // to selects; name ==> this.name
+ case Literal(value) =>
+ // value
+ case TypeTree() => (introduced by refcheck)
+ // a type that's not written out, but given in the tpe attribute
+ case Annotated(annot, arg) => (eliminated by typer)
+ // arg @annot for types, arg: @annot for exprs
+ case SingletonTypeTree(ref) => (eliminated by uncurry)
+ // ref.type
+ case SelectFromTypeTree(qualifier, selector) => (eliminated by uncurry)
+ // qualifier # selector, a path-dependent type p.T is expressed as p.type # T
+ case CompoundTypeTree(templ: Template) => (eliminated by uncurry)
+ // parent1 with ... with parentN { refinement }
+ case AppliedTypeTree(tpt, args) => (eliminated by uncurry)
+ // tpt[args]
+ case TypeBoundsTree(lo, hi) => (eliminated by uncurry)
+ // >: lo <: hi
+ case ExistentialTypeTree(tpt, whereClauses) => (eliminated by uncurry)
+ // tpt forSome { whereClauses }
+
+*/
+}
diff --git a/src/library/scala/reflect/generic/Types.scala b/src/library/scala/reflect/generic/Types.scala
new file mode 100755
index 0000000000..17e19715d7
--- /dev/null
+++ b/src/library/scala/reflect/generic/Types.scala
@@ -0,0 +1,156 @@
+package scala.reflect
+package generic
+
+trait Types { self: Universe =>
+
+ abstract class AbsType {
+ def typeSymbol: Symbol
+ def decl(name: Name): Symbol
+
+ /** Is this type completed (i.e. not a lazy type)?
+ */
+ def isComplete: Boolean = true
+
+ /** If this is a lazy type, assign a new type to `sym'. */
+ def complete(sym: Symbol) {}
+
+ /** Convert toString avoiding infinite recursions by cutting off
+ * after `maxTostringRecursions` recursion levels. Uses `safeToString`
+ * to produce a string on each level.
+ */
+ override def toString: String =
+ if (tostringRecursions >= maxTostringRecursions)
+ "..."
+ else
+ try {
+ tostringRecursions += 1
+ safeToString
+ } finally {
+ tostringRecursions -= 1
+ }
+
+ /** Method to be implemented in subclasses.
+ * Converts this type to a string in calling toString for its parts.
+ */
+ def safeToString: String = super.toString
+ }
+
+ type Type >: Null <: AbsType
+
+ val NoType: Type
+ val NoPrefix: Type
+
+ type ThisType <: Type
+ val ThisType: ThisTypeExtractor
+
+ type TypeRef <: Type
+ val TypeRef: TypeRefExtractor
+
+ type SingleType <: Type
+ val SingleType: SingleTypeExtractor
+
+ type SuperType <: Type
+ val SuperType: SuperTypeExtractor
+
+ type TypeBounds <: Type
+ val TypeBounds: TypeBoundsExtractor
+
+ type CompoundType <: Type
+
+ type RefinedType <: CompoundType
+ val RefinedType: RefinedTypeExtractor
+
+ type ClassInfoType <: CompoundType
+ val ClassInfoType: ClassInfoTypeExtractor
+
+ type ConstantType <: Type
+ val ConstantType: ConstantTypeExtractor
+
+ type MethodType <: Type
+ val MethodType: MethodTypeExtractor
+
+ type PolyType <: Type
+ val PolyType: PolyTypeExtractor
+
+ type ExistentialType <: Type
+ val ExistentialType: ExistentialTypeExtractor
+
+ type AnnotatedType <: Type
+ val AnnotatedType: AnnotatedTypeExtractor
+
+ type LazyType <: Type with AbsLazyType
+
+ trait AbsLazyType extends AbsType {
+ override def isComplete: Boolean = false
+ override def complete(sym: Symbol)
+ override def safeToString = "<?>"
+ }
+
+ abstract class ThisTypeExtractor {
+ def apply(sym: Symbol): Type
+ def unapply(tpe: ThisType): Option[Symbol]
+ }
+
+ abstract class SingleTypeExtractor {
+ def apply(pre: Type, sym: Symbol): Type
+ def unapply(tpe: SingleType): Option[(Type, Symbol)]
+ }
+
+ abstract class SuperTypeExtractor {
+ def apply(thistpe: Type, supertpe: Type): Type
+ def unapply(tpe: SuperType): Option[(Type, Type)]
+ }
+
+ abstract class TypeRefExtractor {
+ def apply(pre: Type, sym: Symbol, args: List[Type]): Type
+ def unapply(tpe: TypeRef): Option[(Type, Symbol, List[Type])]
+ }
+
+ abstract class TypeBoundsExtractor {
+ def apply(lo: Type, hi: Type): TypeBounds
+ def unapply(tpe: TypeBounds): Option[(Type, Type)]
+ }
+
+ abstract class RefinedTypeExtractor {
+ def apply(parents: List[Type], decls: Scope): RefinedType
+ def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType
+ def unapply(tpe: RefinedType): Option[(List[Type], Scope)]
+ }
+
+ abstract class ClassInfoTypeExtractor {
+ def apply(parents: List[Type], decls: Scope, clazz: Symbol): ClassInfoType
+ def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)]
+ }
+
+ abstract class ConstantTypeExtractor {
+ def apply(value: Constant): ConstantType
+ def unapply(tpe: ConstantType): Option[Constant]
+ }
+
+ abstract class MethodTypeExtractor {
+ def apply(params: List[Symbol], resultType: Type): MethodType
+ def unapply(tpe: MethodType): Option[(List[Symbol], Type)]
+ }
+
+ abstract class PolyTypeExtractor {
+ def apply(typeParams: List[Symbol], resultType: Type): PolyType
+ def unapply(tpe: PolyType): Option[(List[Symbol], Type)]
+ }
+
+ abstract class ExistentialTypeExtractor {
+ def apply(quantified: List[Symbol], underlying: Type): ExistentialType
+ def unapply(tpe: ExistentialType): Option[(List[Symbol], Type)]
+ }
+
+ abstract class AnnotatedTypeExtractor {
+ def apply(annotations: List[AnnotationInfo], underlying: Type, selfsym: Symbol): AnnotatedType
+ def unapply(tpe: AnnotatedType): Option[(List[AnnotationInfo], Type, Symbol)]
+ }
+
+ /** The maximum number of recursions allowed in toString
+ */
+ final val maxTostringRecursions = 50
+
+ private var tostringRecursions = 0
+}
+
diff --git a/src/library/scala/reflect/generic/UnPickler.scala b/src/library/scala/reflect/generic/UnPickler.scala
new file mode 100755
index 0000000000..b312114d20
--- /dev/null
+++ b/src/library/scala/reflect/generic/UnPickler.scala
@@ -0,0 +1,796 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package generic
+
+import java.io.IOException
+import java.lang.{Float, Double}
+
+import Flags._
+import PickleFormat._
+import collection.mutable.{HashMap, ListBuffer}
+import annotation.switch
+
+/** @author Martin Odersky
+ * @version 1.0
+ */
+abstract class UnPickler {
+
+ val global: Universe
+ import global._
+
+ /** Unpickle symbol table information descending from a class and/or module root
+ * from an array of bytes.
+ * @param bytes bytearray from which we unpickle
+ * @param offset offset from which unpickling starts
+ * @param classroot the top-level class which is unpickled, or NoSymbol if inapplicable
+ * @param moduleroot the top-level module which is unpickled, or NoSymbol if inapplicable
+ * @param filename filename associated with bytearray, only used for error messages
+ */
+ def unpickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) {
+ try {
+ scan(bytes, offset, classRoot, moduleRoot, filename)
+ } catch {
+ case ex: IOException =>
+ throw ex
+ case ex: Throwable =>
+ /*if (settings.debug.value)*/ ex.printStackTrace()
+ throw new RuntimeException("error reading Scala signature of "+filename+": "+ex.getMessage())
+ }
+ }
+
+ /** To be implemented in subclasses. Like `unpickle` but without the catch-all error handling.
+ */
+ def scan(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String)
+
+ abstract class Scan(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) extends PickleBuffer(bytes, offset, -1) {
+ //println("unpickle " + classRoot + " and " + moduleRoot)//debug
+
+ protected def debug = false
+
+ checkVersion()
+
+ /** A map from entry numbers to array offsets */
+ private val index = createIndex
+
+ /** A map from entry numbers to symbols, types, or annotations */
+ private val entries = new Array[AnyRef](index.length)
+
+ /** A map from symbols to their associated `decls' scopes */
+ private val symScopes = new HashMap[Symbol, Scope]
+
+ //println("unpickled " + classRoot + ":" + classRoot.rawInfo + ", " + moduleRoot + ":" + moduleRoot.rawInfo);//debug
+
+ def run() {
+ for (i <- 0 until index.length) {
+ if (isSymbolEntry(i))
+ at(i, readSymbol)
+ else if (isSymbolAnnotationEntry(i))
+ at(i, {() => readSymbolAnnotation(); null})
+ else if (isChildrenEntry(i))
+ at(i, {() => readChildren(); null})
+ }
+ }
+
+ private def checkVersion() {
+ val major = readNat()
+ val minor = readNat()
+ if (major != MajorVersion || minor > MinorVersion)
+ throw new IOException("Scala signature " + classRoot.decodedName +
+ " has wrong version\n expected: " +
+ MajorVersion + "." + MinorVersion +
+ "\n found: " + major + "." + minor +
+ " in "+filename)
+ }
+
+ /** The `decls' scope associated with given symbol */
+ protected def symScope(sym: Symbol) = symScopes.get(sym) match {
+ case None => val s = newScope; symScopes(sym) = s; s
+ case Some(s) => s
+ }
+
+ /** Does entry represent an (internal) symbol */
+ protected def isSymbolEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ (firstSymTag <= tag && tag <= lastSymTag &&
+ (tag != CLASSsym || !isRefinementSymbolEntry(i)))
+ }
+
+ /** Does entry represent an (internal or external) symbol */
+ protected def isSymbolRef(i: Int): Boolean = {
+ val tag = bytes(index(i))
+ (firstSymTag <= tag && tag <= lastExtSymTag)
+ }
+
+ /** Does entry represent a name? */
+ protected def isNameEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == TERMname || tag == TYPEname
+ }
+
+ /** Does entry represent a symbol annotation? */
+ protected def isSymbolAnnotationEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == SYMANNOT
+ }
+
+ /** Does the entry represent children of a symbol? */
+ protected def isChildrenEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == CHILDREN
+ }
+
+ /** Does entry represent a refinement symbol?
+ * pre: Entry is a class symbol
+ */
+ protected def isRefinementSymbolEntry(i: Int): Boolean = {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ val tag = readByte().toInt
+ assert(tag == CLASSsym)
+
+ readNat(); // read length
+ val result = readNameRef() == mkTypeName(nme.REFINE_CLASS_NAME)
+ readIndex = savedIndex
+ result
+ }
+
+ /** If entry at <code>i</code> is undefined, define it by performing
+ * operation <code>op</code> with <code>readIndex at start of i'th
+ * entry. Restore <code>readIndex</code> afterwards.
+ */
+ protected def at[T <: AnyRef](i: Int, op: () => T): T = {
+ var r = entries(i)
+ if (r eq null) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ r = op()
+ assert(entries(i) eq null, entries(i))
+ entries(i) = r
+ readIndex = savedIndex
+ }
+ r.asInstanceOf[T]
+ }
+
+ /** Read a name */
+ protected def readName(): Name = {
+ val tag = readByte()
+ val len = readNat()
+ tag match {
+ case TERMname => newTermName(bytes, readIndex, len)
+ case TYPEname => newTypeName(bytes, readIndex, len)
+ case _ => errorBadSignature("bad name tag: " + tag)
+ }
+ }
+
+ /** Reflection access to lazyAccessor for binary compatibility
+ * between 2.8.1 and 2.8.2.
+ */
+ private def binaryCompatLazyAccessor(x: AnyRef): Symbol =
+ try x.getClass.getMethod("lazyAccessor").invoke(x).asInstanceOf[Symbol]
+ catch { case _: Exception => null }
+
+ /** Read a symbol */
+ protected def readSymbol(): Symbol = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ var sym: Symbol = NoSymbol
+ tag match {
+ case EXTref | EXTMODCLASSref =>
+ val name = readNameRef()
+ val owner = if (readIndex == end) definitions.RootClass else readSymbolRef()
+ def fromName(name: Name) =
+ if (mkTermName(name) == nme.ROOT) definitions.RootClass
+ else if (name == nme.ROOTPKG) definitions.RootPackage
+ else if (tag == EXTref) owner.info.decl(name)
+ else owner.info.decl(name).moduleClass
+ sym = fromName(name)
+ // If sym not found try with expanded name.
+ // This can happen if references to private symbols are
+ // read from outside; for instance when checking the children of a class
+ // (see t1722)
+ if (sym == NoSymbol) sym = fromName(nme.expandedName(name, owner))
+
+ // If the owner is overloaded (i.e. a method), it's not possible to select the
+ // right member => return NoSymbol. This can only happen when unpickling a tree.
+ // the "case Apply" in readTree() takes care of selecting the correct alternative
+ // after parsing the arguments.
+ if (sym == NoSymbol && !owner.isOverloaded) {
+ // Possibly a nested object symbol
+ tag match {
+ case EXTMODCLASSref =>
+ val moduleVar = owner.info.decl(nme.moduleVarName(name))
+ if (moduleVar.hasFlag(LAZY)) {
+ val lazyAcc = binaryCompatLazyAccessor(moduleVar)
+ if (lazyAcc != null)
+ sym = binaryCompatLazyAccessor(lazyAcc)
+ }
+ case _ =>
+ }
+
+ if (sym == NoSymbol)
+ errorMissingRequirement(name, owner)
+ }
+
+ case NONEsym =>
+ sym = NoSymbol
+
+ case _ => // symbols that were pickled with Pickler.writeSymInfo
+ var defaultGetter: Symbol = NoSymbol // @deprecated, to be removed for 2.8 final
+ var nameref = readNat()
+ if (tag == VALsym && isSymbolRef(nameref)) { // @deprecated, to be removed for 2.8 final
+ defaultGetter = at(nameref, readSymbol)
+ nameref = readNat()
+ }
+ val name = at(nameref, readName)
+ val owner = readSymbolRef()
+ val flags = pickledToRawFlags(readLongNat())
+ var privateWithin: Symbol = NoSymbol
+ var inforef = readNat()
+ if (isSymbolRef(inforef)) {
+ privateWithin = at(inforef, readSymbol)
+ inforef = readNat()
+ }
+ tag match {
+ case TYPEsym =>
+ sym = owner.newAbstractType(name)
+ case ALIASsym =>
+ sym = owner.newAliasType(name)
+ case CLASSsym =>
+ sym =
+ if (name == classRoot.name && owner == classRoot.owner)
+ (if ((flags & MODULE) != 0L) moduleRoot.moduleClass
+ else classRoot)
+ else
+ if ((flags & MODULE) != 0L) owner.newModuleClass(name)
+ else owner.newClass(name)
+ if (readIndex != end) sym.typeOfThis = newLazyTypeRef(readNat())
+ case MODULEsym =>
+ val clazz = at(inforef, readType).typeSymbol
+ sym =
+ if (name == moduleRoot.name && owner == moduleRoot.owner) moduleRoot
+ else {
+ val m = owner.newModule(name, clazz)
+ clazz.sourceModule = m
+ m
+ }
+ case VALsym =>
+ sym = if (name == moduleRoot.name && owner == moduleRoot.owner) { assert(false); NoSymbol }
+ else if ((flags & METHOD) != 0) owner.newMethod(name)
+ else owner.newValue(name)
+ case _ =>
+ noSuchSymbolTag(tag, end, name, owner)
+ }
+ sym.flags = flags & PickledFlags
+ sym.privateWithin = privateWithin
+ if (readIndex != end) assert(sym hasFlag (SUPERACCESSOR | PARAMACCESSOR), sym)
+ if (sym hasFlag SUPERACCESSOR) assert(readIndex != end)
+ sym.info =
+ if (readIndex != end) newLazyTypeRefAndAlias(inforef, readNat())
+ else newLazyTypeRef(inforef)
+ if (sym.owner.isClass && sym != classRoot && sym != moduleRoot &&
+ !sym.isModuleClass && !sym.isRefinementClass && !sym.isTypeParameter && !sym.isExistentiallyBound)
+ symScope(sym.owner) enter sym
+ }
+ sym
+ }
+
+ def noSuchSymbolTag(tag: Int, end: Int, name: Name, owner: Symbol) =
+ errorBadSignature("bad symbol tag: " + tag)
+
+ /** Read a type */
+ protected def readType(): Type = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ (tag: @switch) match {
+ case NOtpe =>
+ NoType
+ case NOPREFIXtpe =>
+ NoPrefix
+ case THIStpe =>
+ ThisType(readSymbolRef())
+ case SINGLEtpe =>
+ SingleType(readTypeRef(), readSymbolRef()) // !!! was singleType
+ case SUPERtpe =>
+ val thistpe = readTypeRef()
+ val supertpe = readTypeRef()
+ SuperType(thistpe, supertpe)
+ case CONSTANTtpe =>
+ ConstantType(readConstantRef())
+ case TYPEREFtpe =>
+ val pre = readTypeRef()
+ val sym = readSymbolRef()
+ var args = until(end, readTypeRef)
+ TypeRef(pre, sym, args)
+ case TYPEBOUNDStpe =>
+ TypeBounds(readTypeRef(), readTypeRef())
+ case REFINEDtpe =>
+ val clazz = readSymbolRef()
+ RefinedType(until(end, readTypeRef), symScope(clazz), clazz)
+ case CLASSINFOtpe =>
+ val clazz = readSymbolRef()
+ ClassInfoType(until(end, readTypeRef), symScope(clazz), clazz)
+ case METHODtpe | IMPLICITMETHODtpe =>
+ val restpe = readTypeRef()
+ val params = until(end, readSymbolRef)
+ // if the method is overloaded, the params cannot be determined (see readSymbol) => return NoType.
+ // Only happen for trees, "case Apply" in readTree() takes care of selecting the correct
+ // alternative after parsing the arguments.
+ if (params.contains(NoSymbol) || restpe == NoType) NoType
+ else MethodType(params, restpe)
+ case POLYtpe =>
+ val restpe = readTypeRef()
+ val typeParams = until(end, readSymbolRef)
+ PolyType(typeParams, restpe)
+ case EXISTENTIALtpe =>
+ val restpe = readTypeRef()
+ ExistentialType(until(end, readSymbolRef), restpe)
+ case ANNOTATEDtpe =>
+ var typeRef = readNat()
+ val selfsym = if (isSymbolRef(typeRef)) {
+ val s = at(typeRef, readSymbol)
+ typeRef = readNat()
+ s
+ } else NoSymbol // selfsym can go.
+ val tp = at(typeRef, readType)
+ val annots = until(end, readAnnotationRef)
+ if (selfsym == NoSymbol) AnnotatedType(annots, tp, selfsym)
+ else tp
+ case _ =>
+ noSuchTypeTag(tag, end)
+ }
+ }
+
+ def noSuchTypeTag(tag: Int, end: Int): Type =
+ errorBadSignature("bad type tag: " + tag)
+
+ /** Read a constant */
+ protected def readConstant(): Constant = {
+ val tag = readByte().toInt
+ val len = readNat()
+ (tag: @switch) match {
+ case LITERALunit => Constant(())
+ case LITERALboolean => Constant(readLong(len) != 0L)
+ case LITERALbyte => Constant(readLong(len).toByte)
+ case LITERALshort => Constant(readLong(len).toShort)
+ case LITERALchar => Constant(readLong(len).toChar)
+ case LITERALint => Constant(readLong(len).toInt)
+ case LITERALlong => Constant(readLong(len))
+ case LITERALfloat => Constant(Float.intBitsToFloat(readLong(len).toInt))
+ case LITERALdouble => Constant(Double.longBitsToDouble(readLong(len)))
+ case LITERALstring => Constant(readNameRef().toString())
+ case LITERALnull => Constant(null)
+ case LITERALclass => Constant(readTypeRef())
+ case LITERALenum => Constant(readSymbolRef())
+ case _ => noSuchConstantTag(tag, len)
+ }
+ }
+
+ def noSuchConstantTag(tag: Int, len: Int): Constant =
+ errorBadSignature("bad constant tag: " + tag)
+
+ /** Read children and store them into the corresponding symbol.
+ */
+ protected def readChildren() {
+ val tag = readByte()
+ assert(tag == CHILDREN)
+ val end = readNat() + readIndex
+ val target = readSymbolRef()
+ while (readIndex != end) target addChild readSymbolRef()
+ }
+
+ /** Read an annotation argument, which is pickled either
+ * as a Constant or a Tree.
+ */
+ protected def readAnnotArg(i: Int): Tree = {
+ if (bytes(index(i)) == TREE) {
+ at(i, readTree)
+ } else {
+ val const = at(i, readConstant)
+ global.Literal(const).setType(const.tpe)
+ }
+ }
+
+ /** Read a ClassfileAnnotArg (argument to a classfile annotation)
+ */
+ protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match {
+ case ANNOTINFO =>
+ NestedAnnotArg(at(i, readAnnotation))
+ case ANNOTARGARRAY =>
+ at(i, () => {
+ readByte() // skip the `annotargarray` tag
+ val end = readNat() + readIndex
+ ArrayAnnotArg(until(end, () => readClassfileAnnotArg(readNat())).toArray(classfileAnnotArgManifest))
+ })
+ case _ =>
+ LiteralAnnotArg(at(i, readConstant))
+ }
+
+ /** Read an AnnotationInfo. Not to be called directly, use
+ * readAnnotation or readSymbolAnnotation
+ */
+ protected def readAnnotationInfo(end: Int): AnnotationInfo = {
+ val atp = readTypeRef()
+ val args = new ListBuffer[Tree]
+ val assocs = new ListBuffer[(Name, ClassfileAnnotArg)]
+ while (readIndex != end) {
+ val argref = readNat()
+ if (isNameEntry(argref)) {
+ val name = at(argref, readName)
+ val arg = readClassfileAnnotArg(readNat())
+ assocs += ((name, arg))
+ }
+ else
+ args += readAnnotArg(argref)
+ }
+ AnnotationInfo(atp, args.toList, assocs.toList)
+ }
+
+ /** Read an annotation and as a side effect store it into
+ * the symbol it requests. Called at top-level, for all
+ * (symbol, annotInfo) entries. */
+ protected def readSymbolAnnotation() {
+ val tag = readByte()
+ if (tag != SYMANNOT)
+ errorBadSignature("symbol annotation expected ("+ tag +")")
+ val end = readNat() + readIndex
+ val target = readSymbolRef()
+ target.addAnnotation(readAnnotationInfo(end))
+ }
+
+ /** Read an annotation and return it. Used when unpickling
+ * an ANNOTATED(WSELF)tpe or a NestedAnnotArg */
+ protected def readAnnotation(): AnnotationInfo = {
+ val tag = readByte()
+ if (tag != ANNOTINFO)
+ errorBadSignature("annotation expected (" + tag + ")")
+ val end = readNat() + readIndex
+ readAnnotationInfo(end)
+ }
+
+ /* Read an abstract syntax tree */
+ protected def readTree(): Tree = {
+ val outerTag = readByte()
+ if (outerTag != TREE)
+ errorBadSignature("tree expected (" + outerTag + ")")
+ val end = readNat() + readIndex
+ val tag = readByte()
+ val tpe = if (tag == EMPTYtree) NoType else readTypeRef()
+
+ // Set by the three functions to follow. If symbol is non-null
+ // after the the new tree 't' has been created, t has its Symbol
+ // set to symbol; and it always has its Type set to tpe.
+ var symbol: Symbol = null
+ var mods: Modifiers = null
+ var name: Name = null
+
+ /** Read a Symbol, Modifiers, and a Name */
+ def setSymModsName() {
+ symbol = readSymbolRef()
+ mods = readModifiersRef()
+ name = readNameRef()
+ }
+ /** Read a Symbol and a Name */
+ def setSymName() {
+ symbol = readSymbolRef()
+ name = readNameRef()
+ }
+ /** Read a Symbol */
+ def setSym() {
+ symbol = readSymbolRef()
+ }
+
+ val t = tag match {
+ case EMPTYtree =>
+ EmptyTree
+
+ case PACKAGEtree =>
+ setSym()
+ // val discardedSymbol = readSymbolRef() // XXX is symbol intentionally not set?
+ val pid = readTreeRef().asInstanceOf[RefTree]
+ val stats = until(end, readTreeRef)
+ PackageDef(pid, stats)
+
+ case CLASStree =>
+ setSymModsName()
+ val impl = readTemplateRef()
+ val tparams = until(end, readTypeDefRef)
+ ClassDef(mods, name, tparams, impl)
+
+ case MODULEtree =>
+ setSymModsName()
+ ModuleDef(mods, name, readTemplateRef())
+
+ case VALDEFtree =>
+ setSymModsName()
+ val tpt = readTreeRef()
+ val rhs = readTreeRef()
+ ValDef(mods, name, tpt, rhs)
+
+ case DEFDEFtree =>
+ setSymModsName()
+ val tparams = times(readNat(), readTypeDefRef)
+ val vparamss = times(readNat(), () => times(readNat(), readValDefRef))
+ val tpt = readTreeRef()
+ val rhs = readTreeRef()
+
+ DefDef(mods, name, tparams, vparamss, tpt, rhs)
+
+ case TYPEDEFtree =>
+ setSymModsName()
+ val rhs = readTreeRef()
+ val tparams = until(end, readTypeDefRef)
+ TypeDef(mods, name, tparams, rhs)
+
+ case LABELtree =>
+ setSymName()
+ val rhs = readTreeRef()
+ val params = until(end, readIdentRef)
+ LabelDef(name, params, rhs)
+
+ case IMPORTtree =>
+ setSym()
+ val expr = readTreeRef()
+ val selectors = until(end, () => {
+ val from = readNameRef()
+ val to = readNameRef()
+ ImportSelector(from, -1, to, -1)
+ })
+
+ Import(expr, selectors)
+
+ case TEMPLATEtree =>
+ setSym()
+ val parents = times(readNat(), readTreeRef)
+ val self = readValDefRef()
+ val body = until(end, readTreeRef)
+
+ Template(parents, self, body)
+
+ case BLOCKtree =>
+ val expr = readTreeRef()
+ val stats = until(end, readTreeRef)
+ Block(stats, expr)
+
+ case CASEtree =>
+ val pat = readTreeRef()
+ val guard = readTreeRef()
+ val body = readTreeRef()
+ CaseDef(pat, guard, body)
+
+ case ALTERNATIVEtree =>
+ Alternative(until(end, readTreeRef))
+
+ case STARtree =>
+ Star(readTreeRef())
+
+ case BINDtree =>
+ setSymName()
+ Bind(name, readTreeRef())
+
+ case UNAPPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ UnApply(fun, args)
+
+ case ARRAYVALUEtree =>
+ val elemtpt = readTreeRef()
+ val trees = until(end, readTreeRef)
+ ArrayValue(elemtpt, trees)
+
+ case FUNCTIONtree =>
+ setSym()
+ val body = readTreeRef()
+ val vparams = until(end, readValDefRef)
+ Function(vparams, body)
+
+ case ASSIGNtree =>
+ val lhs = readTreeRef()
+ val rhs = readTreeRef()
+ Assign(lhs, rhs)
+
+ case IFtree =>
+ val cond = readTreeRef()
+ val thenp = readTreeRef()
+ val elsep = readTreeRef()
+ If(cond, thenp, elsep)
+
+ case MATCHtree =>
+ val selector = readTreeRef()
+ val cases = until(end, readCaseDefRef)
+ Match(selector, cases)
+
+ case RETURNtree =>
+ setSym()
+ Return(readTreeRef())
+
+ case TREtree =>
+ val block = readTreeRef()
+ val finalizer = readTreeRef()
+ val catches = until(end, readCaseDefRef)
+ Try(block, catches, finalizer)
+
+ case THROWtree =>
+ Throw(readTreeRef())
+
+ case NEWtree =>
+ New(readTreeRef())
+
+ case TYPEDtree =>
+ val expr = readTreeRef()
+ val tpt = readTreeRef()
+ Typed(expr, tpt)
+
+ case TYPEAPPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ TypeApply(fun, args)
+
+ case APPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ if (fun.symbol.isOverloaded) {
+ fun.setType(fun.symbol.info)
+ inferMethodAlternative(fun, args map (_.tpe), tpe)
+ }
+ Apply(fun, args)
+
+ case APPLYDYNAMICtree =>
+ setSym()
+ val qual = readTreeRef()
+ val args = until(end, readTreeRef)
+ ApplyDynamic(qual, args)
+
+ case SUPERtree =>
+ setSym()
+ val qual = readNameRef()
+ val mix = readNameRef()
+ Super(qual, mix)
+
+ case THIStree =>
+ setSym()
+ This(readNameRef())
+
+ case SELECTtree =>
+ setSym()
+ val qualifier = readTreeRef()
+ val selector = readNameRef()
+ Select(qualifier, selector)
+
+ case IDENTtree =>
+ setSymName()
+ Ident(name)
+
+ case LITERALtree =>
+ global.Literal(readConstantRef())
+
+ case TYPEtree =>
+ TypeTree()
+
+ case ANNOTATEDtree =>
+ val annot = readTreeRef()
+ val arg = readTreeRef()
+ Annotated(annot, arg)
+
+ case SINGLETONTYPEtree =>
+ SingletonTypeTree(readTreeRef())
+
+ case SELECTFROMTYPEtree =>
+ val qualifier = readTreeRef()
+ val selector = readNameRef()
+ SelectFromTypeTree(qualifier, selector)
+
+ case COMPOUNDTYPEtree =>
+ CompoundTypeTree(readTemplateRef())
+
+ case APPLIEDTYPEtree =>
+ val tpt = readTreeRef()
+ val args = until(end, readTreeRef)
+ AppliedTypeTree(tpt, args)
+
+ case TYPEBOUNDStree =>
+ val lo = readTreeRef()
+ val hi = readTreeRef()
+ TypeBoundsTree(lo, hi)
+
+ case EXISTENTIALTYPEtree =>
+ val tpt = readTreeRef()
+ val whereClauses = until(end, readTreeRef)
+ ExistentialTypeTree(tpt, whereClauses)
+
+ case _ =>
+ noSuchTreeTag(tag, end)
+ }
+
+ if (symbol == null) t setType tpe
+ else t setSymbol symbol setType tpe
+ }
+
+ def noSuchTreeTag(tag: Int, end: Int) =
+ errorBadSignature("unknown tree type (" + tag + ")")
+
+ def readModifiers(): Modifiers = {
+ val tag = readNat()
+ if (tag != MODIFIERS)
+ errorBadSignature("expected a modifiers tag (" + tag + ")")
+ val end = readNat() + readIndex
+ val pflagsHi = readNat()
+ val pflagsLo = readNat()
+ val pflags = (pflagsHi.toLong << 32) + pflagsLo
+ val flags = pickledToRawFlags(pflags)
+ val privateWithin = readNameRef()
+ Modifiers(flags, privateWithin, Nil, Map.empty)
+ }
+
+ /* Read a reference to a pickled item */
+ protected def readNameRef(): Name = at(readNat(), readName)
+ protected def readSymbolRef(): Symbol = at(readNat(), readSymbol)
+ protected def readTypeRef(): Type = at(readNat(), readType)
+ protected def readConstantRef(): Constant = at(readNat(), readConstant)
+ protected def readAnnotationRef(): AnnotationInfo =
+ at(readNat(), readAnnotation)
+ protected def readModifiersRef(): Modifiers =
+ at(readNat(), readModifiers)
+ protected def readTreeRef(): Tree =
+ at(readNat(), readTree)
+
+ protected def readTemplateRef(): Template =
+ readTreeRef() match {
+ case templ:Template => templ
+ case other =>
+ errorBadSignature("expected a template (" + other + ")")
+ }
+ protected def readCaseDefRef(): CaseDef =
+ readTreeRef() match {
+ case tree:CaseDef => tree
+ case other =>
+ errorBadSignature("expected a case def (" + other + ")")
+ }
+ protected def readValDefRef(): ValDef =
+ readTreeRef() match {
+ case tree:ValDef => tree
+ case other =>
+ errorBadSignature("expected a ValDef (" + other + ")")
+ }
+ protected def readIdentRef(): Ident =
+ readTreeRef() match {
+ case tree:Ident => tree
+ case other =>
+ errorBadSignature("expected an Ident (" + other + ")")
+ }
+ protected def readTypeDefRef(): TypeDef =
+ readTreeRef() match {
+ case tree:TypeDef => tree
+ case other =>
+ errorBadSignature("expected an TypeDef (" + other + ")")
+ }
+
+ protected def errorBadSignature(msg: String) =
+ throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg)
+
+ protected def errorMissingRequirement(msg: String): Nothing =
+ if (debug) errorBadSignature(msg)
+ else throw new IOException("class file needed by "+classRoot.name+" is missing.\n"+msg)
+
+ protected def errorMissingRequirement(name: Name, owner: Symbol): Nothing =
+ errorMissingRequirement("reference " + NameTransformer.decode(name.toString) + " of " + owner.tpe + " refers to nonexisting symbol.")
+
+ /** pre: `fun` points to a symbol with an overloaded type.
+ * Selects the overloaded alternative of `fun` which best matches given
+ * argument types `argtpes` and result type `restpe`. Stores this alternative as
+ * the symbol of `fun`.
+ */
+ def inferMethodAlternative(fun: Tree, argtpes: List[Type], restpe: Type)
+
+ /** Create a lazy type which when completed returns type at index `i`. */
+ def newLazyTypeRef(i: Int): LazyType
+
+ /** Create a lazy type which when completed returns type at index `i` and sets alias
+ * of completed symbol to symbol at index `j`
+ */
+ def newLazyTypeRefAndAlias(i: Int, j: Int): LazyType
+ }
+}
diff --git a/src/library/scala/reflect/generic/Universe.scala b/src/library/scala/reflect/generic/Universe.scala
new file mode 100755
index 0000000000..101295ae79
--- /dev/null
+++ b/src/library/scala/reflect/generic/Universe.scala
@@ -0,0 +1,16 @@
+package scala.reflect
+package generic
+
+abstract class Universe extends Symbols
+ with Types
+ with Constants
+ with Scopes
+ with Names
+ with StdNames
+ with Trees
+ with AnnotationInfos
+ with StandardDefinitions {
+ type Position
+ val NoPosition: Position
+}
+
diff --git a/src/library/scala/remote.scala b/src/library/scala/remote.scala
index 612b6661e5..bcbc597338 100644
--- a/src/library/scala/remote.scala
+++ b/src/library/scala/remote.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/runtime/NonLocalReturnException.scala b/src/library/scala/runtime/AbstractFunction0.scala
index 46e5771dd1..386091b818 100644
--- a/src/library/scala/runtime/NonLocalReturnException.scala
+++ b/src/library/scala/runtime/AbstractFunction0.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,9 +8,12 @@
// $Id$
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala.runtime
-import scala.util.control.ControlException
-class NonLocalReturnException[T](val key: AnyRef, val value: T) extends RuntimeException with ControlException
+
+abstract class AbstractFunction0[@specialized +R] extends Function0[R] {
+
+}
diff --git a/src/library/scala/runtime/AbstractFunction1.scala b/src/library/scala/runtime/AbstractFunction1.scala
new file mode 100644
index 0000000000..be125d1566
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction1.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function1[T1, R] {
+
+}
diff --git a/src/library/scala/runtime/AbstractFunction10.scala b/src/library/scala/runtime/AbstractFunction10.scala
new file mode 100644
index 0000000000..5e44dc7513
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction10.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction10[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, +R] extends Function10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R] {
+
+}
diff --git a/src/library/scala/runtime/ExceptionHandling.java b/src/library/scala/runtime/AbstractFunction11.scala
index 73a6647ed2..c7e7d2de28 100644
--- a/src/library/scala/runtime/ExceptionHandling.java
+++ b/src/library/scala/runtime/AbstractFunction11.scala
@@ -1,26 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
-package scala.runtime;
+package scala.runtime
-public abstract class ExceptionHandling {
- public static Throwable tryCatch(Runnable runnable) {
- try {
- runnable.run();
- return null;
- } catch (Throwable exception) {
- return exception;
- }
- }
+abstract class AbstractFunction11[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, +R] extends Function11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R] {
}
diff --git a/src/library/scala/runtime/AbstractFunction12.scala b/src/library/scala/runtime/AbstractFunction12.scala
new file mode 100644
index 0000000000..6ca4602d00
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction12.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction12[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, +R] extends Function12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R] {
+
+}
diff --git a/src/library/scala/runtime/AbstractFunction13.scala b/src/library/scala/runtime/AbstractFunction13.scala
new file mode 100644
index 0000000000..f5f4d6f89f
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction13.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction13[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, +R] extends Function13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R] {
+
+}
diff --git a/src/library/scala/runtime/AbstractFunction14.scala b/src/library/scala/runtime/AbstractFunction14.scala
new file mode 100644
index 0000000000..686f0e42b8
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction14.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction14[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, +R] extends Function14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R] {
+
+}
diff --git a/src/library/scala/runtime/AbstractFunction15.scala b/src/library/scala/runtime/AbstractFunction15.scala
new file mode 100644
index 0000000000..62b70ba7f9
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction15.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction15[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, +R] extends Function15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R] {
+
+}
diff --git a/src/library/scala/runtime/AbstractFunction16.scala b/src/library/scala/runtime/AbstractFunction16.scala
new file mode 100644
index 0000000000..6bd555f2c5
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction16.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, +R] extends Function16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R] {
+
+}
diff --git a/src/library/scala/runtime/AbstractFunction17.scala b/src/library/scala/runtime/AbstractFunction17.scala
new file mode 100644
index 0000000000..d679dd5a82
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction17.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction17[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, +R] extends Function17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R] {
+
+}
diff --git a/src/library/scala/runtime/AbstractFunction18.scala b/src/library/scala/runtime/AbstractFunction18.scala
new file mode 100644
index 0000000000..04d05157a4
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction18.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction18[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, +R] extends Function18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R] {
+
+}
diff --git a/src/library/scala/runtime/AbstractFunction19.scala b/src/library/scala/runtime/AbstractFunction19.scala
new file mode 100644
index 0000000000..0f0b4f847a
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction19.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction19[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, +R] extends Function19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R] {
+
+}
diff --git a/src/library/scala/runtime/BoxedIntArray.scala b/src/library/scala/runtime/AbstractFunction2.scala
index fac3b24b77..75dfc5e0a9 100644
--- a/src/library/scala/runtime/BoxedIntArray.scala
+++ b/src/library/scala/runtime/AbstractFunction2.scala
@@ -1,28 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala.runtime
-import scala.reflect.ClassManifest
-@serializable
-final class BoxedIntArray(val value: Array[Int]) extends BoxedArray[Int] {
- def elemManifest = ClassManifest.Int
- def length: Int = value.length
+abstract class AbstractFunction2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function2[T1, T2, R] {
- def apply(index: Int): Int = value(index)
-
- def update(index: Int, elem: Int) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
}
diff --git a/src/library/scala/runtime/AbstractFunction20.scala b/src/library/scala/runtime/AbstractFunction20.scala
new file mode 100644
index 0000000000..6df9fff32f
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction20.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction20[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, +R] extends Function20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R] {
+
+}
diff --git a/src/library/scala/runtime/BoxedLongArray.scala b/src/library/scala/runtime/AbstractFunction21.scala
index cbf9d68498..44a81e0ba9 100644
--- a/src/library/scala/runtime/BoxedLongArray.scala
+++ b/src/library/scala/runtime/AbstractFunction21.scala
@@ -1,28 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala.runtime
-import scala.reflect.ClassManifest
-@serializable
-final class BoxedLongArray(val value: Array[Long]) extends BoxedArray[Long] {
- def elemManifest = ClassManifest.Long
- def length: Int = value.length
+abstract class AbstractFunction21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, +R] extends Function21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R] {
- def apply(index: Int): Long = value(index)
-
- def update(index: Int, elem: Long) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
}
diff --git a/src/library/scala/runtime/BoxedCharArray.scala b/src/library/scala/runtime/AbstractFunction22.scala
index 8924ed52f5..5fc2d331ea 100644
--- a/src/library/scala/runtime/BoxedCharArray.scala
+++ b/src/library/scala/runtime/AbstractFunction22.scala
@@ -1,28 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala.runtime
-import scala.reflect.ClassManifest
-@serializable
-final class BoxedCharArray(val value: Array[Char]) extends BoxedArray[Char] {
- def elemManifest = ClassManifest.Char
- def length: Int = value.length
+abstract class AbstractFunction22[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, -T22, +R] extends Function22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22, R] {
- def apply(index: Int): Char = value(index)
-
- def update(index: Int, elem: Char) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
}
diff --git a/src/library/scala/runtime/StreamCons.scala b/src/library/scala/runtime/AbstractFunction3.scala
index 96b236f8cf..77473dda6f 100644
--- a/src/library/scala/runtime/StreamCons.scala
+++ b/src/library/scala/runtime/AbstractFunction3.scala
@@ -1,17 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala.runtime
-final class StreamCons[T](xs: => Stream[T]) {
- def lazy_:: (x: T): Stream[T] = Stream.cons(x, xs)
- def lazy_::: (ys: Stream[T]): Stream[T] = ys append xs
+
+
+abstract class AbstractFunction3[-T1, -T2, -T3, +R] extends Function3[T1, T2, T3, R] {
+
}
diff --git a/src/library/scala/concurrent/AsyncInvokable.scala b/src/library/scala/runtime/AbstractFunction4.scala
index be831bfdf5..6aaabe22d4 100644
--- a/src/library/scala/concurrent/AsyncInvokable.scala
+++ b/src/library/scala/runtime/AbstractFunction4.scala
@@ -1,24 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
-package scala.concurrent
+package scala.runtime
-/** The <code>AsyncInvokable</code> trait...
- *
- * @author Philipp Haller
- */
-trait AsyncInvokable[-T, +R] {
- type Future[+S] <: () => S
- def !!(task: T): Future[R]
+abstract class AbstractFunction4[-T1, -T2, -T3, -T4, +R] extends Function4[T1, T2, T3, T4, R] {
}
diff --git a/src/library/scala/runtime/AbstractFunction5.scala b/src/library/scala/runtime/AbstractFunction5.scala
new file mode 100644
index 0000000000..aae3ff44a2
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction5.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction5[-T1, -T2, -T3, -T4, -T5, +R] extends Function5[T1, T2, T3, T4, T5, R] {
+
+}
diff --git a/src/library/scala/runtime/AbstractFunction6.scala b/src/library/scala/runtime/AbstractFunction6.scala
new file mode 100644
index 0000000000..1bef135b27
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction6.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction6[-T1, -T2, -T3, -T4, -T5, -T6, +R] extends Function6[T1, T2, T3, T4, T5, T6, R] {
+
+}
diff --git a/src/library/scala/runtime/AbstractFunction7.scala b/src/library/scala/runtime/AbstractFunction7.scala
new file mode 100644
index 0000000000..97ee37d0c2
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction7.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction7[-T1, -T2, -T3, -T4, -T5, -T6, -T7, +R] extends Function7[T1, T2, T3, T4, T5, T6, T7, R] {
+
+}
diff --git a/src/library/scala/runtime/AbstractFunction8.scala b/src/library/scala/runtime/AbstractFunction8.scala
new file mode 100644
index 0000000000..0a50727c6d
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction8.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction8[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, +R] extends Function8[T1, T2, T3, T4, T5, T6, T7, T8, R] {
+
+}
diff --git a/src/library/scala/runtime/AbstractFunction9.scala b/src/library/scala/runtime/AbstractFunction9.scala
new file mode 100644
index 0000000000..3685004fe1
--- /dev/null
+++ b/src/library/scala/runtime/AbstractFunction9.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
+
+package scala.runtime
+
+
+
+abstract class AbstractFunction9[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, +R] extends Function9[T1, T2, T3, T4, T5, T6, T7, T8, T9, R] {
+
+}
diff --git a/src/library/scala/runtime/AnyValCompanion.scala b/src/library/scala/runtime/AnyValCompanion.scala
new file mode 100644
index 0000000000..0fba1cfd60
--- /dev/null
+++ b/src/library/scala/runtime/AnyValCompanion.scala
@@ -0,0 +1,86 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.runtime
+
+/** A common supertype for companion classes of primitive types.
+ *
+ * A common trait for /companion/ objects of primitive types comes handy
+ * when parameterizing code on types. For instance, the specialized
+ * annotation is passed a sequence of types on which to specialize:
+ * {{{
+ * class Tuple1[@specialized(Unit, Int, Double) T]
+ * }}}
+ *
+ */
+sealed trait AnyValCompanion
+
+/** A object representing 'object scala.Unit'. It should never be used
+ * directly.
+ */
+object Unit extends AnyValCompanion {
+ override def toString = "object scala.Unit"
+}
+
+/** A object representing 'object scala.Boolean'. It should never be used
+ * directly.
+ */
+object Boolean extends AnyValCompanion {
+ override def toString = "object scala.Boolean"
+}
+
+/** A object representing 'object scala.Byte'. It should never be used
+ * directly.
+ */
+object Byte extends AnyValCompanion {
+ override def toString = "object scala.Byte"
+}
+
+/** A object representing 'object scala.Short'. It should never be used
+ * directly.
+ */
+object Short extends AnyValCompanion {
+ override def toString = "object scala.Short"
+}
+
+/** A object representing 'object scala.Char'. It should never be used
+ * directly.
+ */
+object Char extends AnyValCompanion {
+ override def toString = "object scala.Char"
+}
+
+/** A object representing 'object scala.Int'. It should never be used
+ * directly.
+ */
+object Int extends AnyValCompanion {
+ override def toString = "object scala.Int"
+}
+
+/** A object representing 'object scala.Long'. It should never be used
+ * directly.
+ */
+object Long extends AnyValCompanion {
+ override def toString = "object scala.Long"
+}
+
+/** A object representing 'object scala.Float'. It should never be used
+ * directly.
+ */
+object Float extends AnyValCompanion {
+ override def toString = "object scala.Float"
+}
+
+/** A object representing 'object scala.Double'. It should never be used
+ * directly.
+ */
+object Double extends AnyValCompanion {
+ override def toString = "object scala.Double"
+}
diff --git a/src/library/scala/runtime/ArrayRuntime.java b/src/library/scala/runtime/ArrayRuntime.java
new file mode 100644
index 0000000000..b382fdf8f6
--- /dev/null
+++ b/src/library/scala/runtime/ArrayRuntime.java
@@ -0,0 +1,16 @@
+package scala.runtime;
+
+/**
+ * Methods on Java arrays
+ */
+class ArrayRuntime {
+ static boolean[] cloneArray(boolean[] array) { return array.clone(); }
+ static byte[] cloneArray(byte[] array) { return array.clone(); }
+ static short[] cloneArray(short[] array) { return array.clone(); }
+ static char[] cloneArray(char[] array) { return array.clone(); }
+ static int[] cloneArray(int[] array) { return array.clone(); }
+ static long[] cloneArray(long[] array) { return array.clone(); }
+ static float[] cloneArray(float[] array) { return array.clone(); }
+ static double[] cloneArray(double[] array) { return array.clone(); }
+ static Object[] cloneArray(Object[] array) { return array.clone(); }
+}
diff --git a/src/library/scala/runtime/BooleanRef.java b/src/library/scala/runtime/BooleanRef.java
index 88926dab88..bf43b470b4 100644
--- a/src/library/scala/runtime/BooleanRef.java
+++ b/src/library/scala/runtime/BooleanRef.java
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime;
diff --git a/src/library/scala/runtime/Boxed.scala b/src/library/scala/runtime/Boxed.scala
index 0d9d21498b..46515a7997 100644
--- a/src/library/scala/runtime/Boxed.scala
+++ b/src/library/scala/runtime/Boxed.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
diff --git a/src/library/scala/runtime/BoxedAnyArray.scala b/src/library/scala/runtime/BoxedAnyArray.scala
deleted file mode 100644
index e0f6bd5be2..0000000000
--- a/src/library/scala/runtime/BoxedAnyArray.scala
+++ /dev/null
@@ -1,224 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-import compat.Platform
-
-/**
- * Arrays created by <code>new Array[T](length)</code> where <code>T</code>
- * is a type variable.
- *
- * @author Martin Odersky
- */
-@serializable
-final class BoxedAnyArray[A](val length: Int) extends BoxedArray[A] {
-
- def elemManifest: ClassManifest[A] = null
-
- private var boxed = new Array[AnyRef](length)
-// private val hash = boxed.hashCode()
- private var unboxed: AnyRef = null
- private var elemClass: Class[_] = null
-
- def apply(index: Int): A = synchronized {
- if (unboxed eq null)
- boxed(index)
- else if (elemClass eq classOf[Int])
- Int.box(unboxed.asInstanceOf[Array[Int]](index))
- else if (elemClass eq classOf[Double])
- Double.box(unboxed.asInstanceOf[Array[Double]](index))
- else if (elemClass eq classOf[Float])
- Float.box(unboxed.asInstanceOf[Array[Float]](index))
- else if (elemClass eq classOf[Long])
- Long.box(unboxed.asInstanceOf[Array[Long]](index))
- else if (elemClass eq classOf[Char])
- Char.box(unboxed.asInstanceOf[Array[Char]](index))
- else if (elemClass eq classOf[Byte])
- Byte.box(unboxed.asInstanceOf[Array[Byte]](index))
- else if (elemClass eq classOf[Short])
- Short.box(unboxed.asInstanceOf[Array[Short]](index))
- else if (elemClass eq classOf[Boolean])
- Boolean.box(unboxed.asInstanceOf[Array[Boolean]](index))
- else
- unboxed.asInstanceOf[Array[AnyRef]](index)
- }.asInstanceOf[A]
-
- def update(index: Int, _elem: A): Unit = synchronized {
- val elem = _elem.asInstanceOf[AnyRef]
- if (unboxed eq null)
- boxed(index) = elem
- else if (elemClass eq classOf[Int])
- unboxed.asInstanceOf[Array[Int]](index) = Int.unbox(elem)
- else if (elemClass eq classOf[Double])
- unboxed.asInstanceOf[Array[Double]](index) = Double.unbox(elem)
- else if (elemClass eq classOf[Float])
- unboxed.asInstanceOf[Array[Float]](index) = Float.unbox(elem)
- else if (elemClass eq classOf[Long])
- unboxed.asInstanceOf[Array[Long]](index) = Long.unbox(elem)
- else if (elemClass eq classOf[Char])
- unboxed.asInstanceOf[Array[Char]](index) = Char.unbox(elem)
- else if (elemClass eq classOf[Byte])
- unboxed.asInstanceOf[Array[Byte]](index) = Byte.unbox(elem)
- else if (elemClass eq classOf[Short])
- unboxed.asInstanceOf[Array[Short]](index) = Short.unbox(elem)
- else if (elemClass eq classOf[Boolean])
- unboxed.asInstanceOf[Array[Boolean]](index) = Boolean.unbox(elem)
- else
- unboxed.asInstanceOf[Array[AnyRef]](index) = elem
- }
-
- def unbox(elemClass: Class[_]): AnyRef = synchronized {
- if (unboxed eq null) {
- this.elemClass = elemClass;
- if (elemClass eq classOf[Int]) {
- val newvalue = new Array[Int](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Int.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue
- } else if (elemClass eq classOf[Double]) {
- val newvalue = new Array[Double](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Double.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue;
- } else if (elemClass eq classOf[Float]) {
- val newvalue = new Array[Float](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Float.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue;
- } else if (elemClass eq classOf[Long]) {
- val newvalue = new Array[Long](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Long.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue;
- } else if (elemClass eq classOf[Char]) {
- val newvalue = new Array[Char](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Char.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue
- } else if (elemClass eq classOf[Byte]) {
- val newvalue = new Array[Byte](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Byte.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue;
- } else if (elemClass eq classOf[Short]) {
- val newvalue = new Array[Short](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Short.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue;
- } else if (elemClass eq classOf[Boolean]) {
- val newvalue = new Array[Boolean](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Boolean.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue;
- } else if (elemClass == classOf[AnyRef]) {
- unboxed = boxed
- } else {
- unboxed = Platform.createArray(elemClass, length)
- if (elemClass.isArray) {
- var i = 0
- while (i < length) {
- boxed(i) match {
- case ba: BoxedArray[_] => boxed(i) = ba.unbox(elemClass.getComponentType())
- case _ =>
- }
- i += 1
- }
- }
- Platform.arraycopy(boxed, 0, unboxed, 0, length)
- }
- boxed = null
- }
- unboxed
- }
-
- def value: AnyRef = {
- if (unboxed eq null) throw new NotDefinedError("BoxedAnyArray.value")
- unboxed
- }
-
- private def adapt(other: AnyRef): AnyRef =
- if (this.unboxed eq null)
- other match {
- case that: BoxedAnyArray[_] =>
- if (that.unboxed eq null) {
- that.boxed
- } else {
- if (ScalaRunTime.isValueClass(that.elemClass)) unbox(that.elemClass);
- that.unboxed
- }
- case that: BoxedArray[_] =>
- adapt(that.value)
- case that: Array[Int] =>
- unbox(classOf[Int]); that
- case that: Array[Double] =>
- unbox(classOf[Double]); that
- case that: Array[Float] =>
- unbox(classOf[Float]); that
- case that: Array[Long] =>
- unbox(classOf[Long]); that
- case that: Array[Char] =>
- unbox(classOf[Char]); that
- case that: Array[Short] =>
- unbox(classOf[Short]); that
- case that: Array[Byte] =>
- unbox(classOf[Byte]); that
- case that: Array[Boolean] =>
- unbox(classOf[Boolean]); that
- case _ =>
- other
- }
- else
- other match {
- case that: BoxedAnyArray[_] =>
- if (that.unboxed ne null) that.unboxed
- else if (ScalaRunTime.isValueClass(this.elemClass)) that.unbox(this.elemClass)
- else that.boxed
- case that: BoxedArray[_] =>
- adapt(that.value)
- case _ =>
- other
- }
-
- override def copyFrom(src: AnyRef, from: Int, to: Int, len: Int) {
- val src1 = adapt(src)
- Array.copy(src1, from, if (unboxed ne null) unboxed else boxed, to, len)
- }
-
- override def copyTo(from: Int, dest: AnyRef, to: Int, len: Int) {
- var dest1 = adapt(dest)
- Array.copy(if (unboxed ne null) unboxed else boxed, from, dest1, to, len)
- }
-}
diff --git a/src/library/scala/runtime/BoxedArray.scala b/src/library/scala/runtime/BoxedArray.scala
deleted file mode 100644
index 0a8659ffe6..0000000000
--- a/src/library/scala/runtime/BoxedArray.scala
+++ /dev/null
@@ -1,165 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-
-import scala.reflect.ClassManifest
-import collection.mutable._
-import collection.Seq
-
-/**
- * <p>A class representing <code>Array[T]</code></p>
- *
- * @author Martin Odersky, Stephane Micheloud
- * @version 1.0
- */
-abstract class BoxedArray[A] extends IndexedSeq[A] with IndexedSeqLike[A, BoxedArray[A]] with Boxed { self =>
-
- val ex = new Error("trying to create a BoxedArray")
- ex.printStackTrace()
- throw ex
-
- /** The manifest of the element type */
- def elemManifest: ClassManifest[A]
-
- /** The length of the array */
- def length: Int
-
- /** The element at given index */
- def apply(index: Int): A
-
- /** Update element at given index */
- def update(index: Int, elem: A): Unit
-
- /** Creates new builder for this collection ==> move to subclasses
- */
- override protected[this] def newBuilder: Builder[A, BoxedArray[A]] =
- genericBuilder[A]
-
- // !!! todo: remove
- override def genericBuilder[B]: Builder[B, BoxedArray[B]] = new ArrayBuffer[B].mapResult {
- _.toArray(null).asInstanceOf[BoxedArray[B]]
- }
-
- /** Convert to Java array.
- * @param elemTag Either one of the tags ".N" where N is the name of a primitive type
- * (@see ScalaRunTime), or a full class name.
- */
- def unbox(elemClass: Class[_]): AnyRef
-
- /** The underlying array value
- */
- def value: AnyRef
-
- def copyFrom(src: AnyRef, from: Int, to: Int, len: Int): Unit =
- Array.copy(src, from, value, to, len)
-
- def copyTo(from: Int, dest: AnyRef, to: Int, len: Int): Unit = {
- Array.copy(value, from, dest, to, len)
- }
-
- override def toArray[B >: A](implicit m: ClassManifest[B]): Array[B] = {
- if ((elemManifest ne null) && (elemManifest.erasure eq m.erasure)) this.asInstanceOf[Array[B]]
- else super.toArray[B]
- }
-
-/*
- override def equals(other: Any) =
- (value eq other) ||
-
- other.isInstanceOf[BoxedArray[_]] && (value == other.asInstanceOf[BoxedArray[_]].value)
-
- override def hashCode(): Int = value.hashCode()
-*/
- /** Fills the given array <code>xs</code> with the elements of
- * this sequence starting at position <code>start</code>.
- *
- * @param xs the array to fill.
- * @param start starting index.
- * @pre the array must be large enough to hold all elements.
- */
- override def copyToArray[B](xs: Array[B], start: Int, len: Int): Unit =
- copyTo(0, xs, start, len)
-
- /** Creates a possible nested IndexedSeq which consists of all the elements
- * of this array. If the elements are arrays themselves, the `deep' transformation
- * is applied recursively to them. The stringPrefix of the IndexedSeq is
- * "Array", hence the IndexedSeq prints like an array with all its
- * elements shown, and the same recursively for any subarrays.
- *
- * Example: Array(Array(1, 2), Array(3, 4)).deep.toString
- * prints: Array(Array(1, 2), Array(3, 4))
- */
- def deep: collection.IndexedSeq[Any] = new collection.IndexedSeq[Any] {
- def length = self.length
- def apply(idx: Int): Any = self.apply(idx) match {
- case elem: AnyRef if ScalaRunTime.isArray(elem) => ScalaRunTime.boxArray(elem).deep
- case elem => elem
- }
- override def stringPrefix = "Array"
- }
-
- @deprecated("use deep.toString instead")
- final def deepToString() = deepMkString(stringPrefix + "(", ", ", ")")
-
- @deprecated("use deep.mkString instead")
- final def deepMkString(start: String, sep: String, end: String): String = {
- def _deepToString(x: Any) = x match {
- case a: AnyRef if ScalaRunTime.isArray(a) =>
- ScalaRunTime.boxArray(a).deepMkString(start, sep, end)
- case _ =>
- ScalaRunTime.stringOf(x)
- }
- val buf = new StringBuilder()
- buf.append(start)
- val iter = this.iterator
- if (iter.hasNext) buf.append(_deepToString(iter.next))
- while (iter.hasNext) {
- buf.append(sep); buf.append(_deepToString(iter.next))
- }
- buf.append(end)
- buf.toString
- }
-
- @deprecated("use deep.mkString instead")
- final def deepMkString(sep: String): String = this.deepMkString("", sep, "")
-
- @deprecated("use array1.deep.equals(array2.deep) instead")
- final def deepEquals(that: Any): Boolean = {
- def _deepEquals(x1: Any, x2: Any) = (x1, x2) match {
- case (a1: BoxedArray[_], a2: BoxedArray[_]) =>
- _sameElements(a1, a2)
- case (a1: AnyRef, a2: AnyRef)
- if ScalaRunTime.isArray(a1) && ScalaRunTime.isArray(a2) =>
- _sameElements(ScalaRunTime.boxArray(a1), ScalaRunTime.boxArray(a2))
- case _ =>
- x1.equals(x2)
- }
- def _sameElements(a1: BoxedArray[_], a2: BoxedArray[_]): Boolean = {
- val it1 = a1.iterator
- val it2 = a2.iterator
- var res = true
- while (res && it1.hasNext && it2.hasNext)
- res = _deepEquals(it1.next, it2.next)
- !it1.hasNext && !it2.hasNext && res
- }
- that match {
- case a: BoxedArray[_] =>
- _sameElements(this, a)
- case a: AnyRef if ScalaRunTime.isArray(a) =>
- _sameElements(this, ScalaRunTime.boxArray(a))
- case _ =>
- false
- }
- }
-
- override final def stringPrefix: String = "Array"
-}
diff --git a/src/library/scala/runtime/BoxedBooleanArray.scala b/src/library/scala/runtime/BoxedBooleanArray.scala
deleted file mode 100644
index ff781004d4..0000000000
--- a/src/library/scala/runtime/BoxedBooleanArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedBooleanArray(val value: Array[Boolean]) extends BoxedArray[Boolean] {
-
- def elemManifest = ClassManifest.Boolean
-
- def length: Int = value.length
-
- def apply(index: Int): Boolean = value(index)
-
- def update(index: Int, elem: Boolean) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxedByteArray.scala b/src/library/scala/runtime/BoxedByteArray.scala
deleted file mode 100644
index 9f81dc2a86..0000000000
--- a/src/library/scala/runtime/BoxedByteArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedByteArray(val value: Array[Byte]) extends BoxedArray[Byte] {
-
- def elemManifest = ClassManifest.Byte
-
- def length: Int = value.length
-
- def apply(index: Int): Byte = value(index)
-
- def update(index: Int, elem: Byte) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxedDoubleArray.scala b/src/library/scala/runtime/BoxedDoubleArray.scala
deleted file mode 100644
index 2a9a657915..0000000000
--- a/src/library/scala/runtime/BoxedDoubleArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedDoubleArray(val value: Array[Double]) extends BoxedArray[Double] {
-
- def elemManifest = ClassManifest.Double
-
- def length: Int = value.length
-
- def apply(index: Int): Double = value(index)
-
- def update(index: Int, elem: Double) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxedFloatArray.scala b/src/library/scala/runtime/BoxedFloatArray.scala
deleted file mode 100644
index 038356150e..0000000000
--- a/src/library/scala/runtime/BoxedFloatArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedFloatArray(val value: Array[Float]) extends BoxedArray[Float] {
-
- def elemManifest = ClassManifest.Float
-
- def length: Int = value.length
-
- def apply(index: Int): Float = value(index)
-
- def update(index: Int, elem: Float) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxedObjectArray.scala b/src/library/scala/runtime/BoxedObjectArray.scala
deleted file mode 100644
index 47ad9a3b48..0000000000
--- a/src/library/scala/runtime/BoxedObjectArray.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedObjectArray[A <: AnyRef](val value: Array[AnyRef], val elemManifest: ClassManifest[A]) extends BoxedArray[A] {
-
-// @deprecated("creating array w/o manifest")
- def this(value: Array[AnyRef]) = this(value, null) // !!! todo: remove
-
- def length: Int = value.length
-
- def apply(index: Int): A = value(index).asInstanceOf[A]
-
- def update(index: Int, elem: A) {
- value(index) = elem
- }
-
- def unbox(elemClass: Class[_]): AnyRef = value
-
-/*
- override def equals(other: Any): Boolean =
- (value eq other.asInstanceOf[AnyRef]) ||
- other.isInstanceOf[BoxedObjectArray[_]] && (value eq other.asInstanceOf[BoxedObjectArray[_]].value)
-
- override def hashCode(): Int = (value.asInstanceOf[AnyRef]).hashCode()
-*/
-
-}
-
diff --git a/src/library/scala/runtime/BoxedShortArray.scala b/src/library/scala/runtime/BoxedShortArray.scala
deleted file mode 100644
index 4da6b0c1c9..0000000000
--- a/src/library/scala/runtime/BoxedShortArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedShortArray(val value: Array[Short]) extends BoxedArray[Short] {
-
- def elemManifest = ClassManifest.Short
-
- def length: Int = value.length
-
- def apply(index: Int): Short = value(index)
-
- def update(index: Int, elem: Short) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxedUnit.java b/src/library/scala/runtime/BoxedUnit.java
index 3789a9e0ec..2f1e56059f 100644
--- a/src/library/scala/runtime/BoxedUnit.java
+++ b/src/library/scala/runtime/BoxedUnit.java
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime;
diff --git a/src/library/scala/runtime/BoxedUnitArray.scala b/src/library/scala/runtime/BoxedUnitArray.scala
deleted file mode 100644
index 351237c4b2..0000000000
--- a/src/library/scala/runtime/BoxedUnitArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedUnitArray(val value: Array[Unit]) extends BoxedArray[Unit] {
-
- def elemManifest = ClassManifest.Unit
-
- def length: Int = value.length
-
- def apply(index: Int): Unit = value(index)
-
- def update(index: Int, elem: Unit) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java
index b4252ed647..b6539d087c 100644
--- a/src/library/scala/runtime/BoxesRunTime.java
+++ b/src/library/scala/runtime/BoxesRunTime.java
@@ -1,17 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime;
import java.io.*;
+import scala.math.ScalaNumber;
/** An object (static class) that defines methods used for creating,
* reverting, and calculating with, boxed values. There are four classes
@@ -27,18 +27,18 @@ import java.io.*;
* @author Martin Odersky
* @contributor Stepan Koltsov
* @version 2.0 */
-public class BoxesRunTime
+public final class BoxesRunTime
{
private static final int CHAR = 0, BYTE = 1, SHORT = 2, INT = 3, LONG = 4, FLOAT = 5, DOUBLE = 6, OTHER = 7;
private static int typeCode(Object a) {
- if (a instanceof Integer) return INT;
- if (a instanceof Byte) return BYTE;
- if (a instanceof Character) return CHAR;
- if (a instanceof Long) return LONG;
- if (a instanceof Double) return DOUBLE;
- if (a instanceof Short) return SHORT;
- if (a instanceof Float) return FLOAT;
+ if (a instanceof java.lang.Integer) return INT;
+ if (a instanceof java.lang.Byte) return BYTE;
+ if (a instanceof java.lang.Character) return CHAR;
+ if (a instanceof java.lang.Long) return LONG;
+ if (a instanceof java.lang.Double) return DOUBLE;
+ if (a instanceof java.lang.Short) return SHORT;
+ if (a instanceof java.lang.Float) return FLOAT;
return OTHER;
}
@@ -48,134 +48,163 @@ public class BoxesRunTime
/* BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING */
- public static Boolean boxToBoolean(boolean b) {
- return Boolean.valueOf(b);
+ public static java.lang.Boolean boxToBoolean(boolean b) {
+ return java.lang.Boolean.valueOf(b);
}
- public static Character boxToCharacter(char c) {
- // !!! Temporarily working around the "impossible" (?) fact that
- // c can have a negative value here. In any revision since r17461 try:
- // def foo = new (Short => Char) { def apply(x: Short) = x.toChar }
- // foo(-100)
- // and the -100 will get to Character, which will duly crash.
- // The bug was masked before because the Characters were created
- // with "new Character(c)", but now the static method uses the argument
- // as an index into a cache array, which can't be negative.
- //
- // It appears to be Short-specific; I can't get anything similar
- // out of Byte or Int.
- Character ret;
-
- // straightforward workarounds like bitmasking do not seem to
- // work here; is java optimizing out "impossible" tests/ops? I
- // don't know, but this is the safe way:
- try {
- ret = Character.valueOf(c);
- }
- catch (ArrayIndexOutOfBoundsException e) {
- ret = new Character(c);
- }
-
- return ret;
+ public static java.lang.Character boxToCharacter(char c) {
+ return java.lang.Character.valueOf(c);
}
- public static Byte boxToByte(byte b) {
- return Byte.valueOf(b);
+ public static java.lang.Byte boxToByte(byte b) {
+ return java.lang.Byte.valueOf(b);
}
- public static Short boxToShort(short s) {
- return Short.valueOf(s);
+ public static java.lang.Short boxToShort(short s) {
+ return java.lang.Short.valueOf(s);
}
- public static Integer boxToInteger(int i) {
- return Integer.valueOf(i);
+ public static java.lang.Integer boxToInteger(int i) {
+ return java.lang.Integer.valueOf(i);
}
- public static Long boxToLong(long l) {
- return Long.valueOf(l);
+ public static java.lang.Long boxToLong(long l) {
+ return java.lang.Long.valueOf(l);
}
- public static Float boxToFloat(float f) {
- return Float.valueOf(f);
+ public static java.lang.Float boxToFloat(float f) {
+ return java.lang.Float.valueOf(f);
}
- public static Double boxToDouble(double d) {
- return Double.valueOf(d);
+ public static java.lang.Double boxToDouble(double d) {
+ return java.lang.Double.valueOf(d);
}
/* UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING */
public static boolean unboxToBoolean(Object b) {
- return b == null ? false : ((Boolean)b).booleanValue();
+ return b == null ? false : ((java.lang.Boolean)b).booleanValue();
}
public static char unboxToChar(Object c) {
- return c == null ? 0 : ((Character)c).charValue();
+ return c == null ? 0 : ((java.lang.Character)c).charValue();
}
public static byte unboxToByte(Object b) {
- return b == null ? 0 : ((Byte)b).byteValue();
+ return b == null ? 0 : ((java.lang.Byte)b).byteValue();
}
public static short unboxToShort(Object s) {
- return s == null ? 0 : ((Short)s).shortValue();
+ return s == null ? 0 : ((java.lang.Short)s).shortValue();
}
public static int unboxToInt(Object i) {
- return i == null ? 0 : ((Integer)i).intValue();
+ return i == null ? 0 : ((java.lang.Integer)i).intValue();
}
public static long unboxToLong(Object l) {
- return l == null ? 0 : ((Long)l).longValue();
+ return l == null ? 0 : ((java.lang.Long)l).longValue();
}
public static float unboxToFloat(Object f) {
- return f == null ? 0.0f : ((Float)f).floatValue();
+ return f == null ? 0.0f : ((java.lang.Float)f).floatValue();
}
public static double unboxToDouble(Object d) {
- return d == null ? 0.0d : ((Double)d).doubleValue();
+ return d == null ? 0.0d : ((java.lang.Double)d).doubleValue();
}
/* COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON */
- // That's the method we should use from now on.
+ private static int eqTypeCode(Number a) {
+ if ((a instanceof java.lang.Integer) || (a instanceof java.lang.Byte)) return INT;
+ if (a instanceof java.lang.Long) return LONG;
+ if (a instanceof java.lang.Double) return DOUBLE;
+ if (a instanceof java.lang.Short) return INT;
+ if (a instanceof java.lang.Float) return FLOAT;
+ return OTHER;
+ }
+
public static boolean equals(Object x, Object y) {
- if (x instanceof Number) {
- Number xn = (Number)x;
- if (y instanceof Number) {
- Number yn = (Number)y;
- if ((xn instanceof Double) || (yn instanceof Double))
- return xn.doubleValue() == yn.doubleValue();
- if ((xn instanceof Float) || (yn instanceof Float))
- return xn.floatValue() == yn.floatValue();
- if ((xn instanceof Long) || (yn instanceof Long))
- return xn.longValue() == yn.longValue();
- return xn.intValue() == yn.intValue();
- }
- if (y instanceof Character)
- return equalsNumChar(xn, (Character)y);
- } else if (x instanceof Character) {
- Character xc = (Character)x;
- if (y instanceof Character)
- return (xc.charValue() == ((Character)y).charValue());
- if (y instanceof Number)
- return equalsNumChar((Number)y, xc);
- } else if (x == null) {
+ if (x == y) return true;
+ return equals2(x, y);
+ }
+
+ /** Since all applicable logic has to be present in the equals method of a ScalaNumber
+ * in any case, we dispatch to it as soon as we spot one on either side.
+ */
+ public static boolean equals2(Object x, Object y) {
+ if (x instanceof java.lang.Number)
+ return equalsNumObject((java.lang.Number)x, y);
+ if (x instanceof java.lang.Character)
+ return equalsCharObject((java.lang.Character)x, y);
+ if (x == null)
return y == null;
- }
+
return x.equals(y);
}
- private static boolean equalsNumChar(Number x, Character y) {
- char ch = y.charValue();
- if (x instanceof Double)
- return x.doubleValue() == ch;
- if (x instanceof Float)
- return x.floatValue() == ch;
- if (x instanceof Long)
- return x.longValue() == ch;
- return x.intValue() == ch;
+ public static boolean equalsNumObject(java.lang.Number xn, Object y) {
+ if (y instanceof java.lang.Number)
+ return equalsNumNum(xn, (java.lang.Number)y);
+ if (y instanceof java.lang.Character)
+ return equalsNumChar(xn, (java.lang.Character)y);
+ if (xn == null)
+ return y == null;
+
+ return xn.equals(y);
+ }
+
+ public static boolean equalsNumNum(java.lang.Number xn, java.lang.Number yn) {
+ int xcode = eqTypeCode(xn);
+ int ycode = eqTypeCode(yn);
+ switch (ycode > xcode ? ycode : xcode) {
+ case INT:
+ return xn.intValue() == yn.intValue();
+ case LONG:
+ return xn.longValue() == yn.longValue();
+ case FLOAT:
+ return xn.floatValue() == yn.floatValue();
+ case DOUBLE:
+ return xn.doubleValue() == yn.doubleValue();
+ default:
+ if ((yn instanceof ScalaNumber) && !(xn instanceof ScalaNumber))
+ return yn.equals(xn);
+ }
+ if (xn == null)
+ return yn == null;
+
+ return xn.equals(yn);
+ }
+
+ public static boolean equalsCharObject(java.lang.Character xc, Object y) {
+ if (y instanceof java.lang.Character)
+ return xc.charValue() == ((java.lang.Character)y).charValue();
+ if (y instanceof java.lang.Number)
+ return equalsNumChar((java.lang.Number)y, xc);
+ if (xc == null)
+ return y == null;
+
+ return xc.equals(y);
+ }
+
+ private static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) {
+ char ch = yc.charValue();
+ switch (eqTypeCode(xn)) {
+ case INT:
+ return xn.intValue() == ch;
+ case LONG:
+ return xn.longValue() == ch;
+ case FLOAT:
+ return xn.floatValue() == ch;
+ case DOUBLE:
+ return xn.doubleValue() == ch;
+ default:
+ if (xn == null)
+ return yc == null;
+
+ return xn.equals(yc);
+ }
}
/** Hashcode algorithm is driven by the requirements imposed
@@ -205,33 +234,33 @@ public class BoxesRunTime
* Note: Among primitives, Float.NaN != Float.NaN, but the boxed
* verisons are equal. This still needs reconciliation.
*/
- public static int hashFromLong(Long n) {
- int iv = n.intValue();
- if (iv == n.longValue()) return iv;
- else return n.hashCode();
- }
- public static int hashFromDouble(Double n) {
- int iv = n.intValue();
- double dv = n.doubleValue();
- if (iv == dv) return iv;
-
- long lv = n.longValue();
- if (lv == dv) return Long.valueOf(lv).hashCode();
- else return n.hashCode();
- }
- public static int hashFromFloat(Float n) {
- int iv = n.intValue();
- float fv = n.floatValue();
- if (iv == fv) return iv;
-
- long lv = n.longValue();
- if (lv == fv) return Long.valueOf(lv).hashCode();
- else return n.hashCode();
- }
- public static int hashFromNumber(Number n) {
- if (n instanceof Long) return hashFromLong((Long)n);
- else if (n instanceof Double) return hashFromDouble((Double)n);
- else if (n instanceof Float) return hashFromFloat((Float)n);
+ public static int hashFromLong(java.lang.Long n) {
+ int iv = n.intValue();
+ if (iv == n.longValue()) return iv;
+ else return n.hashCode();
+ }
+ public static int hashFromDouble(java.lang.Double n) {
+ int iv = n.intValue();
+ double dv = n.doubleValue();
+ if (iv == dv) return iv;
+
+ long lv = n.longValue();
+ if (lv == dv) return java.lang.Long.valueOf(lv).hashCode();
+ else return n.hashCode();
+ }
+ public static int hashFromFloat(java.lang.Float n) {
+ int iv = n.intValue();
+ float fv = n.floatValue();
+ if (iv == fv) return iv;
+
+ long lv = n.longValue();
+ if (lv == fv) return java.lang.Long.valueOf(lv).hashCode();
+ else return n.hashCode();
+ }
+ public static int hashFromNumber(java.lang.Number n) {
+ if (n instanceof java.lang.Long) return hashFromLong((java.lang.Long)n);
+ else if (n instanceof java.lang.Double) return hashFromDouble((java.lang.Double)n);
+ else if (n instanceof java.lang.Float) return hashFromFloat((java.lang.Float)n);
else return n.hashCode();
}
public static int hashFromObject(Object a) {
@@ -247,23 +276,23 @@ public class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToInteger(val1 + val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToLong(val1 + val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).floatValue();
+ float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
+ float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
return boxToFloat(val1 + val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).doubleValue();
+ double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
+ double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
return boxToDouble(val1 + val2);
}
throw new NoSuchMethodException();
@@ -275,23 +304,23 @@ public class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToInteger(val1 - val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToLong(val1 - val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).floatValue();
+ float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
+ float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
return boxToFloat(val1 - val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).doubleValue();
+ double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
+ double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
return boxToDouble(val1 - val2);
}
throw new NoSuchMethodException();
@@ -303,23 +332,23 @@ public class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToInteger(val1 * val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToLong(val1 * val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).floatValue();
+ float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
+ float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
return boxToFloat(val1 * val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).doubleValue();
+ double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
+ double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
return boxToDouble(val1 * val2);
}
throw new NoSuchMethodException();
@@ -331,23 +360,23 @@ public class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToInteger(val1 / val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToLong(val1 / val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).floatValue();
+ float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
+ float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
return boxToFloat(val1 / val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).doubleValue();
+ double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
+ double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
return boxToDouble(val1 / val2);
}
throw new NoSuchMethodException();
@@ -359,23 +388,23 @@ public class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToInteger(val1 % val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToLong(val1 % val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).floatValue();
+ float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
+ float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
return boxToFloat(val1 % val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).doubleValue();
+ double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
+ double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
return boxToDouble(val1 % val2);
}
throw new NoSuchMethodException();
@@ -386,24 +415,24 @@ public class BoxesRunTime
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
if (code1 <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToInteger(val1 >> val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToInteger(val1 >> val2);
}
}
if (code1 <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToLong(val1 >> val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToLong(val1 >> val2);
}
}
@@ -415,24 +444,24 @@ public class BoxesRunTime
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
if (code1 <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToInteger(val1 << val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToInteger(val1 << val2);
}
}
if (code1 <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToLong(val1 << val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToLong(val1 << val2);
}
}
@@ -444,24 +473,24 @@ public class BoxesRunTime
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
if (code1 <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToInteger(val1 >>> val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToInteger(val1 >>> val2);
}
}
if (code1 <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToLong(val1 >>> val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToLong(val1 >>> val2);
}
}
@@ -472,19 +501,19 @@ public class BoxesRunTime
public static Object negate(Object arg) throws NoSuchMethodException {
int code = typeCode(arg);
if (code <= INT) {
- int val = (code == CHAR) ? ((Character) arg).charValue() : ((Number) arg).intValue();
+ int val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).intValue();
return boxToInteger(-val);
}
if (code <= LONG) {
- long val = (code == CHAR) ? ((Character) arg).charValue() : ((Number) arg).longValue();
+ long val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).longValue();
return boxToLong(-val);
}
if (code <= FLOAT) {
- float val = (code == CHAR) ? ((Character) arg).charValue() : ((Number) arg).floatValue();
+ float val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).floatValue();
return boxToFloat(-val);
}
if (code <= DOUBLE) {
- double val = (code == CHAR) ? ((Character) arg).charValue() : ((Number) arg).doubleValue();
+ double val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).doubleValue();
return boxToDouble(-val);
}
throw new NoSuchMethodException();
@@ -494,19 +523,19 @@ public class BoxesRunTime
public static Object positive(Object arg) throws NoSuchMethodException {
int code = typeCode(arg);
if (code <= INT) {
- int val = (code == CHAR) ? ((Character) arg).charValue() : ((Number) arg).intValue();
+ int val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).intValue();
return boxToInteger(+val);
}
if (code <= LONG) {
- long val = (code == CHAR) ? ((Character) arg).charValue() : ((Number) arg).longValue();
+ long val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).longValue();
return boxToLong(+val);
}
if (code <= FLOAT) {
- float val = (code == CHAR) ? ((Character) arg).charValue() : ((Number) arg).floatValue();
+ float val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).floatValue();
return boxToFloat(+val);
}
if (code <= DOUBLE) {
- double val = (code == CHAR) ? ((Character) arg).charValue() : ((Number) arg).doubleValue();
+ double val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).doubleValue();
return boxToDouble(+val);
}
throw new NoSuchMethodException();
@@ -518,19 +547,19 @@ public class BoxesRunTime
if (!((arg1 instanceof Boolean) && (arg2 instanceof Boolean))) {
throw new NoSuchMethodException();
}
- return boxToBoolean(((Boolean) arg1).booleanValue() & ((Boolean) arg2).booleanValue());
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() & ((java.lang.Boolean) arg2).booleanValue());
}
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToInteger(val1 & val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToLong(val1 & val2);
}
throw new NoSuchMethodException();
@@ -542,19 +571,19 @@ public class BoxesRunTime
if (!((arg1 instanceof Boolean) && (arg2 instanceof Boolean))) {
throw new NoSuchMethodException();
}
- return boxToBoolean(((Boolean) arg1).booleanValue() | ((Boolean) arg2).booleanValue());
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() | ((java.lang.Boolean) arg2).booleanValue());
}
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToInteger(val1 | val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToLong(val1 | val2);
}
throw new NoSuchMethodException();
@@ -566,19 +595,19 @@ public class BoxesRunTime
if (!((arg1 instanceof Boolean) && (arg2 instanceof Boolean))) {
throw new NoSuchMethodException();
}
- return boxToBoolean(((Boolean) arg1).booleanValue() ^ ((Boolean) arg2).booleanValue());
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() ^ ((java.lang.Boolean) arg2).booleanValue());
}
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToInteger(val1 ^ val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToLong(val1 ^ val2);
}
throw new NoSuchMethodException();
@@ -587,7 +616,7 @@ public class BoxesRunTime
/** arg1 && arg2 */
public static Object takeConditionalAnd(Object arg1, Object arg2) throws NoSuchMethodException {
if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) {
- return boxToBoolean(((Boolean) arg1).booleanValue() && ((Boolean) arg2).booleanValue());
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() && ((java.lang.Boolean) arg2).booleanValue());
}
throw new NoSuchMethodException();
}
@@ -595,7 +624,7 @@ public class BoxesRunTime
/** arg1 || arg2 */
public static Object takeConditionalOr(Object arg1, Object arg2) throws NoSuchMethodException {
if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) {
- return boxToBoolean(((Boolean) arg1).booleanValue() || ((Boolean) arg2).booleanValue());
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() || ((java.lang.Boolean) arg2).booleanValue());
}
throw new NoSuchMethodException();
}
@@ -604,11 +633,11 @@ public class BoxesRunTime
public static Object complement(Object arg) throws NoSuchMethodException {
int code = typeCode(arg);
if (code <= INT) {
- int val = (code == CHAR) ? ((Character) arg).charValue() : ((Number) arg).intValue();
+ int val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).intValue();
return boxToInteger(~val);
}
if (code <= LONG) {
- long val = (code == CHAR) ? ((Character) arg).charValue() : ((Number) arg).longValue();
+ long val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).longValue();
return boxToLong(~val);
}
throw new NoSuchMethodException();
@@ -617,7 +646,7 @@ public class BoxesRunTime
/** !arg */
public static Object takeNot(Object arg) throws NoSuchMethodException {
if (arg instanceof Boolean) {
- return boxToBoolean(!((Boolean) arg).booleanValue());
+ return boxToBoolean(!((java.lang.Boolean) arg).booleanValue());
}
throw new NoSuchMethodException();
}
@@ -635,23 +664,23 @@ public class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToBoolean(val1 < val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToBoolean(val1 < val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).floatValue();
+ float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
+ float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
return boxToBoolean(val1 < val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).doubleValue();
+ double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
+ double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
return boxToBoolean(val1 < val2);
}
throw new NoSuchMethodException();
@@ -662,23 +691,23 @@ public class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToBoolean(val1 <= val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToBoolean(val1 <= val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).floatValue();
+ float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
+ float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
return boxToBoolean(val1 <= val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).doubleValue();
+ double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
+ double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
return boxToBoolean(val1 <= val2);
}
throw new NoSuchMethodException();
@@ -689,23 +718,23 @@ public class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToBoolean(val1 >= val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToBoolean(val1 >= val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).floatValue();
+ float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
+ float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
return boxToBoolean(val1 >= val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).doubleValue();
+ double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
+ double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
return boxToBoolean(val1 >= val2);
}
throw new NoSuchMethodException();
@@ -716,109 +745,109 @@ public class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).intValue();
+ int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
return boxToBoolean(val1 > val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).longValue();
+ long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
return boxToBoolean(val1 > val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).floatValue();
+ float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
+ float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
return boxToBoolean(val1 > val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((Character) arg1).charValue() : ((Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((Character) arg2).charValue() : ((Number) arg2).doubleValue();
+ double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
+ double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
return boxToBoolean(val1 > val2);
}
throw new NoSuchMethodException();
}
/** arg.toChar */
- public static Character toCharacter(Object arg) throws NoSuchMethodException {
- if (arg instanceof Integer) return boxToCharacter((char)unboxToInt(arg));
- if (arg instanceof Short) return boxToCharacter((char)unboxToShort(arg));
- if (arg instanceof Character) return (Character)arg;
- if (arg instanceof Long) return boxToCharacter((char)unboxToLong(arg));
- if (arg instanceof Byte) return boxToCharacter((char)unboxToByte(arg));
- if (arg instanceof Float) return boxToCharacter((char)unboxToFloat(arg));
- if (arg instanceof Double) return boxToCharacter((char)unboxToDouble(arg));
+ public static java.lang.Character toCharacter(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return boxToCharacter((char)unboxToInt(arg));
+ if (arg instanceof java.lang.Short) return boxToCharacter((char)unboxToShort(arg));
+ if (arg instanceof java.lang.Character) return (java.lang.Character)arg;
+ if (arg instanceof java.lang.Long) return boxToCharacter((char)unboxToLong(arg));
+ if (arg instanceof java.lang.Byte) return boxToCharacter((char)unboxToByte(arg));
+ if (arg instanceof java.lang.Float) return boxToCharacter((char)unboxToFloat(arg));
+ if (arg instanceof java.lang.Double) return boxToCharacter((char)unboxToDouble(arg));
throw new NoSuchMethodException();
}
/** arg.toByte */
- public static Byte toByte(Object arg) throws NoSuchMethodException {
- if (arg instanceof Integer) return boxToByte((byte)unboxToInt(arg));
- if (arg instanceof Character) return boxToByte((byte)unboxToChar(arg));
- if (arg instanceof Byte) return (Byte)arg;
- if (arg instanceof Long) return boxToByte((byte)unboxToLong(arg));
- if (arg instanceof Short) return boxToByte((byte)unboxToShort(arg));
- if (arg instanceof Float) return boxToByte((byte)unboxToFloat(arg));
- if (arg instanceof Double) return boxToByte((byte)unboxToDouble(arg));
+ public static java.lang.Byte toByte(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return boxToByte((byte)unboxToInt(arg));
+ if (arg instanceof java.lang.Character) return boxToByte((byte)unboxToChar(arg));
+ if (arg instanceof java.lang.Byte) return (java.lang.Byte)arg;
+ if (arg instanceof java.lang.Long) return boxToByte((byte)unboxToLong(arg));
+ if (arg instanceof java.lang.Short) return boxToByte((byte)unboxToShort(arg));
+ if (arg instanceof java.lang.Float) return boxToByte((byte)unboxToFloat(arg));
+ if (arg instanceof java.lang.Double) return boxToByte((byte)unboxToDouble(arg));
throw new NoSuchMethodException();
}
/** arg.toShort */
- public static Short toShort(Object arg) throws NoSuchMethodException {
- if (arg instanceof Integer) return boxToShort((short)unboxToInt(arg));
- if (arg instanceof Long) return boxToShort((short)unboxToLong(arg));
- if (arg instanceof Character) return boxToShort((short)unboxToChar(arg));
- if (arg instanceof Byte) return boxToShort((short)unboxToByte(arg));
- if (arg instanceof Short) return (Short)arg;
- if (arg instanceof Float) return boxToShort((short)unboxToFloat(arg));
- if (arg instanceof Double) return boxToShort((short)unboxToDouble(arg));
+ public static java.lang.Short toShort(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return boxToShort((short)unboxToInt(arg));
+ if (arg instanceof java.lang.Long) return boxToShort((short)unboxToLong(arg));
+ if (arg instanceof java.lang.Character) return boxToShort((short)unboxToChar(arg));
+ if (arg instanceof java.lang.Byte) return boxToShort((short)unboxToByte(arg));
+ if (arg instanceof java.lang.Short) return (java.lang.Short)arg;
+ if (arg instanceof java.lang.Float) return boxToShort((short)unboxToFloat(arg));
+ if (arg instanceof java.lang.Double) return boxToShort((short)unboxToDouble(arg));
throw new NoSuchMethodException();
}
/** arg.toInt */
- public static Integer toInteger(Object arg) throws NoSuchMethodException {
- if (arg instanceof Integer) return (Integer)arg;
- if (arg instanceof Long) return boxToInteger((int)unboxToLong(arg));
- if (arg instanceof Double) return boxToInteger((int)unboxToDouble(arg));
- if (arg instanceof Float) return boxToInteger((int)unboxToFloat(arg));
- if (arg instanceof Character) return boxToInteger((int)unboxToChar(arg));
- if (arg instanceof Byte) return boxToInteger((int)unboxToByte(arg));
- if (arg instanceof Short) return boxToInteger((int)unboxToShort(arg));
+ public static java.lang.Integer toInteger(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return (java.lang.Integer)arg;
+ if (arg instanceof java.lang.Long) return boxToInteger((int)unboxToLong(arg));
+ if (arg instanceof java.lang.Double) return boxToInteger((int)unboxToDouble(arg));
+ if (arg instanceof java.lang.Float) return boxToInteger((int)unboxToFloat(arg));
+ if (arg instanceof java.lang.Character) return boxToInteger((int)unboxToChar(arg));
+ if (arg instanceof java.lang.Byte) return boxToInteger((int)unboxToByte(arg));
+ if (arg instanceof java.lang.Short) return boxToInteger((int)unboxToShort(arg));
throw new NoSuchMethodException();
}
/** arg.toLong */
- public static Long toLong(Object arg) throws NoSuchMethodException {
- if (arg instanceof Integer) return boxToLong((long)unboxToInt(arg));
- if (arg instanceof Double) return boxToLong((long)unboxToDouble(arg));
- if (arg instanceof Float) return boxToLong((long)unboxToFloat(arg));
- if (arg instanceof Long) return (Long)arg;
- if (arg instanceof Character) return boxToLong((long)unboxToChar(arg));
- if (arg instanceof Byte) return boxToLong((long)unboxToByte(arg));
- if (arg instanceof Short) return boxToLong((long)unboxToShort(arg));
+ public static java.lang.Long toLong(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return boxToLong((long)unboxToInt(arg));
+ if (arg instanceof java.lang.Double) return boxToLong((long)unboxToDouble(arg));
+ if (arg instanceof java.lang.Float) return boxToLong((long)unboxToFloat(arg));
+ if (arg instanceof java.lang.Long) return (java.lang.Long)arg;
+ if (arg instanceof java.lang.Character) return boxToLong((long)unboxToChar(arg));
+ if (arg instanceof java.lang.Byte) return boxToLong((long)unboxToByte(arg));
+ if (arg instanceof java.lang.Short) return boxToLong((long)unboxToShort(arg));
throw new NoSuchMethodException();
}
/** arg.toFloat */
- public static Float toFloat(Object arg) throws NoSuchMethodException {
- if (arg instanceof Integer) return boxToFloat((float)unboxToInt(arg));
- if (arg instanceof Long) return boxToFloat((float)unboxToLong(arg));
- if (arg instanceof Float) return (Float)arg;
- if (arg instanceof Double) return boxToFloat((float)unboxToDouble(arg));
- if (arg instanceof Character) return boxToFloat((float)unboxToChar(arg));
- if (arg instanceof Byte) return boxToFloat((float)unboxToByte(arg));
- if (arg instanceof Short) return boxToFloat((float)unboxToShort(arg));
+ public static java.lang.Float toFloat(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return boxToFloat((float)unboxToInt(arg));
+ if (arg instanceof java.lang.Long) return boxToFloat((float)unboxToLong(arg));
+ if (arg instanceof java.lang.Float) return (java.lang.Float)arg;
+ if (arg instanceof java.lang.Double) return boxToFloat((float)unboxToDouble(arg));
+ if (arg instanceof java.lang.Character) return boxToFloat((float)unboxToChar(arg));
+ if (arg instanceof java.lang.Byte) return boxToFloat((float)unboxToByte(arg));
+ if (arg instanceof java.lang.Short) return boxToFloat((float)unboxToShort(arg));
throw new NoSuchMethodException();
}
/** arg.toDouble */
- public static Double toDouble(Object arg) throws NoSuchMethodException {
- if (arg instanceof Integer) return boxToDouble((double)unboxToInt(arg));
- if (arg instanceof Float) return boxToDouble((double)unboxToFloat(arg));
- if (arg instanceof Double) return (Double)arg;
- if (arg instanceof Long) return boxToDouble((double)unboxToLong(arg));
- if (arg instanceof Character) return boxToDouble((double)unboxToChar(arg));
- if (arg instanceof Byte) return boxToDouble((double)unboxToByte(arg));
- if (arg instanceof Short) return boxToDouble((double)unboxToShort(arg));
+ public static java.lang.Double toDouble(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return boxToDouble((double)unboxToInt(arg));
+ if (arg instanceof java.lang.Float) return boxToDouble((double)unboxToFloat(arg));
+ if (arg instanceof java.lang.Double) return (java.lang.Double)arg;
+ if (arg instanceof java.lang.Long) return boxToDouble((double)unboxToLong(arg));
+ if (arg instanceof java.lang.Character) return boxToDouble((double)unboxToChar(arg));
+ if (arg instanceof java.lang.Byte) return boxToDouble((double)unboxToByte(arg));
+ if (arg instanceof java.lang.Short) return boxToDouble((double)unboxToShort(arg));
throw new NoSuchMethodException();
}
diff --git a/src/library/scala/runtime/ByteRef.java b/src/library/scala/runtime/ByteRef.java
index 06cfbadcd0..fc0b4c7442 100644
--- a/src/library/scala/runtime/ByteRef.java
+++ b/src/library/scala/runtime/ByteRef.java
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime;
@@ -17,5 +16,5 @@ public class ByteRef implements java.io.Serializable {
public byte elem;
public ByteRef(byte elem) { this.elem = elem; }
- public String toString() { return Byte.toString(elem); }
+ public String toString() { return java.lang.Byte.toString(elem); }
}
diff --git a/src/library/scala/runtime/CharRef.java b/src/library/scala/runtime/CharRef.java
index 0b287d1ec6..48d712b2b9 100644
--- a/src/library/scala/runtime/CharRef.java
+++ b/src/library/scala/runtime/CharRef.java
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime;
@@ -17,5 +16,5 @@ public class CharRef implements java.io.Serializable {
public char elem;
public CharRef(char elem) { this.elem = elem; }
- public String toString() { return Character.toString(elem); }
+ public String toString() { return java.lang.Character.toString(elem); }
}
diff --git a/src/library/scala/runtime/DoubleRef.java b/src/library/scala/runtime/DoubleRef.java
index bcc967d1d4..d112841a72 100644
--- a/src/library/scala/runtime/DoubleRef.java
+++ b/src/library/scala/runtime/DoubleRef.java
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime;
@@ -17,5 +16,5 @@ public class DoubleRef implements java.io.Serializable {
public double elem;
public DoubleRef(double elem) { this.elem = elem; }
- public String toString() { return Double.toString(elem); }
+ public String toString() { return java.lang.Double.toString(elem); }
}
diff --git a/src/library/scala/runtime/FloatRef.java b/src/library/scala/runtime/FloatRef.java
index 24ad536f42..1fec0e5bfc 100644
--- a/src/library/scala/runtime/FloatRef.java
+++ b/src/library/scala/runtime/FloatRef.java
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime;
@@ -17,5 +16,5 @@ public class FloatRef implements java.io.Serializable {
public float elem;
public FloatRef(float elem) { this.elem = elem; }
- public String toString() { return Float.toString(elem); }
+ public String toString() { return java.lang.Float.toString(elem); }
}
diff --git a/src/library/scala/runtime/IntRef.java b/src/library/scala/runtime/IntRef.java
index cc4122eedf..647cffed7a 100644
--- a/src/library/scala/runtime/IntRef.java
+++ b/src/library/scala/runtime/IntRef.java
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime;
@@ -17,5 +16,5 @@ public class IntRef implements java.io.Serializable {
public int elem;
public IntRef(int elem) { this.elem = elem; }
- public String toString() { return Integer.toString(elem); }
+ public String toString() { return java.lang.Integer.toString(elem); }
}
diff --git a/src/library/scala/runtime/LongRef.java b/src/library/scala/runtime/LongRef.java
index 6862d1d993..708fc12ebe 100644
--- a/src/library/scala/runtime/LongRef.java
+++ b/src/library/scala/runtime/LongRef.java
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime;
@@ -17,5 +16,5 @@ public class LongRef implements java.io.Serializable {
public long elem;
public LongRef(long elem) { this.elem = elem; }
- public String toString() { return Long.toString(elem); }
+ public String toString() { return java.lang.Long.toString(elem); }
}
diff --git a/src/library/scala/runtime/MethodCache.scala b/src/library/scala/runtime/MethodCache.scala
index b4c31f9938..a98ae3ae03 100644
--- a/src/library/scala/runtime/MethodCache.scala
+++ b/src/library/scala/runtime/MethodCache.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http:/// **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
@@ -14,6 +13,8 @@ package scala.runtime
import java.lang.reflect.{ Method => JMethod }
import java.lang.{ Class => JClass }
+import scala.annotation.tailrec
+
/** An element of a polymorphic object cache.
* This class is refered to by the CleanUp phase. Each PolyMethodCache chain
* must only relate to one method as PolyMethodCache does not identify
@@ -59,16 +60,24 @@ final class PolyMethodCache(
private[this] val complexity: Int
) extends MethodCache {
- def find(forReceiver: JClass[_]): JMethod =
- if (forReceiver eq receiver)
- return method
- else
- return next.find(forReceiver) // tail call is optimised, confirm with -Ylog:tailcalls
+ /** To achieve tail recursion this must be a separate method
+ * from find, because the type of next is not PolyMethodCache.
+ */
+ @tailrec private def findInternal(forReceiver: JClass[_]): JMethod =
+ if (forReceiver eq receiver) method
+ else next match {
+ case x: PolyMethodCache => x findInternal forReceiver
+ case _ => next find forReceiver
+ }
+
+ def find(forReceiver: JClass[_]): JMethod = findInternal(forReceiver)
+
+ // TODO: come up with a more realistic number
+ final private val MaxComplexity = 160
def add(forReceiver: JClass[_], forMethod: JMethod): MethodCache =
- if (complexity < 160) // TODO: come up with a more realistic number
- return new PolyMethodCache(this, forReceiver, forMethod, complexity + 1)
+ if (complexity < MaxComplexity)
+ new PolyMethodCache(this, forReceiver, forMethod, complexity + 1)
else
- return new MegaMethodCache(forMethod.getName, forMethod.getParameterTypes)
-
+ new MegaMethodCache(forMethod.getName, forMethod.getParameterTypes)
}
diff --git a/src/library/scala/unsealed.scala b/src/library/scala/runtime/NonLocalReturnControl.scala
index 458d971555..f8843e9c64 100644
--- a/src/library/scala/unsealed.scala
+++ b/src/library/scala/runtime/NonLocalReturnControl.scala
@@ -1,18 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-package scala
+package scala.runtime
-/**
- * @since 2.4
- */
-@deprecated("use `unchecked' instead")
-class unsealed extends Annotation
+import scala.util.control.ControlThrowable
+
+class NonLocalReturnControl[T](val key: AnyRef, val value: T) extends ControlThrowable
diff --git a/src/library/scala/runtime/Nothing$.scala b/src/library/scala/runtime/Nothing$.scala
index 4809e0f644..fd66996518 100644
--- a/src/library/scala/runtime/Nothing$.scala
+++ b/src/library/scala/runtime/Nothing$.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
diff --git a/src/library/scala/runtime/Null$.scala b/src/library/scala/runtime/Null$.scala
index 62eee09c42..25662e1810 100644
--- a/src/library/scala/runtime/Null$.scala
+++ b/src/library/scala/runtime/Null$.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
diff --git a/src/library/scala/runtime/ObjectRef.java b/src/library/scala/runtime/ObjectRef.java
index 04f4a9fc59..a1567b6e48 100644
--- a/src/library/scala/runtime/ObjectRef.java
+++ b/src/library/scala/runtime/ObjectRef.java
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime;
diff --git a/src/library/scala/runtime/RichBoolean.scala b/src/library/scala/runtime/RichBoolean.scala
index 323b828e31..bfff5dae94 100644
--- a/src/library/scala/runtime/RichBoolean.scala
+++ b/src/library/scala/runtime/RichBoolean.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
diff --git a/src/library/scala/runtime/RichByte.scala b/src/library/scala/runtime/RichByte.scala
index bf09ad4599..52ec045217 100644
--- a/src/library/scala/runtime/RichByte.scala
+++ b/src/library/scala/runtime/RichByte.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
diff --git a/src/library/scala/runtime/RichChar.scala b/src/library/scala/runtime/RichChar.scala
index 99c664b7a1..b8324fa221 100644
--- a/src/library/scala/runtime/RichChar.scala
+++ b/src/library/scala/runtime/RichChar.scala
@@ -1,18 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
import java.lang.Character
-import collection.{IndexedSeq, IndexedSeqView}
+import collection.immutable.NumericRange
/** <p>
* For example, in the following code
@@ -48,6 +47,7 @@ final class RichChar(x: Char) extends Proxy with Ordered[Char] {
def isSpaceChar: Boolean = Character.isSpaceChar(x)
def isHighSurrogate: Boolean = Character.isHighSurrogate(x)
def isLowSurrogate: Boolean = Character.isLowSurrogate(x)
+ def isSurrogate: Boolean = isHighSurrogate || isLowSurrogate
def isUnicodeIdentifierStart: Boolean = Character.isUnicodeIdentifierStart(x)
def isUnicodeIdentifierPart: Boolean = Character.isUnicodeIdentifierPart(x)
def isIdentifierIgnorable: Boolean = Character.isIdentifierIgnorable(x)
@@ -82,22 +82,14 @@ final class RichChar(x: Char) extends Proxy with Ordered[Char] {
@deprecated("Use ch.isUpper instead")
def isUpperCase: Boolean = isUpper
- /** Create a <code>[Char]</code> over the characters from 'x' to 'y' - 1
+ /** Create a <code>[Char]</code> over the characters from 'x' to 'limit' - 1
*/
- def until(limit: Char): IndexedSeqView[Char, IndexedSeq[Char]] =
- if (limit <= x) IndexedSeq.empty.view
- else
- new IndexedSeqView[Char, IndexedSeq[Char]] {
- protected def underlying = IndexedSeq.empty[Char]
- def length = limit - x
- def apply(i: Int): Char = {
- require(i >= 0 && i < length)
- (x + i).toChar
- }
- }
-
- /** Create a <code>IndexedSeqView[Char]</code> over the characters from 'x' to 'y'
+ def until(limit: Char): NumericRange[Char] =
+ new NumericRange.Exclusive(x, limit, 1.toChar)
+
+ /** Create a <code>IndexedSeqView[Char]</code> over the characters from 'x' to 'limit'
*/
- def to(y: Char): IndexedSeqView[Char, IndexedSeq[Char]] = until((y + 1).toChar)
+ def to(limit: Char): NumericRange[Char] =
+ new NumericRange.Inclusive(x, limit, 1.toChar)
}
diff --git a/src/library/scala/runtime/RichDouble.scala b/src/library/scala/runtime/RichDouble.scala
index d30cd899a2..64976acb12 100644
--- a/src/library/scala/runtime/RichDouble.scala
+++ b/src/library/scala/runtime/RichDouble.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
@@ -19,13 +18,13 @@ final class RichDouble(x: Double) extends Proxy with Ordered[Double] {
def compare(y: Double): Int = java.lang.Double.compare(x, y)
- def min(y: Double): Double = Math.min(x, y)
- def max(y: Double): Double = Math.max(x, y)
- def abs: Double = Math.abs(x)
+ def min(y: Double): Double = math.min(x, y)
+ def max(y: Double): Double = math.max(x, y)
+ def abs: Double = math.abs(x)
- def round: Long = Math.round(x)
- def ceil: Double = Math.ceil(x)
- def floor: Double = Math.floor(x)
+ def round: Long = math.round(x)
+ def ceil: Double = math.ceil(x)
+ def floor: Double = math.floor(x)
/** See <code>BigDecimal.until</code>. */
def until(end: Double): Range.Partial[Double, NumericRange[Double]] =
@@ -49,7 +48,7 @@ final class RichDouble(x: Double) extends Proxy with Ordered[Double] {
* @param x an angle, in degrees
* @return the measurement of the angle <code>x</code> in radians.
*/
- def toRadians: Double = Math.toRadians(x)
+ def toRadians: Double = math.toRadians(x)
/** Converts an angle measured in radians to an approximately equivalent
* angle measured in degrees
@@ -57,7 +56,7 @@ final class RichDouble(x: Double) extends Proxy with Ordered[Double] {
* @param x angle, in radians
* @return the measurement of the angle <code>x</code> in degrees.
*/
- def toDegrees: Double = Math.toDegrees(x)
+ def toDegrees: Double = math.toDegrees(x)
// isNaN is provided by the implicit conversion to java.lang.Double
// def isNaN: Boolean = java.lang.Double.isNaN(x)
diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala
index 27b3c719c6..a806a3cf5e 100644
--- a/src/library/scala/runtime/RichException.scala
+++ b/src/library/scala/runtime/RichException.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
@@ -14,14 +13,5 @@ package scala.runtime
import compat.Platform.EOL
final class RichException(exc: Throwable) {
-
- def getStackTraceString: String = {
- val s = new StringBuilder()
- for (trElem <- exc.getStackTrace()) {
- s.append(trElem.toString())
- s.append(EOL)
- }
- s.toString()
- }
-
+ def getStackTraceString = exc.getStackTrace().mkString("", EOL, EOL)
}
diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala
index ac47dcc934..e8e351bdaa 100644
--- a/src/library/scala/runtime/RichFloat.scala
+++ b/src/library/scala/runtime/RichFloat.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
@@ -21,13 +20,13 @@ final class RichFloat(x: Float) extends Proxy with Ordered[Float] {
//def compare(y: Float): Int = if (x < y) -1 else if (x > y) 1 else 0
def compare(y: Float): Int = java.lang.Float.compare(x, y)
- def min(y: Float) = Math.min(x, y)
- def max(y: Float) = Math.max(x, y)
- def abs: Float = Math.abs(x)
+ def min(y: Float) = math.min(x, y)
+ def max(y: Float) = math.max(x, y)
+ def abs: Float = math.abs(x)
- def round: Int = Math.round(x)
- def ceil: Float = Math.ceil(x).toFloat
- def floor: Float = Math.floor(x).toFloat
+ def round: Int = math.round(x)
+ def ceil: Float = math.ceil(x).toFloat
+ def floor: Float = math.floor(x).toFloat
/** Converts an angle measured in degrees to an approximately equivalent
* angle measured in radians.
@@ -35,7 +34,7 @@ final class RichFloat(x: Float) extends Proxy with Ordered[Float] {
* @param x an angle, in degrees
* @return the measurement of the angle <code>x</code> in radians.
*/
- def toRadians: Float = Math.toRadians(x).toFloat
+ def toRadians: Float = math.toRadians(x).toFloat
/** Converts an angle measured in radians to an approximately equivalent
* angle measured in degrees.
@@ -43,7 +42,7 @@ final class RichFloat(x: Float) extends Proxy with Ordered[Float] {
* @param x angle, in radians
* @return the measurement of the angle <code>x</code> in degrees.
*/
- def toDegrees: Float = Math.toDegrees(x).toFloat
+ def toDegrees: Float = math.toDegrees(x).toFloat
// isNaN is provided by the implicit conversion to java.lang.Float
// def isNaN: Boolean = java.lang.Float.isNaN(x)
diff --git a/src/library/scala/runtime/RichInt.scala b/src/library/scala/runtime/RichInt.scala
index 697b3dcf8f..30fc010d17 100644
--- a/src/library/scala/runtime/RichInt.scala
+++ b/src/library/scala/runtime/RichInt.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
diff --git a/src/library/scala/runtime/RichLong.scala b/src/library/scala/runtime/RichLong.scala
index 7e8a5d71ba..bf573abc62 100644
--- a/src/library/scala/runtime/RichLong.scala
+++ b/src/library/scala/runtime/RichLong.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
diff --git a/src/library/scala/runtime/RichShort.scala b/src/library/scala/runtime/RichShort.scala
index 382969547e..0fa34b27c4 100644
--- a/src/library/scala/runtime/RichShort.scala
+++ b/src/library/scala/runtime/RichShort.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
diff --git a/src/library/scala/runtime/RichString.scala b/src/library/scala/runtime/RichString.scala
deleted file mode 100644
index a1058cf979..0000000000
--- a/src/library/scala/runtime/RichString.scala
+++ /dev/null
@@ -1,261 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-
-import scala.util.matching.Regex
-import collection.generic._
-import collection.IndexedSeqLike
-import collection.immutable.IndexedSeq
-import collection.mutable.{Builder, StringBuilder}
-
-object RichString {
-
- def newBuilder: Builder[Char, RichString] = new StringBuilder() mapResult (new RichString(_))
- implicit def canBuildFrom: CanBuildFrom[RichString, Char, RichString] =
- new CanBuildFrom[RichString, Char, RichString] {
- def apply(from: RichString) = newBuilder
- def apply() = newBuilder
- }
- implicit def canBuildFrom2: CanBuildFrom[String, Char, RichString] =
- new CanBuildFrom[String, Char, RichString] {
- def apply(from: String) = newBuilder
- def apply() = newBuilder
- }
-
- // just statics for rich string.
- private final val LF: Char = 0x0A
- private final val FF: Char = 0x0C
- private final val CR: Char = 0x0D
- private final val SU: Char = 0x1A
-}
-
-import RichString._
-
-class RichString(val self: String) extends Proxy with IndexedSeq[Char] with IndexedSeqLike[Char, RichString] with PartialFunction[Int, Char] with Ordered[String] with Boxed {
-
- /** Creates a string builder buffer as builder for this class */
- override protected[this] def newBuilder = RichString.newBuilder
-
- /** Return element at index `n`
- * @throws IndexOutofBoundsException if the index is not valid
- */
- def apply(n: Int): Char = self charAt n
-
- def length: Int = self.length
-
- override def mkString = self
- override def toString = self
-
- /** return n times the current string
- */
- def * (n: Int): String = {
- val buf = new StringBuilder
- for (i <- 0 until n) buf append self
- buf.toString
- }
-
- override def compare(other: String) = self compareTo other
-
- private def isLineBreak(c: Char) = c == LF || c == FF
-
- /** <p>
- * Strip trailing line end character from this string if it has one.
- * A line end character is one of
- * </p>
- * <ul style="list-style-type: none;">
- * <li>LF - line feed (0x0A hex)</li>
- * <li>FF - form feed (0x0C hex)</li>
- * </ul>
- * <p>
- * If a line feed character LF is preceded by a carriage return CR
- * (0x0D hex), the CR character is also stripped (Windows convention).
- * </p>
- */
- def stripLineEnd: String = {
- val len = self.length
- if (len == 0) self
- else {
- val last = apply(len - 1)
- if (isLineBreak(last))
- self.substring(0, if (last == LF && len >= 2 && apply(len - 2) == CR) len - 2 else len - 1)
- else
- self
- }
- }
-
- /** <p>
- * Return all lines in this string in an iterator, including trailing
- * line end characters.
- * </p>
- * <p>
- * The number of strings returned is one greater than the number of line
- * end characters in this string. For an empty string, a single empty
- * line is returned. A line end character is one of
- * </p>
- * <ul style="list-style-type: none;">
- * <li>LF - line feed (0x0A hex)</li>
- * <li>FF - form feed (0x0C hex)</li>
- * </ul>
- */
- def linesWithSeparators: Iterator[String] = new Iterator[String] {
- private val len = self.length
- private var index = 0
- def hasNext: Boolean = index < len
- def next(): String = {
- if (index >= len) throw new NoSuchElementException("next on empty iterator")
- val start = index
- while (index < len && !isLineBreak(apply(index))) index += 1
- index += 1
- self.substring(start, index min len)
- }
- }
-
- /** Return all lines in this string in an iterator, excluding trailing line
- * end characters, i.e. apply <code>.stripLineEnd</code> to all lines
- * returned by <code>linesWithSeparators</code>.
- */
- def lines: Iterator[String] =
- linesWithSeparators map (line => new RichString(line).stripLineEnd)
-
- /** Return all lines in this string in an iterator, excluding trailing line
- * end characters, i.e. apply <code>.stripLineEnd</code> to all lines
- * returned by <code>linesWithSeparators</code>.
- */
- def linesIterator: Iterator[String] =
- linesWithSeparators map (line => new RichString(line).stripLineEnd)
-
- /** Returns this string with first character converted to upper case */
- def capitalize: String =
- if (self == null) null
- else if (self.length == 0) ""
- else {
- val chars = self.toCharArray
- chars(0) = chars(0).toUpper
- new String(chars)
- }
-
- /** Returns this string with the given <code>prefix</code> stripped. */
- def stripPrefix(prefix: String) =
- if (self.startsWith(prefix)) self.substring(prefix.length)
- else self
-
- /** Returns this string with the given <code>suffix</code> stripped. */
- def stripSuffix(suffix: String) =
- if (self.endsWith(suffix)) self.substring(0, self.length() - suffix.length)
- else self
-
- /** <p>
- * For every line in this string:
- * </p>
- * <blockquote>
- * Strip a leading prefix consisting of blanks or control characters
- * followed by <code>marginChar</code> from the line.
- * </blockquote>
- */
- def stripMargin(marginChar: Char): String = {
- val buf = new StringBuilder
- for (line <- linesWithSeparators) {
- val len = line.length
- var index = 0
- while (index < len && line.charAt(index) <= ' ') index += 1
- buf append
- (if (index < len && line.charAt(index) == marginChar) line.substring(index + 1) else line)
- }
- buf.toString
- }
-
- /** <p>
- * For every line in this string:
- * </p>
- * <blockquote>
- * Strip a leading prefix consisting of blanks or control characters
- * followed by <code>|</code> from the line.
- * </blockquote>
- */
- def stripMargin: String = stripMargin('|')
-
- private def escape(ch: Char): String = "\\Q" + ch + "\\E"
-
- @throws(classOf[java.util.regex.PatternSyntaxException])
- def split(separator: Char): Array[String] = self.split(escape(separator))
-
- @throws(classOf[java.util.regex.PatternSyntaxException])
- def split(separators: Array[Char]): Array[String] = {
- val re = separators.foldLeft("[")(_+escape(_)) + "]"
- self.split(re)
- }
-
- /** You can follow a string with `.r', turning
- * it into a Regex. E.g.
- *
- * """A\w*""".r is the regular expression for identifiers starting with `A'.
- */
- def r: Regex = new Regex(self)
-
- def toBoolean: Boolean = parseBoolean(self)
- def toByte: Byte = java.lang.Byte.parseByte(self)
- def toShort: Short = java.lang.Short.parseShort(self)
- def toInt: Int = java.lang.Integer.parseInt(self)
- def toLong: Long = java.lang.Long.parseLong(self)
- def toFloat: Float = java.lang.Float.parseFloat(self)
- def toDouble: Double = java.lang.Double.parseDouble(self)
-
- private def parseBoolean(s: String): Boolean =
- if (s != null) s.toLowerCase match {
- case "true" => true
- case "false" => false
- case _ => throw new NumberFormatException("For input string: \""+s+"\"")
- }
- else
- throw new NumberFormatException("For input string: \"null\"")
-
- def toArray: Array[Char] = {
- val result = new Array[Char](length)
- self.getChars(0, length, result, 0)
- result
- }
-
- /** <p>
- * Uses the underlying string as a pattern (in a fashion similar to
- * printf in C), and uses the supplied arguments to fill in the
- * holes.
- * </p>
- * <p>
- * The interpretation of the formatting patterns is described in
- * <a href="" target="contentFrame" class="java/util/Formatter">
- * <code>java.util.Formatter</code></a>.
- * </p>
- *
- * @param args the arguments used to instantiating the pattern.
- * @throws java.lang.IllegalArgumentException
- */
- def format(args : Any*) : String =
- java.lang.String.format(self, args.asInstanceOf[Seq[AnyRef]]: _*)
-
- /** <p>
- * Like format(args*) but takes an initial Locale parameter
- * which influences formatting as in java.lang.String's format.
- * </p>
- * <p>
- * The interpretation of the formatting patterns is described in
- * <a href="" target="contentFrame" class="java/util/Formatter">
- * <code>java.util.Formatter</code></a>.
- * </p>
- *
- * @param locale an instance of java.util.Locale
- * @param args the arguments used to instantiating the pattern.
- * @throws java.lang.IllegalArgumentException
- */
- def format(l: java.util.Locale, args: Any*): String =
- java.lang.String.format(l, self, args.asInstanceOf[Seq[AnyRef]]: _*)
-}
-
diff --git a/src/library/scala/runtime/RichUnit.scala b/src/library/scala/runtime/RichUnit.scala
index b60bc3db3e..982ec8b963 100644
--- a/src/library/scala/runtime/RichUnit.scala
+++ b/src/library/scala/runtime/RichUnit.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index a93ff61a41..a8cb2340ff 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -1,28 +1,26 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime
import scala.reflect.ClassManifest
-import scala.collection.Seq
-import scala.collection.mutable._
-import scala.collection.immutable.{List, Stream, Nil, ::}
+import scala.collection.{ Seq, IndexedSeq, TraversableView }
+import scala.collection.mutable.WrappedArray
+import scala.collection.immutable.{ NumericRange, List, Stream, Nil, :: }
+import scala.xml.{ Node, MetaData }
+import scala.util.control.ControlThrowable
/* The object <code>ScalaRunTime</code> provides ...
*/
object ScalaRunTime {
-
- def isArray(x: AnyRef): Boolean = // !!! remove once newarrays
- x != null && (x.getClass.isArray || x.isInstanceOf[BoxedArray[_]])
-
+ def isArray(x: AnyRef): Boolean = isArray(x, 1)
def isArray(x: Any, atLevel: Int): Boolean =
x != null && isArrayClass(x.asInstanceOf[AnyRef].getClass, atLevel)
@@ -31,21 +29,67 @@ object ScalaRunTime {
def isValueClass(clazz: Class[_]) = clazz.isPrimitive()
- // todo: remove?
- def forceBoxedArray[A <: Any](xs: Seq[A]): Array[A] =
- throw new Error(" not implemented: forceBoxedArray")
-
/** Retrieve generic array element */
- def array_apply(xs: AnyRef, idx: Int): Any = java.lang.reflect.Array.get(xs, idx)
+ def array_apply(xs: AnyRef, idx: Int): Any = xs match {
+ case x: Array[AnyRef] => x(idx).asInstanceOf[Any]
+ case x: Array[Int] => x(idx).asInstanceOf[Any]
+ case x: Array[Double] => x(idx).asInstanceOf[Any]
+ case x: Array[Long] => x(idx).asInstanceOf[Any]
+ case x: Array[Float] => x(idx).asInstanceOf[Any]
+ case x: Array[Char] => x(idx).asInstanceOf[Any]
+ case x: Array[Byte] => x(idx).asInstanceOf[Any]
+ case x: Array[Short] => x(idx).asInstanceOf[Any]
+ case x: Array[Boolean] => x(idx).asInstanceOf[Any]
+ case x: Array[Unit] => x(idx).asInstanceOf[Any]
+ case null => throw new NullPointerException
+ }
/** update generic array element */
- def array_update(xs: AnyRef, idx: Int, value: Any): Unit = java.lang.reflect.Array.set(xs, idx, value)
+ def array_update(xs: AnyRef, idx: Int, value: Any): Unit = xs match {
+ case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef]
+ case x: Array[Int] => x(idx) = value.asInstanceOf[Int]
+ case x: Array[Double] => x(idx) = value.asInstanceOf[Double]
+ case x: Array[Long] => x(idx) = value.asInstanceOf[Long]
+ case x: Array[Float] => x(idx) = value.asInstanceOf[Float]
+ case x: Array[Char] => x(idx) = value.asInstanceOf[Char]
+ case x: Array[Byte] => x(idx) = value.asInstanceOf[Byte]
+ case x: Array[Short] => x(idx) = value.asInstanceOf[Short]
+ case x: Array[Boolean] => x(idx) = value.asInstanceOf[Boolean]
+ case x: Array[Unit] => x(idx) = value.asInstanceOf[Unit]
+ case null => throw new NullPointerException
+ }
/** Get generic array length */
- def array_length(xs: AnyRef): Int = java.lang.reflect.Array.getLength(xs)
+ def array_length(xs: AnyRef): Int = xs match {
+ case x: Array[AnyRef] => x.length
+ case x: Array[Int] => x.length
+ case x: Array[Double] => x.length
+ case x: Array[Long] => x.length
+ case x: Array[Float] => x.length
+ case x: Array[Char] => x.length
+ case x: Array[Byte] => x.length
+ case x: Array[Short] => x.length
+ case x: Array[Boolean] => x.length
+ case x: Array[Unit] => x.length
+ case null => throw new NullPointerException
+ }
+
+ def array_clone(xs: AnyRef): AnyRef = xs match {
+ case x: Array[AnyRef] => ArrayRuntime.cloneArray(x)
+ case x: Array[Int] => ArrayRuntime.cloneArray(x)
+ case x: Array[Double] => ArrayRuntime.cloneArray(x)
+ case x: Array[Long] => ArrayRuntime.cloneArray(x)
+ case x: Array[Float] => ArrayRuntime.cloneArray(x)
+ case x: Array[Char] => ArrayRuntime.cloneArray(x)
+ case x: Array[Byte] => ArrayRuntime.cloneArray(x)
+ case x: Array[Short] => ArrayRuntime.cloneArray(x)
+ case x: Array[Boolean] => ArrayRuntime.cloneArray(x)
+ case x: Array[Unit] => x
+ case null => throw new NullPointerException
+ }
/** Convert a numeric value array to an object array.
- * Needed to deal with vararg arguments of primtive types that are passed
+ * Needed to deal with vararg arguments of primitive types that are passed
* to a generic Java vararg parameter T ...
*/
def toObjectArray(src: AnyRef): Array[Object] = {
@@ -59,62 +103,58 @@ object ScalaRunTime {
def toArray[T](xs: scala.collection.Seq[T]) = {
val arr = new Array[AnyRef](xs.length)
var i = 0
- for (x <- xs) arr(i) = x.asInstanceOf[AnyRef]
+ for (x <- xs) {
+ arr(i) = x.asInstanceOf[AnyRef]
+ i += 1
+ }
arr
}
def checkInitialized[T <: AnyRef](x: T): T =
if (x == null) throw new UninitializedError else x
- abstract class Try[a] {
- def Catch[b >: a](handler: PartialFunction[Throwable, b]): b
- def Finally(handler: Unit): a
+ abstract class Try[+A] {
+ def Catch[B >: A](handler: PartialFunction[Throwable, B]): B
+ def Finally(fin: => Unit): A
}
- def Try[a](block: => a): Try[a] = new Try[a] with Runnable {
- var result: a = _
- var exception: Throwable = ExceptionHandling.tryCatch(this)
-
- def run(): Unit = result = block
-
- def Catch[b >: a](handler: PartialFunction[Throwable, b]): b =
- if (exception eq null)
- result.asInstanceOf[b]
- // !!! else if (exception is LocalReturn)
- // !!! // ...
- else if (handler isDefinedAt exception)
- handler(exception)
- else
- throw exception
-
- def Finally(handler: Unit): a =
- if (exception eq null)
- result.asInstanceOf[a]
- else
- throw exception
- }
+ def Try[A](block: => A): Try[A] = new Try[A] with Runnable {
+ private var result: A = _
+ private var exception: Throwable =
+ try { run() ; null }
+ catch {
+ case e: ControlThrowable => throw e // don't catch non-local returns etc
+ case e: Throwable => e
+ }
+
+ def run() { result = block }
- def caseFields(x: Product): List[Any] = {
- val arity = x.productArity
- def fields(from: Int): List[Any] =
- if (from == arity) List()
- else x.productElement(from) :: fields(from + 1)
- fields(0)
+ def Catch[B >: A](handler: PartialFunction[Throwable, B]): B =
+ if (exception == null) result
+ else if (handler isDefinedAt exception) handler(exception)
+ else throw exception
+
+ def Finally(fin: => Unit): A = {
+ fin
+
+ if (exception == null) result
+ else throw exception
+ }
}
def _toString(x: Product): String =
- caseFields(x).mkString(x.productPrefix + "(", ",", ")")
+ x.productIterator.mkString(x.productPrefix + "(", ",", ")")
- def _hashCodeJenkins(x: Product): Int =
- scala.util.JenkinsHash.hashSeq(x.productPrefix.toSeq ++ x.productIterator.toSeq)
+ // def _hashCodeJenkins(x: Product): Int =
+ // scala.util.JenkinsHash.hashSeq(x.productPrefix.toSeq ++ x.productIterator.toSeq)
def _hashCode(x: Product): Int = {
- var code = x.productPrefix.hashCode()
val arr = x.productArity
+ var code = arr
var i = 0
while (i < arr) {
val elem = x.productElement(i)
- code = code * 41 + (if (elem == null) 0 else elem.hashCode())
+ code = code * 41 + (if (elem == null) 0 else elem.##)
i += 1
}
code
@@ -123,68 +163,63 @@ object ScalaRunTime {
/** Fast path equality method for inlining; used when -optimise is set.
*/
@inline def inlinedEquals(x: Object, y: Object): Boolean =
- if (x eq null) y eq null
- else if (x.isInstanceOf[Number] || x.isInstanceOf[Character]) BoxesRunTime.equals(x, y)
+ if (x eq y) true
+ else if (x eq null) false
+ else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.equalsNumObject(x.asInstanceOf[java.lang.Number], y)
+ else if (x.isInstanceOf[java.lang.Character]) BoxesRunTime.equalsCharObject(x.asInstanceOf[java.lang.Character], y)
else x.equals(y)
def _equals(x: Product, y: Any): Boolean = y match {
- case y1: Product if x.productArity == y1.productArity =>
- val arity = x.productArity
- var i = 0
- while (i < arity && x.productElement(i) == y1.productElement(i))
- i += 1
- i == arity
- case _ =>
- false
+ case y: Product if x.productArity == y.productArity => x.productIterator sameElements y.productIterator
+ case _ => false
}
- def _equalsWithVarArgs(x: Product, y: Any): Boolean = y match {
- case y1: Product if x.productArity == y1.productArity =>
- val arity = x.productArity
- var i = 0
- while (i < arity - 1 && x.productElement(i) == y1.productElement(i))
- i += 1
- i == arity - 1 && {
- x.productElement(i) match {
- case xs: Seq[_] =>
- y1.productElement(i) match {
- case ys: Seq[_] => xs sameElements ys
- }
- }
- }
- case _ =>
- false
- }
-
- //def checkDefined[T >: Null](x: T): T =
- // if (x == null) throw new UndefinedException else x
+ // hashcode -----------------------------------------------------------
+ //
+ // Note that these are the implementations called by ##, so they
+ // must not call ## themselves.
- def Seq[a](xs: a*): Seq[a] = null // interpreted specially by new backend.
+ @inline def hash(x: Any): Int =
+ if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number])
+ else x.hashCode
- def arrayValue[A](x: BoxedArray[A], elemClass: Class[_]): AnyRef =
- if (x eq null) null else x.unbox(elemClass)
+ @inline def hash(dv: Double): Int = {
+ val iv = dv.toInt
+ if (iv == dv) return iv
- /** Temporary method to go to new array representation
- * !!! can be reomved once bootstrap is complete !!!
- */
- def unboxedArray[A](x: AnyRef): AnyRef = x match {
- case ba: BoxedArray[_] => ba.value
- case _ => x
+ val lv = dv.toLong
+ if (lv == dv) return lv.hashCode
+ else dv.hashCode
}
+ @inline def hash(fv: Float): Int = {
+ val iv = fv.toInt
+ if (iv == fv) return iv
- def boxArray(value: AnyRef): BoxedArray[_] = value match {
- case x: Array[AnyRef] => new BoxedObjectArray(x, ClassManifest.classType(x.getClass.getComponentType))
- case x: Array[Int] => new BoxedIntArray(x)
- case x: Array[Double] => new BoxedDoubleArray(x)
- case x: Array[Long] => new BoxedLongArray(x)
- case x: Array[Float] => new BoxedFloatArray(x)
- case x: Array[Char] => new BoxedCharArray(x)
- case x: Array[Byte] => new BoxedByteArray(x)
- case x: Array[Short] => new BoxedShortArray(x)
- case x: Array[Boolean] => new BoxedBooleanArray(x)
- case x: BoxedArray[_] => x
- case null => null
+ val lv = fv.toLong
+ if (lv == fv) return lv.hashCode
+ else fv.hashCode
}
+ @inline def hash(lv: Long): Int = {
+ val iv = lv.toInt
+ if (iv == lv) iv else lv.hashCode
+ }
+ @inline def hash(x: Int): Int = x
+ @inline def hash(x: Short): Int = x.toInt
+ @inline def hash(x: Byte): Int = x.toInt
+ @inline def hash(x: Char): Int = x.toInt
+
+ @inline def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
+ @inline def hash(x: java.lang.Long): Int = {
+ val iv = x.intValue
+ if (iv == x.longValue) iv else x.hashCode
+ }
+
+ /** A helper method for constructing case class equality methods,
+ * because existential types get in the way of a clean outcome and
+ * it's performing a series of Any/Any equals comparisons anyway.
+ * See ticket #2867 for specifics.
+ */
+ def sameElements(xs1: Seq[Any], xs2: Seq[Any]) = xs1 sameElements xs2
/** Given any Scala value, convert it to a String.
*
@@ -199,12 +234,35 @@ object ScalaRunTime {
* @return a string representation of <code>arg</code>
*
*/
- def stringOf(arg : Any): String = arg match {
- case null => "null"
- case arg: AnyRef if isArray(arg) =>
- val d: collection.IndexedSeq[Any] = WrappedArray.make(arg).deep
- d.toString
- case arg: WrappedArray[_] => arg.deep.toString
- case arg => arg.toString
+ def stringOf(arg: Any): String = {
+ import collection.{SortedSet, SortedMap}
+ def mapTraversable(x: Traversable[_], f: Any => String) = x match {
+ case ss: SortedSet[_] => ss.map(f)
+ case ss: SortedMap[_, _] => ss.map(f)
+ case _ => x.map(f)
+ }
+ def inner(arg: Any): String = arg match {
+ case null => "null"
+ // Node extends NodeSeq extends Seq[Node] strikes again
+ case x: Node => x toString
+ // Not to mention MetaData extends Iterable[MetaData]
+ case x: MetaData => x toString
+ // Range/NumericRange have a custom toString to avoid walking a gazillion elements
+ case x: Range => x toString
+ case x: NumericRange[_] => x toString
+ case x: AnyRef if isArray(x) => WrappedArray make x map inner mkString ("Array(", ", ", ")")
+ case x: TraversableView[_, _] => x.toString
+ case x: Traversable[_] if !x.hasDefiniteSize => x.toString
+ case x: Traversable[_] =>
+ // Some subclasses of AbstractFile implement Iterable, then throw an
+ // exception if you call iterator. What a world.
+ // And they can't be infinite either.
+ if (x.getClass.getName startsWith "scala.tools.nsc.io") x.toString
+ else (mapTraversable(x, inner)) mkString (x.stringPrefix + "(", ", ", ")")
+ case x => x toString
+ }
+ val s = inner(arg)
+ val nl = if (s contains "\n") "\n" else ""
+ nl + s + "\n"
}
}
diff --git a/src/library/scala/runtime/ShortRef.java b/src/library/scala/runtime/ShortRef.java
index 2de8dc02b4..76db3de29a 100644
--- a/src/library/scala/runtime/ShortRef.java
+++ b/src/library/scala/runtime/ShortRef.java
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.runtime;
@@ -17,4 +16,5 @@ public class ShortRef implements java.io.Serializable {
public short elem;
public ShortRef(short elem) { this.elem = elem; }
+ public String toString() { return java.lang.Short.toString(elem); }
}
diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala
index c8f22a8fdb..5ec865eac8 100644
--- a/src/library/scala/runtime/StringAdd.scala
+++ b/src/library/scala/runtime/StringAdd.scala
@@ -1,12 +1,11 @@
/* *\
** ________ ___ __ ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ |_| **
** **
\* */
-// $Id$
package scala.runtime
diff --git a/src/library/scala/runtime/TraitSetter.java b/src/library/scala/runtime/TraitSetter.java
new file mode 100755
index 0000000000..d9907c0ac0
--- /dev/null
+++ b/src/library/scala/runtime/TraitSetter.java
@@ -0,0 +1,6 @@
+package scala.runtime;
+
+/** A marker annotation to tag a setter of a mutable variable in a trait
+ */
+public @interface TraitSetter {
+} \ No newline at end of file
diff --git a/src/library/scala/net/Utility.scala b/src/library/scala/runtime/VolatileBooleanRef.java
index 59ce1c0ceb..5cb308b0fd 100644..100755
--- a/src/library/scala/net/Utility.scala
+++ b/src/library/scala/runtime/VolatileBooleanRef.java
@@ -1,23 +1,20 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.net
-import java.net.{ URL, MalformedURLException }
-import scala.util.control.Exception._
-/**
- * Skeleton in anticipation of more convenience methods.
- *
- * @since 2.8
- */
-object Utility
-{
- def parseURL(s: String): Option[URL] =
- catching(classOf[MalformedURLException]) opt new URL(s)
+package scala.runtime;
+
+
+public class VolatileBooleanRef implements java.io.Serializable {
+ private static final long serialVersionUID = -5730524563015615974L;
+
+ volatile public boolean elem;
+ public VolatileBooleanRef(boolean elem) { this.elem = elem; }
+ public String toString() { return "" + elem; }
}
diff --git a/src/library/scala/runtime/VolatileByteRef.java b/src/library/scala/runtime/VolatileByteRef.java
new file mode 100755
index 0000000000..4cddb3ecca
--- /dev/null
+++ b/src/library/scala/runtime/VolatileByteRef.java
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.runtime;
+
+
+public class VolatileByteRef implements java.io.Serializable {
+ private static final long serialVersionUID = -100666928446877072L;
+
+ volatile public byte elem;
+ public VolatileByteRef(byte elem) { this.elem = elem; }
+ public String toString() { return java.lang.Byte.toString(elem); }
+}
diff --git a/src/library/scala/runtime/VolatileCharRef.java b/src/library/scala/runtime/VolatileCharRef.java
new file mode 100755
index 0000000000..76cc1267fd
--- /dev/null
+++ b/src/library/scala/runtime/VolatileCharRef.java
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.runtime;
+
+
+public class VolatileCharRef implements java.io.Serializable {
+ private static final long serialVersionUID = 6537214938268005702L;
+
+ volatile public char elem;
+ public VolatileCharRef(char elem) { this.elem = elem; }
+ public String toString() { return java.lang.Character.toString(elem); }
+}
diff --git a/src/library/scala/runtime/VolatileDoubleRef.java b/src/library/scala/runtime/VolatileDoubleRef.java
new file mode 100755
index 0000000000..4720638399
--- /dev/null
+++ b/src/library/scala/runtime/VolatileDoubleRef.java
@@ -0,0 +1,19 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.runtime;
+
+public class VolatileDoubleRef implements java.io.Serializable {
+ private static final long serialVersionUID = 8304402127373655534L;
+
+ volatile public double elem;
+ public VolatileDoubleRef(double elem) { this.elem = elem; }
+ public String toString() { return java.lang.Double.toString(elem); }
+}
diff --git a/src/library/scala/runtime/VolatileFloatRef.java b/src/library/scala/runtime/VolatileFloatRef.java
new file mode 100755
index 0000000000..e69dd3a2c9
--- /dev/null
+++ b/src/library/scala/runtime/VolatileFloatRef.java
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.runtime;
+
+
+public class VolatileFloatRef implements java.io.Serializable {
+ private static final long serialVersionUID = -5793980990371366933L;
+
+ volatile public float elem;
+ public VolatileFloatRef(float elem) { this.elem = elem; }
+ public String toString() { return java.lang.Float.toString(elem); }
+}
diff --git a/src/library/scala/runtime/VolatileIntRef.java b/src/library/scala/runtime/VolatileIntRef.java
new file mode 100755
index 0000000000..bf306a2c4c
--- /dev/null
+++ b/src/library/scala/runtime/VolatileIntRef.java
@@ -0,0 +1,19 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.runtime;
+
+public class VolatileIntRef implements java.io.Serializable {
+ private static final long serialVersionUID = 1488197132022872888L;
+
+ volatile public int elem;
+ public VolatileIntRef(int elem) { this.elem = elem; }
+ public String toString() { return java.lang.Integer.toString(elem); }
+}
diff --git a/src/library/scala/runtime/VolatileLongRef.java b/src/library/scala/runtime/VolatileLongRef.java
new file mode 100755
index 0000000000..d7e094e296
--- /dev/null
+++ b/src/library/scala/runtime/VolatileLongRef.java
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.runtime;
+
+
+public class VolatileLongRef implements java.io.Serializable {
+ private static final long serialVersionUID = -3567869820105829499L;
+
+ volatile public long elem;
+ public VolatileLongRef(long elem) { this.elem = elem; }
+ public String toString() { return java.lang.Long.toString(elem); }
+}
diff --git a/src/library/scala/runtime/VolatileObjectRef.java b/src/library/scala/runtime/VolatileObjectRef.java
new file mode 100755
index 0000000000..2549b58713
--- /dev/null
+++ b/src/library/scala/runtime/VolatileObjectRef.java
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.runtime;
+
+
+public class VolatileObjectRef implements java.io.Serializable {
+ private static final long serialVersionUID = -9055728157600312291L;
+
+ volatile public Object elem;
+ public VolatileObjectRef(Object elem) { this.elem = elem; }
+ public String toString() { return "" + elem; }
+}
diff --git a/src/library/scala/runtime/VolatileShortRef.java b/src/library/scala/runtime/VolatileShortRef.java
new file mode 100755
index 0000000000..ccb160fae9
--- /dev/null
+++ b/src/library/scala/runtime/VolatileShortRef.java
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.runtime;
+
+
+public class VolatileShortRef implements java.io.Serializable {
+ private static final long serialVersionUID = 4218441291229072313L;
+
+ volatile public short elem;
+ public VolatileShortRef(short elem) { this.elem = elem; }
+ public String toString() { return java.lang.Short.toString(elem); }
+}
diff --git a/src/library/scala/serializable.scala b/src/library/scala/serializable.scala
index 2fbec64ef5..6269359cd8 100644
--- a/src/library/scala/serializable.scala
+++ b/src/library/scala/serializable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/specialized.scala b/src/library/scala/specialized.scala
index e082c2cdfa..f4ce207c4b 100644
--- a/src/library/scala/specialized.scala
+++ b/src/library/scala/specialized.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
@@ -18,20 +17,17 @@ package scala
* </code>
*
* Type T can be specialized on a subset of the primitive types by
- * specifying a comma-separated string argument:
+ * specifying a list of primitive types to specialize at:
*
* <code>
- * class MyList[@specialized("Int, Double, Boolean") T] ..
+ * class MyList[@specialized(Int, Double, Boolean) T] ..
* </code>
- * Only primitive types are supported and no name resolution is currently
- * done on the string arguments (meaning imports and type aliases are
- * not resolved).
*
* @since 2.8
*/
-class specialized(types: String) extends StaticAnnotation {
+class specialized(types: runtime.AnyValCompanion*) extends StaticAnnotation {
def this() {
- this("Boolean, Byte, Short, Char, Int, Long, Float, Double")
+ this(Unit, Boolean, Byte, Short, Char, Int, Long, Float, Double)
}
}
diff --git a/src/library/scala/testing/Benchmark.scala b/src/library/scala/testing/Benchmark.scala
index 7d2da0633b..8e29d740db 100644
--- a/src/library/scala/testing/Benchmark.scala
+++ b/src/library/scala/testing/Benchmark.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.testing
diff --git a/src/library/scala/testing/SUnit.scala b/src/library/scala/testing/SUnit.scala
index b4d2636c5e..1ea54f4982 100644
--- a/src/library/scala/testing/SUnit.scala
+++ b/src/library/scala/testing/SUnit.scala
@@ -1,17 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.testing
import scala.collection.mutable.ArrayBuffer
+import xml.{ Node, NodeSeq }
/**
* <p>
@@ -237,6 +237,25 @@ object SUnit {
assertTrue("(no message)", actual)
}
+ /** Temporary patchwork trying to nurse xml forward. */
+ def assertEqualsXML(msg: String, expected: NodeSeq, actual: NodeSeq) {
+ if (!expected.xml_==(actual))
+ fail(msg, expected, actual)
+ }
+ def assertEqualsXML(msg: String, expected: Seq[Node], actual: Seq[Node]) {
+ assertEqualsXML(msg, expected: NodeSeq, actual: NodeSeq)
+ }
+
+ def assertEqualsXML(expected: NodeSeq, actual: NodeSeq) {
+ assertEqualsXML("(no message)", expected, actual)
+ }
+
+ def assertSameElementsXML(actual: Seq[Node], expected: Seq[Node]) {
+ val res = (actual: NodeSeq) xml_sameElements expected
+
+ assert(res, "\nassertSameElementsXML:\n actual = %s\n expected = %s".format(actual, expected))
+ }
+
/** throws <code>AssertFailed</code> with given message <code>msg</code>.
*/
def fail(msg: String) {
diff --git a/src/library/scala/testing/Show.scala b/src/library/scala/testing/Show.scala
index ed1684d99b..f0efa691ac 100644
--- a/src/library/scala/testing/Show.scala
+++ b/src/library/scala/testing/Show.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.testing
diff --git a/src/library/scala/text/Document.scala b/src/library/scala/text/Document.scala
index d7da0b7ffb..b44741dfe8 100644
--- a/src/library/scala/text/Document.scala
+++ b/src/library/scala/text/Document.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.text
diff --git a/src/library/scala/throws.scala b/src/library/scala/throws.scala
index 34728a094c..62dfc8fc96 100644
--- a/src/library/scala/throws.scala
+++ b/src/library/scala/throws.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
@@ -14,13 +13,13 @@ package scala
/** <p>
* Annotation for specifying the exceptions thrown by a method.
* For example:
- * </p><pre>
- * <b>class</b> Reader(fname: String) {
- * <b>private val</b> in =
- * <b>new</b> BufferedReader(<b>new</b> <a class="java/io/FileReader" href="" target="_top">FileReader</a>(fname))
- * @throws(classOf[<a class="java/io/IOException" href="" target="_top">IOException</a>])
- * <b>def</b> read() = in.read()
- * }</pre>
+ * {{{
+ * class Reader(fname: String) {
+ * private val in = new BufferedReader(new FileReader(fname))
+ * @throws(classOf[IOException])
+ * def read() = in.read()
+ * }
+ * }}}
*
* @author Nikolay Mihaylov
* @version 1.0, 19/05/2006
diff --git a/src/library/scala/transient.scala b/src/library/scala/transient.scala
index 8ed82f2691..12aa15fd1f 100644
--- a/src/library/scala/transient.scala
+++ b/src/library/scala/transient.scala
@@ -1,14 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
+import annotation.target._
+
+@field
class transient extends StaticAnnotation
diff --git a/src/library/scala/unchecked.scala b/src/library/scala/unchecked.scala
index a417030c39..7cdce63380 100644
--- a/src/library/scala/unchecked.scala
+++ b/src/library/scala/unchecked.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
diff --git a/src/library/scala/util/DynamicVariable.scala b/src/library/scala/util/DynamicVariable.scala
index 51d017888a..52d1763123 100644
--- a/src/library/scala/util/DynamicVariable.scala
+++ b/src/library/scala/util/DynamicVariable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util
diff --git a/src/library/scala/util/Hashable.scala b/src/library/scala/util/Hashable.scala
deleted file mode 100644
index 117d749316..0000000000
--- a/src/library/scala/util/Hashable.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.util
-
-/** <p>
- * A convenience trait for simplifying hashCode creation.
- * Mix this into a class and define <code>val hashValues = Seq(x1, x2, ...)</code>
- * and your <code>hashCode</code> will be derived from those values.
- * If you define <code>equals</code> in terms of <code>equalHashValues</code>
- * then your <code>hashCode</code> and <code>equals</code> methods will
- * never be out of sync. Something like:
- * </p><pre>
- * <b>override def</b> equals(other: Any) = other <b>match</b> {
- * <b>case</b> x: YourClass => <b>this</b> equalHashValues x
- * <b>case</b> _ => <b>false</b>
- * }</pre>
- *
- * @author Paul Phillips
- */
-abstract trait Hashable extends AnyRef
-{
- import Hashable._
- protected def hashValues: Seq[Any] // in an ideal universe this would be more like Seq[Hashable]
- protected def hashSeed: Int = 1
-
- override def hashCode: Int =
- (hashValues map calculateHashCode).foldLeft(hashSeed)((x, y) => x * 41 + y)
-
- protected def equalHashValues(other: Any) = other match {
- case x: Hashable => hashValues sameElements x.hashValues
- case _ => false
- }
-}
-abstract trait StrictHashable extends Hashable
-{
- protected def hashValues: Seq[Hashable]
-}
-
-object Hashable
-{
- /** This implicit is for StrictHashable's benefit, so your hashValues Seq
- * can contain both explicitly Hashable classes and value types.
- */
- implicit def anyVal2Hashable(x: AnyVal): Hashable =
- new Hashable { protected def hashValues = Seq(x) }
-
- private def calculateHashCode(x: Any) = x match {
- case null => 0
- case x: AnyRef => x.hashCode
- case x => x.asInstanceOf[AnyRef].hashCode
- }
-}
-
diff --git a/src/library/scala/util/JenkinsHash.scala b/src/library/scala/util/JenkinsHash.scala
deleted file mode 100644
index 471774439c..0000000000
--- a/src/library/scala/util/JenkinsHash.scala
+++ /dev/null
@@ -1,191 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.util
-
-import java.nio.ByteBuffer
-
-/**
- * Original algorithm due to Bob Jenkins.
- * http://burtleburtle.net/bob/c/lookup3.c
- * Scala version partially adapted from java version by Gray Watson.
- * http://256.com/sources/jenkins_hash_java/JenkinsHash.java
- *
- * This is based on the 1996 version, not the 2006 version, and
- * could most likely stand some improvement; the collision rate is
- * negligible in my tests, but performance merits investigation.
- *
- * @author Paul Phillips
- */
-
-object JenkinsHash {
- final val MAX_VALUE = 0xFFFFFFFFL
-
- private def bytesProvided(v: Any) = v match {
- case x: Byte => 1
- case x: Short => 2
- case x: Int => 4
- case x: Long => 8
- case x: Float => 4
- case x: Double => 8
- case x: Boolean => 1
- case x: Char => 2
- case x: Unit => 0
- case _ => 4
- }
-
- private def putAnyVal(bb: ByteBuffer, v: AnyVal) = v match {
- case x: Byte => bb put x
- case x: Short => bb putShort x
- case x: Int => bb putInt x
- case x: Long => bb putLong x
- case x: Float => bb putFloat x
- case x: Double => bb putDouble x
- case x: Boolean => bb.put(if (x) Byte.MaxValue else Byte.MinValue)
- case x: Char => bb putChar x
- case x: Unit =>
- }
-
- /** Not entirely sure how else one might do this these days, since
- * matching on x: AnyVal is a compile time error.
- */
- private def classifyAny(x: Any): (Option[AnyVal], Option[AnyRef]) = x match {
- case x: Byte => (Some(x), None)
- case x: Short => (Some(x), None)
- case x: Int => (Some(x), None)
- case x: Long => (Some(x), None)
- case x: Float => (Some(x), None)
- case x: Double => (Some(x), None)
- case x: Boolean => (Some(x), None)
- case x: Char => (Some(x), None)
- case x: Unit => (Some(x), None)
- case x: AnyRef => (None, Some(x))
- }
-
- private def partitionValuesAndRefs(xs: Seq[Any]): (Seq[AnyVal], Seq[AnyRef]) = {
- val (avs, ars) = xs map classifyAny unzip
-
- (avs.flatten, ars.flatten)
- }
-
- private def hashAnyValSeq(xs: Seq[AnyVal]): Int = {
- val arr = new Array[Byte](xs map bytesProvided sum)
- val bb = ByteBuffer wrap arr
- xs foreach (x => putAnyVal(bb, x))
-
- hash(bb.array()).toInt
- }
-
- /**
- * Convert a byte into a long value without making it negative.
- */
- private def byteToLong(b: Byte): Long = {
- val res = b & 0x7F
- if ((b & 0x80) != 0L) res + 128
- else res
- }
-
- /**
- * Do addition and turn into 4 bytes.
- */
- private def add(x1: Long, x2: Long) = (x1 + x2) & MAX_VALUE
-
- /**
- * Do subtraction and turn into 4 bytes.
- */
- private def subtract(x1: Long, x2: Long) = (x1 - x2) & MAX_VALUE
-
- /**
- * Left shift val by shift bits and turn in 4 bytes.
- */
- private def xor(x1: Long, x2: Long) = (x1 ^ x2) & MAX_VALUE
-
- /**
- * Left shift val by shift bits. Cut down to 4 bytes.
- */
- private def leftShift(x: Long, shift: Int) = (x << shift) & MAX_VALUE
-
- /**
- * Convert 4 bytes from the buffer at offset into a long value.
- */
- private def fourByteToLong(bytes: Array[Byte], offset: Int) =
- 0 to 3 map (i => byteToLong(bytes(offset + i)) << (i * 8)) sum
-
- /**
- * Hash a sequence of anything into a 32-bit value. Descendants
- * of AnyVal are broken down into individual bytes and mixed with
- * some vigor, and this is summed with the hashCodes provided by
- * the descendants of AnyRef.
- */
- def hashSeq(xs: Seq[Any]): Int = {
- val (values, refs) = partitionValuesAndRefs(xs)
- val refsSum = refs map (x => if (x == null) 0 else x.hashCode) sum
-
- hashAnyValSeq(values) + refsSum
- }
-
- /**
- * Hash a variable-length key into a 32-bit value. Every bit of the
- * key affects every bit of the return value. Every 1-bit and 2-bit
- * delta achieves avalanche. The best hash table sizes are powers of 2.
- *
- * @param buffer Byte array that we are hashing on.
- * @param initialValue Initial value of the hash if we are continuing from
- * a previous run. 0 if none.
- * @return Hash value for the buffer.
- */
- def hash(buffer: Array[Byte], initialValue: Long = 0L): Long = {
- var a, b = 0x09e3779b9L
- var c = initialValue
-
- def hashMix(): Long = {
- a = subtract(a, b); a = subtract(a, c); a = xor(a, c >> 13);
- b = subtract(b, c); b = subtract(b, a); b = xor(b, leftShift(a, 8));
- c = subtract(c, a); c = subtract(c, b); c = xor(c, (b >> 13));
- a = subtract(a, b); a = subtract(a, c); a = xor(a, (c >> 12));
- b = subtract(b, c); b = subtract(b, a); b = xor(b, leftShift(a, 16));
- c = subtract(c, a); c = subtract(c, b); c = xor(c, (b >> 5));
- a = subtract(a, b); a = subtract(a, c); a = xor(a, (c >> 3));
- b = subtract(b, c); b = subtract(b, a); b = xor(b, leftShift(a, 10));
- c = subtract(c, a); c = subtract(c, b); c = xor(c, (b >> 15));
-
- c
- }
-
- def mixTwelve(pos: Int) = {
- a = add(a, fourByteToLong(buffer, pos));
- b = add(b, fourByteToLong(buffer, pos + 4));
- c = add(c, fourByteToLong(buffer, pos + 8));
- hashMix()
- }
-
- // mix in blocks of 12
- var pos: Int = buffer.length
- while (pos >= 12) {
- pos -= 12
- mixTwelve(pos)
- }
- c += buffer.length
-
- // mix any leftover bytes (0-11 remaining)
- if (pos > 10) c = add(c, leftShift(byteToLong(buffer(10)), 24))
- if (pos > 9) c = add(c, leftShift(byteToLong(buffer(9)), 16))
- if (pos > 8) c = add(c, leftShift(byteToLong(buffer(8)), 8))
- if (pos > 7) b = add(b, leftShift(byteToLong(buffer(7)), 24))
- if (pos > 6) b = add(b, leftShift(byteToLong(buffer(6)), 16))
- if (pos > 5) b = add(b, leftShift(byteToLong(buffer(5)), 8))
- if (pos > 4) b = add(b, byteToLong(buffer(4)))
- if (pos > 3) a = add(a, leftShift(byteToLong(buffer(3)), 24))
- if (pos > 2) a = add(a, leftShift(byteToLong(buffer(2)), 16))
- if (pos > 1) a = add(a, leftShift(byteToLong(buffer(1)), 8))
- if (pos > 0) a = add(a, byteToLong(buffer(0)))
-
- // final mix and result
- hashMix()
- }
-}
diff --git a/src/library/scala/util/Marshal.scala b/src/library/scala/util/Marshal.scala
index 63d2004769..32c0a15ef3 100644
--- a/src/library/scala/util/Marshal.scala
+++ b/src/library/scala/util/Marshal.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2008-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2008-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util
diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala
new file mode 100644
index 0000000000..26e86ef14d
--- /dev/null
+++ b/src/library/scala/util/MurmurHash.scala
@@ -0,0 +1,196 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util
+
+/** An implementation of Austin Appleby's MurmurHash 3.0 algorithm
+ * (32 bit version); reference: http://code.google.com/p/smhasher
+ *
+ * This is the hash used by collections and case classes (including
+ * tuples).
+ *
+ * @author Rex Kerr
+ * @version 2.9
+ * @since 2.9
+ */
+
+import java.lang.Integer.{ rotateLeft => rotl }
+import scala.collection.Iterator
+
+/** A class designed to generate well-distributed non-cryptographic
+ * hashes. It is designed to be passed to a collection's foreach method,
+ * or can take individual hash values with append. Its own hash code is
+ * set equal to the hash code of whatever it is hashing.
+ */
+class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T => Unit) {
+ import MurmurHash._
+
+ private var h = startHash(seed)
+ private var c = hiddenMagicA
+ private var k = hiddenMagicB
+ private var hashed = false
+ private var hashvalue = h
+
+ /** Begin a new hash using the same seed. */
+ def reset() {
+ h = startHash(seed)
+ c = hiddenMagicA
+ k = hiddenMagicB
+ hashed = false
+ }
+
+ /** Incorporate the hash value of one item. */
+ def apply(t: T) {
+ h = extendHash(h,t.##,c,k)
+ c = nextMagicA(c)
+ k = nextMagicB(k)
+ hashed = false
+ }
+
+ /** Incorporate a known hash value. */
+ def append(i: Int) {
+ h = extendHash(h,i,c,k)
+ c = nextMagicA(c)
+ k = nextMagicB(k)
+ hashed = false
+ }
+
+ /** Retrieve the hash value */
+ def hash = {
+ if (!hashed) {
+ hashvalue = finalizeHash(h)
+ hashed = true
+ }
+ hashvalue
+ }
+ override def hashCode = hash
+}
+
+/** An object designed to generate well-distributed non-cryptographic
+ * hashes. It is designed to hash a collection of integers; along with
+ * the integers to hash, it generates two magic streams of integers to
+ * increase the distribution of repetitive input sequences. Thus,
+ * three methods need to be called at each step (to start and to
+ * incorporate a new integer) to update the values. Only one method
+ * needs to be called to finalize the hash.
+ */
+
+object MurmurHash {
+ // Magic values used for MurmurHash's 32 bit hash.
+ // Don't change these without consulting a hashing expert!
+ final private val visibleMagic = 0x971e137b
+ final private val hiddenMagicA = 0x95543787
+ final private val hiddenMagicB = 0x2ad7eb25
+ final private val visibleMixer = 0x52dce729
+ final private val hiddenMixerA = 0x7b7d159c
+ final private val hiddenMixerB = 0x6bce6396
+ final private val finalMixer1 = 0x85ebca6b
+ final private val finalMixer2 = 0xc2b2ae35
+
+ // Arbitrary values used for hashing certain classes
+ final private val seedString = 0xf7ca7fd2
+ final private val seedArray = 0x3c074a61
+
+ /** The first 23 magic integers from the first stream are stored here */
+ val storedMagicA =
+ Iterator.iterate(hiddenMagicA)(nextMagicA).take(23).toArray
+
+ /** The first 23 magic integers from the second stream are stored here */
+ val storedMagicB =
+ Iterator.iterate(hiddenMagicB)(nextMagicB).take(23).toArray
+
+ /** Begin a new hash with a seed value. */
+ def startHash(seed: Int) = seed ^ visibleMagic
+
+ /** The initial magic integers in the first stream. */
+ def startMagicA = hiddenMagicA
+
+ /** The initial magic integer in the second stream. */
+ def startMagicB = hiddenMagicB
+
+ /** Incorporates a new value into an existing hash.
+ *
+ * @param hash the prior hash value
+ * @param value the new value to incorporate
+ * @param magicA a magic integer from the stream
+ * @param magicB a magic integer from a different stream
+ * @return the updated hash value
+ */
+ def extendHash(hash: Int, value: Int, magicA: Int, magicB: Int) = {
+ (hash ^ rotl(value*magicA,11)*magicB)*3 + visibleMixer
+ }
+
+ /** Given a magic integer from the first stream, compute the next */
+ def nextMagicA(magicA: Int) = magicA*5 + hiddenMixerA
+
+ /** Given a magic integer from the second stream, compute the next */
+ def nextMagicB(magicB: Int) = magicB*5 + hiddenMixerB
+
+ /** Once all hashes have been incorporated, this performs a final mixing */
+ def finalizeHash(hash: Int) = {
+ var i = (hash ^ (hash>>>16))
+ i *= finalMixer1
+ i ^= (i >>> 13)
+ i *= finalMixer2
+ i ^= (i >>> 16)
+ i
+ }
+
+ /** Compute a high-quality hash of an array */
+ def arrayHash[T](a: Array[T]) = {
+ var h = startHash(a.length * seedArray)
+ var c = hiddenMagicA
+ var k = hiddenMagicB
+ var j = 0
+ while (j < a.length) {
+ h = extendHash(h, a(j).##, c, k)
+ c = nextMagicA(c)
+ k = nextMagicB(k)
+ j += 1
+ }
+ finalizeHash(h)
+ }
+
+ /** Compute a high-quality hash of a string */
+ def stringHash(s: String) = {
+ var h = startHash(s.length * seedString)
+ var c = hiddenMagicA
+ var k = hiddenMagicB
+ var j = 0
+ while (j+1 < s.length) {
+ val i = (s.charAt(j)<<16) + s.charAt(j+1);
+ h = extendHash(h,i,c,k)
+ c = nextMagicA(c)
+ k = nextMagicB(k)
+ j += 2
+ }
+ if (j < s.length) h = extendHash(h,s.charAt(j),c,k)
+ finalizeHash(h)
+ }
+
+ /** Compute a hash that is symmetric in its arguments--that is,
+ * where the order of appearance of elements does not matter.
+ * This is useful for hashing sets, for example.
+ */
+ def symmetricHash[T](xs: collection.TraversableOnce[T], seed: Int) = {
+ var a,b,n = 0
+ var c = 1
+ xs.foreach(i => {
+ val h = i.##
+ a += h
+ b ^= h
+ if (h != 0) c *= h
+ n += 1
+ })
+ var h = startHash(seed * n)
+ h = extendHash(h, a, storedMagicA(0), storedMagicB(0))
+ h = extendHash(h, b, storedMagicA(1), storedMagicB(1))
+ h = extendHash(h, c, storedMagicA(2), storedMagicB(2))
+ finalizeHash(h)
+ }
+}
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 0f9e30621e..e4da90ee28 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -1,19 +1,24 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.util
+import java.io.{ IOException, PrintWriter }
+
+/** Loads library.properties from the jar. */
+object Properties extends PropertiesTrait {
+ protected def propCategory = "library"
+ protected def pickJarBasedOn = classOf[ScalaObject]
+}
+
private[scala] trait PropertiesTrait
{
- import java.io.{ IOException, PrintWriter }
protected def propCategory: String // specializes the remainder of the values
protected def pickJarBasedOn: Class[_] // props file comes from jar containing this
@@ -21,7 +26,7 @@ private[scala] trait PropertiesTrait
protected val propFilename = "/" + propCategory + ".properties"
/** The loaded properties */
- protected lazy val props: java.util.Properties = {
+ protected lazy val scalaProps: java.util.Properties = {
val props = new java.util.Properties
val stream = pickJarBasedOn getResourceAsStream propFilename
if (stream ne null)
@@ -30,7 +35,6 @@ private[scala] trait PropertiesTrait
props
}
- protected def onull[T <: AnyRef](x: T) = if (x eq null) None else Some(x)
private def quietlyDispose(action: => Unit, disposal: => Unit) =
try { action }
finally {
@@ -38,51 +42,86 @@ private[scala] trait PropertiesTrait
catch { case _: IOException => }
}
- // for values based on system properties
- def sysprop(name: String): String = sysprop(name, "")
- def sysprop(name: String, default: String): String = System.getProperty(name, default)
- def syspropset(name: String, value: String) = System.setProperty(name, value)
+ def propIsSet(name: String) = System.getProperty(name) != null
+ def propIsSetTo(name: String, value: String) = propOrNull(name) == value
+ def propOrElse(name: String, alt: String) = System.getProperty(name, alt)
+ def propOrEmpty(name: String) = propOrElse(name, "")
+ def propOrNull(name: String) = propOrElse(name, null)
+ def propOrNone(name: String) = Option(propOrNull(name))
+ def propOrFalse(name: String) = propOrNone(name) exists (x => List("yes", "on", "true") contains x.toLowerCase)
+ def setProp(name: String, value: String) = System.setProperty(name, value)
+ def clearProp(name: String) = System.clearProperty(name)
+
+ def envOrElse(name: String, alt: String) = Option(System getenv name) getOrElse alt
+ def envOrNone(name: String) = Option(System getenv name)
// for values based on propFilename
- def prop(name: String): String = props.getProperty(name, "")
- def prop(name: String, default: String): String = props.getProperty(name, default)
+ def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt)
+ def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "")
/** The version number of the jar this was loaded from plus "version " prefix,
* or "version (unknown)" if it cannot be determined.
*/
- val versionString = "version " + prop("version.number", "(unknown)")
- val copyrightString = prop("copyright.string", "(c) 2002-2009 LAMP/EPFL")
+ val versionString = "version " + scalaPropOrElse("version.number", "(unknown)")
+ val copyrightString = scalaPropOrElse("copyright.string", "(c) 2002-2010 LAMP/EPFL")
/** This is the encoding to use reading in source files, overridden with -encoding
* Note that it uses "prop" i.e. looks in the scala jar, not the system properties.
*/
- val sourceEncoding = prop("file.encoding", "UTF8")
+ def sourceEncoding = scalaPropOrElse("file.encoding", "UTF-8")
/** This is the default text encoding, overridden (unreliably) with
* JAVA_OPTS="-Dfile.encoding=Foo"
*/
- val encodingString = sysprop("file.encoding", "UTF8")
-
- val isWin = sysprop("os.name") startsWith "Windows"
- val isMac = sysprop("java.vendor") startsWith "Apple"
- val javaClassPath = sysprop("java.class.path")
- val javaHome = sysprop("java.home")
- val javaVmName = sysprop("java.vm.name")
- val javaVmVersion = sysprop("java.vm.version")
- val javaVmInfo = sysprop("java.vm.info")
- val javaVersion = sysprop("java.version")
- val tmpDir = sysprop("java.io.tmpdir")
- val userName = sysprop("user.name")
- val scalaHome = sysprop("scala.home", null) // XXX places do null checks...
+ def encodingString = propOrElse("file.encoding", "UTF-8")
+
+ /** The default end of line character.
+ */
+ def lineSeparator = propOrElse("line.separator", "\n")
+
+ /** Various well-known properties.
+ */
+ def javaClassPath = propOrEmpty("java.class.path")
+ def javaHome = propOrEmpty("java.home")
+ def javaVendor = propOrEmpty("java.vendor")
+ def javaVersion = propOrEmpty("java.version")
+ def javaVmInfo = propOrEmpty("java.vm.info")
+ def javaVmName = propOrEmpty("java.vm.name")
+ def javaVmVendor = propOrEmpty("java.vm.vendor")
+ def javaVmVersion = propOrEmpty("java.vm.version")
+ def osName = propOrEmpty("os.name")
+ def scalaHome = propOrEmpty("scala.home")
+ def tmpDir = propOrEmpty("java.io.tmpdir")
+ def userDir = propOrEmpty("user.dir")
+ def userHome = propOrEmpty("user.home")
+ def userName = propOrEmpty("user.name")
+
+ /** Some derived values.
+ */
+ def isWin = osName startsWith "Windows"
+ def isMac = javaVendor startsWith "Apple"
- // provide a main method so version info can be obtained by running this
- private val writer = new java.io.PrintWriter(Console.err, true)
def versionMsg = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString)
- def main(args: Array[String]) { writer println versionMsg }
-}
+ def scalaCmd = if (isWin) "scala.bat" else "scala"
+ def scalacCmd = if (isWin) "scalac.bat" else "scalac"
-/** Loads library.properties from the jar. */
-object Properties extends PropertiesTrait {
- protected def propCategory = "library"
- protected def pickJarBasedOn = classOf[Application]
+ /** Can the java version be determined to be at least as high as the argument?
+ * Hard to properly future proof this but at the rate 1.7 is going we can leave
+ * the issue for our cyborg grandchildren to solve.
+ */
+ def isJavaAtLeast(version: String) = {
+ val okVersions = version match {
+ case "1.5" => List("1.5", "1.6", "1.7")
+ case "1.6" => List("1.6", "1.7")
+ case "1.7" => List("1.7")
+ case _ => Nil
+ }
+ okVersions exists (javaVersion startsWith _)
+ }
+
+ // provide a main method so version info can be obtained by running this
+ def main(args: Array[String]) {
+ val writer = new PrintWriter(Console.err, true)
+ writer println versionMsg
+ }
}
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index d1c04e996f..53e721dcda 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util
@@ -18,6 +17,8 @@ import collection.immutable.List
*
*/
class Random(val self: java.util.Random) {
+ import collection.mutable.ArrayBuffer
+ import collection.generic.CanBuildFrom
/** Creates a new random number generator using a single long seed. */
def this(seed: Long) = this(new java.util.Random(seed))
@@ -89,37 +90,23 @@ class Random(val self: java.util.Random) {
List.fill(length)(safeChar()).mkString
}
- /** Returns a pseudorandomly generated String drawing upon
- * only ASCII characters between 33 and 126.
+ /** Returns the next pseudorandom, uniformly distributed value
+ * from the ASCII range 33-126.
*/
- def nextASCIIString(length: Int) = {
- val (min, max) = (33, 126)
- def nextDigit = nextInt(max - min) + min
-
- new String(Array.fill(length)(nextDigit.toByte), "ASCII")
+ def nextPrintableChar(): Char = {
+ val (low, high) = (33, 126)
+ (self.nextInt(high - low) + low).toChar
}
def setSeed(seed: Long) { self.setSeed(seed) }
-}
-
-/** The object <code>Random</code> offers a default implementation
- * of scala.util.Random and random-related convenience methods.
- *
- * @since 2.8
- */
-object Random extends Random
-{
- import collection.Traversable
- import collection.mutable.ArrayBuffer
- import collection.generic.CanBuildFrom
/** Returns a new collection of the same type in a randomly chosen order.
*
- * @param coll the Traversable to shuffle
- * @return the shuffled Traversable
+ * @param coll the TraversableOnce to shuffle
+ * @return the shuffled TraversableOnce
*/
- def shuffle[T, CC[X] <: Traversable[X]](coll: CC[T])(implicit bf: CanBuildFrom[CC[T], T, CC[T]]): CC[T] = {
- val buf = new ArrayBuffer[T] ++ coll
+ def shuffle[T, CC[X] <: TraversableOnce[X]](xs: CC[T])(implicit bf: CanBuildFrom[CC[T], T, CC[T]]): CC[T] = {
+ val buf = new ArrayBuffer[T] ++= xs
def swap(i1: Int, i2: Int) {
val tmp = buf(i1)
@@ -132,6 +119,27 @@ object Random extends Random
swap(n - 1, k)
}
- bf(coll) ++= buf result
+ bf(xs) ++= buf result
+ }
+
+}
+
+/** The object <code>Random</code> offers a default implementation
+ * of scala.util.Random and random-related convenience methods.
+ *
+ * @since 2.8
+ */
+object Random extends Random {
+
+ /** Returns a Stream of pseudorandomly chosen alphanumeric characters,
+ * equally chosen from A-Z, a-z, and 0-9.
+ *
+ * @since 2.8
+ */
+ def alphanumeric: Stream[Char] = {
+ def isAlphaNum(c: Char) = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')
+
+ Stream continually nextPrintableChar filter isAlphaNum
}
+
}
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index a7c83a5f43..8174a0e711 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -6,52 +6,48 @@
** |/ **
\* */
-// $Id$
package scala.util
import scala.reflect.ClassManifest
-/** <p>
- * The Sorting object provides functions that can sort various kinds of
- * objects. You can provide a comparison function, or you can request a sort
- * of items that are viewable as <code>Ordered</code>. Some sorts that
- * operate directly on a subset of value types are also provided. These
- * implementations are derived from those in the Sun JDK.
- * </p>
- * <p>
- * Note that stability doesn't matter for value types, so use the quickSort
- * variants for those. <code>stableSort</code> is intended to be used with
- * objects when the prior ordering should be preserved, where possible.
- * </p>
- *
- * @author Ross Judson
- * @version 1.0
- */
+/** The Sorting object provides functions that can sort various kinds of
+ * objects. You can provide a comparison function, or you can request a sort
+ * of items that are viewable as <code>Ordered</code>. Some sorts that
+ * operate directly on a subset of value types are also provided. These
+ * implementations are derived from those in the Sun JDK.
+ *
+ * Note that stability doesn't matter for value types, so use the quickSort
+ * variants for those. <code>stableSort</code> is intended to be used with
+ * objects when the prior ordering should be preserved, where possible.
+ *
+ * @author Ross Judson
+ * @version 1.0
+ */
object Sorting {
/** Provides implicit access to sorting on arbitrary sequences of orderable
* items. This doesn't quite work the way that I want yet -- K should be
* bounded as viewable, but the compiler rejects that.
*/
- implicit def seq2RichSort[K <: Ordered[K] : ClassManifest](s: Seq[K]) = new RichSorting[K](s)
+ // implicit def seq2RichSort[K <: Ordered[K] : ClassManifest](s: Seq[K]) = new RichSorting[K](s)
/** Quickly sort an array of Doubles. */
- def quickSort(a: Array[Double]) = sort1(a, 0, a.length)
+ def quickSort(a: Array[Double]) { sort1(a, 0, a.length) }
- /** Quickly sort an array of items that are viewable as ordered. */
- def quickSort[K <% Ordered[K]](a: Array[K]) = sort1(a, 0, a.length)
+ /** Quickly sort an array of items with an implicit Ordering. */
+ def quickSort[K](a: Array[K])(implicit ord: Ordering[K]) { sort1(a, 0, a.length) }
/** Quickly sort an array of Ints. */
- def quickSort(a: Array[Int]) = sort1(a, 0, a.length)
+ def quickSort(a: Array[Int]) { sort1(a, 0, a.length) }
/** Quickly sort an array of Floats. */
- def quickSort(a: Array[Float]) = sort1(a, 0, a.length)
+ def quickSort(a: Array[Float]) { sort1(a, 0, a.length) }
/** Sort an array of K where K is Ordered, preserving the existing order
- where the values are equal. */
- def stableSort[K <% Ordered[K] : ClassManifest](a: Array[K]) {
- stableSort(a, 0, a.length-1, new Array[K](a.length), (a:K, b:K) => a < b)
+ * where the values are equal. */
+ def stableSort[K](a: Array[K])(implicit m: ClassManifest[K], ord: Ordering[K]) {
+ stableSort(a, 0, a.length-1, new Array[K](a.length), ord.lt _)
}
/** Sorts an array of <code>K</code> given an ordering function
@@ -77,8 +73,8 @@ object Sorting {
}
/** Sorts an arbitrary sequence of items that are viewable as ordered. */
- def stableSort[K <% Ordered[K] : ClassManifest](a: Seq[K]): Array[K] =
- stableSort(a, (a:K, b:K) => a < b)
+ def stableSort[K](a: Seq[K])(implicit m: ClassManifest[K], ord: Ordering[K]): Array[K] =
+ stableSort(a, ord.lt _)
/** Stably sorts a sequence of items given an extraction function that will
* return an ordered key from an item.
@@ -87,10 +83,11 @@ object Sorting {
* @param f the comparison function.
* @return the sorted sequence of items.
*/
- def stableSort[K : ClassManifest, M <% Ordered[M]](a: Seq[K], f: K => M): Array[K] =
- stableSort(a, (a: K, b: K) => f(a) < f(b))
+ def stableSort[K, M](a: Seq[K], f: K => M)(implicit m: ClassManifest[K], ord: Ordering[M]): Array[K] =
+ stableSort(a)(m, ord on f)
- private def sort1[K <% Ordered[K]](x: Array[K], off: Int, len: Int) {
+ private def sort1[K](x: Array[K], off: Int, len: Int)(implicit ord: Ordering[K]) {
+ import ord._
def swap(a: Int, b: Int) {
val t = x(a)
x(a) = x(b)
@@ -174,9 +171,9 @@ object Sorting {
// Swap partition elements back to middle
val n = off + len
- var s = Math.min(a-off, b-a)
+ var s = math.min(a-off, b-a)
vecswap(off, b-s, s)
- s = Math.min(d-c, n-d-1)
+ s = math.min(d-c, n-d-1)
vecswap(b, n-s, s)
// Recursively sort non-partition-elements
@@ -275,9 +272,9 @@ object Sorting {
// Swap partition elements back to middle
val n = off + len
- var s = Math.min(a-off, b-a)
+ var s = math.min(a-off, b-a)
vecswap(off, b-s, s)
- s = Math.min(d-c, n-d-1)
+ s = math.min(d-c, n-d-1)
vecswap(b, n-s, s)
// Recursively sort non-partition-elements
@@ -383,9 +380,9 @@ object Sorting {
// Swap partition elements back to middle
val n = off + len
- var s = Math.min(a-off, b-a)
+ var s = math.min(a-off, b-a)
vecswap(off, b-s, s)
- s = Math.min(d-c, n-d-1)
+ s = math.min(d-c, n-d-1)
vecswap(b, n-s, s)
// Recursively sort non-partition-elements
@@ -491,9 +488,9 @@ object Sorting {
// Swap partition elements back to middle
val n = off + len
- var s = Math.min(a-off, b-a)
+ var s = math.min(a-off, b-a)
vecswap(off, b-s, s)
- s = Math.min(d-c, n-d-1)
+ s = math.min(d-c, n-d-1)
vecswap(b, n-s, s)
// Recursively sort non-partition-elements
@@ -532,51 +529,6 @@ object Sorting {
}
}
}
-
- // for testing
- def main(args: Array[String]) {
- val tuples = Array(
- (1, "one"), (1, "un"), (3, "three"), (2, "deux"),
- (2, "two"), (0, "zero"), (3, "trois")
- )
- val integers = Array(
- 3, 4, 0, 4, 5, 0, 3, 3, 0
- )
- val doubles = Array(
- 3.4054752250314283E9,
- 4.9663151227666664E10,
-// 0.0/0.0 is interpreted as Nan
-// 0.0/0.0,
- 4.9663171987125E10,
- 5.785996973446602E9,
-// 0.0/0.0,
- 3.973064849653333E10,
- 3.724737288678125E10
-// 0.0/0.0
- )
- val floats = Array(
- 3.4054752250314283E9f,
- 4.9663151227666664E10f,
-// 0.0f/0.0f,
- 4.9663171987125E10f,
- 5.785996973446602E9f,
-// 0.0f/0.0f,
- 3.973064849653333E10f,
- 3.724737288678125E10f
-// 0.0f/0.0f
- )
- Sorting quickSort tuples
- println(tuples.toList)
-
- Sorting quickSort integers
- println(integers.toList)
-
- Sorting quickSort doubles
- println(doubles.toList)
-
- Sorting quickSort floats
- println(floats.toList)
- }
}
/** <p>
@@ -585,8 +537,7 @@ object Sorting {
* the items are ordered.
* </p>
*/
-class RichSorting[K <: Ordered[K] : ClassManifest](s: Seq[K]) {
-
+class RichSorting[K](s: Seq[K])(implicit m: ClassManifest[K], ord: Ordering[K]) {
/** Returns an array with a sorted copy of the RichSorting's sequence.
*/
def sort = Sorting.stableSort(s)
diff --git a/src/library/scala/util/automata/BaseBerrySethi.scala b/src/library/scala/util/automata/BaseBerrySethi.scala
index 09c6fd2835..63eb0aba9f 100644
--- a/src/library/scala/util/automata/BaseBerrySethi.scala
+++ b/src/library/scala/util/automata/BaseBerrySethi.scala
@@ -1,39 +1,36 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.automata
-import scala.util.regexp.Base
+import scala.util.regexp.{ Base }
-import scala.collection.mutable
-import scala.collection.immutable
-import collection.immutable.{List, Nil}
-import collection.{Seq, Iterator}
+import scala.collection.{ mutable, immutable }
+import mutable.{ HashMap }
+import immutable.{ Set }
+
+// todo: replace global variable pos with acc
/** this turns a regexp over A into a NondetWorkAutom over A using the
* celebrated position automata construction (also called Berry-Sethi or
* Glushkov)
*/
abstract class BaseBerrySethi {
-
val lang: Base
- import lang.{Alt,Eps,Meta,RegExp,Sequ,Star}
+ import lang.{ Alt, Eps, Meta, RegExp, Sequ, Star }
protected var pos = 0
- protected var globalFirst: immutable.Set[Int] = _
-
// results which hold all info for the NondetWordAutomaton
- protected var follow: mutable.HashMap[Int, immutable.Set[Int]] = _
+ protected var follow: HashMap[Int, Set[Int]] = _
protected var finalTag: Int = _
@@ -41,63 +38,24 @@ abstract class BaseBerrySethi {
// constants --------------------------
- final val emptySet:immutable.Set[Int] = immutable.Set[Int]()
+ final val emptySet: Set[Int] = Set()
+
+ private def doComp(r: RegExp, compFunction: RegExp => Set[Int]) = r match {
+ case x: Alt => (x.rs map compFirst).foldLeft(emptySet)(_ ++ _)
+ case Eps => emptySet
+ case x: Meta => compFunction(x.r)
+ case x: Sequ =>
+ val (l1, l2) = x.rs span (_.isNullable)
+ ((l1 ++ (l2 take 1)) map compFunction).foldLeft(emptySet)(_ ++ _)
+ case Star(t) => compFunction(t)
+ case _ => throw new IllegalArgumentException("unexpected pattern " + r.getClass())
+ }
/** computes first( r ) for the word regexp r */
- protected def compFirst(r: RegExp): immutable.Set[Int] = r match {
- case x:Alt =>
- var tmp = emptySet
- val it = x.rs.iterator // union
- while (it.hasNext) { tmp = tmp ++ compFirst(it.next) }
- tmp
- case Eps =>
- emptySet
- //case x:Letter => emptySet + posMap(x); // singleton set
- case x:Meta =>
- compFirst(x.r)
- case x:Sequ =>
- var tmp = emptySet;
- val it = x.rs.iterator; // union
- while (it.hasNext) {
- val z = it.next
- tmp = tmp ++ compFirst(z)
- if (!z.isNullable)
- return tmp
- }
- tmp
- case Star(t) =>
- compFirst(t)
- case _ =>
- throw new IllegalArgumentException("unexpected pattern " + r.getClass())
- }
+ protected def compFirst(r: RegExp): Set[Int] = doComp(r, compFirst)
/** computes last( r ) for the regexp r */
- protected def compLast(r: RegExp): immutable.Set[Int] = r match {
- case x:Alt =>
- var tmp = emptySet
- val it = x.rs.iterator // union
- while (it.hasNext) { tmp = tmp ++ compFirst(it.next) }
- tmp
- case Eps =>
- emptySet
- //case x:Letter => emptySet + posMap(x) // singleton set
- case x:Meta =>
- compLast(x.r)
- case x:Sequ =>
- var tmp = emptySet
- val it = x.rs.iterator.toList.reverse.iterator // union
- while (it.hasNext) {
- val z = it.next
- tmp = tmp ++ compLast(z)
- if (!z.isNullable)
- return tmp
- }
- tmp
- case Star(t) =>
- compLast(t)
- case _ =>
- throw new IllegalArgumentException("unexpected pattern " + r.getClass())
- }
+ protected def compLast(r: RegExp): Set[Int] = doComp(r, compLast)
/** Starts from the right-to-left
* precondition: pos is final
@@ -106,24 +64,17 @@ abstract class BaseBerrySethi {
* @param r ...
* @return ...
*/
- protected def compFollow(r: Seq[RegExp]): immutable.Set[Int] = {
- var first = emptySet
- var fol = emptySet
- if (r.length > 0) {//non-empty expr
-
- val it = r.iterator.toList.reverse.iterator
-
- fol = fol + pos // don't modify pos !
- while (it.hasNext) {
- val p = it.next
- first = compFollow1(fol, p)
- fol =
- if (p.isNullable) fol ++ first
- else first
- }
- }
- this.follow.update(0, fol /*first*/)
- fol
+ protected def compFollow(rs: Seq[RegExp]): Set[Int] = {
+ follow(0) =
+ if (rs.isEmpty) emptySet
+ else rs.foldRight(Set(pos))((p, fol) => {
+ val first = compFollow1(fol, p)
+
+ if (p.isNullable) fol ++ first
+ else first
+ })
+
+ follow(0)
}
/** returns the first set of an expression, setting the follow set along
@@ -133,45 +84,20 @@ abstract class BaseBerrySethi {
* @param r ...
* @return ...
*/
- protected def compFollow1(fol1: immutable.Set[Int], r: RegExp): immutable.Set[Int] = {
- var fol = fol1
- r match {
-
- case x:Alt =>
- var first = emptySet
- val it = x.rs.iterator.toList.reverse.iterator
- while (it.hasNext)
- first = first ++ compFollow1(fol, it.next);
- first
-
- /*
- case x:Letter =>
- val i = posMap( x );
- this.follow.update( i, fol );
- emptySet + i;
- */
- case x:Meta =>
- compFollow1(fol1, x.r)
-
- case x:Star =>
- fol = fol ++ compFirst(x.r)
- compFollow1(fol, x.r)
-
- case x:Sequ =>
- var first = emptySet
- val it = x.rs.iterator.toList.reverse.iterator
- while (it.hasNext) {
- val p = it.next
- first = compFollow1(fol, p)
- fol =
- if (p.isNullable) fol ++ first
- else first
- }
- first
-
- case _ =>
- throw new IllegalArgumentException("unexpected pattern: " + r.getClass())
- }
+ protected def compFollow1(fol1: Set[Int], r: RegExp): Set[Int] = r match {
+ case x: Alt => Set(x.rs reverseMap (compFollow1(fol1, _)) flatten: _*)
+ case x: Meta => compFollow1(fol1, x.r)
+ case x: Star => compFollow1(fol1 ++ compFirst(x.r), x.r)
+ case x: Sequ =>
+ var first = emptySet
+ x.rs.foldRight(fol1) { (p, fol) =>
+ val first = compFollow1(fol, p)
+
+ if (p.isNullable) fol ++ first
+ else first
+ }
+ first
+ case _ => throw new IllegalArgumentException("unexpected pattern: " + r.getClass())
}
/** returns "Sethi-length" of a pattern, creating the set of position
@@ -179,21 +105,12 @@ abstract class BaseBerrySethi {
*
* @param r ...
*/
- // todo: replace global variable pos with acc
protected def traverse(r: RegExp): Unit = r match {
// (is tree automaton stuff, more than Berry-Sethi)
- case x:Alt =>
- val it = x.rs.iterator
- while (it.hasNext) traverse(it.next)
- case x:Sequ =>
- val it = x.rs.iterator
- while (it.hasNext) traverse(it.next)
- case x:Meta =>
- traverse(x.r)
- case Star(t) =>
- traverse(t)
- case _ =>
- throw new IllegalArgumentException("unexp pattern " + r.getClass())
+ case x: Alt => x.rs foreach traverse
+ case x: Sequ => x.rs foreach traverse
+ case x: Meta => traverse(x.r)
+ case Star(t) => traverse(t)
+ case _ => throw new IllegalArgumentException("unexp pattern " + r.getClass())
}
-
}
diff --git a/src/library/scala/util/automata/DetWordAutom.scala b/src/library/scala/util/automata/DetWordAutom.scala
index 07ffdaf21b..e1e19589c2 100644
--- a/src/library/scala/util/automata/DetWordAutom.scala
+++ b/src/library/scala/util/automata/DetWordAutom.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.automata
@@ -31,23 +30,9 @@ abstract class DetWordAutom[T <: AnyRef] {
val delta: Array[Map[T,Int]]
val default: Array[Int]
- /**
- * @param q ...
- * @return ...
- */
def isFinal(q: Int) = finals(q) != 0
-
- /**
- * @param q ...
- * @return ...
- */
def isSink(q: Int) = delta(q).isEmpty && default(q) == q
- /**
- * @param q ...
- * @param label ...
- * @return ...
- */
def next(q: Int, label: T) = {
delta(q).get(label) match {
case Some(p) => p
@@ -59,25 +44,15 @@ abstract class DetWordAutom[T <: AnyRef] {
val sb = new StringBuilder("[DetWordAutom nstates=")
sb.append(nstates)
sb.append(" finals=")
- var map = scala.collection.immutable.Map[Int,Int]()
- var j = 0; while( j < nstates ) {
- if (j < finals.length)
- map = map.updated(j, finals(j))
- j += 1
- }
+ val map = Map(finals.zipWithIndex map (_.swap): _*)
sb.append(map.toString())
sb.append(" delta=\n")
+
for (i <- 0 until nstates) {
- sb.append( i )
- sb.append("->")
- sb.append(delta(i).toString())
- sb.append('\n')
- if (i < default.length) {
- sb.append("_>")
- sb.append(default(i).toString())
- sb.append('\n')
- }
+ sb append "%d->%s\n".format(i, delta(i))
+ if (i < default.length)
+ sb append "_>%s\n".format(default(i))
}
- sb.toString()
+ sb.toString
}
}
diff --git a/src/library/scala/util/automata/Inclusion.scala b/src/library/scala/util/automata/Inclusion.scala
index d5cdc5f318..55ca4e0c37 100644
--- a/src/library/scala/util/automata/Inclusion.scala
+++ b/src/library/scala/util/automata/Inclusion.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.automata
diff --git a/src/library/scala/util/automata/NondetWordAutom.scala b/src/library/scala/util/automata/NondetWordAutom.scala
index fd2bb657e6..4434ed395d 100644
--- a/src/library/scala/util/automata/NondetWordAutom.scala
+++ b/src/library/scala/util/automata/NondetWordAutom.scala
@@ -1,17 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.automata
-import scala.collection.{immutable, mutable, Set, Seq, Map}
+import scala.collection.{ immutable, mutable, Set, Seq, Map }
+import immutable.{ BitSet }
/** A nondeterministic automaton. States are integers, where
* 0 is always the only initial state. Transitions are represented
@@ -26,8 +26,8 @@ abstract class NondetWordAutom[T <: AnyRef]
val labels: Seq[T]
val finals: Array[Int] // 0 means not final
- val delta: Array[Map[T, immutable.BitSet]]
- val default: Array[immutable.BitSet]
+ val delta: Array[Map[T, BitSet]]
+ val default: Array[BitSet]
/** returns true if the state is final */
final def isFinal(state: Int) = finals(state) > 0
@@ -36,25 +36,27 @@ abstract class NondetWordAutom[T <: AnyRef]
final def finalTag(state: Int) = finals(state)
/** returns true if the set of states contains at least one final state */
- final def containsFinal(Q: immutable.BitSet): Boolean = Q exists isFinal
+ final def containsFinal(Q: BitSet): Boolean = Q exists isFinal
/** returns true if there are no accepting states */
final def isEmpty = (0 until nstates) forall (x => !isFinal(x))
/** returns a bitset with the next states for given state and label */
- def next(q: Int, a: T): immutable.BitSet = delta(q).get(a) getOrElse default(q)
+ def next(q: Int, a: T): BitSet = delta(q).getOrElse(a, default(q))
/** returns a bitset with the next states for given state and label */
- def next(Q: immutable.BitSet, a: T): immutable.BitSet = next(Q, next(_, a))
- def nextDefault(Q: immutable.BitSet): immutable.BitSet = next(Q, default)
+ def next(Q: BitSet, a: T): BitSet = next(Q, next(_, a))
+ def nextDefault(Q: BitSet): BitSet = next(Q, default)
- private def next(Q: immutable.BitSet, f: (Int) => immutable.BitSet): immutable.BitSet =
- (Q map f).foldLeft(immutable.BitSet.empty)(_ ++ _)
+ private def next(Q: BitSet, f: (Int) => BitSet): BitSet =
+ (Q map f).foldLeft(BitSet.empty)(_ ++ _)
+ private def finalStates = 0 until nstates filter isFinal
override def toString = {
- val finalString = Map(0 until nstates filter isFinal map (j => j -> finals(j)) : _*).toString
+
+ val finalString = Map(finalStates map (j => j -> finals(j)) : _*).toString
val deltaString = (0 until nstates) .
- map (i => " %d->%s\n _>%s\n".format(i, delta(i).toString, default(i).toString)) mkString
+ map (i => " %d->%s\n _>%s\n".format(i, delta(i), default(i))) mkString
"[NondetWordAutom nstates=%d finals=%s delta=\n%s".format(nstates, finalString, deltaString)
}
diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/util/automata/SubsetConstruction.scala
index 2dcad6d006..d5b1a19083 100644
--- a/src/library/scala/util/automata/SubsetConstruction.scala
+++ b/src/library/scala/util/automata/SubsetConstruction.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.automata
@@ -57,7 +56,7 @@ class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
invIndexMap = invIndexMap.updated(ix, P)
ix += 1
- // make transitiion map
+ // make transition map
val Pdelta = new mutable.HashMap[T, BitSet]
delta.update(P, Pdelta)
diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/util/automata/WordBerrySethi.scala
index 9fb2cb71cc..4a41cbbf5a 100644
--- a/src/library/scala/util/automata/WordBerrySethi.scala
+++ b/src/library/scala/util/automata/WordBerrySethi.scala
@@ -1,20 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.automata
-import scala.collection.{immutable, mutable, Map}
+import scala.collection.{ immutable, mutable, Map }
+import immutable.{ Set }
+import mutable.{ HashSet, HashMap }
import scala.util.regexp.WordExp
-import collection.immutable.{List, Nil}
-import collection.{Seq, Iterator}
/** This class turns a regexp into a NondetWordAutom using the
* celebrated position automata construction (also called Berry-Sethi or
@@ -24,38 +23,28 @@ import collection.{Seq, Iterator}
* @version 1.0
*/
abstract class WordBerrySethi extends BaseBerrySethi {
-
override val lang: WordExp
- type _labelT = this.lang._labelT
-
- import lang.{Alt, Eps, Letter, Meta, RegExp, Sequ, Star}
+ import lang.{ Alt, Eps, Letter, Meta, RegExp, Sequ, Star }
-
- protected var labels:mutable.HashSet[_labelT] = _
+ protected var labels: HashSet[lang._labelT] = _
// don't let this fool you, only labelAt is a real, surjective mapping
- protected var labelAt: immutable.Map[Int, _labelT] = _ // new alphabet "gamma"
+ protected var labelAt: immutable.Map[Int, lang._labelT] = _ // new alphabet "gamma"
- protected var deltaq: Array[mutable.HashMap[_labelT,List[Int]]] = _ // delta
+ protected var deltaq: Array[HashMap[lang._labelT, List[Int]]] = _ // delta
protected var defaultq: Array[List[Int]] = _ // default transitions
- protected var initials:immutable.Set[Int] = _
- //NondetWordAutom revNfa
-
- // maps a letter to an Integer ( the position )
- // is not *really* needed (preorder determines position!)
- //protected var posMap: mutable.HashMap[RegExp, Int] = _;
+ protected var initials: Set[Int] = _
/** Computes <code>first(r)</code> where the word regexp <code>r</code>.
*
* @param r the regular expression
* @return the computed set <code>first(r)</code>
*/
- protected override def compFirst(r: RegExp): immutable.Set[Int] = r match {
- case x:Letter => emptySet + x.pos //posMap(x); // singleton set
- case Eps => emptySet /*ignore*/
- case _ => super.compFirst(r)
+ protected override def compFirst(r: RegExp): Set[Int] = r match {
+ case x: Letter => Set(x.pos)
+ case _ => super.compFirst(r)
}
/** Computes <code>last(r)</code> where the word regexp <code>r</code>.
@@ -63,10 +52,9 @@ abstract class WordBerrySethi extends BaseBerrySethi {
* @param r the regular expression
* @return the computed set <code>last(r)</code>
*/
- protected override def compLast(r: RegExp): immutable.Set[Int] = r match {
- case x:Letter => emptySet + x.pos //posMap(x) // singleton set
- case Eps => emptySet /*ignore*/
- case _ => super.compLast(r)
+ protected override def compLast(r: RegExp): Set[Int] = r match {
+ case x: Letter => Set(x.pos)
+ case _ => super.compLast(r)
}
/** Returns the first set of an expression, setting the follow set along
@@ -76,38 +64,25 @@ abstract class WordBerrySethi extends BaseBerrySethi {
* @param r the regular expression
* @return the computed set
*/
- protected override def compFollow1(fol1: immutable.Set[Int], r: RegExp): immutable.Set[Int] =
- r match {
- case x:Letter =>
- //val i = posMap(x)
- val i = x.pos
- this.follow.update(i, fol1)
- emptySet + i
- case Eps =>
- emptySet /*ignore*/
- case _ =>
- super.compFollow1(fol1, r)
+ protected override def compFollow1(fol1: Set[Int], r: RegExp): Set[Int] = r match {
+ case x: Letter => follow(x.pos) = fol1 ; Set(x.pos)
+ case Eps => emptySet
+ case _ => super.compFollow1(fol1, r)
}
/** returns "Sethi-length" of a pattern, creating the set of position
* along the way
*/
-
/** called at the leaves of the regexp */
- protected def seenLabel(r: RegExp, i: Int, label: _labelT) {
- //Console.println("seenLabel (1)");
- //this.posMap.add(r, i)
- this.labelAt = this.labelAt.updated(i, label)
- //@ifdef if( label != Wildcard ) {
- this.labels += label
- //@ifdef }
+ protected def seenLabel(r: RegExp, i: Int, label: lang._labelT) {
+ labelAt = labelAt.updated(i, label)
+ this.labels += label
}
- // overriden in BindingBerrySethi
- protected def seenLabel(r: RegExp, label: _labelT): Int = {
- //Console.println("seenLabel (2)");
- pos = pos + 1
+ // overridden in BindingBerrySethi
+ protected def seenLabel(r: RegExp, label: lang._labelT): Int = {
+ pos += 1
seenLabel(r, pos, label)
pos
}
@@ -115,82 +90,55 @@ abstract class WordBerrySethi extends BaseBerrySethi {
// todo: replace global variable pos with acc
override def traverse(r: RegExp): Unit = r match {
case a @ Letter(label) => a.pos = seenLabel(r, label)
- case Eps => /*ignore*/
+ case Eps => // ignore
case _ => super.traverse(r)
}
- protected def makeTransition(src: Int, dest: Int, label: _labelT ) {
- //@ifdef compiler if( label == Wildcard )
- //@ifdef compiler defaultq.add(src, dest::defaultq( src ))
- //@ifdef compiler else
+ protected def makeTransition(src: Int, dest: Int, label: lang._labelT) {
val q = deltaq(src)
- q.update(label, dest::(q.get(label) match {
- case Some(x) => x
- case _ => Nil
- }))
+ q.update(label, dest :: q.getOrElse(label, Nil))
}
protected def initialize(subexpr: Seq[RegExp]): Unit = {
- //this.posMap = new mutable.HashMap[RegExp,Int]()
- this.labelAt = immutable.Map[Int, _labelT]()
- this.follow = new mutable.HashMap[Int, immutable.Set[Int]]()
- this.labels = new mutable.HashSet[_labelT]()
-
+ this.labelAt = immutable.Map()
+ this.follow = HashMap()
+ this.labels = HashSet()
this.pos = 0
// determine "Sethi-length" of the regexp
- //activeBinders = new IndexedSeq()
- var it = subexpr.iterator
- while (it.hasNext)
- traverse(it.next)
+ subexpr foreach traverse
- //assert(activeBinders.isEmpty())
- this.initials = emptySet + 0
+ this.initials = Set(0)
}
protected def initializeAutom() {
- finals = immutable.Map.empty[Int, Int] // final states
- deltaq = new Array[mutable.HashMap[_labelT, List[Int]]](pos) // delta
- defaultq = new Array[List[Int]](pos) // default transitions
+ finals = immutable.Map.empty[Int, Int] // final states
+ deltaq = new Array[HashMap[lang._labelT, List[Int]]](pos) // delta
+ defaultq = new Array[List[Int]](pos) // default transitions
- var j = 0
- while (j < pos) {
- deltaq(j) = new mutable.HashMap[_labelT,List[Int]]()
+ for (j <- 0 until pos) {
+ deltaq(j) = HashMap[lang._labelT, List[Int]]()
defaultq(j) = Nil
- j += 1
}
}
- protected def collectTransitions(): Unit = { // make transitions
- //Console.println("WBS.collectTrans, this.follow.keys = "+this.follow.keys)
- //Console.println("WBS.collectTrans, pos = "+this.follow.keys)
- var j = 0; while (j < pos) {
- //Console.println("WBS.collectTrans, j = "+j)
- val fol = this.follow(j)
- val it = fol.iterator
- while (it.hasNext) {
- val k = it.next
- if (pos == k)
- finals = finals.updated(j, finalTag)
- else
- makeTransition( j, k, labelAt(k))
- }
- j += 1
+ protected def collectTransitions(): Unit = // make transitions
+ for (j <- 0 until pos ; val fol = follow(j) ; k <- fol) {
+ if (pos == k) finals = finals.updated(j, finalTag)
+ else makeTransition(j, k, labelAt(k))
}
- }
- def automatonFrom(pat: RegExp, finalTag: Int): NondetWordAutom[_labelT] = {
+ def automatonFrom(pat: RegExp, finalTag: Int): NondetWordAutom[lang._labelT] = {
this.finalTag = finalTag
pat match {
- case x:Sequ =>
+ case x: Sequ =>
// (1,2) compute follow + first
initialize(x.rs)
- pos = pos + 1
- globalFirst = compFollow(x.rs)
+ pos += 1
+ compFollow(x.rs) // this used to be assigned to var globalFirst and then never used.
- //System.out.print("someFirst:");debugPrint(someFirst);
// (3) make automaton from follow sets
initializeAutom()
collectTransitions()
@@ -198,68 +146,18 @@ abstract class WordBerrySethi extends BaseBerrySethi {
if (x.isNullable) // initial state is final
finals = finals.updated(0, finalTag)
- var delta1: immutable.Map[Int, Map[_labelT, List[Int]]] =
- immutable.Map[Int, Map[_labelT, List[Int]]]()
+ val delta1 = immutable.Map(deltaq.zipWithIndex map (_.swap): _*)
+ val finalsArr = 0 until pos map (k => finals.getOrElse(k, 0)) toArray // 0 == not final
+ val initialsArr = initials.toArray
- var i = 0
- while (i < deltaq.length) {
- delta1 = delta1.updated(i, deltaq(i))
- i += 1
- }
- val finalsArr = new Array[Int](pos)
-
- {
- var k = 0; while (k < pos) {
- finalsArr(k) = finals.get(k) match {
- case Some(z) => z
- case None => 0 // 0 == not final
- };
- k += 1
- }
- }
-
- val initialsArr = new Array[Int](initials.size)
- val it = initials.iterator
+ val deltaArr: Array[Map[lang._labelT, immutable.BitSet]] =
+ (0 until pos map { x =>
+ HashMap(delta1(x).toSeq map { case (k, v) => k -> immutable.BitSet(v: _*) } : _*)
+ }) toArray
- {
- var k = 0; while (k < initials.size) {
- initialsArr(k) = it.next
- k += 1
- }
- }
+ val defaultArr = 0 until pos map (k => immutable.BitSet(defaultq(k): _*)) toArray
- val deltaArr = new Array[Map[_labelT, immutable.BitSet]](pos)
-
- {
- var k = 0; while(k < pos) {
- val labels = delta1(k).keysIterator
- val hmap =
- new mutable.HashMap[_labelT, immutable.BitSet]
- for (lab <- labels) {
- val trans = delta1(k)
- val x = new mutable.BitSet(pos)
- for (q <- trans(lab))
- x += q
- hmap.update(lab, x.toImmutable)
- }
- deltaArr(k) = hmap
- k += 1
- }
- }
- val defaultArr = new Array[immutable.BitSet](pos)
-
- {
- var k = 0; while(k < pos) {
- val x = new mutable.BitSet(pos)
- for (q <- defaultq(k))
- x += q
- defaultArr(k) = x.toImmutable
- k += 1
- }
- }
-
- new NondetWordAutom[_labelT] {
- type _labelT = WordBerrySethi.this._labelT
+ new NondetWordAutom[lang._labelT] {
val nstates = pos
val labels = WordBerrySethi.this.labels.toList
val initials = initialsArr
@@ -268,35 +166,7 @@ abstract class WordBerrySethi extends BaseBerrySethi {
val default = defaultArr
}
case z =>
- val z1 = z.asInstanceOf[this.lang._regexpT]
- automatonFrom(Sequ(z1), finalTag)
+ automatonFrom(Sequ(z.asInstanceOf[this.lang._regexpT]), finalTag)
}
}
-
- /*
- void print1() {
- System.out.println("after sethi-style processing");
- System.out.println("#positions:" + pos);
- System.out.println("posMap:");
-
- for (Iterator it = this.posMap.keySet().iterator();
- it.hasNext(); ) {
- Tree t = (Tree) it.next();
- switch(t) {
- case Literal( _ ):
- System.out.print( "(" + t.toString() + " -> ");
- String s2 = ((Integer) posMap.get(t)).toString();
- System.out.print( s2 +") ");
- }
- }
- System.out.println("\nfollow: ");
- for (int j = 1; j < pos; j++ ) {
- TreeSet fol = (TreeSet) this.follow.get(new Integer(j));
- System.out.print("("+j+" -> "+fol.toString()+") ");
- //debugPrint( fol );
- System.out.println();
- }
-
- }
- */
-}
+} \ No newline at end of file
diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala
index 6e7f4d6267..dedf721cd4 100644
--- a/src/library/scala/util/control/Breaks.scala
+++ b/src/library/scala/util/control/Breaks.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.control
@@ -28,14 +27,14 @@ package scala.util.control
*/
class Breaks {
- private val breakException = new BreakException
+ private val breakException = new BreakControl
/** A block from which one can exit with a `break''. */
def breakable(op: => Unit) {
try {
op
} catch {
- case ex: BreakException =>
+ case ex: BreakControl =>
if (ex ne breakException) throw ex
}
}
@@ -61,5 +60,5 @@ class Breaks {
*/
object Breaks extends Breaks
-private class BreakException extends RuntimeException with ControlException
+private class BreakControl extends ControlThrowable
diff --git a/src/library/scala/util/control/ControlException.scala b/src/library/scala/util/control/ControlThrowable.scala
index 070d33dff4..dd09793b5f 100644
--- a/src/library/scala/util/control/ControlException.scala
+++ b/src/library/scala/util/control/ControlThrowable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.control
@@ -21,19 +20,19 @@ package scala.util.control
*
* <p>Instances of <code>Throwable</code> subclasses marked in
* this way should not normally be caught. Where catch-all behaviour is
- * required <code>ControlException</code>s should be propagated, for
+ * required <code>ControlThrowable</code>s should be propagated, for
* example,</p>
*
* <pre>
- * import scala.util.control.ControlException
+ * import scala.util.control.ControlThrowable
*
* try {
* // Body might throw arbitrarily
* } catch {
- * case ce : ControlException => throw ce // propagate
+ * case ce : ControlThrowable => throw ce // propagate
* case t : Exception => log(t) // log and suppress
* </pre>
*
* @author Miles Sabin
*/
-trait ControlException extends Throwable with NoStackTrace
+trait ControlThrowable extends Throwable with NoStackTrace
diff --git a/src/library/scala/util/control/NoStackTrace.scala b/src/library/scala/util/control/NoStackTrace.scala
index 8658304f0f..6693891e69 100644
--- a/src/library/scala/util/control/NoStackTrace.scala
+++ b/src/library/scala/util/control/NoStackTrace.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/util/control/TailCalls.scala b/src/library/scala/util/control/TailCalls.scala
new file mode 100644
index 0000000000..59e9618028
--- /dev/null
+++ b/src/library/scala/util/control/TailCalls.scala
@@ -0,0 +1,56 @@
+package scala.util.control
+
+/** Methods exported by this object implement tail calls via trampolining.
+ * Tail calling methods have to return their result using `done` or call the next
+ * method using `tailcall`. Both return a `TailRec` object. The result of evaluating
+ * a tailcalling function can be retrieved from a `Tailrec` value using method result`.
+ * Here's a usage example:
+ * {{{
+ * import scala.util.control.TailCalls._
+ *
+ * def isEven(xs: List[Int]): TailRec[Boolean] =
+ * if (xs.isEmpty) done(true) else tailcall(isOdd(xs.tail))
+ *
+ * def isOdd(xs: List[Int]): TailRec[Boolean] =
+ * if (xs.isEmpty) done(false) else tailcall(isEven(xs.tail))
+ *
+ * isEven((1 to 100000).toList).result
+ * }}}
+ */
+object TailCalls {
+
+ /** This class represents a tailcalling computation.
+ */
+ abstract class TailRec[+A] {
+ /** Returns the result of the tailcalling computation
+ */
+ def result: A = {
+ def loop(body: TailRec[A]): A = body match {
+ case Call(rest) => loop(rest())
+ case Done(result) => result
+ }
+ loop(this)
+ }
+ }
+
+ /** Internal class representing a tailcall */
+ protected case class Call[A](rest: () => TailRec[A]) extends TailRec[A]
+
+ /** Internal class representing the final result return from a tailcalling computation */
+ protected case class Done[A](override val result: A) extends TailRec[A]
+
+ /** Performs a tailcall
+ * @param rest the expression to be evaluated in the tailcall
+ * @return a `TailRec` object representing the expression `rest`
+ */
+ def tailcall[A](rest: => TailRec[A]): TailRec[A] = new Call(() => rest)
+
+ /** Used to return final result from tailcalling computation
+ * @param `result` the result value
+ * @return a `TailRec` object representing a computation which immediately returns `result`
+ */
+ def done[A](result: A): TailRec[A] = new Done(result)
+
+}
+
+
diff --git a/src/library/scala/util/control/TailRec.scala b/src/library/scala/util/control/TailRec.scala
deleted file mode 100644
index db6cbfa2ed..0000000000
--- a/src/library/scala/util/control/TailRec.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package scala.util.control
-
-abstract class TailRec[+A]
-
-object TailRec {
-
- case class Call[A](rest: () => TailRec[A]) extends TailRec[A]
- case class Done[A](result: A) extends TailRec[A]
-
- def tailcall[A](rest: => TailRec[A]) = new Call(() => rest)
- def done [A](result: A) = new Done(result)
- def trampoline[A](body: TailRec[A]): A = {
- def loop(body: TailRec[A]): A = body match {
- case Call(rest) => loop(rest())
- case Done(result) => result
- }
- loop(body)
- }
- def loop[A](body: TailRec[A]): A = body match {
- case Call(rest) => loop[A](rest())
- case Done(result) => result
- }
-}
-
diff --git a/src/library/scala/util/grammar/HedgeRHS.scala b/src/library/scala/util/grammar/HedgeRHS.scala
index 938508af0e..6d72bbb5d5 100644
--- a/src/library/scala/util/grammar/HedgeRHS.scala
+++ b/src/library/scala/util/grammar/HedgeRHS.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.grammar
diff --git a/src/library/scala/util/grammar/TreeRHS.scala b/src/library/scala/util/grammar/TreeRHS.scala
index d0d4c1f628..36f3884034 100644
--- a/src/library/scala/util/grammar/TreeRHS.scala
+++ b/src/library/scala/util/grammar/TreeRHS.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.grammar
diff --git a/src/library/scala/util/logging/ConsoleLogger.scala b/src/library/scala/util/logging/ConsoleLogger.scala
index 4e055b64e8..61950a315c 100644
--- a/src/library/scala/util/logging/ConsoleLogger.scala
+++ b/src/library/scala/util/logging/ConsoleLogger.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.logging
@@ -21,8 +20,6 @@ package scala.util.logging
trait ConsoleLogger extends Logged {
/** logs argument to Console using <code>Console.println</code>
- *
- * @param msg ...
*/
override def log(msg: String): Unit = Console.println(msg)
}
diff --git a/src/library/scala/util/logging/Logged.scala b/src/library/scala/util/logging/Logged.scala
index 477ffa3d39..ecf25e230d 100644
--- a/src/library/scala/util/logging/Logged.scala
+++ b/src/library/scala/util/logging/Logged.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.logging
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index e4ef708608..cf2c1d2cea 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.matching
@@ -107,6 +106,32 @@ class Regex(regex: String, groupNames: String*) {
m.replaceAll(replacement)
}
+ /**
+ * Replaces all matches using a replacer function.
+ *
+ * @param target The string to match.
+ * @param replacer The function which maps a match to another string.
+ * @return The target string after replacements.
+ */
+ def replaceAllIn(target: java.lang.CharSequence, replacer: Match => String): String = {
+ val it = new Regex.MatchIterator(target, this, groupNames).replacementData
+ while (it.hasNext) {
+ val matchdata = it.next
+ it.replace(replacer(matchdata))
+ }
+ it.replaced
+ }
+
+ def replaceSomeIn(target: java.lang.CharSequence, replacer: Match => Option[String]): String = {
+ val it = new Regex.MatchIterator(target, this, groupNames).replacementData
+ while (it.hasNext) {
+ val matchdata = it.next
+ val replaceopt = replacer(matchdata)
+ if (replaceopt != None) it.replace(replaceopt.get)
+ }
+ it.replaced
+ }
+
/** Replaces the first match by a string.
*
* @param target The string to match
@@ -227,7 +252,7 @@ object Regex {
}
- /** A case class for a succesful match.
+ /** A case class for a successful match.
*/
class Match(val source: java.lang.CharSequence,
matcher: Matcher,
@@ -264,12 +289,17 @@ object Regex {
def unapply(m: Match): Some[String] = Some(m.matched)
}
+ /** An extractor object that yields groups in the match. */
+ object Groups {
+ def unapplySeq(m: Match): Option[Seq[String]] = if (m.groupCount > 0) Some(1 to m.groupCount map m.group) else None
+ }
+
/** A class to step through a sequence of regex matches
*/
class MatchIterator(val source: java.lang.CharSequence, val regex: Regex, val groupNames: Seq[String])
extends Iterator[String] with MatchData { self =>
- private val matcher = regex.pattern.matcher(source)
+ protected val matcher = regex.pattern.matcher(source)
private var nextSeen = false
/** Is there another match? */
@@ -307,6 +337,31 @@ object Regex {
def hasNext = self.hasNext
def next = { self.next; new Match(source, matcher, groupNames).force }
}
+
+ /** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support */
+ private[matching] def replacementData = new Iterator[Match] with Replacement {
+ def matcher = self.matcher
+ def hasNext = self.hasNext
+ def next = { self.next; new Match(source, matcher, groupNames).force }
+ }
+ }
+
+ /**
+ * A trait able to build a string with replacements assuming it has a matcher.
+ * Meant to be mixed in with iterators.
+ */
+ private[matching] trait Replacement {
+ protected def matcher: Matcher
+
+ private var sb = new java.lang.StringBuffer
+
+ def replaced = {
+ val newsb = new java.lang.StringBuffer(sb)
+ matcher.appendTail(newsb)
+ newsb.toString
+ }
+
+ def replace(rs: String) = matcher.appendReplacement(sb, rs)
}
}
diff --git a/src/library/scala/util/parsing/ast/AbstractSyntax.scala b/src/library/scala/util/parsing/ast/AbstractSyntax.scala
index 2d39411557..4dc1a26859 100644
--- a/src/library/scala/util/parsing/ast/AbstractSyntax.scala
+++ b/src/library/scala/util/parsing/ast/AbstractSyntax.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/util/parsing/ast/Binders.scala b/src/library/scala/util/parsing/ast/Binders.scala
index e34690c46a..7a9b8e5dcd 100644
--- a/src/library/scala/util/parsing/ast/Binders.scala
+++ b/src/library/scala/util/parsing/ast/Binders.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -127,10 +127,8 @@ trait Binders extends AbstractSyntax with Mappable {
* (e.g. the variable name in a local variable declaration)
*
* @param b a new binder that is distinct from the existing binders in this scope,
- * and shares their conceptual scope
- * @pre canAddBinder(b)
- * @post binds(b)
- * @post getElementFor(b) eq b
+ * and shares their conceptual scope. canAddBinder(b)` must hold.`
+ * @return `binds(b)` and `getElementFor(b) eq b` will hold.
*/
def addBinder(b: binderType) { substitution += Pair(b, b) }
@@ -140,7 +138,7 @@ trait Binders extends AbstractSyntax with Mappable {
* linked to its `UnderBinder' (i.e., while parsing, BoundElements may be added to the Scope
* associated to the UnderBinder, but after that, no changes are allowed, except for substitution)?
*
- * @returns true if `b' had not been added yet
+ * @return true if `b' had not been added yet
*/
def canAddBinder(b: binderType): Boolean = !binds(b)
@@ -150,17 +148,15 @@ trait Binders extends AbstractSyntax with Mappable {
* a proxy for the element it is bound to by its binder, `substitute' may thus be thought of
* as replacing all the bound occurrences of the given binder `b' by their new value `value'.
*
- * @param b the binder whose bound occurrences should be given a new value
+ * @param b the binder whose bound occurrences should be given a new value. `binds(b)` must hold.
* @param value the new value for the bound occurrences of `b'
- * @pre binds(b)
- * @post getElementFor(b) eq value
+ * @return `getElementFor(b) eq value` will hold.
*/
def substitute(b: binderType, value: Element): Unit = substitution(b) = value
/** Returns the current value for the bound occurrences of `b'.
*
- * @param b the contained binder whose current value should be returned
- * @pre binds(b)
+ * @param b the contained binder whose current value should be returned `binds(b)` must hold.
*/
def getElementFor(b: binderType): Element = substitution(b)
@@ -173,7 +169,7 @@ trait Binders extends AbstractSyntax with Mappable {
def allowForwardRef: Scope[binderType] = this // TODO
/** Return a nested scope -- binders entered into it won't be visible in this scope, but if this scope allows forward references,
- the binding in the returned scope also does, and thus the check that all variables are bound is deferred until this scope is left **/
+ * the binding in the returned scope also does, and thus the check that all variables are bound is deferred until this scope is left **/
def nested: Scope[binderType] = this // TODO
def onEnter {}
@@ -193,7 +189,7 @@ trait Binders extends AbstractSyntax with Mappable {
* A `BoundElement' is represented textually by its bound element, followed by its scope's `id'.
* For example: `x@1' represents the variable `x' that is bound in the scope with `id' `1'.
*
- * @invar scope.binds(el)
+ * @note `scope.binds(el)` holds before and after.
*/
case class BoundElement[boundElement <: NameElement](el: boundElement, scope: Scope[boundElement]) extends NameElement with Proxy with BindingSensitive {
/** Returns the element this `BoundElement' stands for.
@@ -300,7 +296,7 @@ trait Binders extends AbstractSyntax with Mappable {
*
* The name `sequence' comes from the fact that this method's type is equal to the type of monadic sequence.
*
- * @pre !orig.isEmpty implies orig.forall(ub => ub.scope eq orig(0).scope)
+ * @note `!orig.isEmpty` implies `orig.forall(ub => ub.scope eq orig(0).scope)`
*
*/
def sequence[bt <: NameElement, st <% Mappable[st]](orig: List[UnderBinder[bt, st]]): UnderBinder[bt, List[st]] =
diff --git a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
index e01de41b0d..1ba9270ec6 100644
--- a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
+++ b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.combinator
diff --git a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
index 8070cb110e..02fb85e81a 100644
--- a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.combinator
diff --git a/src/library/scala/util/parsing/combinator/PackratParsers.scala b/src/library/scala/util/parsing/combinator/PackratParsers.scala
index fc8200a390..1e2975b615 100644
--- a/src/library/scala/util/parsing/combinator/PackratParsers.scala
+++ b/src/library/scala/util/parsing/combinator/PackratParsers.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.combinator
@@ -289,7 +288,7 @@ to update each parser involved in the recursion.
//all setupLR does is change the heads of the recursions, so the seed will stay the same
recDetect match {case LR(seed, _, _) => seed.asInstanceOf[ParseResult[T]]}
}
- case MemoEntry(Right(res: ParseResult[T])) => res
+ case MemoEntry(Right(res: ParseResult[_])) => res.asInstanceOf[ParseResult[T]]
}
}
}
@@ -316,7 +315,7 @@ to update each parser involved in the recursion.
//we're done with growing, we can remove data from recursion head
rest.recursionHeads -= rest.pos
rest.getFromCache(p).get match {
- case MemoEntry(Right(x: ParseResult[T])) => x
+ case MemoEntry(Right(x: ParseResult[_])) => x.asInstanceOf[ParseResult[T]]
case _ => throw new Exception("impossible match")
}
}
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index b2c72153fe..0df8871f82 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -1,17 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.combinator
import scala.util.parsing.input._
-import scala.collection.mutable.{Map=>MutableMap}
+import scala.collection.mutable.ListBuffer
+import scala.annotation.tailrec
// TODO: better error handling (labelling like parsec's <?>)
@@ -47,35 +47,21 @@ import scala.collection.mutable.{Map=>MutableMap}
* of the input.
* </p>
*
- * @requires Elem the type of elements the provided parsers consume
- * (When consuming invidual characters, a parser is typically called a ``scanner'',
- * which produces ``tokens'' that are consumed by what is normally called a ``parser''.
- * Nonetheless, the same principles apply, regardless of the input type.)</p>
- *<p>
- * @provides Input = Reader[Elem]
- * The type of input the parsers in this component expect.</p>
- *<p>
- * @provides Parser[+T] extends (Input => ParseResult[T])
- * Essentially, a `Parser[T]' is a function from `Input' to `ParseResult[T]'.</p>
- *<p>
- * @provides ParseResult[+T] is like an `Option[T]', in the sense that it is either
- * `Success[T]', which consists of some result (:T) (and the rest of the input) or
- * `Failure[T]', which provides an error message (and the rest of the input).</p>
- *
* @author Martin Odersky, Iulian Dragos, Adriaan Moors
*/
trait Parsers {
- /** the type of input elements */
+ /** the type of input elements the provided parsers consume (When consuming invidual characters, a parser is typically
+ * called a ``scanner'', which produces ``tokens'' that are consumed by what is normally called a ``parser''.
+ * Nonetheless, the same principles apply, regardless of the input type.) */
type Elem
- /** The parser input is an abstract reader of input elements */
+ /** The parser input is an abstract reader of input elements, i.e. the type of input the parsers in this component
+ * expect. */
type Input = Reader[Elem]
- /** A base class for parser results.
- * A result is either successful or not (failure may be fatal, i.e.,
- * an Error, or not, i.e., a Failure)
- * On success, provides a result of type <code>T</code>.
- */
+ /** A base class for parser results. A result is either successful or not (failure may be fatal, i.e., an Error, or
+ * not, i.e., a Failure). On success, provides a result of type `T` which consists of some result (and the rest of
+ * the input). */
sealed abstract class ParseResult[+T] {
/** Functional composition of ParseResults
*
@@ -153,9 +139,7 @@ trait Parsers {
def get: Nothing = error("No result when parsing failed")
}
- /** An extractor so NoSuccess(msg, next) can be used in matches
- * Note: case class inheritance is currently sketchy and may be
- * deprecated, so an explicit extractor is better.
+ /** An extractor so NoSuccess(msg, next) can be used in matches.
*/
object NoSuccess {
def unapply[T](x: ParseResult[T]) = x match {
@@ -303,7 +287,7 @@ trait Parsers {
* characters accepts.</p>
*
* @param q a parser that accepts if p consumes less characters.
- * @return a `Parser' that returns the result of the parser consuming the most characteres (out of `p' and `q').
+ * @return a `Parser' that returns the result of the parser consuming the most characters (out of `p' and `q').
*/
def ||| [U >: T](q: => Parser[U]): Parser[U] = new Parser[U] {
def apply(in: Input) = {
@@ -363,7 +347,7 @@ trait Parsers {
def ^? [U](f: PartialFunction[T, U]): Parser[U] = ^?(f, r => "Constructor function not defined at "+r)
- /** A parser combinator that parameterises a subsequent parser with the result of this one
+ /** A parser combinator that parameterizes a subsequent parser with the result of this one
*
*<p>
* Use this combinator when a parser depends on the result of a previous parser. `p' should be
@@ -541,7 +525,6 @@ trait Parsers {
r
}
-
/** A parser generator for repetitions.
*
* <p> rep(p) repeatedly uses `p' to parse the input until `p' fails (the result is a List
@@ -591,37 +574,25 @@ trait Parsers {
* @return A parser that returns a list of results produced by first applying `f' and then
* repeatedly `p' to the input (it only succeeds if `f' matches).
*/
- def rep1[T](first: => Parser[T], p: => Parser[T]): Parser[List[T]] = Parser{ in0 =>
- val xs = new scala.collection.mutable.ListBuffer[T]
- var in = in0
-
- var res = first(in)
+ def rep1[T](first: => Parser[T], p: => Parser[T]): Parser[List[T]] = Parser { in =>
+ val elems = new ListBuffer[T]
+
+ def continue(in: Input): ParseResult[List[T]] = {
+ val p0 = p // avoid repeatedly re-evaluating by-name parser
+ @tailrec def applyp(in0: Input): ParseResult[List[T]] = p0(in0) match {
+ case Success(x, rest) => elems += x ; applyp(rest)
+ case _ => Success(elems.toList, in0)
+ }
- while(res.successful) {
- xs += res.get
- in = res.next
- res = p(in)
+ applyp(in)
}
- // assert(res.isInstanceOf[NoSuccess])
-
- res match {
- case e: Error => e
- case _ =>
- if (!xs.isEmpty) {
- // the next parser should start parsing where p failed,
- // since `!p(in).successful', the next input to be consumed is `in'
- Success(xs.toList, in) // TODO: I don't think in == res.next holds
- }
- else {
- Failure(res.asInstanceOf[NoSuccess].msg, in0)
- }
+ first(in) match {
+ case Success(x, rest) => elems += x ; continue(rest)
+ case ns: NoSuccess => ns
}
}
- //= first ~ rep(p) ^^ { case ~(x, xs) => x :: xs }
-
-
/** A parser generator for a specified number of repetitions.
*
* <p> repN(n, p) uses `p' exactly `n' time to parse the input
@@ -632,8 +603,20 @@ trait Parsers {
* @return A parser that returns a list of results produced by repeatedly applying `p' to the input
* (and that only succeeds if `p' matches exactly `n' times).
*/
- def repN[T](n: Int, p: => Parser[T]): Parser[List[T]] =
- if(n==0) success(Nil) else p ~ repN(n-1, p) ^^ { case ~(x, xs) => x :: xs }
+ def repN[T](num: Int, p: => Parser[T]): Parser[List[T]] =
+ if (num == 0) success(Nil) else Parser { in =>
+ val elems = new ListBuffer[T]
+ val p0 = p // avoid repeatedly re-evaluating by-name parser
+
+ @tailrec def applyp(in0: Input): ParseResult[List[T]] =
+ if (elems.length == num) Success(elems.toList, in0)
+ else p0(in0) match {
+ case Success(x, rest) => elems += x ; applyp(rest)
+ case ns: NoSuccess => return ns
+ }
+
+ applyp(in)
+ }
/** A parser generator for non-empty repetitions.
*
@@ -678,7 +661,7 @@ trait Parsers {
/** A parser generator that generalises the rep1sep generator so that `q', which parses the separator,
* produces a right-associative function that combines the elements it separates. Additionally,
- * The right-most (last) element and the left-most combinating function have to be supplied.
+ * The right-most (last) element and the left-most combining function have to be supplied.
*
* rep1sep(p: Parser[T], q) corresponds to chainr1(p, q ^^ cons, cons, Nil) (where val cons = (x: T, y: List[T]) => x :: y)
*
diff --git a/src/library/scala/util/parsing/combinator/RegexParsers.scala b/src/library/scala/util/parsing/combinator/RegexParsers.scala
index 4f4a8e6fc7..ec056f9b4f 100644
--- a/src/library/scala/util/parsing/combinator/RegexParsers.scala
+++ b/src/library/scala/util/parsing/combinator/RegexParsers.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.combinator
@@ -67,6 +66,25 @@ trait RegexParsers extends Parsers {
}
}
+ /** `positioned' decorates a parser's result with the start position of the input it consumed.
+ * If whitespace is being skipped, then it is skipped before the start position is recorded.
+ *
+ * @param p a `Parser' whose result conforms to `Positional'.
+ * @return A parser that has the same behaviour as `p', but which marks its result with the
+ * start position of the input it consumed after whitespace has been skipped, if it
+ * didn't already have a position.
+ */
+ override def positioned[T <: Positional](p: => Parser[T]): Parser[T] = {
+ val pp = super.positioned(p)
+ new Parser[T] {
+ def apply(in: Input) = {
+ val offset = in.offset
+ val start = handleWhiteSpace(in.source, offset)
+ pp(in.drop (start - offset))
+ }
+ }
+ }
+
override def phrase[T](p: Parser[T]): Parser[T] =
super.phrase(p <~ opt("""\z""".r))
diff --git a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
index f6a85ac452..2a44a1c98c 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-package scala.util.parsing.combinator.lexical
-import scala.util.parsing.combinator._
+package scala.util.parsing
+package combinator
+package lexical
-import scala.util.parsing.syntax._
-import scala.util.parsing.input.CharArrayReader.EofCh
+import token._
+import input.CharArrayReader.EofCh
/** <p>
* This component complements the <code>Scanners</code> component with
diff --git a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
index 2318f660f6..90e9fa9b82 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-package scala.util.parsing.combinator.lexical
-import scala.util.parsing.combinator._
+package scala.util.parsing
+package combinator
+package lexical
-import scala.util.parsing.syntax._
-import scala.util.parsing.input._
+import token._
+import input._
/** <p>
* This component provides core functionality for lexical parsers.
@@ -23,13 +23,6 @@ import scala.util.parsing.input._
* {@see StdLexical}, for more functionality.
* </p>
*
- * @requires token a parser that produces a token (from a stream of characters)
- * @requires whitespace a unit-parser for white-space
- * @provides Scanner essentially a parser that parses a stream of characters
- * to produce `Token's, which are typically passed to a
- * syntactical parser (which operates on `Token's, not on
- * individual characters).
- *
* @author Martin Odersky, Adriaan Moors
*/
trait Scanners extends Parsers {
diff --git a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
index 924e4d16d7..8d286d9aef 100644
--- a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-package scala.util.parsing.combinator.lexical
-import scala.util.parsing.combinator._
+package scala.util.parsing
+package combinator
+package lexical
-import scala.util.parsing.syntax._
-import scala.util.parsing.input.CharArrayReader.EofCh
+import token._
+import input.CharArrayReader.EofCh
import collection.mutable.HashSet
/** <p>
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
index 2809b503a7..a3f7c24c0a 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
@@ -1,19 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
+package scala.util.parsing
+package combinator
+package syntactical
-package scala.util.parsing.combinator.syntactical
-import scala.util.parsing.combinator._
-
-import scala.util.parsing.syntax._
-import scala.util.parsing.combinator.lexical.StdLexical
+import token._
+import lexical.StdLexical
/** This component provides primitive parsers for the standard tokens defined in `StdTokens'.
*
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
index 18f51b1511..bc37ce7b4d 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-package scala.util.parsing.combinator.syntactical
-import scala.util.parsing.combinator._
+package scala.util.parsing
+package combinator
+package syntactical
-import scala.util.parsing.syntax._
-import scala.collection.mutable.HashMap
+import token._
+import collection.mutable.HashMap
/** This component provides primitive parsers for the standard tokens defined in `StdTokens'.
*
diff --git a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
index 5c90c46df7..d9353b6b27 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
@@ -1,30 +1,23 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
+package scala.util.parsing
+package combinator
+package syntactical
-package scala.util.parsing.combinator.syntactical
-import scala.util.parsing.combinator._
-
-/** <p>
- * This is the core component for token-based parsers.
- * </p>
- * <p>
- * @requires lexical a component providing the tokens consumed by the
- * parsers in this component.
- * </p>
+/** This is the core component for token-based parsers.
*
* @author Martin Odersky, Adriaan Moors
*/
trait TokenParsers extends Parsers {
/** Tokens is the abstract type of the `Token's consumed by the parsers in this component*/
- type Tokens <: scala.util.parsing.syntax.Tokens
+ type Tokens <: token.Tokens
/** lexical is the component responsible for consuming some basic kind of
* input (usually character-based) and turning it into the tokens
diff --git a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala b/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
index 3a024013c9..299736046e 100644
--- a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
+++ b/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
@@ -1,4 +1,3 @@
-// $Id$
package scala.util.parsing.combinator.testing
diff --git a/src/library/scala/util/parsing/combinator/testing/Tester.scala b/src/library/scala/util/parsing/combinator/testing/Tester.scala
index 8af0ce2d4b..7709cc0896 100644
--- a/src/library/scala/util/parsing/combinator/testing/Tester.scala
+++ b/src/library/scala/util/parsing/combinator/testing/Tester.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.combinator.testing
import scala.util.parsing.combinator._
diff --git a/src/library/scala/util/parsing/syntax/StdTokens.scala b/src/library/scala/util/parsing/combinator/token/StdTokens.scala
index aeda120646..ea565235d1 100644
--- a/src/library/scala/util/parsing/syntax/StdTokens.scala
+++ b/src/library/scala/util/parsing/combinator/token/StdTokens.scala
@@ -1,12 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.util.parsing.syntax
+package scala.util.parsing
+package combinator
+package token
/** This component provides the standard `Token's for a simple, Scala-like language.
*
diff --git a/src/library/scala/util/parsing/syntax/Tokens.scala b/src/library/scala/util/parsing/combinator/token/Tokens.scala
index c2eb634b28..b7a568efea 100644
--- a/src/library/scala/util/parsing/syntax/Tokens.scala
+++ b/src/library/scala/util/parsing/combinator/token/Tokens.scala
@@ -1,12 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.util.parsing.syntax
+package scala.util.parsing
+package combinator
+package token
/** This component provides the notion of `Token', the unit of information that is passed from lexical
* parsers in the `Lexical' component to the parsers in the `TokenParsers' component.
diff --git a/src/library/scala/util/parsing/input/CharArrayPosition.scala b/src/library/scala/util/parsing/input/CharArrayPosition.scala
deleted file mode 100644
index 54907e037e..0000000000
--- a/src/library/scala/util/parsing/input/CharArrayPosition.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.util.parsing.input
-
-/** <code>CharArrayPosition</code> implements the general <code>Position</code>
- * class for documents represented by an <code>Array</code> of `char's.
- *
- * @param source The contents of the document in which this position is contained
- * @param line The line number of the position (1-based)
- * @param columm The column number of the position (1-based)
- *
- * @author Martin Odersky, Adriaan Moors
- */
-@deprecated("use OffsetPosition instead")
-class CharArrayPosition(val source: Array[Char], val line: Int, val column: Int) extends Position {
-
- // TODO: this could be implemented more high-level:
- // return the string representation of the sub-array of source that starts
- // after the (lnum-1)'ed '\n' up to (but not including) the (lnum)'ed '\n'
- protected def lineContents = {
- var i = 0
- var l = 1
- while (i < source.length && l < line) {
- while (i < source.length && source(i) != '\n') i += 1
- i += 1
- l += 1
- }
- var chars = new StringBuffer
- while (i < source.length && source(i) != '\n') {
- chars append source(i)
- i += 1
- }
- chars.toString
- }
-}
-
diff --git a/src/library/scala/util/parsing/input/CharArrayReader.scala b/src/library/scala/util/parsing/input/CharArrayReader.scala
index 360820608c..772dd533ae 100644
--- a/src/library/scala/util/parsing/input/CharArrayReader.scala
+++ b/src/library/scala/util/parsing/input/CharArrayReader.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.input
@@ -16,9 +15,6 @@ package scala.util.parsing.input
*/
object CharArrayReader {
final val EofCh = '\032'
-
- @deprecated("This should probably be LF instead?")
- final val CR = '\015'
}
/** A character array reader reads a stream of characters (keeping track of their positions)
diff --git a/src/library/scala/util/parsing/input/CharSequenceReader.scala b/src/library/scala/util/parsing/input/CharSequenceReader.scala
index 1886133e5d..e47b213ad3 100644
--- a/src/library/scala/util/parsing/input/CharSequenceReader.scala
+++ b/src/library/scala/util/parsing/input/CharSequenceReader.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.input
diff --git a/src/library/scala/util/parsing/input/NoPosition.scala b/src/library/scala/util/parsing/input/NoPosition.scala
index 6dc2aecddc..311dde7b9a 100644
--- a/src/library/scala/util/parsing/input/NoPosition.scala
+++ b/src/library/scala/util/parsing/input/NoPosition.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.input
diff --git a/src/library/scala/util/parsing/input/OffsetPosition.scala b/src/library/scala/util/parsing/input/OffsetPosition.scala
index 109249dbf8..bcc33d8aef 100644
--- a/src/library/scala/util/parsing/input/OffsetPosition.scala
+++ b/src/library/scala/util/parsing/input/OffsetPosition.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/util/parsing/input/PagedSeqReader.scala b/src/library/scala/util/parsing/input/PagedSeqReader.scala
index c8816afab5..71e1395f24 100644
--- a/src/library/scala/util/parsing/input/PagedSeqReader.scala
+++ b/src/library/scala/util/parsing/input/PagedSeqReader.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.input
diff --git a/src/library/scala/util/parsing/input/Position.scala b/src/library/scala/util/parsing/input/Position.scala
index 1706b8ea71..482610ca28 100644
--- a/src/library/scala/util/parsing/input/Position.scala
+++ b/src/library/scala/util/parsing/input/Position.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -53,7 +53,7 @@ trait Position {
*<pre> List(this, is, a, line, from, the, document)
* ^</pre>
*/
- def longString = lineContents+"\n"+(" " * (column - 1))+"^"
+ def longString = lineContents+"\n"+lineContents.take(column-1).map{x => if (x == '\t') x else ' ' } + "^"
/** Compare this position to another, by first comparing their line numbers,
* and then -- if necessary -- using the columns to break a tie.
diff --git a/src/library/scala/util/parsing/input/Positional.scala b/src/library/scala/util/parsing/input/Positional.scala
index 31c2ad5291..b422b216c2 100644
--- a/src/library/scala/util/parsing/input/Positional.scala
+++ b/src/library/scala/util/parsing/input/Positional.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/util/parsing/input/Reader.scala b/src/library/scala/util/parsing/input/Reader.scala
index 4c9fe0ce5d..2249f8867a 100644
--- a/src/library/scala/util/parsing/input/Reader.scala
+++ b/src/library/scala/util/parsing/input/Reader.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.input
diff --git a/src/library/scala/util/parsing/input/StreamReader.scala b/src/library/scala/util/parsing/input/StreamReader.scala
index 7cf97c4a13..6dc15f25c8 100644
--- a/src/library/scala/util/parsing/input/StreamReader.scala
+++ b/src/library/scala/util/parsing/input/StreamReader.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.input
diff --git a/src/library/scala/util/parsing/json/JSON.scala b/src/library/scala/util/parsing/json/JSON.scala
index 648aa5993a..6d3761af52 100644
--- a/src/library/scala/util/parsing/json/JSON.scala
+++ b/src/library/scala/util/parsing/json/JSON.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.json
@@ -42,8 +41,33 @@ object JSON extends Parser {
*
* @param input the given JSON string.
* @return an optional list of of elements.
+ *
+ * @deprecated Use parseFull or parseRaw as needed.
+ */
+ def parse(input: String): Option[List[Any]] = parseRaw(input).map(unRaw).flatMap({
+ case l : List[_] => Some(l)
+ case _ => None
+ })
+
+ /**
+ * This method converts "raw" results back into the original, deprecated
+ * form.
*/
- def parse(input: String): Option[List[Any]] =
+ private def unRaw (in : Any) : Any = in match {
+ case JSONObject(obj) => obj.map({ case (k,v) => (k,unRaw(v))}).toList
+ case JSONArray(list) => list.map(unRaw)
+ case x => x
+ }
+
+ /**
+ * Parse the given JSON string and return a list of elements. If the
+ * string is a JSON object it will be a JSONObject. If it's a JSON
+ * array it will be be a JSONArray.
+ *
+ * @param input the given JSON string.
+ * @return an optional JSONType element.
+ */
+ def parseRaw(input : String) : Option[JSONType] =
phrase(root)(new lexical.Scanner(input)) match {
case Success(result, _) => Some(result)
case _ => None
@@ -58,7 +82,7 @@ object JSON extends Parser {
* @return an optional list or map.
*/
def parseFull(input: String): Option[Any] =
- parse(input) match {
+ parseRaw(input) match {
case Some(data) => Some(resolveType(data))
case None => None
}
@@ -67,23 +91,12 @@ object JSON extends Parser {
* A utility method to resolve a parsed JSON list into objects or
* arrays. See the parse method for details.
*/
- def resolveType(input: List[_]): Any = {
- var objMap = Map[String, Any]()
-
- if (input.forall {
- case (key: String, value: List[_]) =>
- objMap = objMap.+[Any](key -> resolveType(value))
- true
- case (key : String, value) =>
- objMap += key -> value
- true
- case _ => false
- }) objMap
- else
- input.map {
- case l : List[_] => resolveType(l)
- case x => x
- }
+ def resolveType(input: Any): Any = input match {
+ case JSONObject(data) => data.transform {
+ case (k,v) => resolveType(v)
+ }
+ case JSONArray(data) => data.map(resolveType)
+ case x => x
}
/**
diff --git a/src/library/scala/util/parsing/json/Lexer.scala b/src/library/scala/util/parsing/json/Lexer.scala
index 9026f45f11..347d317f11 100644
--- a/src/library/scala/util/parsing/json/Lexer.scala
+++ b/src/library/scala/util/parsing/json/Lexer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.json
@@ -83,7 +82,7 @@ class Lexer extends StdLexical with ImplicitConversions {
private def unicodeBlock = hexDigit ~ hexDigit ~ hexDigit ~ hexDigit ^^ {
case a ~ b ~ c ~ d =>
- new String(io.UTF8Codec.encode(Integer.parseInt(List(a, b, c, d) mkString "", 16)), "UTF-8")
+ new String(Array(Integer.parseInt(List(a, b, c, d) mkString "", 16)), 0, 1)
}
//private def lift[T](f: String => T)(xs: List[Any]): T = f(xs mkString "")
diff --git a/src/library/scala/util/parsing/json/Parser.scala b/src/library/scala/util/parsing/json/Parser.scala
index cd6713170b..ce9c1dd3fe 100644
--- a/src/library/scala/util/parsing/json/Parser.scala
+++ b/src/library/scala/util/parsing/json/Parser.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.parsing.json
@@ -16,6 +15,31 @@ import scala.util.parsing.combinator.syntactical._
import scala.util.parsing.combinator.lexical._
/**
+ * A marker class for the JSON result types.
+ *
+ * @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
+ */
+sealed abstract class JSONType
+
+/**
+ * Represents a JSON Object (map).
+ * @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
+ */
+case class JSONObject (obj : Map[Any,Any]) extends JSONType {
+ override def toString = "{" + obj.map({ case (k,v) => k + " : " + v }).mkString(", ") + "}"
+}
+
+/**
+ * Represents a JSON Array (list).
+ * @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
+ */
+case class JSONArray (list : List[Any]) extends JSONType {
+ override def toString = "[" + list.mkString(", ") + "]"
+}
+
+/**
+ * The main JSON Parser.
+ *
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
class Parser extends StdTokenParsers with ImplicitConversions {
@@ -40,8 +64,8 @@ class Parser extends StdTokenParsers with ImplicitConversions {
// Define the grammar
def root = jsonObj | jsonArray
- def jsonObj = "{" ~> repsep(objEntry, ",") <~ "}"
- def jsonArray = "[" ~> repsep(value, ",") <~ "]"
+ def jsonObj = "{" ~> repsep(objEntry, ",") <~ "}" ^^ { case vals : List[_] => JSONObject(Map(vals : _*)) }
+ def jsonArray = "[" ~> repsep(value, ",") <~ "]" ^^ { case vals : List[_] => JSONArray(vals) }
def objEntry = stringVal ~ (":" ~> value) ^^ { case x ~ y => (x, y) }
def value: Parser[Any] = (jsonObj | jsonArray | number | "true" ^^^ true | "false" ^^^ false | "null" ^^^ null | stringVal)
def stringVal = accept("string", { case lexical.StringLit(n) => n} )
diff --git a/src/library/scala/util/parsing/syntax/package.scala b/src/library/scala/util/parsing/syntax/package.scala
new file mode 100644
index 0000000000..9dc909ca60
--- /dev/null
+++ b/src/library/scala/util/parsing/syntax/package.scala
@@ -0,0 +1,19 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util.parsing
+
+import scala.util.parsing.combinator.token
+
+/** If deprecating the whole package worked, that's what would best
+ * be done, but it doesn't (yet) so it isn't.
+ */
+package object syntax {
+ @deprecated("Moved to scala.util.parsing.combinator.token") type Tokens = token.Tokens
+ @deprecated("Moved to scala.util.parsing.combinator.token") type StdTokens = token.StdTokens
+}
diff --git a/src/library/scala/util/regexp/Base.scala b/src/library/scala/util/regexp/Base.scala
index 6ef0392c30..359416b20b 100644
--- a/src/library/scala/util/regexp/Base.scala
+++ b/src/library/scala/util/regexp/Base.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.regexp
@@ -24,22 +23,27 @@ abstract class Base
val isNullable: Boolean
}
- /** Alt( R,R,R* ) */
- case class Alt(rs: _regexpT*) extends RegExp {
- // check rs \in R,R,R*
- // @todo: flattening
- if (rs.size < 2)
- throw new SyntaxError("need at least 2 branches in Alt")
+ object Alt {
+ /** Alt( R,R,R* ) */
+ def apply(rs: _regexpT*) =
+ if (rs.size < 2) throw new SyntaxError("need at least 2 branches in Alt")
+ else new Alt(rs: _*)
+ // Can't enforce that statically without changing the interface
+ // def apply(r1: _regexpT, r2: _regexpT, rs: _regexpT*) = new Alt(Seq(r1, r2) ++ rs: _*)
+ def unapplySeq(x: Alt) = Some(x.rs)
+ }
- final val isNullable = rs forall (_.isNullable)
+ class Alt private (val rs: _regexpT*) extends RegExp {
+ final val isNullable = rs exists (_.isNullable)
}
- case class Sequ(rs: _regexpT*) extends RegExp {
- // @todo: flattening
- // check rs \in R,R*
- if (rs.isEmpty)
- throw new SyntaxError("need at least 1 item in Sequ")
+ object Sequ {
+ /** Sequ( R,R* ) */
+ def apply(rs: _regexpT*) = if (rs.isEmpty) Eps else new Sequ(rs: _*)
+ def unapplySeq(x: Sequ) = Some(x.rs)
+ }
+ class Sequ private (val rs: _regexpT*) extends RegExp {
final val isNullable = rs forall (_.isNullable)
}
@@ -47,6 +51,7 @@ abstract class Base
final lazy val isNullable = true
}
+ // The empty Sequ.
case object Eps extends RegExp {
final lazy val isNullable = true
override def toString() = "Eps"
@@ -57,7 +62,4 @@ abstract class Base
final val isNullable = r1.isNullable
def r = r1
}
-
- final def mkSequ(rs: _regexpT *): RegExp =
- if (rs.isEmpty) Eps else Sequ(rs : _*)
}
diff --git a/src/library/scala/util/regexp/PointedHedgeExp.scala b/src/library/scala/util/regexp/PointedHedgeExp.scala
index ce9ff95282..83b9dbff06 100644
--- a/src/library/scala/util/regexp/PointedHedgeExp.scala
+++ b/src/library/scala/util/regexp/PointedHedgeExp.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.regexp
diff --git a/src/library/scala/util/regexp/SyntaxError.scala b/src/library/scala/util/regexp/SyntaxError.scala
index 7080886974..367d2d1295 100644
--- a/src/library/scala/util/regexp/SyntaxError.scala
+++ b/src/library/scala/util/regexp/SyntaxError.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.regexp
diff --git a/src/library/scala/util/regexp/WordExp.scala b/src/library/scala/util/regexp/WordExp.scala
index 43a6178978..411a588297 100644
--- a/src/library/scala/util/regexp/WordExp.scala
+++ b/src/library/scala/util/regexp/WordExp.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.util.regexp
diff --git a/src/library/scala/volatile.scala b/src/library/scala/volatile.scala
index 6799174349..7c9d9726bd 100644
--- a/src/library/scala/volatile.scala
+++ b/src/library/scala/volatile.scala
@@ -1,14 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala
+import annotation.target._
+
+@field
class volatile extends StaticAnnotation
diff --git a/src/library/scala/xml/Atom.scala b/src/library/scala/xml/Atom.scala
index b3dc9578a7..8d8667eae6 100644
--- a/src/library/scala/xml/Atom.scala
+++ b/src/library/scala/xml/Atom.scala
@@ -1,16 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
-import collection.mutable.StringBuilder
/** The class <code>Atom</code> provides an XML node for text (PCDATA).
* It is used in both non-bound and bound XML representations.
@@ -24,17 +21,21 @@ class Atom[+A](val data: A) extends SpecialNode
if (data == null)
throw new IllegalArgumentException("cannot construct Atom(null)")
+ override def basisForHashCode: Seq[Any] = Seq(data)
+ override def strict_==(other: Equality) = other match {
+ case x: Atom[_] => data == x.data
+ case _ => false
+ }
+ override def canEqual(other: Any) = other match {
+ case _: Atom[_] => true
+ case _ => false
+ }
+
final override def doCollectNamespaces = false
final override def doTransform = false
def label = "#PCDATA"
- override def equals(x: Any) = x match {
- case s:Atom[_] => data == s.data
- case _ => false
- }
- override def hashCode() = data.hashCode()
-
/** Returns text, with some characters escaped according to the XML
* specification.
*
diff --git a/src/library/scala/xml/Attribute.scala b/src/library/scala/xml/Attribute.scala
index 222642fe47..17780fc024 100644
--- a/src/library/scala/xml/Attribute.scala
+++ b/src/library/scala/xml/Attribute.scala
@@ -1,19 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
-import collection.Seq
-import collection.mutable.StringBuilder
-
-
/** Attribute defines the interface shared by both
* PrefixedAttribute and UnprefixedAttribute
*/
@@ -46,6 +41,7 @@ object Attribute {
abstract trait Attribute extends MetaData
{
+ def pre: String // will be null if unprefixed
val key: String
val value: Seq[Node]
val next: MetaData
@@ -53,13 +49,43 @@ abstract trait Attribute extends MetaData
def apply(key: String): Seq[Node]
def apply(namespace: String, scope: NamespaceBinding, key: String): Seq[Node]
def copy(next: MetaData): Attribute
- def remove(key: String): MetaData
- def remove(namespace: String, scope: NamespaceBinding, key: String): MetaData
- def isPrefixed: Boolean
+ def remove(key: String) =
+ if (!isPrefixed && this.key == key) next
+ else copy(next remove key)
+
+ def remove(namespace: String, scope: NamespaceBinding, key: String) =
+ if (isPrefixed && this.key == key && (scope getURI pre) == namespace) next
+ else next.remove(namespace, scope, key)
+
+ def isPrefixed: Boolean = pre != null
def getNamespace(owner: Node): String
- def wellformed(scope: NamespaceBinding): Boolean
+ def wellformed(scope: NamespaceBinding): Boolean = {
+ val arg = if (isPrefixed) scope getURI pre else null
+ (next(arg, scope, key) == null) && (next wellformed scope)
+ }
- def equals1(m: MetaData): Boolean
- def toString1(sb: StringBuilder): Unit
+ override def canEqual(other: Any) = other match {
+ case _: Attribute => true
+ case _ => false
+ }
+ override def strict_==(other: Equality) = other match {
+ case x: Attribute => (pre == x.pre) && (key == x.key) && (value sameElements x.value)
+ case _ => false
+ }
+ override def basisForHashCode = List(pre, key, value)
+
+ /** Appends string representation of only this attribute to stringbuffer.
+ */
+ def toString1(sb: StringBuilder) {
+ if (value == null)
+ return
+ if (isPrefixed)
+ sb append pre append ':'
+
+ sb append key append '='
+ val sb2 = new StringBuilder()
+ Utility.sequenceToXML(value, TopScope, sb2, true)
+ Utility.appendQuoted(sb2.toString(), sb)
+ }
}
diff --git a/src/library/scala/xml/Comment.scala b/src/library/scala/xml/Comment.scala
index a417f82220..f52f626c57 100644
--- a/src/library/scala/xml/Comment.scala
+++ b/src/library/scala/xml/Comment.scala
@@ -1,16 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
-import collection.mutable.StringBuilder
+
/** The class <code>Comment</code> implements an XML node for comments.
*
diff --git a/src/library/scala/xml/Document.scala b/src/library/scala/xml/Document.scala
index 4ddcc2bf66..60e4790139 100644
--- a/src/library/scala/xml/Document.scala
+++ b/src/library/scala/xml/Document.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
@@ -19,6 +18,7 @@ package scala.xml
* @author Burak Emir
* @version 1.0, 26/04/2005
*/
+@serializable @SerialVersionUID(-2289320563321795109L)
class Document extends NodeSeq with pull.XMLEvent {
/** An ordered list of child information items, in document
@@ -86,4 +86,8 @@ class Document extends NodeSeq with pull.XMLEvent {
def theSeq: Seq[Node] = this.docElem
+ override def canEqual(other: Any) = other match {
+ case _: Document => true
+ case _ => false
+ }
}
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index f9346cff3a..c65608f5fb 100644
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -1,18 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
-import collection.Seq
-
/** This singleton object contains the apply and unapplySeq methods for convenient construction and
* deconstruction. It is possible to deconstruct any Node instance (that is not a SpecialNode or
* a Group) using the syntax
@@ -26,8 +22,10 @@ object Elem
def apply(prefix: String,label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*) =
new Elem(prefix,label,attributes,scope,child:_*)
- def unapplySeq(n:Node) = if (n.isInstanceOf[SpecialNode] || n.isInstanceOf[Group]) None else
- Some((n.prefix, n.label, n.attributes, n.scope, n.child))
+ def unapplySeq(n: Node) = n match {
+ case _: SpecialNode | _: Group => None
+ case _ => Some((n.prefix, n.label, n.attributes, n.scope, n.child))
+ }
}
/** The case class <code>Elem</code> extends the <code>Node</code> class,
@@ -54,18 +52,17 @@ extends Node
final override def doCollectNamespaces = true
final override def doTransform = true
- if ((null != prefix) && 0 == prefix.length())
+ if (prefix == "")
throw new IllegalArgumentException("prefix of zero length, use null instead")
- if (null == scope)
- throw new IllegalArgumentException("scope is null, try xml.TopScope for empty scope")
+ if (scope == null)
+ throw new IllegalArgumentException("scope is null, use xml.TopScope for empty scope")
//@todo: copy the children,
// setting namespace scope if necessary
// cleaning adjacent text nodes if necessary
- override def hashCode(): Int =
- Utility.hashCode(prefix, label, attributes.hashCode(), scope.hashCode(), child)
+ override def basisForHashCode: Seq[Any] = prefix :: label :: attributes :: child.toList
/** Returns a new element with updated attributes, resolving namespace uris from this element's scope.
* See MetaData.update for details.
diff --git a/src/library/scala/xml/EntityRef.scala b/src/library/scala/xml/EntityRef.scala
index 8eba6b7c88..cd84fca7f4 100644
--- a/src/library/scala/xml/EntityRef.scala
+++ b/src/library/scala/xml/EntityRef.scala
@@ -1,16 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
-import collection.mutable.StringBuilder
+
/** The class <code>EntityRef</code> implements an XML node for entity
diff --git a/src/library/scala/xml/Equality.scala b/src/library/scala/xml/Equality.scala
new file mode 100644
index 0000000000..210029b3bd
--- /dev/null
+++ b/src/library/scala/xml/Equality.scala
@@ -0,0 +1,115 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.xml
+
+/** In an attempt to contain the damage being inflicted on
+ * consistency by the ad hoc equals methods spread around
+ * xml, the logic is centralized and all the xml classes
+ * go through the xml.Equality trait. There are two forms
+ * of xml comparison.
+ *
+ * 1) def strict_==(other: xml.Equality)
+ *
+ * This one tries to honor the little things like symmetry
+ * and hashCode contracts. The equals method routes all
+ * comparisons through this.
+ *
+ * 2) xml_==(other: Any)
+ *
+ * This one picks up where strict_== leaves off. It might
+ * declare any two things equal.
+ *
+ * As things stood, the logic not only made a mockery of
+ * the collections equals contract, but also laid waste to
+ * that of case classes.
+ *
+ * Among the obstacles to sanity are/were:
+ *
+ * Node extends NodeSeq extends Seq[Node]
+ * MetaData extends Iterable[MetaData]
+ * The hacky "Group" xml node which throws exceptions
+ * with wild abandon, so don't get too close
+ * Rampant asymmetry and impossible hashCodes
+ * Most classes claiming to be equal to "String" if
+ * some specific stringification of it was the same.
+ * String was never going to return the favor.
+ */
+
+object Equality {
+ def asRef(x: Any): AnyRef = x.asInstanceOf[AnyRef]
+
+ /** Note - these functions assume strict equality has already failed.
+ */
+ def compareBlithely(x1: AnyRef, x2: String): Boolean = x1 match {
+ case x: Atom[_] => x.data == x2
+ case x: NodeSeq => x.text == x2
+ case _ => false
+ }
+ def compareBlithely(x1: AnyRef, x2: Node): Boolean = x1 match {
+ case x: NodeSeq if x.length == 1 => x2 == x(0)
+ case _ => false
+ }
+ def compareBlithely(x1: AnyRef, x2: AnyRef): Boolean = {
+ if (x1 == null || x2 == null)
+ return (x1 eq x2)
+
+ x2 match {
+ case s: String => compareBlithely(x1, s)
+ case n: Node => compareBlithely(x1, n)
+ case _ => false
+ }
+ }
+}
+import Equality._
+
+private[xml]
+trait Equality extends scala.Equals {
+ def basisForHashCode: Seq[Any]
+ def strict_==(other: Equality): Boolean
+ def strict_!=(other: Equality) = !strict_==(other)
+
+ private def hashOf(x: Any) = if (x == null) 1 else x.##
+
+ /** We insist we're only equal to other xml.Equality implementors,
+ * which heads off a lot of inconsistency up front.
+ */
+ override def canEqual(other: Any): Boolean = other match {
+ case x: Equality => true
+ case _ => false
+ }
+
+ /** It's be nice to make these final, but there are probably
+ * people out there subclassing the XML types, especially when
+ * it comes to equals. However WE at least can pretend they
+ * are final since clearly individual classes cannot be trusted
+ * to maintain a semblance of order.
+ */
+ override def hashCode() = basisForHashCode match {
+ case Nil => 0
+ case x :: xs => hashOf(x) * 41 + (xs map hashOf).foldLeft(0)(_ * 7 + _)
+ }
+ override def equals(other: Any) = doComparison(other, false)
+ final def xml_==(other: Any) = doComparison(other, true)
+ final def xml_!=(other: Any) = !xml_==(other)
+
+ /** The "blithe" parameter expresses the caller's unconcerned attitude
+ * regarding the usual constraints on equals. The method is thereby
+ * given carte blanche to declare any two things equal.
+ */
+ private def doComparison(other: Any, blithe: Boolean) = {
+ val strictlyEqual = other match {
+ case x: AnyRef if this eq x => true
+ case x: Equality => (x canEqual this) && (this strict_== x)
+ case _ => false
+ }
+
+ strictlyEqual || (blithe && compareBlithely(this, asRef(other)))
+ }
+}
+
diff --git a/src/library/scala/xml/Group.scala b/src/library/scala/xml/Group.scala
index 11f064e67c..614adc98a9 100644
--- a/src/library/scala/xml/Group.scala
+++ b/src/library/scala/xml/Group.scala
@@ -1,16 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
-import collection.Seq
/** A hack to group XML nodes in one node for output.
*
@@ -18,49 +15,27 @@ import collection.Seq
* @version 1.0
*/
@serializable
-case class Group(val nodes: Seq[Node]) extends Node {
- // final override def doTransform = false
+final case class Group(val nodes: Seq[Node]) extends Node {
override def theSeq = nodes
- /** XXX this is ridiculous, we can't do equality like this. */
- override def equals(x: Any) = x match {
- case z:Group => (length == z.length) && sameElements(z)
- case z:Node => (length == 1) && z == apply(0)
- case z:Seq[_] => sameElements(z)
- case z:String => text == z
- case _ => false
+ override def canEqual(other: Any) = other match {
+ case x: Group => true
+ case _ => false
}
- /* As if there were a hashCode which could back up the above implementation! */
- override def hashCode = nodes.hashCode
-
- /**
- * @throws Predef.UnsupportedOperationException (always)
- */
- final def label =
- throw new UnsupportedOperationException("class Group does not support method 'label'")
-
- /**
- * @throws Predef.UnsupportedOperationException (always)
- */
- final override def attributes =
- throw new UnsupportedOperationException("class Group does not support method 'attributes'")
-
- /**
- * @throws Predef.UnsupportedOperationException (always)
- */
- final override def namespace =
- throw new UnsupportedOperationException("class Group does not support method 'namespace'")
+ override def strict_==(other: Equality) = other match {
+ case Group(xs) => nodes sameElements xs
+ case _ => false
+ }
+ override def basisForHashCode = nodes
- /**
- * @throws Predef.UnsupportedOperationException (always)
+ /** Since Group is very much a hack it throws an exception if you
+ * try to do anything with it.
*/
- final override def child =
- throw new UnsupportedOperationException("class Group does not support method 'child'")
+ private def fail(msg: String) = throw new UnsupportedOperationException("class Group does not support method '%s'" format msg)
- /**
- * @throws Predef.UnsupportedOperationException (always)
- */
- def buildString(sb: StringBuilder) =
- throw new UnsupportedOperationException(
- "class Group does not support method toString(StringBuilder)")
+ def label = fail("label")
+ override def attributes = fail("attributes")
+ override def namespace = fail("namespace")
+ override def child = fail("child")
+ def buildString(sb: StringBuilder) = fail("toString(StringBuilder)")
}
diff --git a/src/library/scala/xml/HasKeyValue.scala b/src/library/scala/xml/HasKeyValue.scala
index 0522924270..1ec117a6b6 100644
--- a/src/library/scala/xml/HasKeyValue.scala
+++ b/src/library/scala/xml/HasKeyValue.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
@@ -20,6 +19,7 @@ package scala.xml
*
* @author Burak Emir
*/
+@deprecated("Use UnprefixedAttribute's extractor")
class HasKeyValue(key: String) {
def unapplySeq(x: MetaData): Option[Seq[Node]] = x.get(key)
}
diff --git a/src/library/scala/xml/MalformedAttributeException.scala b/src/library/scala/xml/MalformedAttributeException.scala
index 53c6be9785..a85995d960 100644
--- a/src/library/scala/xml/MalformedAttributeException.scala
+++ b/src/library/scala/xml/MalformedAttributeException.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
diff --git a/src/library/scala/xml/MetaData.scala b/src/library/scala/xml/MetaData.scala
index 8bf2f87dd9..ab3d476deb 100644
--- a/src/library/scala/xml/MetaData.scala
+++ b/src/library/scala/xml/MetaData.scala
@@ -1,21 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
import Utility.sbToString
import annotation.tailrec
-import collection.immutable.List
-import collection.{Seq, Iterator, Iterable}
-import collection.mutable.StringBuilder
/**
@@ -77,7 +72,7 @@ object MetaData {
* @author Burak Emir <bqe@google.com>
*/
@serializable
-abstract class MetaData extends Iterable[MetaData]
+abstract class MetaData extends Iterable[MetaData] with Equality
{
/** Updates this MetaData with the MetaData given as argument. All attributes that occur in updates
* are part of the resulting MetaData. If an attribute occurs in both this instance and
@@ -118,13 +113,6 @@ abstract class MetaData extends Iterable[MetaData]
*/
def apply(namespace_uri:String, scp:NamespaceBinding, k:String): Seq[Node]
- /**
- * @param m ...
- * @return <code>true</code> iff ...
- */
- def containedIn1(m: MetaData): Boolean =
- m != null && (m.equals1(this) || containedIn1(m.next))
-
/** returns a copy of this MetaData item with next field set to argument.
*
* @param next ...
@@ -143,22 +131,20 @@ abstract class MetaData extends Iterable[MetaData]
def isPrefixed: Boolean
- /** deep equals method - XXX */
- override def equals(that: Any) = that match {
- case m: MetaData =>
- (this.length == m.length) &&
- (this.hashCode == m.hashCode) &&
- (this forall (_ containedIn1 m))
+ override def canEqual(other: Any) = other match {
+ case _: MetaData => true
+ case _ => false
+ }
+ override def strict_==(other: Equality) = other match {
+ case m: MetaData => this.toSet == m.toSet
case _ => false
}
+ def basisForHashCode: Seq[Any] = List(this.toSet)
/** Returns an iterator on attributes */
- def iterator: Iterator[MetaData] = Iterator.iterate(this)(_.next) takeWhile (_ != Null)
+ def iterator: Iterator[MetaData] = Iterator.single(this) ++ next.iterator
override def size: Int = 1 + iterator.length
- /** shallow equals method */
- def equals1(that: MetaData): Boolean
-
/** filters this sequence of meta data */
override def filter(f: MetaData => Boolean): MetaData =
if (f(this)) copy(next filter f)
@@ -170,8 +156,18 @@ abstract class MetaData extends Iterable[MetaData]
/** returns value of this MetaData item */
def value: Seq[Node]
- /** maps this sequence of meta data */
- def map(f: MetaData => Text): List[Text] = (iterator map f).toList
+ /** Returns a String containing "prefix:key" if the first key is
+ * prefixed, and "key" otherwise.
+ */
+ def prefixedKey = this match {
+ case x: Attribute if x.isPrefixed => x.pre + ":" + key
+ case _ => key
+ }
+
+ /** Returns a Map containing the attributes stored as key/value pairs.
+ */
+ def asAttrMap: Map[String, String] =
+ iterator map (x => (x.prefixedKey, x.value.text)) toMap
/** returns Null or the next MetaData item */
def next: MetaData
@@ -198,8 +194,6 @@ abstract class MetaData extends Iterable[MetaData]
final def get(uri: String, scope: NamespaceBinding, key: String): Option[Seq[Node]] =
Option(apply(uri, scope, key))
- override def hashCode(): Int
-
def toString1(): String = sbToString(toString1)
// appends string representations of single attribute to StringBuilder
diff --git a/src/library/scala/xml/NamespaceBinding.scala b/src/library/scala/xml/NamespaceBinding.scala
index 93485a17fe..3b29f2e90a 100644
--- a/src/library/scala/xml/NamespaceBinding.scala
+++ b/src/library/scala/xml/NamespaceBinding.scala
@@ -1,18 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
import Utility.sbToString
-import collection.mutable.StringBuilder
+
/** The class <code>NamespaceBinding</code> represents namespace bindings
* and scopes. The binding for the default namespace is treated as a null
@@ -23,9 +22,9 @@ import collection.mutable.StringBuilder
* @version 1.0
*/
@SerialVersionUID(0 - 2518644165573446725L)
-case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBinding) extends AnyRef
+case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBinding) extends AnyRef with Equality
{
- if (prefix != null && prefix.isEmpty)
+ if (prefix == "")
throw new IllegalArgumentException("zero length prefix not allowed")
def getURI(_prefix: String): String =
@@ -41,6 +40,15 @@ case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBindin
if (_uri == uri) prefix else parent getPrefix _uri
override def toString(): String = sbToString(buildString(_, TopScope))
+ override def canEqual(other: Any) = other match {
+ case _: NamespaceBinding => true
+ case _ => false
+ }
+ override def strict_==(other: Equality) = other match {
+ case x: NamespaceBinding => (prefix == x.prefix) && (uri == x.uri) && (parent == x.parent)
+ case _ => false
+ }
+ def basisForHashCode: Seq[Any] = List(prefix, uri, parent)
def buildString(stop: NamespaceBinding): String = sbToString(buildString(_, stop))
def buildString(sb: StringBuilder, stop: NamespaceBinding): Unit = {
diff --git a/src/library/scala/xml/Node.scala b/src/library/scala/xml/Node.scala
index 5636c7ddcc..6e3c2acebf 100644
--- a/src/library/scala/xml/Node.scala
+++ b/src/library/scala/xml/Node.scala
@@ -1,20 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
-import collection.Seq
-import collection.immutable.{List, Nil}
-import collection.mutable.StringBuilder
-
/**
* This object provides methods ...
*
@@ -22,7 +16,6 @@ import collection.mutable.StringBuilder
* @version 1.0
*/
object Node {
-
/** the constant empty attribute sequence */
final def NoAttributes: MetaData = Null
@@ -30,7 +23,6 @@ object Node {
val EmptyNamespace = ""
def unapplySeq(n: Node) = Some((n.label, n.attributes, n.child))
-
}
/**
@@ -116,6 +108,10 @@ abstract class Node extends NodeSeq {
*/
def child: Seq[Node]
+ /** Children which do not stringify to "" (needed for equality)
+ */
+ def nonEmptyChildren: Seq[Node] = child filterNot (_.toString == "")
+
/**
* Descendant axis (all descendants of this node, not including node itself)
* includes all text nodes, element nodes, comments and processing instructions.
@@ -129,42 +125,24 @@ abstract class Node extends NodeSeq {
*/
def descendant_or_self: List[Node] = this :: descendant
- /**
- * Returns true if x is structurally equal to this node. Compares prefix,
- * label, attributes and children.
- *
- * @param x ...
- * @return <code>true</code> if ..
- */
- override def equals(x: Any): Boolean = x match {
- case g: Group => false
- case that: Node =>
- this.prefix == that.prefix &&
- this.label == that.label &&
- this.attributes == that.attributes &&
- equalChildren(that)
+ override def canEqual(other: Any) = other match {
+ case x: Group => false
+ case x: Node => true
case _ => false
}
-
- // children comparison has to be done carefully - see bug #1773.
- // It would conceivably be a better idea for a scala block which
- // generates the empty string not to generate a child rather than
- // our having to filter it later, but that approach would be more
- // delicate to implement.
- private def equalChildren(that: Node) = {
- def noEmpties(xs: Seq[Node]) = xs filter (_.toString() != "")
- noEmpties(this.child) sameElements noEmpties(that.child)
+ override def basisForHashCode: Seq[Any] = prefix :: label :: attributes :: nonEmptyChildren.toList
+ override def strict_==(other: Equality) = other match {
+ case _: Group => false
+ case x: Node =>
+ (prefix == x.prefix) &&
+ (label == x.label) &&
+ (attributes == x.attributes) &&
+ // (scope == x.scope) // note - original code didn't compare scopes so I left it as is.
+ (nonEmptyChildren sameElements x.nonEmptyChildren)
+ case _ =>
+ false
}
- /** <p>
- * Returns a hashcode. The default implementation here calls only
- * super.hashcode (which is the same as for objects). A more useful
- * implementation can be invoked by calling
- * <code>Utility.hashCode(pre, label, attributes.hashCode(), child)</code>.
- * </p>
- */
- override def hashCode(): Int = super.hashCode
-
// implementations of NodeSeq methods
/**
@@ -214,9 +192,10 @@ abstract class Node extends NodeSeq {
* Martin to Burak: to do: if you make this method abstract, the compiler will now
* complain if there's no implementation in a subclass. Is this what we want? Note that
* this would break doc/DocGenator and doc/ModelToXML, with an error message like:
-doc\DocGenerator.scala:1219: error: object creation impossible, since there is a deferred declaration of method text in class Node of type => String which is not implemented in a subclass
- new SpecialNode {
- ^
- */
+ * {{{
+ * doc\DocGenerator.scala:1219: error: object creation impossible, since there is a deferred declaration of method text in class Node of type => String which is not implemented in a subclass
+ * new SpecialNode {
+ * ^
+ * }}} */
override def text: String = super.text
}
diff --git a/src/library/scala/xml/NodeBuffer.scala b/src/library/scala/xml/NodeBuffer.scala
index d8b5927435..e9c82b0451 100644
--- a/src/library/scala/xml/NodeBuffer.scala
+++ b/src/library/scala/xml/NodeBuffer.scala
@@ -1,18 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
-import collection.{Iterator, Seq, Iterable}
-
/**
* <p>
* This class acts as a Buffer for nodes. If it is used as a sequence
@@ -42,9 +38,10 @@ class NodeBuffer extends scala.collection.mutable.ArrayBuffer[Node] {
def &+(o: Any): NodeBuffer = {
o match {
case null | _: Unit | Text("") => // ignore
- case it: Iterator[_] => it foreach (this &+ _)
+ case it: Iterator[_] => it foreach &+
case n: Node => super.+=(n)
case ns: Iterable[_] => this &+ ns.iterator
+ case ns: Array[_] => this &+ ns.iterator
case d => super.+=(new Atom(d))
}
this
diff --git a/src/library/scala/xml/NodeSeq.scala b/src/library/scala/xml/NodeSeq.scala
index de6e38d5b7..cf343c55e8 100644
--- a/src/library/scala/xml/NodeSeq.scala
+++ b/src/library/scala/xml/NodeSeq.scala
@@ -1,21 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
-import collection.immutable
-import collection.immutable.{List, Nil, ::}
-import collection.{Seq, SeqLike}
-import collection.mutable.{Builder, ListBuffer}
-import collection.generic.CanBuildFrom
+import collection.{ mutable, immutable, generic, SeqLike }
+import mutable.{ Builder, ListBuffer }
+import generic.{ CanBuildFrom }
/** This object ...
*
@@ -43,7 +40,7 @@ object NodeSeq {
* @author Burak Emir
* @version 1.0
*/
-abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] {
+abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] with Equality {
import NodeSeq.seqToNodeSeq // import view magic for NodeSeq wrappers
/** Creates a list buffer as builder for this class */
@@ -56,32 +53,45 @@ abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] {
def apply(i: Int): Node = theSeq(i)
def apply(f: Node => Boolean): NodeSeq = filter(f)
- /** structural equality */
- override def equals(x: Any): Boolean = x match {
- case z:Node => (length == 1) && z == apply(0)
- case z:Seq[_] => sameElements(z)
- case z:String => text == z
- case _ => false
+ def xml_sameElements[A](that: Iterable[A]): Boolean = {
+ val these = this.iterator
+ val those = that.iterator
+ while (these.hasNext && those.hasNext)
+ if (these.next xml_!= those.next)
+ return false
+
+ !these.hasNext && !those.hasNext
+ }
+ def basisForHashCode: Seq[Any] = theSeq
+ override def canEqual(other: Any) = other match {
+ case _: NodeSeq => true
+ case _ => false
+ }
+ override def strict_==(other: Equality) = other match {
+ case x: NodeSeq => (length == x.length) && (theSeq sameElements x.theSeq)
+ case _ => false
}
- /** Projection function. Similar to XPath, use <code>this \ "foo"</code>
- * to get a list of all elements of this sequence that are labelled with
- * <code>"foo"</code>. Use <code>\ "_"</code> as a wildcard. Use
- * <code>ns \ "@foo"</code> to get the unprefixed attribute "foo".
- * Use <code>ns \ "@{uri}foo"</code> to get the prefixed attribute
- * "pre:foo" whose prefix "pre" is resolved to the namespace "uri".
- * For attribute projections, the resulting NodeSeq attribute values are
- * wrapped in a Group.
- * There is no support for searching a prefixed attribute by
- * its literal prefix.
+ /** Projection function, which returns elements of `this` sequence based on the string `that`. Use:
+ * - `this \ "foo"` to get a list of all elements that are labelled with `"foo"`;
+ * - `\ "_"` to get a list of all elements (wildcard);
+ * - `ns \ "@foo"` to get the unprefixed attribute `"foo"`;
+ * - `ns \ "@{uri}foo"` to get the prefixed attribute `"pre:foo"` whose prefix `"pre"` is resolved to the
+ * namespace `"uri"`.
+ *
+ * For attribute projections, the resulting [[scala.xml.NodeSeq]] attribute values are wrapped in a
+ * [[scala.xml.Group]].
+ *
+ * There is no support for searching a prefixed attribute by its literal prefix.
+ *
* The document order is preserved.
*
* @param that ...
* @return ...
*/
def \(that: String): NodeSeq = {
+ def fail = throw new IllegalArgumentException(that)
def atResult = {
- def fail = throw new IllegalArgumentException(that)
lazy val y = this(0)
val attr =
if (that.length == 1) fail
@@ -92,7 +102,7 @@ abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] {
if (uri == "" || key == "") fail
else y.attribute(uri, key)
}
- else y.attribute(that.substring(1))
+ else y.attribute(that drop 1)
attr match {
case Some(x) => Group(x)
@@ -104,22 +114,26 @@ abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] {
NodeSeq fromSeq (this flatMap (_.child) filter cond)
that match {
+ case "" => fail
case "_" => makeSeq(!_.isAtom)
case _ if (that(0) == '@' && this.length == 1) => atResult
case _ => makeSeq(_.label == that)
}
}
- /** projection function. Similar to XPath, use <code>this \\ 'foo</code>
- * to get a list of all elements of this sequence that are labelled with
- * <code>"foo"</code>. Use <code>\\ "_"</code> as a wildcard. Use
- * <code>ns \\ "@foo"</code> to get the unprefixed attribute "foo".
- * Use <code>ns \\ "@{uri}foo"</code> to get each prefixed attribute
- * "pre:foo" whose prefix "pre" is resolved to the namespace "uri".
- * For attribute projections, the resulting NodeSeq attribute values are
- * wrapped in a Group.
- * There is no support for searching a prefixed attribute by
- * its literal prefix.
+ /** Projection function, which returns elements of `this` sequence and of all its subsequences, based on
+ * the string `that`. Use:
+ * - `this \\ 'foo` to get a list of all elements that are labelled with `"foo"`;
+ * - `\\ "_"` to get a list of all elements (wildcard);
+ * - `ns \\ "@foo"` to get the unprefixed attribute `"foo"`;
+ * - `ns \\ "@{uri}foo"` to get each prefixed attribute `"pre:foo"` whose prefix `"pre"` is resolved to the
+ * namespace `"uri"`.
+ *
+ * For attribute projections, the resulting [[scala.xml.NodeSeq]] attribute values are wrapped in a
+ * [[scala.xml.Group]].
+ *
+ * There is no support for searching a prefixed attribute by its literal prefix.
+ *
* The document order is preserved.
*
* @param that ...
diff --git a/src/library/scala/xml/Null.scala b/src/library/scala/xml/Null.scala
index 1bbcd0713d..6d2939a9a1 100644
--- a/src/library/scala/xml/Null.scala
+++ b/src/library/scala/xml/Null.scala
@@ -1,81 +1,59 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
import Utility.{ isNameStart }
-import collection.Iterator
-import collection.immutable.{Nil, List}
-import collection.mutable.StringBuilder
+/** Essentially, every method in here is a dummy, returning Zero[T].
+ * It provides a backstop for the unusual collection defined by MetaData,
+ * sort of a linked list of tails.
+ */
case object Null extends MetaData {
-
- /** appends given MetaData items to this MetaData list */
- override def append(m: MetaData, scope: NamespaceBinding = TopScope): MetaData = m
-
- override def containedIn1(m: MetaData): Boolean = false
-
- /** returns its argument */
- def copy(next: MetaData) = next
-
override def iterator = Iterator.empty
-
+ override def append(m: MetaData, scope: NamespaceBinding = TopScope): MetaData = m
override def filter(f: MetaData => Boolean): MetaData = this
+ def copy(next: MetaData) = next
def getNamespace(owner: Node) = null
- final override def hasNext = false
+ override def hasNext = false
def next = null
def key = null
def value = null
-
- final override def length = 0
- final override def length(i: Int) = i
-
def isPrefixed = false
- /** deep equals method - XXX */
- override def equals(that: Any) = that match {
- case m: MetaData => m.length == 0
- case _ => false
- }
+ override def length = 0
+ override def length(i: Int) = i
- def equals1(that:MetaData) = that.length == 0
-
- override def map(f: MetaData => Text): List[Text] = Nil
+ override def strict_==(other: Equality) = other match {
+ case x: MetaData => x.length == 0
+ case _ => false
+ }
+ override def basisForHashCode: Seq[Any] = Nil
- /** null */
+ def apply(namespace: String, scope: NamespaceBinding, key: String) = null
def apply(key: String) = {
- if(!isNameStart(key charAt 0))
+ if (!isNameStart(key.head))
throw new IllegalArgumentException("not a valid attribute name '"+key+"', so can never match !")
+
null
}
- /** gets value of qualified (prefixed) attribute with given key */
- def apply(namespace: String, scope: NamespaceBinding, key: String) = null
-
- override def hashCode(): Int = 0
-
+ def toString1(sb: StringBuilder) = ()
override def toString1(): String = ""
-
- //appends string representations of single attribute to StringBuilder
- def toString1(sb:StringBuilder) = {}
-
override def toString(): String = ""
override def buildString(sb: StringBuilder): StringBuilder = sb
-
override def wellformed(scope: NamespaceBinding) = true
def remove(key: String) = this
-
def remove(namespace: String, scope: NamespaceBinding, key: String) = this
}
diff --git a/src/library/scala/xml/PCData.scala b/src/library/scala/xml/PCData.scala
index 5cf4bda070..330ad897f9 100644
--- a/src/library/scala/xml/PCData.scala
+++ b/src/library/scala/xml/PCData.scala
@@ -1,4 +1,3 @@
-// $Id$
package scala.xml
@@ -7,16 +6,9 @@ package scala.xml
* and is to be preserved as CDATA section in the output.
*/
case class PCData(_data: String) extends Atom[String](_data) {
- /* The following code is a derivative work of scala.xml.Text */
if (null == data)
throw new IllegalArgumentException("tried to construct PCData with null")
- final override def equals(x: Any) = x match {
- case s: String => s.equals(data)
- case s: Atom[_] => data == s.data
- case _ => false
- }
-
/** Returns text, with some characters escaped according to the XML
* specification.
*
diff --git a/src/library/scala/xml/PrefixedAttribute.scala b/src/library/scala/xml/PrefixedAttribute.scala
index cd89456628..c739dbe1fa 100644
--- a/src/library/scala/xml/PrefixedAttribute.scala
+++ b/src/library/scala/xml/PrefixedAttribute.scala
@@ -1,20 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
-import collection.Seq
-import collection.mutable.StringBuilder
-
-
/** prefixed attributes always have a non-null namespace.
*
* @param pre ...
@@ -36,24 +30,12 @@ extends Attribute
def this(pre: String, key: String, value: String, next: MetaData) =
this(pre, key, Text(value), next)
- /*
- // the problem here is the fact that we cannot remove the proper attribute from
- // next, and thus cannot guarantee that hashcodes are computed properly
- def this(pre: String, key: String, value: scala.AllRef, next: MetaData) =
- throw new UnsupportedOperationException("can't construct prefixed nil attributes")
- */
-
/** Returns a copy of this unprefixed attribute with the given
* next field.
*/
def copy(next: MetaData) =
new PrefixedAttribute(pre, key, value, next)
- def equals1(m: MetaData) =
- (m.isPrefixed &&
- (m.asInstanceOf[PrefixedAttribute].pre == pre) &&
- (m.key == key) && (m.value sameElements value))
-
def getNamespace(owner: Node) =
owner.getNamespace(pre)
@@ -68,41 +50,8 @@ extends Attribute
else
next(namespace, scope, key)
}
-
- /** returns true */
- final def isPrefixed = true
-
- /** returns the hashcode.
- */
- override def hashCode() =
- pre.hashCode() * 41 + key.hashCode() * 7 + next.hashCode()
-
-
- /** appends string representation of only this attribute to stringbuffer */
- def toString1(sb:StringBuilder): Unit = if(value ne null) {
- sb.append(pre)
- sb.append(':')
- sb.append(key)
- sb.append('=')
- val sb2 = new StringBuilder()
- Utility.sequenceToXML(value, TopScope, sb2, true)
- Utility.appendQuoted(sb2.toString(), sb)
- }
-
- def wellformed(scope: NamespaceBinding): Boolean =
- (null == next(scope.getURI(pre), scope, key) &&
- next.wellformed(scope))
-
- def remove(key: String) =
- copy(next.remove(key))
-
- def remove(namespace: String, scope: NamespaceBinding, key: String): MetaData =
- if (key == this.key && scope.getURI(pre) == namespace)
- next
- else
- next.remove(namespace, scope, key)
-
}
+
object PrefixedAttribute {
def unapply(x: PrefixedAttribute) = Some(x.pre, x.key, x.value, x.next)
}
diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala
index 8b2193fba8..5927ae0d11 100644
--- a/src/library/scala/xml/PrettyPrinter.scala
+++ b/src/library/scala/xml/PrettyPrinter.scala
@@ -1,17 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
-import scala.collection.Map
import Utility.sbToString
/** Class for pretty printing. After instantiating, you can use the
@@ -23,7 +20,7 @@ import Utility.sbToString
* @version 1.0
*
* @param width the width to fit the output into
- * @step indentation
+ * @param step indentation
*/
class PrettyPrinter(width: Int, step: Int) {
@@ -39,7 +36,6 @@ class PrettyPrinter(width: Int, step: Int) {
protected var items: List[Item] = Nil
protected var cur = 0
- //protected var pmap:Map[String,String] = _
protected def reset() = {
cur = 0
@@ -84,20 +80,13 @@ class PrettyPrinter(width: Int, step: Int) {
* @param s ...
* @return ...
*/
- protected def makeBox(ind: Int, s: String) = {
- // XXX um...
- if (cur < ind)
- cur == ind
+ protected def makeBox(ind: Int, s: String) =
if (cur + s.length > width) { // fits in this line
- items = Box(ind, s) :: items
+ items ::= Box(ind, s)
cur += s.length
- } else try {
- for (b <- cut(s, ind).iterator) // break it up
- items = b :: items
- } catch {
- case _:BrokenException => makePara(ind, s) // give up, para
}
- }
+ else try cut(s, ind) foreach (items ::= _) // break it up
+ catch { case _: BrokenException => makePara(ind, s) } // give up, para
// dont respect indent in para, but afterwards
protected def makePara(ind: Int, s: String) = {
diff --git a/src/library/scala/xml/ProcInstr.scala b/src/library/scala/xml/ProcInstr.scala
index 0f7877049c..72b9ba00f2 100644
--- a/src/library/scala/xml/ProcInstr.scala
+++ b/src/library/scala/xml/ProcInstr.scala
@@ -1,15 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
-import collection.mutable.StringBuilder
/** an XML node for processing instructions (PI)
*
@@ -36,5 +34,5 @@ case class ProcInstr(target: String, proctext: String) extends SpecialNode
* to this stringbuffer.
*/
override def buildString(sb: StringBuilder) =
- sb append "<?%s%s?>".format(target, (if (proctext.isEmpty) "" else " " + proctext))
+ sb append "<?%s%s?>".format(target, (if (proctext == "") "" else " " + proctext))
}
diff --git a/src/library/scala/xml/QNode.scala b/src/library/scala/xml/QNode.scala
index 7a47d3ef9d..331f138f25 100644
--- a/src/library/scala/xml/QNode.scala
+++ b/src/library/scala/xml/QNode.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
diff --git a/src/library/scala/xml/SpecialNode.scala b/src/library/scala/xml/SpecialNode.scala
index 1c49467773..1b5a8a7d9a 100644
--- a/src/library/scala/xml/SpecialNode.scala
+++ b/src/library/scala/xml/SpecialNode.scala
@@ -1,19 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
-import collection.immutable.{List, Nil, ::}
-import collection.mutable.StringBuilder
-
/** <p>
* <code>SpecialNode</code> is a special XML node which
* represents either text (PCDATA), a comment, a PI, or an entity ref.
diff --git a/src/library/scala/xml/Text.scala b/src/library/scala/xml/Text.scala
index ffb5cf5155..aebb0d4897 100644
--- a/src/library/scala/xml/Text.scala
+++ b/src/library/scala/xml/Text.scala
@@ -1,28 +1,30 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
-import collection.mutable.StringBuilder
-
-object Text {
- def apply(data: String) =
- if (data != null) new Text(data)
- else throw new IllegalArgumentException("tried to construct Text with null")
-
- def unapply(other: Any) = other match {
- case x: Text => Some(x.data)
- case _ => None
- }
-}
+// XXX This attempt to make Text not a case class revealed a bug in the pattern
+// matcher (see ticket #2883) so I've put the case back. (It was/is desirable that
+// it not be a case class because it is using the antipattern of passing constructor
+// parameters to the superclass where they become vals, but since they will also be
+// vals in the subclass, it acquires an underscore to avoid a name clash.)
+//
+// object Text {
+// def apply(data: String) =
+// if (data != null) new Text(data)
+// else throw new IllegalArgumentException("tried to construct Text with null")
+//
+// def unapply(other: Any): Option[String] = other match {
+// case x: Text => Some(x.data)
+// case _ => None
+// }
+// }
/** The class <code>Text</code> implements an XML node for text (PCDATA).
* It is used in both non-bound and bound XML representations.
@@ -31,18 +33,11 @@ object Text {
*
* @param text the text contained in this node, may not be null.
*/
-class Text(data: String) extends Atom[String](data)
+case class Text(_data: String) extends Atom[String](_data)
{
- if (data == null)
+ if (_data == null)
throw new IllegalArgumentException("tried to construct Text with null")
- /** XXX More hashCode flailing. */
- final override def equals(x: Any) = x match {
- case s:String => s == data
- case s:Atom[_] => data == s.data
- case _ => false
- }
-
/** Returns text, with some characters escaped according to the XML
* specification.
*
diff --git a/src/library/scala/xml/TextBuffer.scala b/src/library/scala/xml/TextBuffer.scala
index 633c90e94f..265851a04b 100644
--- a/src/library/scala/xml/TextBuffer.scala
+++ b/src/library/scala/xml/TextBuffer.scala
@@ -1,19 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
-import collection.Seq
-import collection.mutable.StringBuilder
-import collection.immutable.{List, Nil, ::}
import Utility.isSpace
object TextBuffer {
diff --git a/src/library/scala/xml/TopScope.scala b/src/library/scala/xml/TopScope.scala
index c458248ae3..33b7bbfa01 100644
--- a/src/library/scala/xml/TopScope.scala
+++ b/src/library/scala/xml/TopScope.scala
@@ -1,23 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
-import collection.mutable.StringBuilder
-
-
/** top level namespace scope. only contains the predefined binding
* for the &quot;xml&quot; prefix which is bound to
* &quot;http://www.w3.org/XML/1998/namespace&quot;
*/
-case object TopScope extends NamespaceBinding(null, null, null)
+object TopScope extends NamespaceBinding(null, null, null)
{
import XML.{ xml, namespace }
diff --git a/src/library/scala/xml/TypeSymbol.scala b/src/library/scala/xml/TypeSymbol.scala
index 826e967fa1..b9bd6960cc 100644
--- a/src/library/scala/xml/TypeSymbol.scala
+++ b/src/library/scala/xml/TypeSymbol.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
diff --git a/src/library/scala/xml/Unparsed.scala b/src/library/scala/xml/Unparsed.scala
index 08e7f373e9..7911310363 100644
--- a/src/library/scala/xml/Unparsed.scala
+++ b/src/library/scala/xml/Unparsed.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
@@ -22,13 +21,6 @@ class Unparsed(data: String) extends Atom[String](data)
if (null == data)
throw new IllegalArgumentException("tried to construct Unparsed with null")
- /** XXX another hashCode fail */
- final override def equals(x: Any) = x match {
- case s:String => s == data
- case s:Atom[_] => data == s.data
- case _ => false
- }
-
/** returns text, with some characters escaped according to XML spec */
override def buildString(sb: StringBuilder) = sb append data
}
diff --git a/src/library/scala/xml/UnprefixedAttribute.scala b/src/library/scala/xml/UnprefixedAttribute.scala
index 8f4129b25b..a4bbef37b0 100644
--- a/src/library/scala/xml/UnprefixedAttribute.scala
+++ b/src/library/scala/xml/UnprefixedAttribute.scala
@@ -1,20 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
-import collection.Seq
-import collection.mutable.StringBuilder
-
-
/** Unprefixed attributes have the null namespace, and no prefix field
*
* @author Burak Emir
@@ -25,6 +19,7 @@ class UnprefixedAttribute(
next1: MetaData)
extends Attribute
{
+ final val pre = null
val next = if (value ne null) next1 else next1.remove(key)
/** same as this(key, Text(value), next) */
@@ -38,9 +33,6 @@ extends Attribute
/** returns a copy of this unprefixed attribute with the given next field*/
def copy(next: MetaData) = new UnprefixedAttribute(key, value, next)
- def equals1(m: MetaData) =
- !m.isPrefixed && (m.key == key) && (m.value sameElements value)
-
final def getNamespace(owner: Node): String = null
/**
@@ -62,33 +54,6 @@ extends Attribute
*/
def apply(namespace: String, scope: NamespaceBinding, key: String): Seq[Node] =
next(namespace, scope, key)
-
- override def hashCode() =
- key.hashCode() * 7 + { if(value ne null) value.hashCode() * 53 else 0 } + next.hashCode()
-
- final def isPrefixed = false
-
- /** appends string representation of only this attribute to stringbuffer.
- *
- * @param sb ..
- */
- def toString1(sb: StringBuilder): Unit = if (value ne null) {
- sb.append(key)
- sb.append('=')
- val sb2 = new StringBuilder()
- Utility.sequenceToXML(value, TopScope, sb2, true)
- Utility.appendQuoted(sb2.toString(), sb)
- }
-
- def wellformed(scope: NamespaceBinding): Boolean =
- (null == next(null, scope, key)) && next.wellformed(scope)
-
- def remove(key: String) =
- if (this.key == key) next else copy(next.remove(key))
-
- def remove(namespace: String, scope: NamespaceBinding, key: String): MetaData =
- next.remove(namespace, scope, key)
-
}
object UnprefixedAttribute {
def unapply(x: UnprefixedAttribute) = Some(x.key, x.value, x.next)
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index 0628de8922..65d7179fa2 100644
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -1,18 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
-import collection.mutable.{Set, HashSet, StringBuilder}
-import collection.Seq
+import collection.mutable
+import mutable.{ Set, HashSet }
import parsing.XhtmlEntities
/**
@@ -74,9 +73,6 @@ object Utility extends AnyRef with parsing.TokenTests
case _ => n
}
- @deprecated("a string might also be Atom(s) - define your own conversion")
- def view(s: String): Text = Text(s)
-
/**
* Escapes the characters &lt; &gt; &amp; and &quot; from string.
*
@@ -87,7 +83,7 @@ object Utility extends AnyRef with parsing.TokenTests
object Escapes {
/** For reasons unclear escape and unescape are a long ways from
- being logical inverses. */
+ * being logical inverses. */
val pairs = Map(
"lt" -> '<',
"gt" -> '>',
@@ -109,11 +105,29 @@ object Utility extends AnyRef with parsing.TokenTests
* @param s ...
* @return ...
*/
- final def escape(text: String, s: StringBuilder): StringBuilder =
- text.foldLeft(s)((s, c) => escMap.get(c) match {
- case Some(str) => s append str
- case None => s append c
- })
+ final def escape(text: String, s: StringBuilder): StringBuilder = {
+ // Implemented per XML spec:
+ // http://www.w3.org/International/questions/qa-controls
+ // imperative code 3x-4x faster than current implementation
+ // dpp (David Pollak) 2010/02/03
+ val len = text.length
+ var pos = 0
+ while (pos < len) {
+ text.charAt(pos) match {
+ case '<' => s.append("&lt;")
+ case '>' => s.append("&gt;")
+ case '&' => s.append("&amp;")
+ case '"' => s.append("&quot;")
+ case '\n' => s.append('\n')
+ case '\r' => s.append('\r')
+ case '\t' => s.append('\t')
+ case c => if (c >= ' ') s.append(c)
+ }
+
+ pos += 1
+ }
+ s
+ }
/**
* Appends unescaped string to <code>s</code>, amp becomes &amp;
@@ -125,7 +139,7 @@ object Utility extends AnyRef with parsing.TokenTests
* entity.
*/
final def unescape(ref: String, s: StringBuilder): StringBuilder =
- (unescMap get ref) map (s append _) getOrElse null
+ (unescMap get ref) map (s append _) orNull
/**
* Returns a set of all namespaces used in a sequence of nodes
@@ -134,7 +148,7 @@ object Utility extends AnyRef with parsing.TokenTests
* @param nodes ...
* @return ...
*/
- def collectNamespaces(nodes: Seq[Node]): Set[String] =
+ def collectNamespaces(nodes: Seq[Node]): mutable.Set[String] =
nodes.foldLeft(new HashSet[String]) { (set, x) => collectNamespaces(x, set) ; set }
/**
@@ -143,7 +157,7 @@ object Utility extends AnyRef with parsing.TokenTests
* @param n ...
* @param set ...
*/
- def collectNamespaces(n: Node, set: Set[String]) {
+ def collectNamespaces(n: Node, set: mutable.Set[String]) {
if (n.doCollectNamespaces) {
set += n.namespace
for (a <- n.attributes) a match {
@@ -179,22 +193,24 @@ object Utility extends AnyRef with parsing.TokenTests
minimizeTags: Boolean = false): StringBuilder =
{
x match {
- case c: Comment if !stripComments => c buildString sb
- case x: SpecialNode => x buildString sb
- case g: Group => for (c <- g.nodes) toXML(c, x.scope, sb) ; sb
+ case c: Comment => if (!stripComments) c buildString sb else sb
+ case x: SpecialNode => x buildString sb
+ case g: Group =>
+ g.nodes foreach {toXML(_, x.scope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)}
+ sb
case _ =>
// print tag with namespace declarations
sb.append('<')
x.nameToString(sb)
if (x.attributes ne null) x.attributes.buildString(sb)
x.scope.buildString(sb, pscope)
- if (x.child.isEmpty && minimizeTags)
+ if (x.child.isEmpty && minimizeTags) {
// no children, so use short form: <xyz .../>
sb.append(" />")
- else {
+ } else {
// children, so use long form: <xyz ...>...</xyz>
sb.append('>')
- sequenceToXML(x.child, x.scope, sb, stripComments)
+ sequenceToXML(x.child, x.scope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
sb.append("</")
x.nameToString(sb)
sb.append('>')
@@ -206,20 +222,23 @@ object Utility extends AnyRef with parsing.TokenTests
children: Seq[Node],
pscope: NamespaceBinding = TopScope,
sb: StringBuilder = new StringBuilder,
- stripComments: Boolean = false): Unit =
+ stripComments: Boolean = false,
+ decodeEntities: Boolean = true,
+ preserveWhitespace: Boolean = false,
+ minimizeTags: Boolean = false): Unit =
{
if (children.isEmpty) return
else if (children forall isAtomAndNotText) { // add space
val it = children.iterator
val f = it.next
- toXML(f, pscope, sb)
+ toXML(f, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
while (it.hasNext) {
val x = it.next
sb.append(' ')
- toXML(x, pscope, sb)
+ toXML(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
}
}
- else children foreach { toXML(_, pscope, sb) }
+ else children foreach { toXML(_, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) }
}
/**
@@ -242,14 +261,14 @@ object Utility extends AnyRef with parsing.TokenTests
* @param children
*/
def hashCode(pre: String, label: String, attribHashCode: Int, scpeHash: Int, children: Seq[Node]) = (
- ( if(pre ne null) {41 * pre.hashCode() % 7} else {0})
- + label.hashCode() * 53
+ ( if(pre ne null) {41 * pre.## % 7} else {0})
+ + label.## * 53
+ attribHashCode * 7
+ scpeHash * 31
+ {
var c = 0
val i = children.iterator
- while(i.hasNext) c = c * 41 + i.next.hashCode
+ while(i.hasNext) c = c * 41 + i.next.##
c
}
)
@@ -292,9 +311,10 @@ object Utility extends AnyRef with parsing.TokenTests
*/
def getName(s: String, index: Int): String = {
if (index >= s.length) null
- else (s drop index) match {
- case Seq(x, xs @ _*) if isNameStart(x) => x.toString + (xs takeWhile isNameChar).mkString
- case _ => ""
+ else {
+ val xs = s drop index
+ if (xs.nonEmpty && isNameStart(xs.head)) xs takeWhile isNameChar
+ else ""
}
}
diff --git a/src/library/scala/xml/XML.scala b/src/library/scala/xml/XML.scala
index afe7354106..b3ecc6ba7b 100644
--- a/src/library/scala/xml/XML.scala
+++ b/src/library/scala/xml/XML.scala
@@ -1,22 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
-import scala.xml.parsing.NoBindingFactoryAdapter
-import scala.xml.factory.XMLLoader
-import org.xml.sax.InputSource
-import javax.xml.parsers.{ SAXParser, SAXParserFactory }
-import java.io.{File, FileDescriptor, FileInputStream, FileOutputStream}
-import java.io.{InputStream, Reader, StringReader, Writer}
+import parsing.NoBindingFactoryAdapter
+import factory.XMLLoader
+import java.io.{ File, FileDescriptor, FileInputStream, FileOutputStream }
+import java.io.{ InputStream, Reader, StringReader, Writer }
import java.nio.channels.Channels
import scala.util.control.Exception.ultimately
@@ -56,11 +53,11 @@ object XML extends XMLLoader[Elem]
@deprecated("Use save() instead")
final def saveFull(filename: String, node: Node, xmlDecl: Boolean, doctype: dtd.DocType): Unit =
- saveFull(filename, node, encoding, xmlDecl, doctype)
+ save(filename, node, encoding, xmlDecl, doctype)
@deprecated("Use save() instead")
final def saveFull(filename: String, node: Node, enc: String, xmlDecl: Boolean, doctype: dtd.DocType): Unit =
- saveFull(filename, node, enc, xmlDecl, doctype)
+ save(filename, node, enc, xmlDecl, doctype)
/** Saves a node to a file with given filename using given encoding
* optionally with xmldecl and doctype declaration.
@@ -82,7 +79,7 @@ object XML extends XMLLoader[Elem]
val fos = new FileOutputStream(filename)
val w = Channels.newWriter(fos.getChannel(), enc)
- ultimately({ w.close() ; fos.close() })(
+ ultimately(w.close())(
write(w, node, enc, xmlDecl, doctype)
)
}
diff --git a/src/library/scala/xml/Xhtml.scala b/src/library/scala/xml/Xhtml.scala
index 744fe260c2..6730548b73 100644
--- a/src/library/scala/xml/Xhtml.scala
+++ b/src/library/scala/xml/Xhtml.scala
@@ -1,4 +1,3 @@
-// $Id$
package scala.xml
@@ -14,7 +13,7 @@ object Xhtml
*
* @param node the node
*/
- def toXhtml(node: Node): String = sbToString(toXhtml(x = node, sb = _))
+ def toXhtml(node: Node): String = sbToString(sb => toXhtml(x = node, sb = sb))
/**
* Convenience function: amounts to calling toXhtml(node) on each
@@ -22,7 +21,7 @@ object Xhtml
*
* @param nodeSeq the node sequence
*/
- def toXhtml(nodeSeq: NodeSeq): String = sbToString(sequenceToXML(nodeSeq: Seq[Node], sb = _))
+ def toXhtml(nodeSeq: NodeSeq): String = sbToString(sb => sequenceToXML(nodeSeq: Seq[Node], sb = sb))
/** Elements which we believe are safe to minimize if minimizeTags is true.
* See http://www.w3.org/TR/xhtml1/guidelines.html#C_3
@@ -49,11 +48,11 @@ object Xhtml
(minimizableElements contains x.label)
x match {
- case c: Comment if !stripComments => c buildString sb
+ case c: Comment => if (!stripComments) c buildString sb
case er: EntityRef if decodeEntities => decode(er)
case x: SpecialNode => x buildString sb
case g: Group =>
- g.nodes foreach { toXhtml(_, x.scope, sb) }
+ g.nodes foreach { toXhtml(_, x.scope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) }
case _ =>
sb.append('<')
@@ -64,7 +63,7 @@ object Xhtml
if (shortForm) sb.append(" />")
else {
sb.append('>')
- sequenceToXML(x.child, x.scope, sb)
+ sequenceToXML(x.child, x.scope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
sb.append("</")
x.nameToString(sb)
sb.append('>')
@@ -89,9 +88,9 @@ object Xhtml
val doSpaces = children forall isAtomAndNotText // interleave spaces
for (c <- children.take(children.length - 1)) {
- toXhtml(c, pscope, sb)
+ toXhtml(c, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
if (doSpaces) sb append ' '
}
- toXhtml(children.last, pscope, sb)
+ toXhtml(children.last, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
}
}
diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/library/scala/xml/dtd/ContentModel.scala
index d5d45bdaa0..d864d4630d 100644
--- a/src/library/scala/xml/dtd/ContentModel.scala
+++ b/src/library/scala/xml/dtd/ContentModel.scala
@@ -1,22 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package dtd
import util.regexp.WordExp
-import util.automata.{DetWordAutom, SubsetConstruction, WordBerrySethi}
-import collection.mutable.{HashSet, StringBuilder}
-import collection.immutable.{List, Nil}
-import collection.Seq
+import util.automata._
import Utility.sbToString
import PartialFunction._
@@ -51,11 +47,9 @@ object ContentModel extends WordExp {
/* precond: rs.length >= 1 */
private def buildString(rs: Seq[RegExp], sb: StringBuilder, sep: Char) {
- val it = rs.iterator
- val fst = it.next
- buildString(fst, sb)
- for (z <- it) {
- sb.append(sep)
+ buildString(rs.head, sb)
+ for (z <- rs.tail) {
+ sb append sep
buildString(z, sb)
}
sb
@@ -121,7 +115,7 @@ case class MIXED(r: ContentModel.RegExp) extends DFAContentModel {
}
}
-case class ELEMENTS(r: ContentModel.RegExp) extends DFAContentModel {
+case class ELEMENTS(r: ContentModel.RegExp) extends DFAContentModel {
override def buildString(sb: StringBuilder): StringBuilder =
ContentModel.buildString(r, sb)
}
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala
index 4f0edb9cae..199aa3492d 100644
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ b/src/library/scala/xml/dtd/ContentModelParser.scala
@@ -1,19 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
package dtd
-import collection.immutable.List
-
/** Parser for regexps (content models in DTD element declarations) */
object ContentModelParser extends Scanner { // a bit too permissive concerning #PCDATA
diff --git a/src/library/scala/xml/dtd/DTD.scala b/src/library/scala/xml/dtd/DTD.scala
index 4c1b03bb93..bfb9ad19ba 100644
--- a/src/library/scala/xml/dtd/DTD.scala
+++ b/src/library/scala/xml/dtd/DTD.scala
@@ -1,38 +1,31 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
package dtd
-import scala.collection.mutable.{HashMap, Map}
+import collection.mutable
+import mutable.HashMap
/** A document type declaration.
*
* @author Burak Emir
*/
abstract class DTD {
-
- var externalID: ExternalID = null
-
- def notations: Seq[NotationDecl] = Nil
-
+ var externalID: ExternalID = null
+ var decls: List[Decl] = Nil
+ def notations: Seq[NotationDecl] = Nil
def unparsedEntities: Seq[EntityDecl] = Nil
- var elem: Map[String, ElemDecl] = new HashMap[String, ElemDecl]()
-
- var attr: Map[String, AttListDecl] = new HashMap[String, AttListDecl]()
-
- var ent: Map[String, EntityDecl] = new HashMap[String, EntityDecl]()
-
- var decls: List[Decl] = Nil
+ var elem: mutable.Map[String, ElemDecl] = new HashMap[String, ElemDecl]()
+ var attr: mutable.Map[String, AttListDecl] = new HashMap[String, AttListDecl]()
+ var ent: mutable.Map[String, EntityDecl] = new HashMap[String, EntityDecl]()
override def toString() =
"DTD [\n%s%s]".format(
diff --git a/src/library/scala/xml/dtd/Decl.scala b/src/library/scala/xml/dtd/Decl.scala
index 417b9407f0..0badde91c4 100644
--- a/src/library/scala/xml/dtd/Decl.scala
+++ b/src/library/scala/xml/dtd/Decl.scala
@@ -1,21 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
- ** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+ ** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
package dtd
import Utility.sbToString
-import collection.immutable.List
-import collection.mutable.StringBuilder
-
abstract class Decl
@@ -114,7 +109,7 @@ case class IntDef(value:String) extends EntityDef {
val n = tmp.substring(ix, iz);
if( !Utility.isName( n ))
- throw new IllegalArgumentException("interal entity def: \""+n+"\" must be an XML Name");
+ throw new IllegalArgumentException("internal entity def: \""+n+"\" must be an XML Name");
tmp = tmp.substring(iz+1, tmp.length());
ix = tmp.indexOf('%');
diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/library/scala/xml/dtd/DocType.scala
index 31c246fa41..9aa2e7504d 100644
--- a/src/library/scala/xml/dtd/DocType.scala
+++ b/src/library/scala/xml/dtd/DocType.scala
@@ -1,19 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
package dtd
-import collection.Seq
-
/** An XML node for document type declaration.
*
* @author Burak Emir
diff --git a/src/library/scala/xml/dtd/ElementValidator.scala b/src/library/scala/xml/dtd/ElementValidator.scala
index 8c375ca1c8..872fb58393 100644
--- a/src/library/scala/xml/dtd/ElementValidator.scala
+++ b/src/library/scala/xml/dtd/ElementValidator.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
@@ -14,22 +13,26 @@ package dtd
import PartialFunction._
import ContentModel.ElemName
+import MakeValidationException._ // @todo other exceptions
+
import scala.util.automata._
+import scala.collection.mutable.BitSet
/** validate children and/or attributes of an element
* exceptions are created but not thrown.
*/
class ElementValidator() extends Function1[Node,Boolean] {
- var exc: List[ValidationException] = Nil
+ private var exc: List[ValidationException] = Nil
protected var contentModel: ContentModel = _
protected var dfa: DetWordAutom[ElemName] = _
protected var adecls: List[AttrDecl] = _
/** set content model, enabling element validation */
- def setContentModel(cm:ContentModel) = {
- contentModel = cm; cm match {
+ def setContentModel(cm: ContentModel) = {
+ contentModel = cm
+ cm match {
case ELEMENTS(r) =>
val nfa = ContentModel.Translator.automatonFrom(r, 1)
dfa = new SubsetConstruction(nfa).determinize
@@ -43,8 +46,8 @@ class ElementValidator() extends Function1[Node,Boolean] {
/** set meta data, enabling attribute validation */
def setMetaData(adecls: List[AttrDecl]) { this.adecls = adecls }
- def getIterator(nodes: Seq[Node], skipPCDATA: Boolean): Iterator[ElemName] = {
- def isAllWhitespace(a: Atom[_]) = cond(a.data) { case s: String if s.trim.isEmpty => true }
+ def getIterable(nodes: Seq[Node], skipPCDATA: Boolean): Iterable[ElemName] = {
+ def isAllWhitespace(a: Atom[_]) = cond(a.data) { case s: String if s.trim == "" => true }
nodes.filter {
case y: SpecialNode => y match {
@@ -52,91 +55,71 @@ class ElementValidator() extends Function1[Node,Boolean] {
case _ => !skipPCDATA
}
case x => x.namespace eq null
- } . map (x => ElemName(x.label)) iterator
+ } . map (x => ElemName(x.label))
}
/** check attributes, return true if md corresponds to attribute declarations in adecls.
*/
def check(md: MetaData): Boolean = {
- //@todo other exceptions
- import MakeValidationException._;
- val len: Int = exc.length;
- var j = 0;
- var ok = new scala.collection.mutable.BitSet(adecls.length);
- def find(Key:String): AttrDecl = {
- var attr: AttrDecl = null;
- val jt = adecls.iterator; while(j < adecls.length) {
- jt.next match {
- case a @ AttrDecl(Key, _, _) => attr = a; ok += j; j = adecls.length;
- case _ => j = j + 1;
+ val len: Int = exc.length
+ var ok = new BitSet(adecls.length)
+
+ for (attr <- md) {
+ def attrStr = attr.value.toString
+ def find(Key: String): Option[AttrDecl] = {
+ adecls.zipWithIndex find {
+ case (a @ AttrDecl(Key, _, _), j) => ok += j ; return Some(a)
+ case _ => false
}
+ None
}
- attr
- }
- val it = md.iterator; while(it.hasNext) {
- val attr = it.next
- j = 0
- find(attr.key) match {
- case null =>
- //Console.println("exc");
- exc = fromUndefinedAttribute( attr.key ) :: exc;
-
- case AttrDecl(_, tpe, DEFAULT(true, fixedValue)) if attr.value.toString != fixedValue =>
- exc = fromFixedAttribute( attr.key, fixedValue, attr.value.toString) :: exc;
+ find(attr.key) match {
+ case None =>
+ exc ::= fromUndefinedAttribute(attr.key)
- case s =>
- //Console.println("s: "+s);
+ case Some(AttrDecl(_, tpe, DEFAULT(true, fixedValue))) if attrStr != fixedValue =>
+ exc ::= fromFixedAttribute(attr.key, fixedValue, attrStr)
+ case _ =>
}
}
- //val missing = ok.toSet(false); FIXME: it doesn't seem to be used anywhere
- j = 0
- var kt = adecls.iterator
- while (kt.hasNext) {
- kt.next match {
- case AttrDecl(key, tpe, REQUIRED) if !ok(j) =>
- exc = fromMissingAttribute( key, tpe ) :: exc;
- j = j + 1;
- case _ =>
- j = j + 1;
- }
+ adecls.zipWithIndex foreach {
+ case (AttrDecl(key, tpe, REQUIRED), j) if !ok(j) => exc ::= fromMissingAttribute(key, tpe)
+ case _ =>
}
+
exc.length == len //- true if no new exception
}
/** check children, return true if conform to content model
- * @pre contentModel != null
+ * @note contentModel != null
*/
def check(nodes: Seq[Node]): Boolean = contentModel match {
case ANY => true
- case EMPTY => !getIterator(nodes, false).hasNext
- case PCDATA => !getIterator(nodes, true).hasNext
+ case EMPTY => getIterable(nodes, false).isEmpty
+ case PCDATA => getIterable(nodes, true).isEmpty
case MIXED(ContentModel.Alt(branches @ _*)) => // @todo
val j = exc.length
def find(Key: String): Boolean =
branches exists { case ContentModel.Letter(ElemName(Key)) => true ; case _ => false }
- getIterator(nodes, true) map (_.name) filterNot find foreach {
+ getIterable(nodes, true) map (_.name) filterNot find foreach {
exc ::= MakeValidationException fromUndefinedElement _
}
(exc.length == j) // - true if no new exception
case _: ELEMENTS =>
- var q = 0
- getIterator(nodes, false) foreach { e =>
- (dfa delta q get e) match {
- case Some(p) => q = p
- case _ => throw ValidationException("element %s not allowed here" format e)
+ dfa isFinal {
+ getIterable(nodes, false).foldLeft(0) { (q, e) =>
+ (dfa delta q).getOrElse(e, throw ValidationException("element %s not allowed here" format e))
}
}
-
- dfa isFinal q // - true if arrived in final state
}
/** applies various validations - accumulates error messages in exc
- * @todo: fail on first error, ignore other errors (rearranging conditions)
+ * @todo fail on first error, ignore other errors (rearranging conditions)
*/
def apply(n: Node): Boolean =
//- ? check children
diff --git a/src/library/scala/xml/dtd/ExternalID.scala b/src/library/scala/xml/dtd/ExternalID.scala
index 01302e716b..df5d611690 100644
--- a/src/library/scala/xml/dtd/ExternalID.scala
+++ b/src/library/scala/xml/dtd/ExternalID.scala
@@ -1,21 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
package dtd
-import collection.immutable.{List, Nil}
-import collection.mutable.StringBuilder
-
-
/** an ExternalIDs - either PublicID or SystemID
*
* @author Burak Emir
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala
index 71cb93b39e..b8dc2b070b 100644
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ b/src/library/scala/xml/dtd/Scanner.scala
@@ -1,20 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
package dtd
-import collection.{Seq, Iterator}
-import collection.immutable.{List, Nil}
-
/** Scanner for regexps (content models in DTD element declarations)
* todo: cleanup
*/
diff --git a/src/library/scala/xml/dtd/Tokens.scala b/src/library/scala/xml/dtd/Tokens.scala
index c3466ee0e7..94f6b9aa11 100644
--- a/src/library/scala/xml/dtd/Tokens.scala
+++ b/src/library/scala/xml/dtd/Tokens.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
diff --git a/src/library/scala/xml/dtd/ValidationException.scala b/src/library/scala/xml/dtd/ValidationException.scala
index 8f27b5d7f3..09142e9770 100644
--- a/src/library/scala/xml/dtd/ValidationException.scala
+++ b/src/library/scala/xml/dtd/ValidationException.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
diff --git a/src/library/scala/xml/factory/Binder.scala b/src/library/scala/xml/factory/Binder.scala
index e2e0821aaf..b0e38a39cb 100644
--- a/src/library/scala/xml/factory/Binder.scala
+++ b/src/library/scala/xml/factory/Binder.scala
@@ -1,18 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package factory
-import scala.xml.parsing.ValidatingMarkupHandler
+import parsing.ValidatingMarkupHandler
/**
* @author Burak Emir
diff --git a/src/library/scala/xml/factory/LoggedNodeFactory.scala b/src/library/scala/xml/factory/LoggedNodeFactory.scala
index 1b12b63dcc..2159f8c106 100644
--- a/src/library/scala/xml/factory/LoggedNodeFactory.scala
+++ b/src/library/scala/xml/factory/LoggedNodeFactory.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
@@ -59,7 +58,7 @@ with scala.util.logging.Logged {
if (logNode)
log("[makeNode for "+label+"]");
- val hash = Utility.hashCode(pre, label, attrSeq.hashCode(), scope.hashCode(), children)
+ val hash = Utility.hashCode(pre, label, attrSeq.##, scope.##, children)
/*
if(logCompressLevel >= FULL) {
diff --git a/src/library/scala/xml/factory/NodeFactory.scala b/src/library/scala/xml/factory/NodeFactory.scala
index 911ca620a4..4178a38cd9 100644
--- a/src/library/scala/xml/factory/NodeFactory.scala
+++ b/src/library/scala/xml/factory/NodeFactory.scala
@@ -1,22 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package factory
import parsing.{ FactoryAdapter, NoBindingFactoryAdapter }
-import collection.Seq
-import collection.immutable.{List, Nil}
-import org.xml.sax.InputSource
import java.io.{ InputStream, Reader, StringReader, File, FileDescriptor, FileInputStream }
-import javax.xml.parsers.{ SAXParser, SAXParserFactory }
trait NodeFactory[A <: Node]
{
@@ -45,7 +40,7 @@ trait NodeFactory[A <: Node]
eqElements(n.child, children)
def makeNode(pre: String, name: String, attrSeq: MetaData, scope: NamespaceBinding, children: Seq[Node]): A = {
- val hash = Utility.hashCode( pre, name, attrSeq.hashCode(), scope.hashCode(), children)
+ val hash = Utility.hashCode( pre, name, attrSeq.##, scope.##, children)
def cons(old: List[A]) = construct(hash, old, pre, name, attrSeq, scope, children)
(cache get hash) match {
diff --git a/src/library/scala/xml/factory/XMLLoader.scala b/src/library/scala/xml/factory/XMLLoader.scala
index dc2b07f6d3..006edca054 100644
--- a/src/library/scala/xml/factory/XMLLoader.scala
+++ b/src/library/scala/xml/factory/XMLLoader.scala
@@ -1,20 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package factory
+import javax.xml.parsers.SAXParserFactory
import parsing.{ FactoryAdapter, NoBindingFactoryAdapter }
-import org.xml.sax.InputSource
import java.io.{ InputStream, Reader, StringReader, File, FileDescriptor, FileInputStream }
-import javax.xml.parsers.{ SAXParser, SAXParserFactory }
+import java.net.URL
/** Presents collection of XML loading methods which use the parser
* created by "def parser".
@@ -49,11 +48,12 @@ trait XMLLoader[T <: Node]
def loadFile(fd: FileDescriptor): T = loadXML(fromFile(fd), parser)
def loadFile(name: String): T = loadXML(fromFile(name), parser)
- /** loads XML from given InputStream, Reader, sysID, or InputSource. */
+ /** loads XML from given InputStream, Reader, sysID, InputSource, or URL. */
def load(is: InputStream): T = loadXML(fromInputStream(is), parser)
def load(reader: Reader): T = loadXML(fromReader(reader), parser)
def load(sysID: String): T = loadXML(fromSysId(sysID), parser)
def load(source: InputSource): T = loadXML(source, parser)
+ def load(url: URL): T = loadXML(fromInputStream(url.openStream()), parser)
/** Loads XML from the given String. */
def loadString(string: String): T = loadXML(fromString(string), parser)
diff --git a/src/library/scala/xml/include/CircularIncludeException.scala b/src/library/scala/xml/include/CircularIncludeException.scala
index 94efca2b02..5c6e327de9 100644
--- a/src/library/scala/xml/include/CircularIncludeException.scala
+++ b/src/library/scala/xml/include/CircularIncludeException.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package include
diff --git a/src/library/scala/xml/include/UnavailableResourceException.scala b/src/library/scala/xml/include/UnavailableResourceException.scala
index d2e26e0d19..da1c5d452c 100644
--- a/src/library/scala/xml/include/UnavailableResourceException.scala
+++ b/src/library/scala/xml/include/UnavailableResourceException.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package include
diff --git a/src/library/scala/xml/include/XIncludeException.scala b/src/library/scala/xml/include/XIncludeException.scala
index 83a339562a..3faf587cd9 100644
--- a/src/library/scala/xml/include/XIncludeException.scala
+++ b/src/library/scala/xml/include/XIncludeException.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package include
@@ -43,7 +42,7 @@ class XIncludeException(message: String) extends Exception(message) {
* This method allows you to store the original exception.
*
* @param nestedException the underlying exception which
- caused the XIncludeException to be thrown
+ * caused the XIncludeException to be thrown
*/
def setRootCause(nestedException: Throwable ) {
this.rootCause = nestedException
diff --git a/src/library/scala/xml/include/sax/EncodingHeuristics.scala b/src/library/scala/xml/include/sax/EncodingHeuristics.scala
index ad08ef68cf..95fe240635 100644
--- a/src/library/scala/xml/include/sax/EncodingHeuristics.scala
+++ b/src/library/scala/xml/include/sax/EncodingHeuristics.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package include.sax
diff --git a/src/library/scala/xml/include/sax/Main.scala b/src/library/scala/xml/include/sax/Main.scala
index 4df95d1046..ed2e5ebf37 100644
--- a/src/library/scala/xml/include/sax/Main.scala
+++ b/src/library/scala/xml/include/sax/Main.scala
@@ -1,23 +1,21 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package include.sax
import scala.xml.include._
import scala.util.control.Exception.{ catching, ignoring }
-import org.xml.sax.{ SAXException, SAXParseException, EntityResolver, XMLReader }
+import org.xml.sax.XMLReader
import org.xml.sax.helpers.XMLReaderFactory
object Main {
- private val xercesClass = "org.apache.xerces.parsers.SAXParser"
private val namespacePrefixes = "http://xml.org/sax/features/namespace-prefixes"
private val lexicalHandler = "http://xml.org/sax/properties/lexical-handler"
@@ -27,7 +25,7 @@ object Main {
* </p>
*
* @param args contains the URLs and/or filenames
- * of the documents to be procesed.
+ * of the documents to be processed.
*/
def main(args: Array[String]) {
def saxe[T](body: => T) = catching[T](classOf[SAXException]) opt body
@@ -35,7 +33,7 @@ object Main {
val parser: XMLReader =
saxe[XMLReader](XMLReaderFactory.createXMLReader()) getOrElse (
- saxe[XMLReader](XMLReaderFactory.createXMLReader(xercesClass)) getOrElse (
+ saxe[XMLReader](XMLReaderFactory.createXMLReader(XercesClassName)) getOrElse (
return error("Could not find an XML parser")
)
)
@@ -47,13 +45,15 @@ object Main {
if (args.isEmpty)
return
- val (resolver, args2): (Option[EntityResolver], Array[String]) =
- if (args.size < 2 || args(0) != "-r") (None, args)
+ def dashR = args.size >= 2 && args(0) == "-r"
+ val args2 = if (dashR) args drop 2 else args
+ val resolver: Option[EntityResolver] =
+ if (dashR) None
else catching(classOf[Exception]) opt {
- val r = Class.forName(args(1)).newInstance().asInstanceOf[EntityResolver]
- parser setEntityResolver r
- (r, args drop 2)
- } orElse (return error("Could not load requested EntityResolver"))
+ val r = Class.forName(args(1)).newInstance().asInstanceOf[EntityResolver]
+ parser setEntityResolver r
+ r
+ } orElse (return error("Could not load requested EntityResolver"))
for (arg <- args2) {
try {
diff --git a/src/library/scala/xml/include/sax/XIncludeFilter.scala b/src/library/scala/xml/include/sax/XIncludeFilter.scala
index 222c9f39f8..ce0fc4bf50 100644
--- a/src/library/scala/xml/include/sax/XIncludeFilter.scala
+++ b/src/library/scala/xml/include/sax/XIncludeFilter.scala
@@ -1,23 +1,23 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package include.sax
+
import scala.xml.include._
-import org.xml.sax.{ Attributes, SAXException, XMLReader, EntityResolver, Locator }
+import org.xml.sax.{ Attributes, XMLReader, Locator }
import org.xml.sax.helpers.{ XMLReaderFactory, XMLFilterImpl, NamespaceSupport, AttributesImpl }
-import java.net.{ URL, URLConnection, MalformedURLException }
-import java.io.{ UnsupportedEncodingException, IOException, InputStream, BufferedInputStream, InputStreamReader }
+import java.io.{ InputStream, BufferedInputStream, InputStreamReader, IOException, UnsupportedEncodingException }
import java.util.Stack
+import java.net.{ URL, MalformedURLException }
/**
* <p>
@@ -351,61 +351,49 @@ class XIncludeFilter extends XMLFilterImpl {
be downloaded from the specified URL.
*/
private def includeXMLDocument(url: String) {
- var source: URL = null
- try {
- val base = bases.peek().asInstanceOf[URL]
- source = new URL(base, url)
- }
- catch {
- case e:MalformedURLException =>
- val ex = new UnavailableResourceException("Unresolvable URL " + url
- + getLocation());
- ex.setRootCause(e)
- throw new SAXException("Unresolvable URL " + url + getLocation(), ex)
- }
+ val source =
+ try new URL(bases.peek(), url)
+ catch {
+ case e: MalformedURLException =>
+ val ex = new UnavailableResourceException("Unresolvable URL " + url + getLocation())
+ ex setRootCause e
+ throw new SAXException("Unresolvable URL " + url + getLocation(), ex)
+ }
try {
- // make this more robust
- var parser: XMLReader = null
- try {
- parser = XMLReaderFactory.createXMLReader()
- } catch {
- case e:SAXException =>
- try {
- parser = XMLReaderFactory.createXMLReader(
- "org.apache.xerces.parsers.SAXParser"
- );
- } catch {
- case e2: SAXException =>
- System.err.println("Could not find an XML parser")
- }
- }
- if(parser != null) {
- parser.setContentHandler(this)
- val resolver = this.getEntityResolver()
- if (resolver != null) parser.setEntityResolver(resolver);
- // save old level and base
- val previousLevel = level
- this.level = 0
- if (bases.contains(source)) {
- val e = new CircularIncludeException(
- "Circular XInclude Reference to " + source + getLocation()
- );
- throw new SAXException("Circular XInclude Reference", e)
+ val parser: XMLReader =
+ try XMLReaderFactory.createXMLReader()
+ catch {
+ case e: SAXException =>
+ try XMLReaderFactory.createXMLReader(XercesClassName)
+ catch { case _: SAXException => return System.err.println("Could not find an XML parser") }
}
- bases.push(source)
- atRoot = true
- parser.parse(source.toExternalForm())
- // restore old level and base
- this.level = previousLevel
- bases.pop()
- }
+
+ parser setContentHandler this
+ val resolver = this.getEntityResolver()
+ if (resolver != null)
+ parser setEntityResolver resolver
+
+ // save old level and base
+ val previousLevel = level
+ this.level = 0
+ if (bases contains source)
+ throw new SAXException(
+ "Circular XInclude Reference",
+ new CircularIncludeException("Circular XInclude Reference to " + source + getLocation())
+ )
+
+ bases push source
+ atRoot = true
+ parser parse source.toExternalForm()
+
+ // restore old level and base
+ this.level = previousLevel
+ bases.pop()
}
catch {
- case e:IOException =>
- throw new SAXException("Document not found: "
- + source.toExternalForm() + getLocation(), e)
+ case e: IOException =>
+ throw new SAXException("Document not found: " + source.toExternalForm() + getLocation(), e)
}
-
}
}
diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala
index d27c518488..64d91b21ca 100644
--- a/src/library/scala/xml/include/sax/XIncluder.scala
+++ b/src/library/scala/xml/include/sax/XIncluder.scala
@@ -1,31 +1,21 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package include.sax
+
import scala.xml.include._
+import collection.mutable.Stack
-import org.xml.sax.SAXException
-import org.xml.sax.SAXParseException
-import org.xml.sax.ContentHandler
-import org.xml.sax.EntityResolver
-import org.xml.sax.helpers.XMLReaderFactory
-import org.xml.sax.XMLReader
-import org.xml.sax.Locator
-import org.xml.sax.Attributes
+import org.xml.sax.{ ContentHandler, XMLReader, Locator, Attributes }
import org.xml.sax.ext.LexicalHandler
-
-import java.io.{File, IOException, OutputStream, OutputStreamWriter,
- UnsupportedEncodingException, Writer}
-import java.net.{MalformedURLException, URL}
-import java.util.Stack
+import java.io.{ File, OutputStream, OutputStreamWriter, Writer, IOException }
/** XIncluder is a SAX <code>ContentHandler</code>
* that writes its XML document onto an output stream after resolving
@@ -35,8 +25,7 @@ import java.util.Stack
* based on Eliotte Rusty Harold's SAXXIncluder
* </p>
*/
-class XIncluder(outs:OutputStream, encoding:String) extends Object
-with ContentHandler with LexicalHandler {
+class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler with LexicalHandler {
var out = new OutputStreamWriter(outs, encoding)
@@ -153,7 +142,7 @@ with ContentHandler with LexicalHandler {
def startDTD(name: String, publicID: String, systemID: String) {
inDTD = true
// if this is the source document, output a DOCTYPE declaration
- if (entities.size() == 0) {
+ if (entities.isEmpty) {
var id = ""
if (publicID != null) id = " PUBLIC \"" + publicID + "\" \"" + systemID + '"';
else if (systemID != null) id = " SYSTEM \"" + systemID + '"';
@@ -169,7 +158,7 @@ with ContentHandler with LexicalHandler {
def endDTD() {}
def startEntity(name: String) {
- entities.push(name)
+ entities push name
}
def endEntity(name: String) {
diff --git a/src/library/scala/xml/package.scala b/src/library/scala/xml/package.scala
new file mode 100644
index 0000000000..dec05abd2a
--- /dev/null
+++ b/src/library/scala/xml/package.scala
@@ -0,0 +1,11 @@
+package scala
+
+package object xml {
+ val XercesClassName = "org.apache.xerces.parsers.SAXParser"
+
+ type SAXException = org.xml.sax.SAXException
+ type SAXParseException = org.xml.sax.SAXParseException
+ type EntityResolver = org.xml.sax.EntityResolver
+ type InputSource = org.xml.sax.InputSource
+ type SAXParser = javax.xml.parsers.SAXParser
+} \ No newline at end of file
diff --git a/src/library/scala/xml/parsing/ConstructingHandler.scala b/src/library/scala/xml/parsing/ConstructingHandler.scala
index a213a72590..f2dd7e58fd 100644
--- a/src/library/scala/xml/parsing/ConstructingHandler.scala
+++ b/src/library/scala/xml/parsing/ConstructingHandler.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
diff --git a/src/library/scala/xml/parsing/ConstructingParser.scala b/src/library/scala/xml/parsing/ConstructingParser.scala
index 962c629663..b5a8f0ba25 100644
--- a/src/library/scala/xml/parsing/ConstructingParser.scala
+++ b/src/library/scala/xml/parsing/ConstructingParser.scala
@@ -1,54 +1,49 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package parsing
import java.io.File
-
-import scala.io.{ Source, Codec }
+import scala.io.Source
object ConstructingParser {
def fromFile(inp: File, preserveWS: Boolean) =
- // XXX why does the default implicit not work here when building locker,
- // unless the empty parameter list is supplied?
- new ConstructingParser(Source.fromFile(inp)(), preserveWS) initialize
+ new ConstructingParser(Source.fromFile(inp), preserveWS) initialize
def fromSource(inp: Source, preserveWS: Boolean) =
new ConstructingParser(inp, preserveWS) initialize
}
/** An xml parser. parses XML and invokes callback methods of a MarkupHandler.
- * Don't forget to call next.ch on a freshly instantiated parser in order to
- * initialize it. If you get the parser from the object method, initialization
- * is already done for you.
- *
- *<pre>
-object parseFromURL {
- def main(args:Array[String]): Unit = {
- val url = args(0);
- val src = scala.io.Source.fromURL(url);
- val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false); // fromSource initializes automatically
- val doc = cpa.document();
-
- // let's see what it is
- val ppr = new scala.xml.PrettyPrinter(80,5);
- val ele = doc.docElem;
- Console.println("finished parsing");
- val out = ppr.format(ele);
- Console.println(out);
- }
-}
-</pre>
- */
+ * Don't forget to call next.ch on a freshly instantiated parser in order to
+ * initialize it. If you get the parser from the object method, initialization
+ * is already done for you.
+ *
+ * {{{
+ * object parseFromURL {
+ * def main(args:Array[String]): Unit = {
+ * val url = args(0);
+ * val src = scala.io.Source.fromURL(url);
+ * val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false); // fromSource initializes automatically
+ * val doc = cpa.document();
+ *
+ * // let's see what it is
+ * val ppr = new scala.xml.PrettyPrinter(80,5);
+ * val ele = doc.docElem;
+ * Console.println("finished parsing");
+ * val out = ppr.format(ele);
+ * Console.println(out);
+ * }
+ * }
+ * }}} */
class ConstructingParser(val input: Source, val preserveWS: Boolean)
extends ConstructingHandler
with ExternalSources
diff --git a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala b/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
index 68750b6fa2..c215b8b639 100644
--- a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
+++ b/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
@@ -1,19 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package parsing
-/** default implemenation of markup handler always returns NodeSeq.Empty */
+/** default implementation of markup handler always returns NodeSeq.Empty */
abstract class DefaultMarkupHandler extends MarkupHandler {
def elem(pos: Int, pre: String, label: String, attrs: MetaData,
diff --git a/src/library/scala/xml/parsing/ExternalSources.scala b/src/library/scala/xml/parsing/ExternalSources.scala
index c3b5cb1f05..a1363b8b17 100644
--- a/src/library/scala/xml/parsing/ExternalSources.scala
+++ b/src/library/scala/xml/parsing/ExternalSources.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
@@ -21,8 +20,7 @@ import scala.io.Source
* @author Burak Emir
* @version 1.0
*/
-trait ExternalSources
-{
+trait ExternalSources {
self: ExternalSources with MarkupParser with MarkupHandler =>
/** ...
@@ -39,6 +37,6 @@ trait ExternalSources
case x => x take ((x lastIndexOf separator) + 1)
}
- Source.fromPath(fileStr + systemId)()
+ Source.fromFile(fileStr + systemId)
}
}
diff --git a/src/library/scala/xml/parsing/FactoryAdapter.scala b/src/library/scala/xml/parsing/FactoryAdapter.scala
index 838cee008f..0e8ea2c5b6 100644
--- a/src/library/scala/xml/parsing/FactoryAdapter.scala
+++ b/src/library/scala/xml/parsing/FactoryAdapter.scala
@@ -1,31 +1,25 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package parsing
-import java.io.{InputStream, Reader, File, FileDescriptor, FileInputStream}
-import collection.mutable.{Stack, StringBuilder}
-import collection.immutable.{List, Nil}
-import collection.{Seq, Iterator}
+import java.io.{ InputStream, Reader, File, FileDescriptor, FileInputStream }
+import collection.mutable.Stack
-import org.xml.sax.{ Attributes, InputSource }
+import org.xml.sax.Attributes
import org.xml.sax.helpers.DefaultHandler
-import javax.xml.parsers.{ SAXParser, SAXParserFactory }
// can be mixed into FactoryAdapter if desired
trait ConsoleErrorHandler extends DefaultHandler
{
- import org.xml.sax.SAXParseException
-
// ignore warning, crimson warns even for entity resolution!
override def warning(ex: SAXParseException): Unit = { }
override def error(ex: SAXParseException): Unit = printError("Error", ex)
@@ -135,7 +129,9 @@ abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node
hStack push null
var m: MetaData = Null
- var scpe: NamespaceBinding = TopScope
+ var scpe: NamespaceBinding =
+ if (scopeStack.isEmpty) TopScope
+ else scopeStack.top
for (i <- 0 until attributes.getLength()) {
val qname = attributes getQName i
diff --git a/src/library/scala/xml/parsing/FatalError.scala b/src/library/scala/xml/parsing/FatalError.scala
index 3a1f408df8..d2d6cf32b4 100644
--- a/src/library/scala/xml/parsing/FatalError.scala
+++ b/src/library/scala/xml/parsing/FatalError.scala
@@ -1,16 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
-package parsing;
+package parsing
-
-case class FatalError(msg:String) extends java.lang.RuntimeException(msg);
+/** !!! This is poorly named, but I guess it's in the API.
+ */
+case class FatalError(msg: String) extends java.lang.RuntimeException(msg)
diff --git a/src/library/scala/xml/parsing/MarkupHandler.scala b/src/library/scala/xml/parsing/MarkupHandler.scala
index dbb89d6881..d53e5d3319 100644
--- a/src/library/scala/xml/parsing/MarkupHandler.scala
+++ b/src/library/scala/xml/parsing/MarkupHandler.scala
@@ -1,18 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package parsing
-import scala.collection.mutable.{HashMap, Map}
+import collection.mutable
+import mutable.HashMap
import scala.io.Source
import scala.util.logging.Logged
import scala.xml.dtd._
@@ -32,7 +32,7 @@ abstract class MarkupHandler extends Logged
val isValidating: Boolean = false
var decls: List[Decl] = Nil
- var ent: Map[String, EntityDecl] = new HashMap[String, EntityDecl]()
+ var ent: mutable.Map[String, EntityDecl] = new HashMap[String, EntityDecl]()
def lookupElemDecl(Label: String): ElemDecl = {
for (z @ ElemDecl(Label, _) <- decls)
@@ -69,7 +69,7 @@ abstract class MarkupHandler extends Logged
*/
def elemEnd(pos: Int, pre: String, label: String): Unit = ()
- /** callback method invoked by MarkupParser after parsing an elementm,
+ /** callback method invoked by MarkupParser after parsing an element,
* between the elemStart and elemEnd callbacks
*
* @param pos the position in the source file
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index 846895a21b..4f6b89c07b 100644
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -1,14 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
-
package scala.xml
package parsing
@@ -30,10 +27,21 @@ import Utility.Escapes.{ pairs => unescape }
* @author Burak Emir
* @version 1.0
*/
-trait MarkupParser extends AnyRef with TokenTests
+trait MarkupParser extends MarkupParserCommon with TokenTests
{
self: MarkupParser with MarkupHandler =>
+ type PositionType = Int
+ type InputType = Source
+ type ElementType = NodeSeq
+ type AttributesType = (MetaData, NamespaceBinding)
+ type NamespaceType = NamespaceBinding
+
+ def truncatedError(msg: String): Nothing = throw FatalError(msg)
+ def errorNoEnd(tag: String) = throw FatalError("expected closing tag of " + tag)
+
+ def xHandleError(that: Char, msg: String) = reportSyntaxError(msg)
+
val input: Source
/** if true, does not remove surplus whitespace */
@@ -46,6 +54,15 @@ trait MarkupParser extends AnyRef with TokenTests
//
var curInput: Source = input
+ def lookahead(): BufferedIterator[Char] = new BufferedIterator[Char] {
+ val stream = curInput.toStream
+ curInput = Source.fromIterable(stream)
+ val underlying = Source.fromIterable(stream).buffered
+
+ def hasNext = underlying.hasNext
+ def next = underlying.next
+ def head = underlying.head
+ }
/** the handler of the markup, returns this */
private val handle: MarkupHandler = this
@@ -56,7 +73,6 @@ trait MarkupParser extends AnyRef with TokenTests
/** holds the position in the source file */
var pos: Int = _
-
/* used when reading external subset */
var extIndex = -1
@@ -92,30 +108,28 @@ trait MarkupParser extends AnyRef with TokenTests
md
}
- /** &lt;? prolog ::= xml S?
- * // this is a bit more lenient than necessary...
+ /** Factored out common code.
*/
- def prolog(): Tuple3[Option[String], Option[String], Option[Boolean]] = {
-
- //Console.println("(DEBUG) prolog")
- var n = 0
+ private def prologOrTextDecl(isProlog: Boolean): (Option[String], Option[String], Option[Boolean]) = {
var info_ver: Option[String] = None
var info_enc: Option[String] = None
var info_stdl: Option[Boolean] = None
var m = xmlProcInstr()
+ var n = 0
- xSpaceOpt
+ if (isProlog)
+ xSpaceOpt
m("version") match {
- case null => ;
+ case null => ;
case Text("1.0") => info_ver = Some("1.0"); n += 1
case _ => reportSyntaxError("cannot deal with versions != 1.0")
}
m("encoding") match {
case null => ;
- case Text(enc) =>
+ case Text(enc) =>
if (!isValidIANAEncoding(enc))
reportSyntaxError("\"" + enc + "\" is not a valid encoding")
else {
@@ -123,52 +137,33 @@ trait MarkupParser extends AnyRef with TokenTests
n += 1
}
}
- m("standalone") match {
- case null => ;
- case Text("yes") => info_stdl = Some(true); n += 1
- case Text("no") => info_stdl = Some(false); n += 1
- case _ => reportSyntaxError("either 'yes' or 'no' expected")
+
+ if (isProlog) {
+ m("standalone") match {
+ case null => ;
+ case Text("yes") => info_stdl = Some(true); n += 1
+ case Text("no") => info_stdl = Some(false); n += 1
+ case _ => reportSyntaxError("either 'yes' or 'no' expected")
+ }
}
if (m.length - n != 0) {
- reportSyntaxError("VersionInfo EncodingDecl? SDDecl? or '?>' expected!");
+ val s = if (isProlog) "SDDecl? " else ""
+ reportSyntaxError("VersionInfo EncodingDecl? %sor '?>' expected!" format s)
}
- //Console.println("[MarkupParser::prolog] finished parsing prolog!");
- Tuple3(info_ver,info_enc,info_stdl)
- }
- /** prolog, but without standalone */
- def textDecl(): Tuple2[Option[String],Option[String]] = {
-
- var info_ver: Option[String] = None
- var info_enc: Option[String] = None
-
- var m = xmlProcInstr()
- var n = 0
-
- m("version") match {
- case null => ;
- case Text("1.0") => info_ver = Some("1.0"); n += 1
- case _ => reportSyntaxError("cannot deal with versions != 1.0")
- }
+ (info_ver, info_enc, info_stdl)
+ }
- m("encoding") match {
- case null => ;
- case Text(enc) =>
- if (!isValidIANAEncoding(enc))
- reportSyntaxError("\"" + enc + "\" is not a valid encoding")
- else {
- info_enc = Some(enc)
- n += 1
- }
- }
+ /** &lt;? prolog ::= xml S?
+ * // this is a bit more lenient than necessary...
+ */
+ def prolog(): (Option[String], Option[String], Option[Boolean]) =
+ prologOrTextDecl(true)
- if (m.length - n != 0) {
- reportSyntaxError("VersionInfo EncodingDecl? or '?>' expected!");
- }
- //Console.println("[MarkupParser::textDecl] finished parsing textdecl");
- Tuple2(info_ver, info_enc);
- }
+ /** prolog, but without standalone */
+ def textDecl(): (Option[String], Option[String]) =
+ prologOrTextDecl(false) match { case (x1, x2, _) => (x1, x2) }
/**
*[22] prolog ::= XMLDecl? Misc* (doctypedecl Misc*)?
@@ -180,8 +175,6 @@ trait MarkupParser extends AnyRef with TokenTests
*/
def document(): Document = {
-
- //Console.println("(DEBUG) document")
doc = new Document()
this.dtd = null
@@ -194,7 +187,6 @@ trait MarkupParser extends AnyRef with TokenTests
nextch // is prolog ?
var children: NodeSeq = null
if ('?' == ch) {
- //Console.println("[MarkupParser::document] starts with xml declaration");
nextch;
info_prolog = prolog()
doc.version = info_prolog._1
@@ -202,10 +194,8 @@ trait MarkupParser extends AnyRef with TokenTests
doc.standAlone = info_prolog._3
children = content(TopScope) // DTD handled as side effect
- } else {
- //Console.println("[MarkupParser::document] does not start with xml declaration");
- //
-
+ }
+ else {
val ts = new NodeBuffer();
content1(TopScope, ts); // DTD handled as side effect
ts &+ content(TopScope);
@@ -218,7 +208,7 @@ trait MarkupParser extends AnyRef with TokenTests
case _:ProcInstr => ;
case _:Comment => ;
case _:EntityRef => // todo: fix entities, shouldn't be "special"
- reportSyntaxError("no entity references alllowed here");
+ reportSyntaxError("no entity references allowed here");
case s:SpecialNode =>
if (s.toString().trim().length > 0) //non-empty text nodes not allowed
elemCount = elemCount + 2;
@@ -239,8 +229,6 @@ trait MarkupParser extends AnyRef with TokenTests
/** append Unicode character to name buffer*/
protected def putChar(c: Char) = cbuf.append(c)
- //var xEmbeddedBlock = false;
-
/** As the current code requires you to call nextch once manually
* after construction, this method formalizes that suboptimal reality.
*/
@@ -249,8 +237,16 @@ trait MarkupParser extends AnyRef with TokenTests
this
}
+ def ch_returning_nextch = { val res = ch ; nextch ; res }
+ def mkProcInstr(position: Int, name: String, text: String): NodeSeq =
+ handle.procInstr(position, name, text)
+
+ def mkAttributes(name: String, pscope: NamespaceBinding) =
+ if (isNameStart (ch)) xAttributes(pscope)
+ else (Null, pscope)
+
/** this method assign the next character to ch and advances in input */
- def nextch {
+ def nextch = {
if (curInput.hasNext) {
ch = curInput.next
pos = curInput.pos
@@ -265,23 +261,9 @@ trait MarkupParser extends AnyRef with TokenTests
ch = 0.asInstanceOf[Char]
}
}
+ ch
}
- //final val enableEmbeddedExpressions: Boolean = false;
-
- /** munch expected XML token, report syntax error for unexpected
- */
- def xToken(that: Char) {
- if (ch == that)
- nextch
- else {
- reportSyntaxError("'" + that + "' expected instead of '" + ch + "'")
- error("FATAL")
- }
- }
-
- def xToken(that: Seq[Char]): Unit = that foreach xToken
-
/** parse attribute and create namespace scope, metadata
* [41] Attributes ::= { S Name Eq AttValue }
*/
@@ -321,27 +303,6 @@ trait MarkupParser extends AnyRef with TokenTests
(aMap,scope)
}
- /** attribute value, terminated by either ' or ". value may not contain &lt;.
- * AttValue ::= `'` { _ } `'`
- * | `"` { _ } `"`
- */
- def xAttributeValue(): String = {
- val endch = ch
- nextch
- while (ch != endch) {
- if ('<' == ch)
- reportSyntaxError( "'<' not allowed in attrib value" );
- putChar(ch)
- nextch
- }
- nextch
- val str = cbuf.toString()
- cbuf.length = 0
-
- // well-formedness constraint
- normalizeAttributeValue(str)
- }
-
/** entity value, terminated by either ' or ". value may not contain &lt;.
* AttValue ::= `'` { _ } `'`
* | `"` { _ } `"`
@@ -349,7 +310,7 @@ trait MarkupParser extends AnyRef with TokenTests
def xEntityValue(): String = {
val endch = ch
nextch
- while (ch != endch) {
+ while (ch != endch && !eof) {
putChar(ch)
nextch
}
@@ -359,65 +320,16 @@ trait MarkupParser extends AnyRef with TokenTests
str
}
-
- /** parse a start or empty tag.
- * [40] STag ::= '&lt;' Name { S Attribute } [S]
- * [44] EmptyElemTag ::= '&lt;' Name { S Attribute } [S]
- */
- protected def xTag(pscope:NamespaceBinding): Tuple3[String, MetaData, NamespaceBinding] = {
- val qname = xName
-
- xSpaceOpt
- val (aMap: MetaData, scope: NamespaceBinding) = {
- if (isNameStart(ch))
- xAttributes(pscope)
- else
- (Null, pscope)
- }
- (qname, aMap, scope)
- }
-
- /** [42] '&lt;' xmlEndTag ::= '&lt;' '/' Name S? '&gt;'
- */
- def xEndTag(n: String) = {
- xToken('/')
- val m = xName
- if (n != m)
- reportSyntaxError("expected closing tag of " + n/* +", not "+m*/);
- xSpaceOpt
- xToken('>')
- }
-
/** '&lt;! CharData ::= [CDATA[ ( {char} - {char}"]]&gt;"{char} ) ']]&gt;'
*
* see [15]
*/
def xCharData: NodeSeq = {
xToken("[CDATA[")
- val pos1 = pos
- val sb: StringBuilder = new StringBuilder()
- while (true) {
- if (ch==']' &&
- { sb.append(ch); nextch; ch == ']' } &&
- { sb.append(ch); nextch; ch == '>' } ) {
- sb.setLength(sb.length - 2);
- nextch;
- return PCData(sb.toString)
- } else sb.append( ch );
- nextch;
- }
- // bq: (todo) increase grace when meeting CDATA section
- throw FatalError("this cannot happen");
+ def mkResult(pos: Int, s: String): NodeSeq = PCData(s)
+ xTakeUntil(mkResult, () => pos, "]]>")
}
- /** CharRef ::= "&amp;#" '0'..'9' {'0'..'9'} ";"
- * | "&amp;#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
- *
- * see [66]
- */
- def xCharRef(ch: () => Char, nextch: () => Unit): String =
- Utility.parseCharRef(ch, nextch, reportSyntaxError _)
-
/** Comment ::= '&lt;!--' ((Char - '-') | ('-' (Char - '-')))* '--&gt;'
*
* see [15]
@@ -469,42 +381,32 @@ trait MarkupParser extends AnyRef with TokenTests
def content(pscope: NamespaceBinding): NodeSeq = {
var ts = new NodeBuffer
var exit = eof
- while (! exit) {
- //Console.println("in content, ch = '"+ch+"' line="+scala.io.Position.line(pos));
- /* if( xEmbeddedBlock ) {
- ts.append( xEmbeddedExpr );
- } else {*/
- tmppos = pos;
- exit = eof;
- if(!eof)
- ch match {
- case '<' => // another tag
- //Console.println("before ch = '"+ch+"' line="+scala.io.Position.line(pos)+" pos="+pos);
- nextch;
- //Console.println("after ch = '"+ch+"' line="+scala.io.Position.line(pos)+" pos="+pos);
-
- if('/' ==ch)
- exit = true; // end tag
- else
- content1(pscope, ts)
- //case '{' =>
-/* if( xCheckEmbeddedBlock ) {
- ts.appendAll(xEmbeddedExpr);
- } else {*/
- // val str = new StringBuilder("{");
- // str.append(xText);
- // appendText(tmppos, ts, str.toString());
- /*}*/
- // postcond: xEmbeddedBlock == false!
- case '&' => // EntityRef or CharRef
- nextch;
- if (ch == '#') { // CharacterRef
+ // todo: optimize seq repr.
+ def done = new NodeSeq { val theSeq = ts.toList }
+
+ while (!exit) {
+ tmppos = pos
+ exit = eof
+
+ if (eof)
+ return done
+
+ ch match {
+ case '<' => // another tag
+ nextch match {
+ case '/' => exit = true // end tag
+ case _ => content1(pscope, ts)
+ }
+
+ // postcond: xEmbeddedBlock == false!
+ case '&' => // EntityRef or CharRef
+ nextch match {
+ case '#' => // CharacterRef
nextch
val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch))
xToken(';');
ts &+ theChar
- }
- else { // EntityRef
+ case _ => // EntityRef
val n = xName
xToken(';')
@@ -512,17 +414,12 @@ trait MarkupParser extends AnyRef with TokenTests
handle.entityRef(tmppos, n)
ts &+ unescape(n)
} else push(n)
- }
- case _ => // text content
- appendText(tmppos, ts, xText);
}
- /*}*/
- }
- val list = ts.toList
- // 2do: optimize seq repr.
- new NodeSeq {
- val theSeq = list
+ case _ => // text content
+ appendText(tmppos, ts, xText);
+ }
}
+ done
} // content(NamespaceBinding)
/** externalID ::= SYSTEM S syslit
@@ -572,47 +469,17 @@ trait MarkupParser extends AnyRef with TokenTests
if ((null != extID) && isValidating) {
pushExternal(extID.systemId)
- //val extSubsetSrc = externalSource( extID.systemId );
-
extIndex = inpStack.length
- /*
- .indexOf(':') != -1) { // assume URI
- Source.fromFile(new java.net.URI(extID.systemLiteral));
- } else {
- Source.fromFile(extID.systemLiteral);
- }
- */
- //Console.println("I'll print it now");
- //val old = curInput;
- //tmppos = curInput.pos;
- //val oldch = ch;
- //curInput = extSubsetSrc;
- //pos = 0;
- //nextch;
extSubset()
-
pop()
-
extIndex = -1
-
- //curInput = old;
- //pos = curInput.pos;
- //ch = curInput.ch;
- //eof = false;
- //while(extSubsetSrc.hasNext)
- //Console.print(extSubsetSrc.next);
-
- //Console.println("returned from external, current ch = "+ch )
}
if ('[' == ch) { // internal subset
nextch
/* TODO */
- //Console.println("hello");
intSubset()
- //while(']' != ch)
- // nextch;
// TODO: do the DTD parsing?? ?!?!?!?!!
xToken(']')
xSpaceOpt
@@ -639,15 +506,14 @@ trait MarkupParser extends AnyRef with TokenTests
*/
def element1(pscope: NamespaceBinding): NodeSeq = {
val pos = this.pos
- val Tuple3(qname, aMap, scope) = xTag(pscope)
- val Tuple2(pre, local) = Utility.prefix(qname) match {
- case Some(p) => (p,qname.substring(p.length+1, qname.length))
- case _ => (null,qname)
+ val (qname, (aMap, scope)) = xTag(pscope)
+ val (pre, local) = Utility.prefix(qname) match {
+ case Some(p) => (p, qname drop p.length+1)
+ case _ => (null, qname)
}
val ts = {
if (ch == '/') { // empty element
- xToken('/')
- xToken('>')
+ xToken("/>")
handle.elemStart(pos, pre, local, aMap, scope)
NodeSeq.Empty
}
@@ -664,88 +530,21 @@ trait MarkupParser extends AnyRef with TokenTests
res
}
- //def xEmbeddedExpr: MarkupType;
-
- /** Name ::= (Letter | '_' | ':') (NameChar)*
- *
- * see [5] of XML 1.0 specification
- */
- def xName: String = {
- if (isNameStart(ch)) {
- while (isNameChar(ch)) {
- putChar(ch)
- nextch
- }
- val n = cbuf.toString().intern()
- cbuf.length = 0
- n
- } else {
- reportSyntaxError("name expected")
- ""
- }
- }
-
- /** scan [S] '=' [S]*/
- def xEQ = { xSpaceOpt; xToken('='); xSpaceOpt }
-
- /** skip optional space S? */
- def xSpaceOpt = while (isSpace(ch) && !eof) { nextch; }
-
- /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
- def xSpace =
- if (isSpace(ch)) { nextch; xSpaceOpt }
- else reportSyntaxError("whitespace expected")
-
- /** '&lt;?' ProcInstr ::= Name [S ({Char} - ({Char}'&gt;?' {Char})]'?&gt;'
- *
- * see [15]
- */
- def xProcInstr: NodeSeq = {
- val sb:StringBuilder = new StringBuilder()
- val n = xName
- if (isSpace(ch)) {
- xSpace
- while (true) {
- if (ch == '?' && { sb.append( ch ); nextch; ch == '>' }) {
- sb.length = sb.length - 1;
- nextch;
- return handle.procInstr(tmppos, n, sb.toString);
- } else
- sb.append(ch);
- nextch
- }
- };
- xToken('?')
- xToken('>')
- handle.procInstr(tmppos, n, sb.toString)
- }
-
/** parse character data.
* precondition: xEmbeddedBlock == false (we are not in a scala block)
*/
def xText: String = {
- //if( xEmbeddedBlock ) throw FatalError("internal error: encountered embedded block"); // assert
-
- /*if( xCheckEmbeddedBlock )
- return ""
- else {*/
- //Console.println("in xText! ch = '"+ch+"'");
- var exit = false;
- while (! exit) {
- //Console.println("LOOP in xText! ch = '"+ch+"' + pos="+pos);
- putChar(ch);
- val opos = pos;
- nextch;
-
- //Console.println("STILL LOOP in xText! ch = '"+ch+"' + pos="+pos+" opos="+opos);
-
+ var exit = false;
+ while (! exit) {
+ putChar(ch);
+ val opos = pos;
+ nextch;
- exit = eof || /*{ nextch; xCheckEmbeddedBlock }||*/( ch == '<' ) || ( ch == '&' );
- }
- val str = cbuf.toString();
- cbuf.length = 0;
- str
- /*}*/
+ exit = eof || ( ch == '<' ) || ( ch == '&' )
+ }
+ val str = cbuf.toString();
+ cbuf.length = 0;
+ str
}
/** attribute value, terminated by either ' or ". value may not contain &lt;.
@@ -757,7 +556,7 @@ trait MarkupParser extends AnyRef with TokenTests
if (ch != '\'' && ch != '"')
reportSyntaxError("quote ' or \" expected");
nextch
- while (ch != endch) {
+ while (ch != endch && !eof) {
putChar(ch)
nextch
}
@@ -767,14 +566,13 @@ trait MarkupParser extends AnyRef with TokenTests
str
}
-
/* [12] PubidLiteral ::= '"' PubidChar* '"' | "'" (PubidChar - "'")* "'" */
def pubidLiteral(): String = {
val endch = ch
if (ch!='\'' && ch != '"')
reportSyntaxError("quote ' or \" expected");
nextch
- while (ch != endch) {
+ while (ch != endch && !eof) {
putChar(ch)
//Console.println("hello '"+ch+"'"+isPubIDChar(ch));
if (!isPubIDChar(ch))
@@ -846,34 +644,16 @@ trait MarkupParser extends AnyRef with TokenTests
val ent = xName
xToken(';')
xSpaceOpt
- /*
- Console.println("hello, pushing!");
- {
- val test = replacementText(ent);
- while(test.hasNext)
- Console.print(test.next);
- } */
+
push(ent)
xSpaceOpt
- //Console.println("hello, getting name");
val stmt = xName
- //Console.println("hello, got name");
xSpaceOpt
- //Console.println("how can we be eof = "+eof);
- // eof = true because not external?!
- //if(!eof)
- // error("expected only INCLUDE or IGNORE");
-
- //pop();
-
- //Console.println("hello, popped");
stmt match {
// parameter entity
- case "INCLUDE" =>
- doInclude()
- case "IGNORE" =>
- doIgnore()
+ case "INCLUDE" => doInclude()
+ case "IGNORE" => doIgnore()
}
case 'I' =>
nextch
@@ -921,8 +701,7 @@ trait MarkupParser extends AnyRef with TokenTests
nextch
}
- /** "rec-xml/#ExtSubset" pe references may not occur within markup
- declarations
+ /** "rec-xml/#ExtSubset" pe references may not occur within markup declarations
*/
def intSubset() {
//Console.println("(DEBUG) intSubset()")
@@ -958,11 +737,10 @@ trait MarkupParser extends AnyRef with TokenTests
val n = xName
xSpace
var attList: List[AttrDecl] = Nil
+
// later: find the elemDecl for n
while ('>' != ch) {
val aname = xName
- //Console.println("attribute name: "+aname);
- var defdecl: DefaultDecl = null
xSpace
// could be enumeration (foo,bar) parse this later :-/
while ('"' != ch && '\'' != ch && '#' != ch && '<' != ch) {
@@ -972,29 +750,24 @@ trait MarkupParser extends AnyRef with TokenTests
}
val atpe = cbuf.toString()
cbuf.length = 0
- //Console.println("attr type: "+atpe);
- ch match {
+
+ val defdecl: DefaultDecl = ch match {
case '\'' | '"' =>
- val defValue = xAttributeValue() // default value
- defdecl = DEFAULT(false, defValue)
+ DEFAULT(false, xAttributeValue())
case '#' =>
nextch
xName match {
- case "FIXED" =>
- xSpace
- val defValue = xAttributeValue() // default value
- defdecl = DEFAULT(true, defValue)
- case "IMPLIED" =>
- defdecl = IMPLIED
- case "REQUIRED" =>
- defdecl = REQUIRED
+ case "FIXED" => xSpace ; DEFAULT(true, xAttributeValue())
+ case "IMPLIED" => IMPLIED
+ case "REQUIRED" => REQUIRED
}
case _ =>
+ null
}
xSpaceOpt
- attList = AttrDecl(aname, atpe, defdecl) :: attList
+ attList ::= AttrDecl(aname, atpe, defdecl)
cbuf.length = 0
}
nextch
@@ -1086,9 +859,6 @@ trait MarkupParser extends AnyRef with TokenTests
def reportValidationError(pos: Int, str: String): Unit = reportSyntaxError(pos, str)
def push(entityName: String) {
- //Console.println("BEFORE PUSHING "+ch)
- //Console.println("BEFORE PUSHING "+pos)
- //Console.print("[PUSHING "+entityName+"]")
if (!eof)
inpStack = curInput :: inpStack
@@ -1111,50 +881,4 @@ trait MarkupParser extends AnyRef with TokenTests
pos = curInput.pos
eof = false // must be false, because of places where entity refs occur
}
-
- /** for the moment, replace only character references
- * see spec 3.3.3
- * precond: cbuf empty
- */
- def normalizeAttributeValue(attval: String): String = {
- val s: Seq[Char] = attval
- val it = s.iterator
- while (it.hasNext) {
- it.next match {
- case ' '|'\t'|'\n'|'\r' =>
- cbuf.append(' ');
- case '&' => it.next match {
- case '#' =>
- var c = it.next
- val s = xCharRef ({ () => c }, { () => c = it.next })
- cbuf.append(s)
- case nchar =>
- val nbuf = new StringBuilder()
- var d = nchar
- do {
- nbuf.append(d)
- d = it.next
- } while(d != ';');
- nbuf.toString() match {
- case "lt" => cbuf.append('<')
- case "gt" => cbuf.append('>')
- case "amp" => cbuf.append('&')
- case "apos" => cbuf.append('\'')
- case "quot" => cbuf.append('"')
- case "quote" => cbuf.append('"')
- case name =>
- cbuf.append('&')
- cbuf.append(name)
- cbuf.append(';')
- }
- }
- case c =>
- cbuf.append(c)
- }
- }
- val name = cbuf.toString()
- cbuf.length = 0
- name
- }
-
}
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala
new file mode 100644
index 0000000000..936515852b
--- /dev/null
+++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala
@@ -0,0 +1,263 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.xml
+package parsing
+
+import scala.io.Source
+import scala.xml.dtd._
+import scala.annotation.switch
+import Utility.Escapes.{ pairs => unescape }
+
+object MarkupParserCommon {
+ final val SU = '\u001A'
+}
+import MarkupParserCommon._
+
+/** This is not a public trait - it contains common code shared
+ * between the library level XML parser and the compiler's.
+ * All members should be accessed through those.
+ */
+private[scala] trait MarkupParserCommon extends TokenTests {
+ protected def unreachable = Predef.error("Cannot be reached.")
+
+ // type HandleType // MarkupHandler, SymbolicXMLBuilder
+ type InputType // Source, CharArrayReader
+ type PositionType // Int, Position
+ type ElementType // NodeSeq, Tree
+ type NamespaceType // NamespaceBinding, Any
+ type AttributesType // (MetaData, NamespaceBinding), mutable.Map[String, Tree]
+
+ def mkAttributes(name: String, pscope: NamespaceType): AttributesType
+ def mkProcInstr(position: PositionType, name: String, text: String): ElementType
+
+ /** parse a start or empty tag.
+ * [40] STag ::= '<' Name { S Attribute } [S]
+ * [44] EmptyElemTag ::= '<' Name { S Attribute } [S]
+ */
+ protected def xTag(pscope: NamespaceType): (String, AttributesType) = {
+ val name = xName
+ xSpaceOpt
+
+ (name, mkAttributes(name, pscope))
+ }
+
+ /** '<?' ProcInstr ::= Name [S ({Char} - ({Char}'>?' {Char})]'?>'
+ *
+ * see [15]
+ */
+ def xProcInstr: ElementType = {
+ val n = xName
+ xSpaceOpt
+ xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
+ }
+
+ /** attribute value, terminated by either ' or ". value may not contain <.
+ * @param endch either ' or "
+ */
+ def xAttributeValue(endCh: Char): String = {
+ val buf = new StringBuilder
+ while (ch != endCh) {
+ // well-formedness constraint
+ if (ch == '<') return errorAndResult("'<' not allowed in attrib value", "")
+ else if (ch == SU) truncatedError("")
+ else buf append ch_returning_nextch
+ }
+ ch_returning_nextch
+ // @todo: normalize attribute value
+ buf.toString
+ }
+
+ def xAttributeValue(): String = {
+ val str = xAttributeValue(ch_returning_nextch)
+ // well-formedness constraint
+ normalizeAttributeValue(str)
+ }
+
+ private def takeUntilChar(it: Iterator[Char], end: Char): String = {
+ val buf = new StringBuilder
+ while (it.hasNext) it.next match {
+ case `end` => return buf.toString
+ case ch => buf append ch
+ }
+ error("Expected '%s'".format(end))
+ }
+
+ /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
+ */
+ def xEndTag(startName: String) {
+ xToken('/')
+ if (xName != startName)
+ errorNoEnd(startName)
+
+ xSpaceOpt
+ xToken('>')
+ }
+
+ /** actually, Name ::= (Letter | '_' | ':') (NameChar)* but starting with ':' cannot happen
+ * Name ::= (Letter | '_') (NameChar)*
+ *
+ * see [5] of XML 1.0 specification
+ *
+ * pre-condition: ch != ':' // assured by definition of XMLSTART token
+ * post-condition: name does neither start, nor end in ':'
+ */
+ def xName: String = {
+ if (ch == SU)
+ truncatedError("")
+ else if (!isNameStart(ch))
+ return errorAndResult("name expected, but char '%s' cannot start a name" format ch, "")
+
+ val buf = new StringBuilder
+
+ do buf append ch_returning_nextch
+ while (isNameChar(ch))
+
+ if (buf.last == ':') {
+ reportSyntaxError( "name cannot end in ':'" )
+ buf.toString dropRight 1
+ }
+ else buf.toString
+ }
+
+ private def attr_unescape(s: String) = s match {
+ case "lt" => "<"
+ case "gt" => ">"
+ case "amp" => "&"
+ case "apos" => "'"
+ case "quot" => "\""
+ case "quote" => "\""
+ case _ => "&" + s + ";"
+ }
+
+ /** Replaces only character references right now.
+ * see spec 3.3.3
+ */
+ private def normalizeAttributeValue(attval: String): String = {
+ val buf = new StringBuilder
+ val it = attval.iterator.buffered
+
+ while (it.hasNext) buf append (it.next match {
+ case ' ' | '\t' | '\n' | '\r' => " "
+ case '&' if it.head == '#' => it.next ; xCharRef(it)
+ case '&' => attr_unescape(takeUntilChar(it, ';'))
+ case c => c
+ })
+
+ buf.toString
+ }
+
+ /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
+ * | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+ *
+ * see [66]
+ */
+ def xCharRef(ch: () => Char, nextch: () => Unit): String =
+ Utility.parseCharRef(ch, nextch, reportSyntaxError _)
+
+ def xCharRef(it: Iterator[Char]): String = {
+ var c = it.next
+ Utility.parseCharRef(() => c, () => { c = it.next }, reportSyntaxError _)
+ }
+
+ def xCharRef: String = xCharRef(() => ch, () => nextch)
+
+ /** Create a lookahead reader which does not influence the input */
+ def lookahead(): BufferedIterator[Char]
+
+ /** The library and compiler parsers had the interesting distinction of
+ * different behavior for nextch (a function for which there are a total
+ * of two plausible behaviors, so we know the design space was fully
+ * explored.) One of them returned the value of nextch before the increment
+ * and one of them the new value. So to unify code we have to at least
+ * temporarily abstract over the nextchs.
+ */
+ def ch: Char
+ def nextch: Char
+ def ch_returning_nextch: Char
+ def eof: Boolean
+
+ // def handle: HandleType
+ var tmppos: PositionType
+
+ def xHandleError(that: Char, msg: String): Unit
+ def reportSyntaxError(str: String): Unit
+ def reportSyntaxError(pos: Int, str: String): Unit
+
+ def truncatedError(msg: String): Nothing
+ def errorNoEnd(tag: String): Nothing
+
+ protected def errorAndResult[T](msg: String, x: T): T = {
+ reportSyntaxError(msg)
+ x
+ }
+
+ def xToken(that: Char) {
+ if (ch == that) nextch
+ else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch))
+ }
+ def xToken(that: Seq[Char]) { that foreach xToken }
+
+ /** scan [S] '=' [S]*/
+ def xEQ = { xSpaceOpt; xToken('='); xSpaceOpt }
+
+ /** skip optional space S? */
+ def xSpaceOpt = while (isSpace(ch) && !eof) nextch
+
+ /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
+ def xSpace =
+ if (isSpace(ch)) { nextch; xSpaceOpt }
+ else xHandleError(ch, "whitespace expected")
+
+ /** Apply a function and return the passed value */
+ def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
+
+ /** Execute body with a variable saved and restored after execution */
+ def saving[A,B](getter: A, setter: (A) => Unit)(body: => B): B = {
+ val saved = getter
+ try body
+ finally setter(saved)
+ }
+
+ /** Take characters from input stream until given String "until"
+ * is seen. Once seen, the accumulated characters are passed
+ * along with the current Position to the supplied handler function.
+ */
+ protected def xTakeUntil[T](
+ handler: (PositionType, String) => T,
+ positioner: () => PositionType,
+ until: String): T =
+ {
+ val sb = new StringBuilder
+ val head = until charAt 0
+ val rest = until drop 1
+
+ while (true) {
+ if (ch == head && peek(rest))
+ return handler(positioner(), sb.toString)
+ else if (ch == SU)
+ truncatedError("") // throws TruncatedXMLControl in compiler
+
+ sb append ch
+ nextch
+ }
+ unreachable
+ }
+
+ /** Create a non-destructive lookahead reader and see if the head
+ * of the input would match the given String. If yes, return true
+ * and drop the entire String from input; if no, return false
+ * and leave input unchanged.
+ */
+ private def peek(lookingFor: String): Boolean =
+ (lookahead() take lookingFor.length sameElements lookingFor.iterator) && {
+ // drop the chars from the real reader (all lookahead + orig)
+ (0 to lookingFor.length) foreach (_ => nextch)
+ true
+ }
+}
diff --git a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala b/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
index e062658cc3..3854f66683 100644
--- a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
+++ b/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
@@ -1,21 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package parsing
import factory.NodeFactory
-import collection.Seq
-import collection.immutable.List
-import org.xml.sax.InputSource
-import javax.xml.parsers.{ SAXParser, SAXParserFactory }
/** nobinding adaptor providing callbacks to parser to create elements.
* implements hash-consing
diff --git a/src/library/scala/xml/parsing/TokenTests.scala b/src/library/scala/xml/parsing/TokenTests.scala
index f53e849764..88a9ac3756 100644
--- a/src/library/scala/xml/parsing/TokenTests.scala
+++ b/src/library/scala/xml/parsing/TokenTests.scala
@@ -1,19 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package parsing
-import collection.Seq
-import collection.immutable.List
/**
* Helper functions for parsing XML fragments
*/
diff --git a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
index 1a46d7fc10..c02d1e1242 100644
--- a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
+++ b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
@@ -51,7 +50,7 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
log("advanceDFA(trans): " + trans)
trans.get(ContentModel.ElemName(label)) match {
case Some(qNew) => qCurrent = qNew
- case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keysIterator);
+ case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keys);
}
}
// advance in current automaton
diff --git a/src/library/scala/xml/parsing/XhtmlEntities.scala b/src/library/scala/xml/parsing/XhtmlEntities.scala
index bb704b3c06..5ca2cf2b0c 100644
--- a/src/library/scala/xml/parsing/XhtmlEntities.scala
+++ b/src/library/scala/xml/parsing/XhtmlEntities.scala
@@ -1,18 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.xml
package parsing
-import scala.xml.dtd.{IntDef, ParsedEntityDecl}
+import scala.xml.dtd.{ IntDef, ParsedEntityDecl }
/** <p>
* (c) David Pollak 2007 WorldWide Conferencing, LLC.
diff --git a/src/library/scala/xml/parsing/XhtmlParser.scala b/src/library/scala/xml/parsing/XhtmlParser.scala
index 8132c00cf4..0647addf8b 100644
--- a/src/library/scala/xml/parsing/XhtmlParser.scala
+++ b/src/library/scala/xml/parsing/XhtmlParser.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package parsing
diff --git a/src/library/scala/xml/persistent/CachedFileStorage.scala b/src/library/scala/xml/persistent/CachedFileStorage.scala
index 923e8c2557..5550259a09 100644
--- a/src/library/scala/xml/persistent/CachedFileStorage.scala
+++ b/src/library/scala/xml/persistent/CachedFileStorage.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package persistent
@@ -74,7 +73,7 @@ extends java.lang.Thread with scala.util.logging.Logged {
import scala.io.Source
import scala.xml.parsing.ConstructingParser
log("[load]\nloading "+theFile)
- val src = Source.fromFile(theFile)()
+ val src = Source.fromFile(theFile)
log("parsing "+theFile)
val res = ConstructingParser.fromSource(src,false).document.docElem(0)
switch
diff --git a/src/library/scala/xml/persistent/Index.scala b/src/library/scala/xml/persistent/Index.scala
index b9d9debb7b..92825f92c7 100644
--- a/src/library/scala/xml/persistent/Index.scala
+++ b/src/library/scala/xml/persistent/Index.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
diff --git a/src/library/scala/xml/persistent/SetStorage.scala b/src/library/scala/xml/persistent/SetStorage.scala
index b30e0ee080..a49858487c 100644
--- a/src/library/scala/xml/persistent/SetStorage.scala
+++ b/src/library/scala/xml/persistent/SetStorage.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package persistent
diff --git a/src/library/scala/xml/pull/XMLEvent.scala b/src/library/scala/xml/pull/XMLEvent.scala
index 0f6edfad52..255bf3b8a8 100644
--- a/src/library/scala/xml/pull/XMLEvent.scala
+++ b/src/library/scala/xml/pull/XMLEvent.scala
@@ -1,38 +1,59 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package pull
-/** This class represents an XML event for pull parsing.
- * Pull parsing means that during the traversal of the XML
- * tree we are parsing, each "event" is returned to the caller
- * and the traversal is suspended.
+/** An XML event for pull parsing. All events received during
+ * parsing will be one of the subclasses of this trait.
*/
trait XMLEvent
-/** An element is encountered the first time */
+/**
+ * An Element's start tag was encountered.
+ * @param pre prefix, if any, on the element. This is the `xs` in `<xs:string>foo</xs:string>`.
+ * @param label the name of the element, not including the prefix
+ * @param attrs any attributes on the element
+ */
case class EvElemStart(pre: String, label: String, attrs: MetaData, scope: NamespaceBinding) extends XMLEvent
-/** An element is encountered the last time */
+/**
+ * An Element's end tag was encountered.
+ * @param pre prefix, if any, on the element. This is the `xs` in `<xs:string>foo</xs:string>`.
+ * @param label the name of the element, not including the prefix
+ */
case class EvElemEnd(pre: String, label: String) extends XMLEvent
-/** A text node is encountered */
+/**
+ * A text node was encountered.
+ * @param text the text that was found
+ */
case class EvText(text: String) extends XMLEvent
-/** An entity reference is encountered */
+/** An entity reference was encountered.
+ * @param the name of the entity, e.g. `gt` when encountering the entity `&gt;`
+ */
case class EvEntityRef(entity: String) extends XMLEvent
-/** A processing instruction is encountered */
+/**
+ * A processing instruction was encountered.
+ * @param target the "PITarget" of the processing instruction. For the instruction `<?foo bar="baz"?>`, the target would
+ * be `foo`
+ * @param text the remainder of the instruction. For the instruction `<?foo bar="baz"?>`, the text would
+ * be `bar="baz"`
+ * @see [[http://www.w3.org/TR/REC-xml/#sec-pi]]
+ */
case class EvProcInstr(target: String, text: String) extends XMLEvent
-/** A comment is encountered */
+/**
+ * A comment was encountered
+ * @param text the text of the comment
+ */
case class EvComment(text: String) extends XMLEvent
diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/library/scala/xml/pull/XMLEventReader.scala
index b09218a826..fa428a440f 100644
--- a/src/library/scala/xml/pull/XMLEventReader.scala
+++ b/src/library/scala/xml/pull/XMLEventReader.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
package pull
@@ -17,28 +16,14 @@ import java.nio.channels.ClosedChannelException
import scala.io.Source
import scala.xml.parsing.{ ExternalSources, MarkupHandler, MarkupParser }
-/** <p>
- * A pull parser that offers to view an XML document as a series of events.
- * Example usage:
- * </p><pre>
- * <b>import</b> scala.xml.pull._
- * <b>import</b> scala.io.Source
- *
- * <b>object</b> reader {
- * <b>val</b> src = Source.fromString("<hello><world/></hello>")
- * <b>val</b> er = new XMLEventReader(src)
- *
- * <b>def</b> main(args: Array[String]) {
- * while (er.hasNext)
- * Console.println(er.next)
- * }
- * }
- * </pre>
+/**
+ * Main entry point into creating an event-based XML parser. Treating this
+ * as a [[scala.collection.Iterator]] will provide access to the generated events.
+ * @param src A [[scala.io.Source]] for XML data to parse
*
* @author Burak Emir
* @author Paul Phillips
*/
-
class XMLEventReader(src: Source) extends ProducerConsumerIterator[XMLEvent]
{
// We implement a pull parser as an iterator, but since we may be operating on
diff --git a/src/library/scala/xml/pull/package.scala b/src/library/scala/xml/pull/package.scala
new file mode 100644
index 0000000000..3742c55513
--- /dev/null
+++ b/src/library/scala/xml/pull/package.scala
@@ -0,0 +1,41 @@
+package scala.xml
+
+/**
+ * Classes needed to view an XML document as a series of events. The document
+ * is parsed by an [[scala.xml.pull.XMLEventReader]] instance. You can treat it as
+ * an [[scala.collection.Iterator]] to retrieve the events, which are all
+ * subclasses of [[scala.xml.pull.XMLEvent]].
+ *
+ * {{{
+ * scala> val source = Source.fromString("""<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+ * <?instruction custom value="customvalue"?>
+ * <!DOCTYPE foo [
+ * <!ENTITY bar "BAR">
+ * ]><foo>Hello<!-- this is a comment --><bar>&bar;</bar><bar>&gt;</bar></foo>""")
+ *
+ * source: scala.io.Source = non-empty iterator
+ *
+ * scala> val reader = new XMLEventReader(source)
+ * reader: scala.xml.pull.XMLEventReader = non-empty iterator
+ *
+ * scala> reader.foreach{ println(_) }
+ * EvProcInstr(instruction,custom value="customvalue")
+ * EvText(
+ * )
+ * EvElemStart(null,foo,,)
+ * EvText(Hello)
+ * EvComment( this is a comment )
+ * EvElemStart(null,bar,,)
+ * EvText(BAR)
+ * EvElemEnd(null,bar)
+ * EvElemStart(null,bar,,)
+ * EvEntityRef(gt)
+ * EvElemEnd(null,bar)
+ * EvElemEnd(null,foo)
+ * EvText(
+ *
+ * )
+ *
+ * }}}
+ */
+package object pull
diff --git a/src/library/scala/xml/transform/BasicTransformer.scala b/src/library/scala/xml/transform/BasicTransformer.scala
index 301f2e23a5..242549ebb2 100644
--- a/src/library/scala/xml/transform/BasicTransformer.scala
+++ b/src/library/scala/xml/transform/BasicTransformer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
diff --git a/src/library/scala/xml/transform/RewriteRule.scala b/src/library/scala/xml/transform/RewriteRule.scala
index d2712f4552..1b6a0cb16b 100644
--- a/src/library/scala/xml/transform/RewriteRule.scala
+++ b/src/library/scala/xml/transform/RewriteRule.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
diff --git a/src/library/scala/xml/transform/RuleTransformer.scala b/src/library/scala/xml/transform/RuleTransformer.scala
index 7275c562ba..2105d1437b 100644
--- a/src/library/scala/xml/transform/RuleTransformer.scala
+++ b/src/library/scala/xml/transform/RuleTransformer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.xml
diff --git a/src/manual/scala/man1/Command.scala b/src/manual/scala/man1/Command.scala
index ec4625a7f6..abc2b42b95 100644
--- a/src/manual/scala/man1/Command.scala
+++ b/src/manual/scala/man1/Command.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Stephane Micheloud
*/
-//$Id$
package scala.man1
diff --git a/src/manual/scala/man1/fsc.scala b/src/manual/scala/man1/fsc.scala
index a11ef6a936..c778038618 100644
--- a/src/manual/scala/man1/fsc.scala
+++ b/src/manual/scala/man1/fsc.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Stephane Micheloud
*/
-//$Id$
package scala.man1
diff --git a/src/manual/scala/man1/sbaz.scala b/src/manual/scala/man1/sbaz.scala
index bf53e3803d..41675c84ff 100644
--- a/src/manual/scala/man1/sbaz.scala
+++ b/src/manual/scala/man1/sbaz.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Stephane Micheloud
*/
-//$Id$
package scala.man1
diff --git a/src/manual/scala/man1/scala.scala b/src/manual/scala/man1/scala.scala
index 084d40a5a7..0530185c30 100644
--- a/src/manual/scala/man1/scala.scala
+++ b/src/manual/scala/man1/scala.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Stephane Micheloud
*/
-//$Id$
package scala.man1
diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala
index 330ea4fd82..d2f6183356 100644
--- a/src/manual/scala/man1/scalac.scala
+++ b/src/manual/scala/man1/scalac.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Stephane Micheloud
*/
-//$Id$
package scala.man1
diff --git a/src/manual/scala/man1/scaladoc.scala b/src/manual/scala/man1/scaladoc.scala
index 458ee8da37..420bb08c4d 100644
--- a/src/manual/scala/man1/scaladoc.scala
+++ b/src/manual/scala/man1/scaladoc.scala
@@ -1,8 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright LAMP/EPFL
* @author Stephane Micheloud
+ * @author Gilles Dubochet
*/
-//$Id$
package scala.man1
@@ -45,82 +45,92 @@ object scaladoc extends Command {
CmdOption("d") & "(see " & Link(Bold("OPTIONS"), "#options") & ", below).",
// tags are defined in class "scala.tools.nsc.doc.DocGenerator"
- "Supported tag comments are:",
-
- BulletList(
- Mono("@author"), Mono("@deprecated"),
- Mono("@exception") & " (two arguments)",
- Mono("@param") & " (two arguments)", Mono("@pre"),
- Mono("@return"), Mono("@see"), Mono("@since"),
- Mono("@throws") & " (two arguments)",
- Mono("@todo"), Mono("@version")),
-
- "See also online document \"" & Link("How to Write Doc Comments for the Javadoc Tool",
- "http://java.sun.com/j2se/javadoc/writingdoccomments/") & "\" from Sun.")
+ "The recognised format of comments in source is described in the " & Link("online documentation",
+ "http://lampsvn.epfl.ch/trac/scala/wiki/Scaladoc"))
val options = Section("OPTIONS",
- "The generator has a set of standard options that are supported on the " &
- "current development environment and will be supported in future releases.",
-
Section("Standard Options",
DefinitionList(
Definition(
CmdOption("d", Argument("directory")),
- "Specify where to place generated class files."),
+ "Specify where to generate documentation."),
Definition(
- CmdOption("access:<access>"),
- "Show only public, protected/public (default) or all classes " &
- "and members (" & Mono("public") & ",protected,private)"),
+ CmdOption("version"),
+ "Print product version and exit."),
Definition(
- CmdOption("windowtitle", Argument("windowtitle")),
- "Specify window title of generated HTML documentation"),
+ /*CmdOption("?") & "| " &*/ CmdOption("help"),
+ "Print a synopsis of available options."))),
+
+ Section("Documentation Options",
+ DefinitionList(
Definition(
- CmdOption("doctitle", Argument("doctitle")),
- "Include title for the overview page"),
+ CmdOption("doc-title", Argument("title")),
+ "Define the overall title of the documentation, typically the name of the library being documented."),
Definition(
- CmdOption("stylesheetfile", Argument("stylesheetfile")),
- "File to change style of the generated documentation"),
+ CmdOption("doc-version", Argument("version")),
+ "Define the overall version number of the documentation, typically the version of the library being documented."),
Definition(
- CmdOption("header", Argument("pageheader")),
- "Include header text for each page"),
+ CmdOption("doc-source-url", Argument("url")),
+ "Define a URL to be concatenated with source locations for link to source files."))),
+
+ Section("Compiler Options",
+ DefinitionList(
Definition(
- CmdOption("footer", Argument("pagefooter")),
- "Include footer text for each page"),
+ CmdOption("verbose"),
+ "Output messages about what the compiler is doing"),
Definition(
- CmdOption("top", Argument("pagetop")),
- "Include top text for each page"),
+ CmdOption("deprecation"),
+ SeqPara(
+ "Indicate whether source should be compiled with deprecation " &
+ "information; defaults to " & Mono("off") & " (" &
+ "accepted values are: " & Mono("on") & ", " & Mono("off") &
+ ", " & Mono("yes") & " and " & Mono("no") & ")",
+ "Available since Scala version 2.2.1")),
Definition(
- CmdOption("bottom", Argument("pagebottom")),
- "Include bottom text for each page"),
+ CmdOption("classpath", Argument("path")),
+ SeqPara(
+ "Specify where to find user class files (on Unix-based systems " &
+ "a colon-separated list of paths, on Windows-based systems, a " &
+ "semicolon-separate list of paths). This does not override the " &
+ "built-in (" & Mono("\"boot\"") & ") search path.",
+ "The default class path is the current directory. Setting the " &
+ Mono("CLASSPATH") & " variable or using the " & Mono("-classpath") & " " &
+ "command-line option overrides that default, so if you want to " &
+ "include the current directory in the search path, you must " &
+ "include " & Mono("\".\"") & " in the new settings.")),
Definition(
- CmdOption("version"),
- "Print product version and exit."),
+ CmdOption("sourcepath", Argument("path")),
+ "Specify where to find input source files."),
Definition(
- /*CmdOption("?") & "| " &*/ CmdOption("help"),
- "Print a synopsis of standard options."))))
-
- val examples = Section("EXAMPLES",
-
- DefinitionList(
- Definition(
- "Generate documentation for a Scala program",
- CmdLine("HelloWorld.scala")),
- Definition(
- "Generation documentation for a Scala program to the destination " &
- "directory " & Bold("classes"),
- CmdLine(CmdOption("d", "api") & "HelloWorld.scala")),
- Definition(
- "Generate documentation for all Scala files found in the source " &
- "directory " & Bold("src") & " to the destination directory " &
- Bold("api"),
- CmdLine(CmdOption("d", "api") & "src/*.scala"))))
+ CmdOption("bootclasspath", Argument("path")),
+ "Override location of bootstrap class files (where to find the " &
+ "standard built-in classes, such as \"" & Mono("scala.List") & "\")."),
+ Definition(
+ CmdOption("extdirs", Argument("dirs")),
+ "Override location of installed extensions."),
+ Definition(
+ CmdOption("encoding", Argument("encoding")),
+ SeqPara(
+ "Specify character encoding used by source files.",
+ "The default value is platform-specific (Linux: " & Mono("\"UTF8\"") &
+ ", Windows: " & Mono("\"Cp1252\"") & "). Executing the following " &
+ "code in the Scala interpreter will return the default value " &
+ "on your system:",
+ MBold(" scala> ") &
+ Mono("new java.io.InputStreamReader(System.in).getEncoding"))))))
val exitStatus = Section("EXIT STATUS",
MBold(command) & " returns a zero exist status if it succeeds to process " &
"the specified input files. Non zero is returned in case of failure.")
+ override val authors = Section("AUTHORS",
+
+ "This version of Scaladoc was written by Gilles Dubochet with contributions by Pedro Furlanetto and Johannes Rudolph. " &
+ "It is based on the original Scaladoc (Sean McDirmid, Geoffrey Washburn, Vincent Cremet and Stéphane Michleoud), " &
+ "on vScaladoc (David Bernard), as well as on an unreleased version of Scaladoc 2 (Manohar Jonnalagedda).")
+
val seeAlso = Section("SEE ALSO",
Link(Bold("fsc") & "(1)", "fsc.html") & ", " &
@@ -131,20 +141,17 @@ object scaladoc extends Command {
def manpage = new Document {
title = command
- date = "May 1, 2007"
- author = "Stephane Micheloud"
- version = "0.4"
+ date = "2 June 2010"
+ author = "Gilles Dubochet"
+ version = "2.0"
sections = List(
name,
synopsis,
parameters,
description,
options,
- examples,
exitStatus,
authors,
- bugs,
- copyright,
seeAlso)
}
}
diff --git a/src/manual/scala/man1/scalap.scala b/src/manual/scala/man1/scalap.scala
index 465bc79037..d9fc6d162d 100644
--- a/src/manual/scala/man1/scalap.scala
+++ b/src/manual/scala/man1/scalap.scala
@@ -1,8 +1,7 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Stephane Micheloud
*/
-//$Id$
package scala.man1
diff --git a/src/manual/scala/tools/docutil/EmitHtml.scala b/src/manual/scala/tools/docutil/EmitHtml.scala
index 4cd3703a4f..20348ad303 100644
--- a/src/manual/scala/tools/docutil/EmitHtml.scala
+++ b/src/manual/scala/tools/docutil/EmitHtml.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Stephane Micheloud
* Adapted from Lex Spoon's sbaz manual
*/
-//$Id$
package scala.tools.docutil
diff --git a/src/manual/scala/tools/docutil/EmitManPage.scala b/src/manual/scala/tools/docutil/EmitManPage.scala
index a692e2047f..2183ae3cb9 100644
--- a/src/manual/scala/tools/docutil/EmitManPage.scala
+++ b/src/manual/scala/tools/docutil/EmitManPage.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Stephane Micheloud
* Adapted from Lex Spoon's sbaz manual
*/
-//$Id$
package scala.tools.docutil
@@ -149,7 +148,7 @@ object EmitManPage {
out.println(".\\\" ##########################################################################")
out.println(".\\\" # __ #")
out.println(".\\\" # ________ ___ / / ___ Scala 2 On-line Manual Pages #")
- out.println(".\\\" # / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL #")
+ out.println(".\\\" # / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL #")
out.println(".\\\" # __\\ \\/ /__/ __ |/ /__/ __ | #")
out.println(".\\\" # /____/\\___/_/ |_/____/_/ | | http://scala-lang.org/ #")
out.println(".\\\" # |/ #")
diff --git a/src/manual/scala/tools/docutil/ManPage.scala b/src/manual/scala/tools/docutil/ManPage.scala
index 644045089b..83790e18d3 100644
--- a/src/manual/scala/tools/docutil/ManPage.scala
+++ b/src/manual/scala/tools/docutil/ManPage.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Stephane Micheloud
* Adapted from Lex Spoon's sbaz manual
*/
-//$Id$
package scala.tools.docutil
diff --git a/src/manual/scala/tools/docutil/resources/index.html b/src/manual/scala/tools/docutil/resources/index.html
index 248bb02a81..452c1fcd85 100644
--- a/src/manual/scala/tools/docutil/resources/index.html
+++ b/src/manual/scala/tools/docutil/resources/index.html
@@ -8,7 +8,7 @@
<meta http-equiv="Content-Style-Type" content="text/css"/>
<meta http-equiv="Content-Language" content="en"/>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1"/>
- <meta name="Copyright" content="(C) 2002-2009 LAMP/EPFL"/>
+ <meta name="Copyright" content="(C) 2002-2010 LAMP/EPFL"/>
<meta name="Language" content="en"/>
<meta name="Description" content="The Scala Programming Language"/>
<meta name="Author" content="Stephane Micheloud"/>
@@ -191,7 +191,7 @@
<hr/>
<div style="font-size:x-small;">
- Copyright (c) 2002-2009 <a href="http://www.epfl.ch/">EPFL</a>,
+ Copyright (c) 2002-2010 <a href="http://www.epfl.ch/">EPFL</a>,
Lausanne, unless specified otherwise.<br/>
All rights reserved.
</div>
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java b/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java
index 509575b26e..0d7102c305 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java b/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java
index 3e1b6a6f57..acdcb32e33 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java b/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java
index b136f9423e..aefb6fdaf1 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
@@ -55,7 +54,7 @@ public class Attribute {
return value;
}
- /**@return an array with the arguments to the attribute's contructor. */
+ /**@return an array with the arguments to the attribute's constructor. */
public Object[] getConstructorArguments() {
parseBlob();
Object[] cas = new Object[constrArgs.length];
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java b/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java
index c9a024409c..cac2319b50 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java b/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java
index 925c5b8693..50bf9fb5d5 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java
index 3dea39c2cf..99e5c5fe69 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java b/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java
index 32a50fdf9b..0e58c18114 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java
index f931f7bad6..a183993cb9 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java
index e2bea10f7f..3ccba7900b 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java
index 5e6cc84f1e..d7d1bb3d54 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java
index bd08e4a3e1..9649dabd4e 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java b/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java
index 4eafc37ef3..927185962c 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java
index 14aac59e84..65ff1b290b 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java b/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java
index 4536dc997f..5f49ad3323 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java
index 3e8b658d49..a703c38fb8 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java
index 699edab6e3..793ee362e9 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java
index 1ad3f2a133..8e8d879593 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java
index 22b174d77b..5e227fba35 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Module.java b/src/msil/ch/epfl/lamp/compiler/msil/Module.java
index bd06c32223..8dd5e7119f 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/Module.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/Module.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java b/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java
index 2d6d498bb0..a31db16c92 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java b/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
index 459bb39a20..f84598e20b 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
@@ -854,10 +853,18 @@ public class PEFile {
while (getByte() == ELEMENT_TYPE_CMOD_OPT
|| getByte() == ELEMENT_TYPE_CMOD_REQD)
{
- Type t = decodeType();
- System.err.println("CMOD: " + t);
- if (getByte() == ELEMENT_TYPE_CMOD_REQD)
- throw new RuntimeException("Reqired CMOD: " + t);
+ // skip the tag 23.2.7
+ readByte();
+ // skip the TypeDefOrRefEncoded (23.2.8)
+ readByte();
+ readByte();
+
+ // @FIXME: could be 4 bytes, not always 2...
+
+ //Type t = decodeType();
+ //System.err.println("CMOD: " + t);
+ //if (getByte() == ELEMENT_TYPE_CMOD_REQD)
+ //throw new RuntimeException("Reqired CMOD: " + t);
}
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java b/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java
index a6e7bb31b2..18e9c37bb4 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
@@ -143,8 +142,17 @@ final class PEModule extends Module {
Assembly assem = getAssembly(name);
type = assem.GetType(typeName);
if (type == null) {
- throw new RuntimeException("Failed to locate type " +
- typeName + " in assembly " + assem);
+ // HACK: the IKVM.OpenJDK.Core assembly is compiled against mscorlib.dll v2.0
+ // The MSIL library cannot parse the v2.0 mscorlib because of generics, so we
+ // use the v1.0
+ // However, the java.io.FileDescriptor.FlushFileBuffers method uses a type
+ // Microsoft.Win32.SafeHandles.SafeFileHandle, which only exists in mscorlib
+ // v2.0
+ // For now, jsut return Object (fine as long as we don't use that method).
+ Assembly asmb = getAssembly("mscorlib");
+ type = asmb.GetType("System.Object");
+ //throw new RuntimeException("Failed to locate type " +
+ //typeName + " in assembly " + assem);
}
break;
case ModuleDef.ID:
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEType.java b/src/msil/ch/epfl/lamp/compiler/msil/PEType.java
index 67686baad2..ace364d2ed 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEType.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/PEType.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java
index 7c48637d9f..d4360363fc 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java
index df3b5f96da..877d7aa8a5 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java
index ca137ba99f..b1bec64aff 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java
index ba3d67be2e..4b7cef8bc1 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Type.java b/src/msil/ch/epfl/lamp/compiler/msil/Type.java
index 55125f0d96..f7d44980c4 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/Type.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/Type.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java
index 45ef487e24..8f489fa46f 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Version.java b/src/msil/ch/epfl/lamp/compiler/msil/Version.java
index 7276498258..ad4b09b163 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/Version.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/Version.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for access to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala
index 9fbfbb533b..2c4011eeb0 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -72,7 +71,7 @@ class AssemblyBuilder(name: AssemblyName)
}
/** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[byte]) {
+ def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
addCustomAttribute(constr, value)
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala
index ad3a82d040..ddd4708ecd 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -21,7 +20,7 @@ import java.io.IOException
* @author Nikolay Mihaylov
* @version 1.0
*/
-class ConstructorBuilder(declType: Type, attrs: int, paramTypes: Array[Type])
+class ConstructorBuilder(declType: Type, attrs: Int, paramTypes: Array[Type])
extends ConstructorInfo(declType, attrs, paramTypes)
with ICustomAttributeSetter
with Visitable
@@ -31,7 +30,7 @@ class ConstructorBuilder(declType: Type, attrs: int, paramTypes: Array[Type])
// public interface
/** Defines a parameter of this constructor. */
- def DefineParameter(pos: int, attr: int, name: String): ParameterBuilder = {
+ def DefineParameter(pos: Int, attr: Int, name: String): ParameterBuilder = {
val param = new ParameterBuilder(name, params(pos).ParameterType, attr, pos)
params(pos) = param
return param
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala
index 1fc3e068b8..3ea06382e5 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -18,7 +17,7 @@ import java.io.IOException
* @author Nikolay Mihaylov
* @version 1.0
*/
-class FieldBuilder(name: String, declType: Type, attrs: int, fieldType: Type)
+class FieldBuilder(name: String, declType: Type, attrs: Int, fieldType: Type)
extends FieldInfo(name, declType, attrs, fieldType)
with ICustomAttributeSetter
with Visitable
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala
index c19366cc33..5d74d3aa95 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -15,5 +14,5 @@ import ch.epfl.lamp.compiler.msil.ConstructorInfo
* @version 1.0
*/
trait ICustomAttributeSetter {
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[byte])
+ def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte])
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
index bf8eade0cd..4ef7069254 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -381,7 +380,9 @@ import ILGenerator._
* (MSIL) instruction stream.
*/
def BeginFinallyBlock() {
- Emit(OpCodes.Leave, excStack.peekLabel())
+ val endExc: Label = excStack.popLabel()
+ Emit(OpCodes.Leave, endExc)
+ excStack.push(Label.Finally, endExc)
emitSpecialLabel(Label.Finally)
}
@@ -482,9 +483,7 @@ import ILGenerator._
// compute new lastLabel (next label)
val stackSize: Int = lastLabel.getStacksize() + overridePOPUSH
if (stackSize < 0) {
- throw new RuntimeException
- //System.err.println
- ("ILGenerator.emit(): Stack underflow in method: " + owner)
+ throw new RuntimeException("ILGenerator.emit(): Stack underflow in method: " + owner)
}
if (stackSize > maxstack)
maxstack = stackSize
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
index 28d0f7787a..ef1e3bc86a 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies in MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -20,7 +19,7 @@ import ch.epfl.lamp.compiler.msil._
import ch.epfl.lamp.compiler.msil.util.Table
/**
- * The MSIL printer Vistor. It prints a complete
+ * The MSIL printer Visitor. It prints a complete
* assembly in a single or multiple files. Then this file can be compiled by ilasm.
*
* @author Nikolay Mihaylov
@@ -52,7 +51,7 @@ abstract class ILPrinterVisitor extends Visitor {
private var newline = true
// print types without or with members?
- protected var nomembers: boolean = false
+ protected var nomembers: Boolean = false
// external assemblies
protected var as: Array[Assembly] = null
@@ -90,13 +89,13 @@ abstract class ILPrinterVisitor extends Visitor {
// methods to print code
protected def print(s: String) { align(); out.print(s)}
protected def print(o: Object) { align(); out.print(o) }
- protected def print(c: char) { align(); out.print(c) }
- protected def print(`val`: int) { align(); out.print(`val`)}
- protected def print(`val`: long){ align(); out.print(`val`)}
+ protected def print(c: Char) { align(); out.print(c) }
+ protected def print(`val`: Int) { align(); out.print(`val`)}
+ protected def print(`val`: Long){ align(); out.print(`val`)}
protected def println() { out.println(); newline = true; padding = 0 }
- protected def println(c: char) { print(c); println() }
- protected def println(i: int) { print(i); println() }
- protected def println(l: long) { print(l); println() }
+ protected def println(c: Char) { print(c); println() }
+ protected def println(i: Int) { print(i); println() }
+ protected def println(l: Long) { print(l); println() }
protected def println(s: String){ print(s); println() }
protected def println(o: Object){ print(o); println() }
protected def printName(name: String) {
@@ -489,7 +488,7 @@ abstract class ILPrinterVisitor extends Visitor {
//##########################################################################
- def printAssemblySignature(assem: Assembly, extern: boolean) {
+ def printAssemblySignature(assem: Assembly, extern: Boolean) {
print(".assembly ")
if (extern)
print("extern ")
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala
index ac62dd9ccd..22c1b1150b 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -78,7 +77,7 @@ object Label {
// : "this.stacksize = " + stacksize + " that.stacksize = "
// + that.stacksize
// stacksize = that.stacksize
- val ss: Int = Math.max(stacksize, that.getStacksize())
+ val ss: Int = math.max(stacksize, that.getStacksize())
stacksize = ss
that.setStacksize(ss)
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala
index d19475401e..73bca4639f 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
index e22c1fca13..237d8fd728 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -40,12 +39,12 @@ class MethodBuilder(name: String, declType: Type, attrs: Int, returnType: Type,
def GetILGenerator(): ILGenerator = {
if (ilGenerator == null)
throw new RuntimeException
- ("No code generator avaiable for this method: " + this)
+ ("No code generator available for this method: " + this)
return ilGenerator
}
/** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[byte]) {
+ def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
addCustomAttribute(constr, value)
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala
index 037b8660ee..a2d284865f 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -105,7 +104,7 @@ class ModuleBuilder(name: String, fullname: String, scopeName: String, assembly:
}
/** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[byte]) {
+ def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
addCustomAttribute(constr, value)
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
index 63776cc084..afd2d5d556 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies in MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -20,8 +19,8 @@ import ch.epfl.lamp.compiler.msil.emit
import ch.epfl.lamp.compiler.msil.util.Table
/**
- * The MSIL printer Vistor. It prints a complete
- * assembly into seperate files. Then these files can be compiled by ilasm.
+ * The MSIL printer Visitor. It prints a complete
+ * assembly into separate files. Then these files can be compiled by ilasm.
*
* @author Nikolay Mihaylov
* @author Daniel Lorch
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
index e7bff447cc..ef9e002495 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -23,23 +22,23 @@ class OpCode extends Visitable {
var CEE_string: String = _
/** The type of Microsoft intermediate language (MSIL) instruction. */
- var CEE_code: short = _
+ var CEE_code: Short = _
/** How the Microsoft intermediate language (MSIL) instruction pops the stack. */
- var CEE_pop: byte = _
+ var CEE_pop: Byte = _
/** How the Microsoft intermediate language (MSIL) instruction pushes operand onto the stack. */
- var CEE_push: byte = _
+ var CEE_push: Byte = _
/** Describes the type of flow control. */
- var CEE_flow: byte = _
+ var CEE_flow: Byte = _
/** ????? */
- var CEE_inline: byte = _
+ var CEE_inline: Byte = _
- var CEE_length: byte = _
+ var CEE_length: Byte = _
- var CEE_popush: byte = _
+ var CEE_popush: Byte = _
/**
* the apply method for a visitor
@@ -49,13 +48,13 @@ class OpCode extends Visitable {
v.caseOpCode(this)
}
- protected def length(): byte = {
+ protected def length(): Byte = {
val code = OpCode.length(CEE_code)
val inline = OpCode.INLINE_length(CEE_inline)
return if(inline < 0) { -1 } else { (code + inline).toByte }
}
- protected def popush(): byte = {
+ protected def popush(): Byte = {
val pop = OpCode.POP_size(CEE_pop)
val push = OpCode.PUSH_size(CEE_push)
return if(pop < 0 || push < 0) { OpCode.POPUSH_SPECIAL } else { (push - pop).toByte }
@@ -391,27 +390,27 @@ object OpCode {
//########################################################################
// Opcode's amount and type of poped data
- final val POP_NONE : byte = 0x00
- final val POP_1 : byte = 0x01
- final val POP_1_1 : byte = 0x02
- final val POP_I : byte = 0x03
- final val POP_I_1 : byte = 0x04
- final val POP_I_I : byte = 0x05
- final val POP_I_I8 : byte = 0x06
- final val POP_I_R4 : byte = 0x07
- final val POP_I_R8 : byte = 0x08
- final val POP_I_I_I : byte = 0x09
- final val POP_REF : byte = 0x0A
- final val POP_REF_1 : byte = 0x0B
- final val POP_REF_I : byte = 0x0C
- final val POP_REF_I_I : byte = 0x0D
- final val POP_REF_I_I8 : byte = 0x0E
- final val POP_REF_I_R4 : byte = 0x0F
- final val POP_REF_I_R8 : byte = 0x10
- final val POP_REF_I_REF : byte = 0x11
- final val POP_SPECIAL : byte = 0x12
+ final val POP_NONE : Byte = 0x00
+ final val POP_1 : Byte = 0x01
+ final val POP_1_1 : Byte = 0x02
+ final val POP_I : Byte = 0x03
+ final val POP_I_1 : Byte = 0x04
+ final val POP_I_I : Byte = 0x05
+ final val POP_I_I8 : Byte = 0x06
+ final val POP_I_R4 : Byte = 0x07
+ final val POP_I_R8 : Byte = 0x08
+ final val POP_I_I_I : Byte = 0x09
+ final val POP_REF : Byte = 0x0A
+ final val POP_REF_1 : Byte = 0x0B
+ final val POP_REF_I : Byte = 0x0C
+ final val POP_REF_I_I : Byte = 0x0D
+ final val POP_REF_I_I8 : Byte = 0x0E
+ final val POP_REF_I_R4 : Byte = 0x0F
+ final val POP_REF_I_R8 : Byte = 0x10
+ final val POP_REF_I_REF : Byte = 0x11
+ final val POP_SPECIAL : Byte = 0x12
final val POP_count : Int = 0x13
- final val POP_size : Array[byte] = new Array[byte](POP_count)
+ final val POP_size : Array[Byte] = new Array[Byte](POP_count)
POP_size(POP_NONE) = 0
POP_size(POP_1) = 1
@@ -436,17 +435,17 @@ object OpCode {
//########################################################################
// Opcode's amount and type of pushed data
- final val PUSH_NONE : byte = 0x00
- final val PUSH_1 : byte = 0x01
- final val PUSH_1_1 : byte = 0x02
- final val PUSH_I : byte = 0x03
- final val PUSH_I8 : byte = 0x04
- final val PUSH_R4 : byte = 0x05
- final val PUSH_R8 : byte = 0x06
- final val PUSH_REF : byte = 0x07
- final val PUSH_SPECIAL : byte = 0x08
+ final val PUSH_NONE : Byte = 0x00
+ final val PUSH_1 : Byte = 0x01
+ final val PUSH_1_1 : Byte = 0x02
+ final val PUSH_I : Byte = 0x03
+ final val PUSH_I8 : Byte = 0x04
+ final val PUSH_R4 : Byte = 0x05
+ final val PUSH_R8 : Byte = 0x06
+ final val PUSH_REF : Byte = 0x07
+ final val PUSH_SPECIAL : Byte = 0x08
final val PUSH_count : Int = 0x09
- final val PUSH_size : Array[byte] = new Array[byte](PUSH_count)
+ final val PUSH_size : Array[Byte] = new Array[Byte](PUSH_count)
PUSH_size(PUSH_NONE) = 0
PUSH_size(PUSH_1) = 1
@@ -461,30 +460,30 @@ object OpCode {
//########################################################################
// Opcode's amount of moved data
- final val POPUSH_SPECIAL : byte = -128
+ final val POPUSH_SPECIAL : Byte = -128
//########################################################################
// Opcode's inline argument types
- final val INLINE_NONE : byte = 0x00
- final val INLINE_VARIABLE_S : byte = 0x01
- final val INLINE_TARGET_S : byte = 0x02
- final val INLINE_I_S : byte = 0x03
- final val INLINE_VARIABLE : byte = 0x04
- final val INLINE_TARGET : byte = 0x05
- final val INLINE_I : byte = 0x06
- final val INLINE_I8 : byte = 0x07
- final val INLINE_R : byte = 0x08
- final val INLINE_R8 : byte = 0x09
- final val INLINE_STRING : byte = 0x0A
- final val INLINE_TYPE : byte = 0x0B
- final val INLINE_FIELD : byte = 0x0C
- final val INLINE_METHOD : byte = 0x0D
- final val INLINE_SIGNATURE : byte = 0x0E
- final val INLINE_TOKEN : byte = 0x0F
- final val INLINE_SWITCH : byte = 0x10
+ final val INLINE_NONE : Byte = 0x00
+ final val INLINE_VARIABLE_S : Byte = 0x01
+ final val INLINE_TARGET_S : Byte = 0x02
+ final val INLINE_I_S : Byte = 0x03
+ final val INLINE_VARIABLE : Byte = 0x04
+ final val INLINE_TARGET : Byte = 0x05
+ final val INLINE_I : Byte = 0x06
+ final val INLINE_I8 : Byte = 0x07
+ final val INLINE_R : Byte = 0x08
+ final val INLINE_R8 : Byte = 0x09
+ final val INLINE_STRING : Byte = 0x0A
+ final val INLINE_TYPE : Byte = 0x0B
+ final val INLINE_FIELD : Byte = 0x0C
+ final val INLINE_METHOD : Byte = 0x0D
+ final val INLINE_SIGNATURE : Byte = 0x0E
+ final val INLINE_TOKEN : Byte = 0x0F
+ final val INLINE_SWITCH : Byte = 0x10
final val INLINE_count : Int = 0x11
- final val INLINE_length : Array[byte] = new Array[byte](INLINE_count)
+ final val INLINE_length : Array[Byte] = new Array[Byte](INLINE_count)
INLINE_length(INLINE_NONE) = 0
INLINE_length(INLINE_VARIABLE_S) = 1
@@ -507,21 +506,21 @@ object OpCode {
//########################################################################
// Opcode's control flow implications
- final val FLOW_META : byte = 0x00
- final val FLOW_NEXT : byte = 0x01
- final val FLOW_BRANCH : byte = 0x02
- final val FLOW_COND_BRANCH : byte = 0x03
- final val FLOW_BREAK : byte = 0x04
- final val FLOW_CALL : byte = 0x05
- final val FLOW_RETURN : byte = 0x06
- final val FLOW_THROW : byte = 0x07
+ final val FLOW_META : Byte = 0x00
+ final val FLOW_NEXT : Byte = 0x01
+ final val FLOW_BRANCH : Byte = 0x02
+ final val FLOW_COND_BRANCH : Byte = 0x03
+ final val FLOW_BREAK : Byte = 0x04
+ final val FLOW_CALL : Byte = 0x05
+ final val FLOW_RETURN : Byte = 0x06
+ final val FLOW_THROW : Byte = 0x07
final val FLOW_count : Int = 0x08
//########################################################################
// Init methods for Opcode
- def opcode(that: OpCode, opcode: int, string: String, code: Int,
- pop: byte, push: byte, inline: byte, flow: byte) {
+ def opcode(that: OpCode, opcode: Int, string: String, code: Int,
+ pop: Byte, push: Byte, inline: Byte, flow: Byte) {
that.CEE_opcode = opcode
that.CEE_string = string
that.CEE_code = code.toShort
@@ -533,7 +532,7 @@ object OpCode {
that.CEE_popush = that.popush()
}
- def length(code: Int): byte = {
+ def length(code: Int): Byte = {
if ((code & 0xFFFFFF00) == 0xFFFFFF00) return 1
if ((code & 0xFFFFFF00) == 0xFFFFFE00) return 2
return 0
@@ -1139,7 +1138,7 @@ object OpCode {
opcode(Rem_Un, CEE_REM_UN, "rem.un" , 0xFFFFFF5E, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
/**
- * Computes the bitwise AND of two values and pushes the result onto the evalution stack.
+ * Computes the bitwise AND of two values and pushes the result onto the evaluation stack.
*/
final val And = new OpCode()
opcode(And, CEE_AND, "and" , 0xFFFFFF5F, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
@@ -1585,7 +1584,7 @@ object OpCode {
opcode(Conv_Ovf_I2, CEE_CONV_OVF_I2, "conv.ovf.i2", 0xFFFFFFB5, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
/**
- * Converts the signed value on top of the sevaluation tack to signed int32,
+ * Converts the signed value on top of the evaluation stack to signed int32,
* throwing OverflowException on overflow.
*/
final val Conv_Ovf_I4 = new OpCode()
@@ -1862,7 +1861,7 @@ object OpCode {
/**
* Allocates a certain number of bytes from the local dynamic memory pool and pushes the
- * address (a transient pointer, type *) of the first allocated byte onto the evaluation stack.
+ * address (a transient pointer, type *) of the first allocated Byte onto the evaluation stack.
*/
final val Localloc = new OpCode()
opcode(Localloc, CEE_LOCALLOC, "localloc" , 0xFFFFFE0F, POP_I, PUSH_I, INLINE_NONE, FLOW_NEXT)
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
index 57ef69ba7f..db2a6fedc7 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -521,7 +520,7 @@ object OpCodes {
final val Rem_Un = OpCode.Rem_Un
/**
- * Computes the bitwise AND of two values and pushes the result onto the evalution stack.
+ * Computes the bitwise AND of two values and pushes the result onto the evaluation stack.
*/
final val And = OpCode.And
@@ -899,7 +898,7 @@ object OpCodes {
final val Conv_Ovf_I2 = OpCode.Conv_Ovf_I2
/**
- * Converts the signed value on top of the sevaluation tack to signed int32,
+ * Converts the signed value on top of the evaluation stack to signed int32,
* throwing OverflowException on overflow.
*/
final val Conv_Ovf_I4 = OpCode.Conv_Ovf_I4
@@ -1135,7 +1134,7 @@ object OpCodes {
/**
* Allocates a certain number of bytes from the local dynamic memory pool and pushes the
- * address (a transient pointer, type *) of the first allocated byte onto the evaluation stack.
+ * address (a transient pointer, type *) of the first allocated Byte onto the evaluation stack.
*/
final val Localloc = OpCode.Localloc
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala
index dabba58f0c..8f9d81a8b0 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -29,7 +28,7 @@ class ParameterBuilder(name: String, tpe: Type, attr: Int, pos: Int)
//##########################################################################
/** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[byte]) {
+ def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
addCustomAttribute(constr, value)
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
index 5ebc5ea32f..7288cf41a7 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies in MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -19,7 +18,7 @@ import ch.epfl.lamp.compiler.msil.emit
import ch.epfl.lamp.compiler.msil.util.Table
/**
- * The MSIL printer Vistor. It prints a complete
+ * The MSIL printer Visitor. It prints a complete
* assembly in a single file. Then this file can be compiled by ilasm.
*
* @author Nikolay Mihaylov
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
index 81bf28bc04..8667098cdb 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
@@ -18,7 +17,7 @@ import java.io.IOException
* @author Nikolay Mihaylov
* @version 1.0
*/
-class TypeBuilder (module: Module, attributes: int, fullName: String, baseType: Type, interfaces: Array[Type], declType: Type)
+class TypeBuilder (module: Module, attributes: Int, fullName: String, baseType: Type, interfaces: Array[Type], declType: Type)
extends Type(module, attributes, fullName, baseType, interfaces, declType, 0)
with ICustomAttributeSetter
with Visitable
@@ -45,7 +44,7 @@ class TypeBuilder (module: Module, attributes: int, fullName: String, baseType:
* Adds a new field to the class, with the given name,
* attributes and field type.
*/
- def DefineField(name: String, `type`: Type, attrs: short): FieldBuilder = {
+ def DefineField(name: String, `type`: Type, attrs: Short): FieldBuilder = {
val field: FieldBuilder = new FieldBuilder(name, this, attrs, `type`)
fieldBuilders.add(field)
return field
@@ -55,7 +54,7 @@ class TypeBuilder (module: Module, attributes: int, fullName: String, baseType:
* Adds a new method to the class, with the given name and
* method signature.
*/
- def DefineMethod(name: String, attrs: short, returnType: Type, paramTypes: Array[Type]): MethodBuilder = {
+ def DefineMethod(name: String, attrs: Short, returnType: Type, paramTypes: Array[Type]): MethodBuilder = {
val method = new MethodBuilder(name, this, attrs, returnType, paramTypes)
val methods = methodBuilders.iterator()
while(methods.hasNext()) {
@@ -72,7 +71,7 @@ class TypeBuilder (module: Module, attributes: int, fullName: String, baseType:
* Adds a new constructor to the class, with the given attributes
* and signature.
*/
- def DefineConstructor(attrs: short, callingConvention: short, paramTypes: Array[Type]): ConstructorBuilder = {
+ def DefineConstructor(attrs: Short, callingConvention: Short, paramTypes: Array[Type]): ConstructorBuilder = {
val constr = new ConstructorBuilder(this, attrs, paramTypes)
constructorBuilders.add(constr)
return constr
@@ -81,7 +80,7 @@ class TypeBuilder (module: Module, attributes: int, fullName: String, baseType:
/**
* Defines a nested type given its name.
*/
- def DefineNestedType(name: String, attributes: int, baseType: Type, interfaces: Array[Type]): TypeBuilder = {
+ def DefineNestedType(name: String, attributes: Int, baseType: Type, interfaces: Array[Type]): TypeBuilder = {
val nested = nestedTypeBuilders.iterator()
while(nested.hasNext()) {
val nt = nested.next().asInstanceOf[TypeBuilder]
@@ -141,22 +140,27 @@ class TypeBuilder (module: Module, attributes: int, fullName: String, baseType:
/** Searches for the nested type with the specified name. */
override def GetNestedType(name: String): Type = {
- testRaw(name)
- return super.GetNestedType(name)
+ testRaw(name)
+ super.GetNestedType(name)
}
/** Returns all the types nested within the current Type. */
override def GetNestedTypes(): Array[Type] = {
- testRaw("<GetNestedTypes>")
- return super.GetNestedTypes()
+ testRaw("<GetNestedTypes>")
+ super.GetNestedTypes()
+ }
+
+ /** Returns a Type object that represents a one-dimensional array of the current type */
+ def MakeArrayType(): Type = {
+ Type.mkArray(this, 1)
}
/** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[byte]) {
+ def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
addCustomAttribute(constr, value)
}
- def setPosition(sourceLine: int, sourceFilename: String) {
+ def setPosition(sourceLine: Int, sourceFilename: String) {
this.sourceLine = sourceLine
this.sourceFilename = sourceFilename
}
@@ -184,8 +188,7 @@ class TypeBuilder (module: Module, attributes: int, fullName: String, baseType:
// i.e. not finalized by call to CreateType
protected def testRaw(member: String) {
if (raw)
- throw new RuntimeException
- ("Not supported for TypeBuilder before CreateType(): " +
+ throw new RuntimeException("Not supported for TypeBuilder before CreateType(): " +
FullName + "::" + member)
}
@@ -213,7 +216,7 @@ object TypeBuilder {
return s.toString()
}
- def methodsEqual(m1: MethodInfo, m2: MethodInfo): boolean = {
+ def methodsEqual(m1: MethodInfo, m2: MethodInfo): Boolean = {
if (!m1.Name.equals(m2.Name))
return false
if (m1.ReturnType != m2.ReturnType)
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala
index 963c587bc7..28ec801dd4 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala
index dc71123f85..d4b84cdd4e 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala
@@ -2,7 +2,6 @@
* System.Reflection.Emit-like API for writing .NET assemblies to MSIL
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.emit
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java
index 5067489693..9a6e28a545 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.compiler.msil.tests;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java
index 0c2d12aa0f..96ec1bfeea 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.compiler.msil.tests;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java
index 4f20632ce3..37a5c6ea90 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.compiler.msil.tests;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java
index 1d3764dff2..1df389b011 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java
@@ -1,4 +1,3 @@
-// $Id$
package ch.epfl.lamp.compiler.msil.tests;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java
index d1c81f4fb5..2c5946a734 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java
@@ -1,4 +1,3 @@
-// $Id$
package test;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java
index 84f604af53..454a94e55c 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for acces to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.util;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java
index 4feb0f7b1e..ef043875ec 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for acces to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.util;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java b/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java
index ded2dd64a7..1b16508be5 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for acces to .NET assemblies (DLL & EXE)
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.util;
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java b/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java
index a4860c369f..ba9d317dcf 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java
@@ -2,7 +2,6 @@
* System.Reflection-like API for acces to .NET Assemblies
*/
-// $Id$
package ch.epfl.lamp.compiler.msil.util;
diff --git a/src/partest-alternative/README b/src/partest-alternative/README
new file mode 100644
index 0000000000..c7673fe2f8
--- /dev/null
+++ b/src/partest-alternative/README
@@ -0,0 +1,50 @@
+If you're looking for something to read, I suggest running ../test/partest
+with no arguments, which at this moment prints this:
+
+Usage: partest [<options>] [<test> <test> ...]
+ <test>: a path to a test designator, typically a .scala file or a directory.
+ Examples: files/pos/test1.scala, files/res/bug785
+
+ Test categories:
+ --all run all tests (default, unless no options given)
+ --pos Compile files that are expected to build
+ --neg Compile files that are expected to fail
+ --run Test JVM backend
+ --jvm Test JVM backend
+ --res Run resident compiler scenarii
+ --buildmanager Run Build Manager scenarii
+ --scalacheck Run Scalacheck tests
+ --script Run script files
+ --shootout Run shootout tests
+ --scalap Run scalap tests
+
+ Test "smart" categories:
+ --grep run all tests with a source file containing <expr>
+ --failed run all tests which failed on the last run
+
+ Specifying paths and additional flags, ~ means repository root:
+ --rootdir path from ~ to partest (default: test)
+ --builddir path from ~ to test build (default: build/pack)
+ --srcdir path from --rootdir to sources (default: files)
+ --javaopts flags to java on all runs (overrides JAVA_OPTS)
+ --scalacopts flags to scalac on all tests (overrides SCALAC_OPTS)
+ --pack alias for --builddir build/pack
+ --quick alias for --builddir build/quick
+
+ Options influencing output:
+ --trace show the individual steps taken by each test
+ --show-diff show diff between log and check file
+ --show-log show log on failures
+ --dry-run do not run tests, only show their traces.
+ --terse be less verbose (almost silent except for failures)
+ --verbose be more verbose (additive with --trace)
+ --debug maximum debugging output
+ --ansi print output in color
+
+ Other options:
+ --timeout Timeout in seconds
+ --cleanup delete all stale files and dirs before run
+ --nocleanup do not delete any logfiles or object dirs
+ --stats collect and print statistics about the tests
+ --validate examine test filesystem for inconsistencies
+ --version print version
diff --git a/src/partest-alternative/scala/tools/partest/Actions.scala b/src/partest-alternative/scala/tools/partest/Actions.scala
new file mode 100644
index 0000000000..cb60152b71
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Actions.scala
@@ -0,0 +1,231 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools
+package partest
+
+import util._
+import nsc.io._
+
+trait Actions {
+ partest: Universe =>
+
+ class TestSequence(val actions: List[TestStep]) extends AbsTestSequence {
+ }
+
+ implicit def createSequence(xs: List[TestStep]) = new TestSequence(xs)
+
+ trait ExecSupport {
+ self: TestEntity =>
+
+ def execEnv: Map[String, String] = {
+ val map = assembleEnvironment()
+ val cwd = execCwd.toList map ("CWD" -> _.path)
+
+ map ++ cwd
+ }
+ def execCwd = if (commandFile.isFile) Some(sourcesDir) else None
+
+ def runExec(args: List[String]): Boolean = {
+ val cmd = fromArgs(args)
+
+ if (isVerbose) {
+ trace("runExec: " + execEnv.mkString("ENV(", "\n", "\n)"))
+ execCwd foreach (x => trace("CWD(" + x + ")"))
+ }
+
+ trace("runExec: " + cmd)
+ isDryRun || execAndLog(cmd)
+ }
+
+ /** Runs <code>command</code> redirecting standard out and
+ * error out to <code>output</code> file.
+ */
+ private def runCommandOld(command: String, output: java.io.File): Int = {
+ import java.io._
+ import nest.StreamAppender
+
+ // NestUI.verbose("running command:\n"+command)
+ val proc = Runtime.getRuntime.exec(command)
+ val in = proc.getInputStream
+ val err = proc.getErrorStream
+ val writer = new PrintWriter(new FileWriter(output), true)
+ val inApp = StreamAppender(in, writer)
+ val errApp = StreamAppender(err, writer)
+ val async = new Thread(errApp)
+ async.start()
+ inApp.run()
+ async.join()
+ writer.close()
+
+ try proc.exitValue()
+ catch { case _: IllegalThreadStateException => 0 }
+ }
+
+ /** Exec a process to run a command. Assumes 0 exit value is success.
+ * Of necessity, also treats no available exit value as success.
+ */
+ protected def execAndLog(cmd: String): Boolean = {
+ runCommandOld(cmd, logFile.jfile) == 0
+
+ // var proc: Process = null
+ //
+ // val result = interruptMeIn(cmd, testTimeout) {
+ // loggingResult {
+ // proc = Process.exec(toArgs(cmd), execEnv, execCwd.orNull, true)
+ // proc.slurp()
+ // }
+ // proc != null && (proc.waitFor() == 0)
+ // }
+ // result getOrElse {
+ // warning("Process never terminated: '%s'" format cmd)
+ // if (proc != null)
+ // proc.destroy()
+ //
+ // false
+ // }
+ }
+ }
+
+ trait ScriptableTest {
+ self: TestEntity =>
+
+ /** Translates a line from a .cmds file into a teststep.
+ */
+ def customTestStep(line: String): TestStep = {
+ trace("customTestStep: " + line)
+ val (cmd, rest) = line span (x => !Character.isWhitespace(x))
+ def qualify(name: String) = sourcesDir / name path
+ val args = toArgs(rest) map qualify
+ def fail: TestStep = (_: TestEntity) => error("Parse error: did not understand '%s'" format line)
+
+ val f: TestEntity => Boolean = cmd match {
+ case "scalac" => _ scalac args
+ case "javac" => _ javac args
+ case "scala" => _ runScala args
+ case _ => fail
+ }
+ f
+ }
+ }
+
+ trait CompilableTest extends CompileExecSupport {
+ self: TestEntity =>
+
+ def sourceFiles = location.walk collect { case f: File if isJavaOrScala(f) => f } toList
+ def allSources = sourceFiles map (_.path)
+ def scalaSources = sourceFiles filter isScala map (_.path)
+ def javaSources = sourceFiles filter isJava map (_.path)
+
+ /** If there are mixed java and scala files, the standard compilation
+ * sequence is:
+ *
+ * scalac with all files
+ * javac with only java files
+ * scalac with only scala files
+ *
+ * This should be expanded to encompass other strategies so we know how
+ * well they're working or not working - notably, it would be very useful
+ * to know exactly when and how two-pass compilation fails.
+ */
+ def compile() = {
+ trace("compile: " + sourceFiles)
+
+ def compileJava() = javac(javaSources)
+ def compileScala() = scalac(scalaSources)
+ def compileAll() = scalac(allSources)
+ def compileMixed() = compileAll() && compileJava() && compileScala()
+
+ if (scalaSources.nonEmpty && javaSources.nonEmpty) compileMixed()
+ else compileScala()
+ }
+ }
+
+ trait DiffableTest {
+ self: TestEntity =>
+
+ def checkFile: File = withExtension("check").toFile
+ def checkFileRequired =
+ returning(checkFile.isFile)(res => if (!res) warnAndLog("A checkFile at '%s' is mandatory.\n" format checkFile.path))
+
+ lazy val sourceFileNames = sourceFiles map (_.name)
+
+ /** Given the difficulty of verifying that any selective approach works
+ * everywhere, the algorithm now is to look for the name of any known
+ * source file for this test, and if seen, remove all the non-whitespace
+ * preceding it. (Paths with whitespace don't work anyway.) This should
+ * wipe out all slashes, backslashes, C:\, cygwin/windows differences,
+ * and whatever else makes a simple diff not simple.
+ *
+ * The log and check file are both transformed, which I don't think is
+ * correct -- only the log should be -- but doing it this way until I
+ * can clarify martin's comments in #3283.
+ */
+ def normalizePaths(s: String) =
+ sourceFileNames.foldLeft(s)((res, name) => res.replaceAll("""\S+\Q%s\E""" format name, name))
+
+ /** The default cleanup normalizes paths relative to sourcesDir,
+ * absorbs line terminator differences by going to lines and back,
+ * and trims leading or trailing whitespace.
+ */
+ def diffCleanup(f: File) = safeLines(f) map normalizePaths mkString "\n" trim
+
+ /** diffFiles requires actual Files as arguments but the output we want
+ * is the post-processed versions of log/check, so we resort to tempfiles.
+ */
+ lazy val diffOutput = {
+ if (!checkFile.exists) "" else {
+ val input = diffCleanup(checkFile)
+ val output = diffCleanup(logFile)
+ def asFile(s: String) = returning(File.makeTemp("partest-diff"))(_ writeAll s)
+
+ if (input == output) ""
+ else diffFiles(asFile(input), asFile(output))
+ }
+ }
+ private def checkTraceName = tracePath(checkFile)
+ private def logTraceName = tracePath(logFile)
+ private def isDiffConfirmed = checkFile.exists && (diffOutput == "")
+
+ private def sendTraceMsg() {
+ def result =
+ if (isDryRun) ""
+ else if (isDiffConfirmed) " [passed]"
+ else if (checkFile.exists) " [failed]"
+ else " [unchecked]"
+
+ trace("diff %s %s%s".format(checkTraceName, logTraceName, result))
+ }
+
+ /** If optional is true, a missing check file is considered
+ * a successful diff. Necessary since many categories use
+ * checkfiles in an ad hoc manner.
+ */
+ def runDiff() = {
+ sendTraceMsg()
+
+ def updateCheck = (
+ isUpdateCheck && {
+ val formatStr = "** diff %s %s: " + (
+ if (checkFile.exists) "failed, updating '%s' and marking as passed."
+ else if (diffOutput == "") "not creating checkFile at '%s' as there is no output."
+ else "was unchecked, creating '%s' for future tests."
+ ) + "\n"
+
+ normal(formatStr.format(checkTraceName, logTraceName, checkFile.path))
+ if (diffOutput != "") normal(diffOutput)
+
+ checkFile.writeAll(diffCleanup(logFile), "\n")
+ true
+ }
+ )
+
+ isDryRun || isDiffConfirmed || (updateCheck || !checkFile.exists)
+ }
+ }
+}
diff --git a/src/partest-alternative/scala/tools/partest/Alarms.scala b/src/partest-alternative/scala/tools/partest/Alarms.scala
new file mode 100644
index 0000000000..f38d8d6268
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Alarms.scala
@@ -0,0 +1,86 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package partest
+
+import java.util.{ Timer, TimerTask }
+
+trait Alarms {
+ self: Universe =>
+
+ def interruptMeIn[T](debugMsg: String, seconds: Int)(body: => T): Option[T] = {
+ val thisThread = currentThread
+ val alarm = new SimpleAlarm(seconds * 1000) set thisThread.interrupt()
+ debug("interruptMeIn(%d) '%s'".format(seconds, debugMsg))
+
+ try { Some(body) }
+ catch { case _: InterruptedException => debug("Received interrupted exception.") ; None }
+ finally { debug("Cancelling interruptMeIn '%s'" format debugMsg) ; alarm.cancel() ; Thread.interrupted() }
+ }
+
+ case class AlarmerAction(secs: Int, action: () => Unit) extends Runnable {
+ override def run() = action()
+ }
+
+ /** Set any number of alarms up with tuples of the form:
+ * seconds to alarm -> Function0[Unit] to execute
+ */
+ class Alarmer(alarms: AlarmerAction*) {
+ import java.util.concurrent._
+
+ val exec = Executors.newSingleThreadScheduledExecutor()
+ alarms foreach (x => exec.schedule(x, x.secs, TimeUnit.SECONDS))
+ exec.shutdown()
+
+ def cancelAll() = exec.shutdownNow()
+ }
+
+ class SimpleAlarm(timeout: Long) {
+ private val alarm = new Timer
+
+ /** Start a timer, running the given body if it goes off.
+ */
+ def set(body: => Unit) = returning(new TimerTask { def run() = body })(alarm.schedule(_, timeout))
+
+ /** Cancel the timer.
+ */
+ def cancel() = alarm.cancel()
+ }
+
+ trait TestAlarms {
+ test: TestEntity =>
+
+ private def warning1 = AlarmerAction(testWarning, () => warning(
+ """|I've been waiting %s seconds for this to complete:
+ | %s
+ |It may be stuck, or if not, it should be broken into smaller tests.
+ |""".stripMargin.format(testWarning, test))
+ )
+ private def warning2 = AlarmerAction(testWarning * 2, () => warning(
+ """|Now I've been waiting %s seconds for this to complete:
+ | %s
+ |If partest seems hung it would be a good place to look.
+ |""".stripMargin.format(testWarning * 2, test))
+ )
+
+ def startAlarms(onTimeout: => Unit) =
+ if (isNoAlarms) new Alarmer() // for alarm debugging
+ else new Alarmer(Seq(warning1, warning2, AlarmerAction(testTimeout, () => onTimeout)): _*)
+ }
+
+ // Thread.setDefaultUncaughtExceptionHandler(new UncaughtException)
+ // class UncaughtException extends Thread.UncaughtExceptionHandler {
+ // def uncaughtException(t: Thread, e: Throwable) {
+ // Console.println("Uncaught in %s: %s".format(t, e))
+ // }
+ // }
+ //
+ // lazy val logger = File("/tmp/partest.log").bufferedWriter()
+ // def flog(msg: String) = logger synchronized {
+ // logger write (msg + "\n")
+ // logger.flush()
+ // }
+}
diff --git a/src/partest-alternative/scala/tools/partest/BuildContributors.scala b/src/partest-alternative/scala/tools/partest/BuildContributors.scala
new file mode 100644
index 0000000000..64c7e07bc3
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/BuildContributors.scala
@@ -0,0 +1,102 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import nsc.io._
+import nsc.util.ClassPath
+
+trait BuildContributors {
+ universe: Universe =>
+
+ /** A trait mixed into types which contribute a portion of the values.
+ * The basic mechanism is the TestBuild, TestCategory, and TestEntity
+ * can each contribute to each value. They are assembled at the last
+ * moment by the ContributorAssembler (presently the TestEntity.)
+ */
+ trait BuildContributor {
+ def javaFlags: List[String]
+ def scalacFlags: List[String]
+ def classpathPaths: List[Path]
+ def buildProperties: List[(String, Any)]
+ def buildEnvironment: Map[String, String]
+ }
+
+ trait ContributorAssembler {
+ def contributors: List[BuildContributor]
+ def assemble[T](what: BuildContributor => List[T]): List[T] = contributors flatMap what
+
+ /** !!! This will need work if we want to achieve real composability,
+ * but it can wait for the demand.
+ */
+ def assembleScalacArgs(args: List[String]) = assemble(_.scalacFlags) ++ args
+ def assembleJavaArgs(args: List[String]) = assemble(_.javaFlags) ++ args
+ def assembleProperties() = assemble(_.buildProperties)
+ def assembleClasspaths(paths: List[Path]) = assemble(_.classpathPaths) ++ paths
+ def assembleEnvironment() = assemble(_.buildEnvironment.toList).toMap
+
+ def createClasspathString() = ClassPath fromPaths (assembleClasspaths(Nil) : _*)
+ def createPropertyString() = assembleProperties() map { case (k, v) => "-D%s=%s".format(k, v.toString) }
+ }
+
+ trait BuildContribution extends BuildContributor {
+ self: TestBuild =>
+
+ /** The base classpath and system properties.
+ * !!! TODO - this should adjust itself depending on the build
+ * being tested, because pack and quick at least need different jars.
+ */
+ def classpathPaths = List[Path](library, compiler, partest, fjbg) ++ forkJoinPath
+ def buildProperties = List(
+ "scala.home" -> testBuildDir,
+ "partest.lib" -> library, // used in jvm/inner
+ "java.awt.headless" -> true,
+ "user.language" -> "en",
+ "user.country" -> "US",
+ "partest.debug" -> isDebug,
+ "partest.verbose" -> isVerbose
+ // Disabled because there are no natives tests.
+ // "java.library.path" -> srcLibDir
+ )
+ def javaFlags: List[String] = toArgs(javaOpts)
+ def scalacFlags: List[String] = toArgs(scalacOpts)
+
+ /** We put the build being tested's /bin directory in the front of the
+ * path so the scripts and such written to execute "scala" will use this
+ * build and not whatever happens to be on their path.
+ */
+ private def modifiedPath = ClassPath.join(scalaBin.path, Properties.envOrElse("PATH", ""))
+ def buildEnvironment = Map("PATH" -> modifiedPath)
+ }
+
+ trait CategoryContribution extends BuildContributor {
+ self: DirBasedCategory =>
+
+ /** Category-wide classpath additions placed in <category>/lib. */
+ private def libContents = root / "lib" ifDirectory (_.list.toList)
+
+ def classpathPaths = libContents getOrElse Nil
+ def buildProperties = Nil
+ def javaFlags = Nil
+ def scalacFlags = Nil
+ def buildEnvironment = Map()
+ }
+
+ trait TestContribution extends BuildContributor with ContributorAssembler {
+ self: TestEntity =>
+
+ def jarsInTestDir = location.walk collect { case f: File if f hasExtension "jar" => f } toList
+
+ def contributors = List(build, category, self)
+ def javaFlags = safeArgs(javaOptsFile)
+ def scalacFlags = safeArgs(scalaOptsFile)
+ def classpathPaths = jarsInTestDir :+ outDir
+ def buildProperties = List(
+ "partest.output" -> outDir.toAbsolute, // used in jvm/inner
+ "partest.cwd" -> outDir.parent.toAbsolute // used in shootout tests
+ )
+ def buildEnvironment = Map("JAVA_OPTS" -> fromArgs(assembleJavaArgs(Nil)))
+ }
+} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Categories.scala b/src/partest-alternative/scala/tools/partest/Categories.scala
new file mode 100644
index 0000000000..172cca74b4
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Categories.scala
@@ -0,0 +1,70 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools
+package partest
+
+import nsc.Settings
+import nsc.io._
+import nsc.util.{ ClassPath }
+
+trait Categories {
+ self: Universe =>
+
+ trait TestCategory extends AbsTestCategory {
+ def kind: String
+ def startMessage: String = "Executing test group"
+ def testSequence: TestSequence
+
+ class TestSettings(entity: TestEntity, error: String => Unit) extends Settings(error) {
+ def this(entity: TestEntity) = this(entity, Console println _)
+
+ deprecation.value = false
+ encoding.value = "ISO-8859-1"
+ classpath.value = entity.testClasspath
+ outdir.value = entity.outDir.path
+ }
+
+ def createSettings(entity: TestEntity): TestSettings = new TestSettings(entity)
+ def createTest(location: Path): TestEntity =
+ if (location.isFile) TestFile(this, location.toFile)
+ else if (location.isDirectory) TestDirectory(this, location.toDirectory)
+ else error("Failed to create test at '%s'" format location)
+
+ /** Category test identification.
+ */
+ def denotesTestFile(p: Path) = p.isFile && (p hasExtension "scala")
+ def denotesTestDir(p: Path) = p.isDirectory && !ignorePath(p)
+ def denotesTest(p: Path) = denotesTestDir(p) || denotesTestFile(p)
+
+ /** This should verify that all necessary files are present.
+ * By default it delegates to denotesTest.
+ */
+ def denotesValidTest(p: Path) = denotesTest(p)
+ }
+
+ abstract class DirBasedCategory(val kind: String) extends TestCategory with CategoryContribution {
+ lazy val root = Directory(src / kind).normalize
+ def enumerate = root.list filter denotesTest map createTest toList
+
+ /** Standard actions. These can be overridden either on the
+ * Category level or by individual tests.
+ */
+ def compile: TestStep = (_: TestEntity).compile()
+ def checkFileRequired: TestStep = (_: TestEntity).checkFileRequired
+ def diff: TestStep = (_: TestEntity).diff()
+ def run: TestStep = (_: TestEntity).run()
+ def exec: TestStep = (_: TestEntity).exec()
+
+ /** Combinators.
+ */
+ def not(f: TestStep): TestStep = !f(_: TestEntity)
+
+ override def toString = kind
+ }
+} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Compilable.scala b/src/partest-alternative/scala/tools/partest/Compilable.scala
new file mode 100644
index 0000000000..ddaa277842
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Compilable.scala
@@ -0,0 +1,106 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import scala.tools.nsc.io._
+import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError }
+import scala.tools.nsc.util.{ ClassPath }
+import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
+
+trait PartestCompilation {
+ self: Universe =>
+
+ trait CompileExecSupport extends ExecSupport {
+ self: TestEntity =>
+
+ def javacpArg = "-classpath " + testClasspath
+ def scalacpArg = "-usejavacp"
+
+ /** Not used, requires tools.jar.
+ */
+ // def javacInternal(args: List[String]) = {
+ // import com.sun.tools.javac.Main
+ // Main.compile(args.toArray, logWriter)
+ // }
+
+ def javac(args: List[String]): Boolean = {
+ val allArgString = fromArgs(javacpArg :: javacOpts :: args)
+
+ // javac -d outdir -classpath <basepath> <files>
+ val cmd = "%s -d %s %s".format(javacCmd, outDir, allArgString)
+ def traceMsg =
+ if (isVerbose) cmd
+ else "%s -d %s %s".format(tracePath(Path(javacCmd)), tracePath(outDir), fromArgs(args))
+
+ trace(traceMsg)
+
+ isDryRun || execAndLog(cmd)
+ }
+
+ def scalac(args: List[String]): Boolean = {
+ val allArgs = assembleScalacArgs(args)
+ val (global, files) = newGlobal(allArgs)
+ def nonFileArgs = if (isVerbose) global.settings.recreateArgs else assembleScalacArgs(Nil)
+ def traceArgs = fromArgs(nonFileArgs ++ (files map tracePath))
+ def traceMsg = "scalac " + traceArgs
+
+ trace(traceMsg)
+ isDryRun || global.partestCompile(files, true)
+ }
+
+ /** Actually running the test, post compilation.
+ * Normally args will be List("Test", "jvm"), main class and arg to it.
+ */
+ def runScala(args: List[String]): Boolean = {
+ val scalaRunnerClass = "scala.tools.nsc.MainGenericRunner"
+
+ // java $JAVA_OPTS <javaopts> -classpath <cp>
+ val javaCmdAndOptions = javaCmd +: assembleJavaArgs(List(javacpArg))
+ // MainGenericRunner -usejavacp <scalacopts> Test jvm
+ val scalaCmdAndOptions = List(scalaRunnerClass, scalacpArg) ++ assembleScalacArgs(args)
+ // Assembled
+ val cmd = fromArgs(javaCmdAndOptions ++ createPropertyString() ++ scalaCmdAndOptions)
+
+ def traceMsg = if (isVerbose) cmd else fromArgs(javaCmd :: args)
+ trace("runScala: " + traceMsg)
+
+ isDryRun || execAndLog(cmd)
+ }
+
+ def newReporter(settings: Settings) = new ConsoleReporter(settings, Console.in, logWriter)
+
+ class PartestGlobal(settings: Settings, val creporter: ConsoleReporter) extends Global(settings, creporter) {
+ def partestCompile(files: List[String], printSummary: Boolean): Boolean = {
+ try { new Run compile files }
+ catch {
+ case FatalError(msg) => creporter.error(null, "fatal error: " + msg)
+ case ae: AssertionError => creporter.error(null, ""+ae)
+ case te: TypeError => creporter.error(null, ""+te)
+ case ex =>
+ creporter.error(null, ""+ex)
+ throw ex
+ }
+
+ if (printSummary)
+ creporter.printSummary
+
+ creporter.flush()
+ !creporter.hasErrors
+ }
+ }
+
+ def newGlobal(args: List[String]): (PartestGlobal, List[String]) = {
+ val settings = category createSettings self
+ val command = new CompilerCommand(args, settings)
+ val reporter = newReporter(settings)
+
+ if (!command.ok)
+ debug("Error parsing arguments: '%s'".format(args mkString ", "))
+
+ (new PartestGlobal(command.settings, reporter), command.files)
+ }
+ }
+}
diff --git a/src/partest-alternative/scala/tools/partest/Config.scala b/src/partest-alternative/scala/tools/partest/Config.scala
new file mode 100644
index 0000000000..288a3034e9
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Config.scala
@@ -0,0 +1,129 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import io._
+import nsc.io._
+import Properties._
+
+trait Config {
+ universe: Universe =>
+
+ lazy val src = absolutize(srcDir).toDirectory
+ lazy val build = new TestBuild()
+
+ def javaHomeEnv = envOrElse("JAVA_HOME", null)
+ def javaCmd = envOrElse("JAVACMD", "java")
+ def javacCmd = Option(javaHomeEnv) map (x => Path(x) / "bin" / "javac" path) getOrElse "javac"
+
+ /** Values related to actors. The timeouts are in seconds. On a dry
+ * run we only allocate one worker so the output isn't interspersed.
+ */
+ def workerTimeout = 3600 // 1 hour, probably overly generous
+ def numWorkers = if (isDryRun) 1 else propOrElse("partest.actors", "8").toInt
+ def expectedErrors = propOrElse("partest.errors", "0").toInt
+ def poolSize = (wrapAccessControl(propOrNone("actors.corePoolSize")) getOrElse "16").toInt
+
+ def allScalaFiles = src.deepFiles filter (_ hasExtension "scala")
+ def allObjDirs = src.deepDirs filter (_ hasExtension "obj")
+ def allLogFiles = src.deepFiles filter (_ hasExtension "log")
+ def allClassFiles = src.deepFiles filter (_ hasExtension "class")
+
+ class TestBuild() extends BuildContribution {
+ import nsc.util.ClassPath
+
+ /** Scala core libs.
+ */
+ val library = pathForComponent("library")
+ val compiler = pathForComponent("compiler")
+ val partest = pathForComponent("partest")
+ val scalap = pathForComponent("scalap", "%s.jar")
+
+ /** Scala supplementary libs - these are not all needed for all build targets,
+ * and some of them are copied inside other jars in later targets. However quick
+ * for instance cannot be run without some of these.
+ */
+ val fjbg = pathForLibrary("fjbg")
+ val msil = pathForLibrary("msil")
+ val forkjoin = pathForLibrary("forkjoin")
+ val scalacheck = pathForLibrary("scalacheck")
+
+ /** Other interesting paths.
+ */
+ val scalaBin = testBuildDir / "bin"
+
+ /** A hack for now to get quick running.
+ */
+ def needsForkJoin = {
+ val loader = nsc.util.ScalaClassLoader.fromURLs(List(library.toURL))
+ val fjMarker = "scala.concurrent.forkjoin.ForkJoinTask"
+ val clazz = loader.tryToLoadClass(fjMarker)
+
+ if (clazz.isDefined) debug("Loaded ForkJoinTask OK, don't need jar.")
+ else debug("Could not load ForkJoinTask, putting jar on classpath.")
+
+ clazz.isEmpty
+ }
+ lazy val forkJoinPath: List[Path] = if (needsForkJoin) List(forkjoin) else Nil
+
+ /** Internal **/
+ private def repo = partestDir.parent.normalize
+ // XXX - is this needed? Where?
+ //
+ // private val pluginOptionString = "-Xplugin:"
+ // private def updatedPluginPath(options: String): String = {
+ // val (pluginArgs, rest) = toArgs(options) partition (_ startsWith pluginOptionString)
+ // // join all plugin paths as one classpath
+ // val pluginPaths = ClassPath.join(pluginArgs map (_ stripPrefix pluginOptionString): _*)
+ // // map all paths to absolute
+ // val newPath = ClassPath.map(pluginPaths, x => absolutize(x).path)
+ // // recreate option
+ // val pluginOption = if (newPath == "") None else Some(pluginOptionString + newPath)
+ //
+ // fromArgs(rest ::: pluginOption.toList)
+ // }
+
+ private def pathForComponent(what: String, jarFormat: String = "scala-%s.jar"): Path = {
+ def asDir = testBuildDir / "classes" / what
+ def asJar = testBuildDir / "lib" / jarFormat.format(what)
+
+ if (asDir.isDirectory) asDir
+ else if (asJar.isFile) asJar
+ else ""
+ }
+ private def pathForLibrary(what: String) = File(repo / "lib" / (what + ".jar"))
+ }
+
+ def printConfigBanner() = {
+ debug("Java VM started with arguments: '%s'" format fromArgs(Process.javaVmArguments))
+ debug("System Properties:\n" + util.allPropertiesString())
+
+ normal(configBanner())
+ }
+
+ /** Treat an access control failure as None. */
+ private def wrapAccessControl[T](body: => Option[T]): Option[T] =
+ try body catch { case _: java.security.AccessControlException => None }
+
+ private def configBanner() = {
+ val javaBin = Path(javaHome) / "bin"
+ val javaInfoString = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
+
+ List(
+ "Scala compiler classes in: " + testBuildDir,
+ "Scala version is: " + nsc.Properties.versionMsg,
+ "Scalac options are: " + universe.scalacOpts,
+ "Java binaries in: " + javaBin,
+ "Java runtime is: " + javaInfoString,
+ "Java runtime options: " + (Process.javaVmArguments mkString " "),
+ "Javac options are: " + universe.javacOpts,
+ "Java options are: " + universe.javaOpts,
+ "Source directory is: " + src,
+ "Selected categories: " + (selectedCategories mkString " "),
+ ""
+ ) mkString "\n"
+ }
+}
diff --git a/src/partest-alternative/scala/tools/partest/Dispatcher.scala b/src/partest-alternative/scala/tools/partest/Dispatcher.scala
new file mode 100644
index 0000000000..2a9d99ab60
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Dispatcher.scala
@@ -0,0 +1,162 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+package scala.tools
+package partest
+
+import scala.tools.nsc.io._
+import scala.actors.{ Actor, TIMEOUT }
+import scala.actors.Actor._
+import scala.collection.immutable
+import scala.util.control.Exception.ultimately
+
+/** The machinery for concurrent execution of tests. Each Worker
+ * is given a bundle of tests, which it runs sequentially and then
+ * sends a report back to the dispatcher.
+ */
+trait Dispatcher {
+ partest: Universe =>
+
+ /** The public entry point. The given filter narrows down the list of
+ * tests to run.
+ */
+ def runSelection(categories: List[TestCategory], filt: TestEntity => Boolean = _ => true): CombinedTestResults = {
+ // Setting scala.home informs tests where to obtain their jars.
+ setProp("scala.home", testBuildDir.path)
+
+ val allTests = allCategories flatMap (_.enumerate)
+ val selected = allTests filter filt
+ val groups = selected groupBy (_.category)
+ val count = selected.size
+
+ if (count == 0) return CombinedTestResults(0, 0, 0, Nil)
+ else if (count == allTests.size) verbose("Running all %d tests." format count)
+ else verbose("Running %d/%d tests: %s".format(count, allTests.size, toStringTrunc(selected map (_.label) mkString ", ")))
+
+ allCategories collect { case x if groups contains x => runCategory(x, groups(x)) } reduceLeft (_ ++ _)
+ }
+
+ private def parallelizeTests(tests: List[TestEntity]): immutable.Map[TestEntity, TestResult] = {
+ // propagate verbosity
+ if (isDebug) scala.actors.Debug.level = 3
+
+ // "If elected, I guarantee a slice of tests for every worker!"
+ val groups = tests grouped ((tests.size / numWorkers) + 1) toList
+
+ // "Workers, line up for assignments!"
+ val workers =
+ for ((slice, workerNum) <- groups.zipWithIndex) yield {
+ returning(new Worker(workerNum)) { worker =>
+ worker.start()
+ worker ! TestsToRun(slice)
+ }
+ }
+
+ normal("Started %d workers with ~%d tests each.\n".format(groups.size, groups.head.size))
+
+ /** Listening for news from the proletariat.
+ */
+ (workers map { w =>
+ receiveWithin(workerTimeout * 1000) {
+ case ResultsOfRun(resultMap) => resultMap
+ case TIMEOUT =>
+ warning("Worker %d timed out." format w.workerNum)
+ // mark all the worker's tests as having timed out - should be hard to miss
+ // immutable.Map[TestEntity, TestResult]()
+ groups(w.workerNum) map (x => (x -> new Timeout(x))) toMap
+ }
+ }) reduceLeft (_ ++ _)
+ }
+
+ private def runCategory(category: TestCategory, tests: List[TestEntity]): CombinedTestResults = {
+ val kind = category.kind
+ normal("%s (%s tests in %s)\n".format(category.startMessage, tests.size, category))
+
+ val (milliSeconds, resultMap) = timed2(parallelizeTests(tests))
+ val (passed, failed) = resultsToStatistics(resultMap mapValues (_.state))
+ val failures = resultMap.values filterNot (_.passed) toList
+
+ CombinedTestResults(passed, failed, milliSeconds, failures)
+ }
+
+ /** A Worker is given a bundle of tests and runs them all sequentially.
+ */
+ class Worker(val workerNum: Int) extends Actor {
+ def act() {
+ react { case TestsToRun(tests) =>
+ val master = sender
+ runTests(tests)(results => master ! ResultsOfRun(results))
+ }
+ }
+
+ /** Runs the tests. Passes the result Map to onCompletion when done.
+ */
+ private def runTests(tests: List[TestEntity])(onCompletion: immutable.Map[TestEntity, TestResult] => Unit) {
+ var results = new immutable.HashMap[TestEntity, TestResult] // maps tests to results
+ val numberOfTests = tests.size
+ val testIterator = tests.iterator
+ def processed = results.size
+ def isComplete = testIterator.isEmpty
+
+ def atThreshold(num: Double) = {
+ require(num >= 0 && num <= 1.0)
+ ((processed - 1).toDouble / numberOfTests <= num) && (processed.toDouble / numberOfTests >= num)
+ }
+
+ def extraMessage = {
+ // for now quiet for normal people
+ if (isVerbose || isTrace || isDebug) {
+ if (isComplete) "(#%d 100%%)" format workerNum
+ else if (isVerbose) "(#%d %d/%d)".format(workerNum, processed, numberOfTests)
+ else if (isTrace && atThreshold(0.5)) "(#%d 50%%)" format workerNum
+ else ""
+ }
+ else ""
+ }
+
+ def countAndReport(result: TestResult) {
+ val TestResult(test, state) = result
+ // refuse to count an entity twice
+ if (results contains test)
+ return warning("Received duplicate result for %s: was %s, now %s".format(test, results(test), state))
+
+ // increment the counter for this result state
+ results += (test -> result)
+
+ // show on screen
+ if (isDryRun) normal("\n") // blank line between dry run traces
+ else result show extraMessage
+
+ // remove log if successful
+ if (result.passed)
+ test.deleteLog()
+
+ // Respond to master if this Worker is complete
+ if (isComplete)
+ onCompletion(results)
+ }
+
+ Actor.loopWhile(testIterator.hasNext) {
+ val parent = self
+ // pick a test and set some alarms
+ val test = testIterator.next
+ val alarmer = test startAlarms (parent ! new Timeout(test))
+
+ actor {
+ ultimately(alarmer.cancelAll()) {
+ // Calling isSuccess forces the lazy val "process" inside the test, running it.
+ val res = test.isSuccess
+ // Cancel the alarms and alert the media.
+ parent ! TestResult(test, res)
+ }
+ }
+
+ react {
+ case x: TestResult => countAndReport(x)
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Entities.scala b/src/partest-alternative/scala/tools/partest/Entities.scala
new file mode 100644
index 0000000000..bea505b594
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Entities.scala
@@ -0,0 +1,74 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+package scala.tools
+package partest
+
+import nsc.io._
+
+trait Entities {
+ self: Universe =>
+
+ abstract class TestEntity extends AbsTestEntity
+ with TestContribution
+ with TestHousekeeping
+ with TestAlarms
+ with EntityLogging
+ with CompilableTest
+ with ScriptableTest
+ with DiffableTest {
+ def location: Path
+ def category: TestCategory
+
+ lazy val label = location.stripExtension
+ lazy val testClasspath = returning(createClasspathString())(x => vtrace("testClasspath: " + x))
+
+ /** Was this test successful? Calling this for the first time forces
+ * lazy val "process" which actually runs the test.
+ */
+ def isSuccess = process
+
+ /** Some standard files, which may or may not be present.
+ */
+ def scalaOptsFile = withExtension("flags").toFile // opts to scalac
+ def javaOptsFile = withExtension("javaopts").toFile // opts to java (but not javac)
+ def commandFile = withExtension("cmds").toFile // sequence of commands to execute
+ def logFile = withExtension("log").toFile // collected output
+
+ /** Some standard directories.
+ */
+ def outDir = withExtension("obj").toDirectory // output dir, e.g. files/pos/t14.obj
+ def categoryDir = location.parent.normalize // category dir, e.g. files/pos/
+ def sourcesDir = location ifDirectory (_.normalize) getOrElse categoryDir
+
+ /** Standard arguments for run, exec, diff.
+ */
+ def argumentsToRun = List("Test", "jvm")
+ def argumentsToExec = List(location.path)
+
+ /** Using a .cmds file for a custom test sequence.
+ */
+ def commandList = safeLines(commandFile)
+ def testSequence =
+ if (commandFile.isFile && commandList.nonEmpty) commandList map customTestStep
+ else category.testSequence
+
+ def run() = runScala(argumentsToRun)
+ def exec() = runExec(argumentsToExec)
+ def diff() = runDiff() // checkFile, logFile
+
+ /** The memoized result of the test run.
+ */
+ private lazy val process = {
+ val outcome = runWrappers(testSequence.actions forall (f => f(this)))
+
+ // an empty outcome means we've been interrupted and are shutting down.
+ outcome getOrElse false
+ }
+ }
+
+ case class TestDirectory(category: TestCategory, location: Directory) extends TestEntity { }
+ case class TestFile(category: TestCategory, location: File) extends TestEntity { }
+}
diff --git a/src/partest-alternative/scala/tools/partest/Housekeeping.scala b/src/partest-alternative/scala/tools/partest/Housekeeping.scala
new file mode 100644
index 0000000000..a624ca8adb
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Housekeeping.scala
@@ -0,0 +1,187 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import scala.util.control.Exception.catching
+import util._
+import nsc.io._
+import Process.runtime
+import Properties._
+
+/** An agglomeration of code which is low on thrills. Hopefully
+ * it operates so quietly in the background that you never have to
+ * look at this file.
+ */
+trait Housekeeping {
+ self: Universe =>
+
+ /** Orderly shutdown on ctrl-C. */
+ @volatile private var _shuttingDown = false
+ protected def setShuttingDown() = {
+ /** Whatever we want to do as shutdown begins goes here. */
+ if (!_shuttingDown) {
+ warning("Received shutdown signal, partest is cleaning up...\n")
+ _shuttingDown = true
+ }
+ }
+ def isShuttingDown = _shuttingDown
+
+ /** Execute some code with a shutdown hook in place. This is
+ * motivated by the desire not to leave the filesystem full of
+ * junk when someone ctrl-Cs a test run.
+ */
+ def withShutdownHook[T](hook: => Unit)(body: => T): Option[T] =
+ /** Java doesn't like it if you keep adding and removing shutdown
+ * hooks after shutdown has begun, so we trap the failure.
+ */
+ catching(classOf[IllegalStateException]) opt {
+ val t = new Thread() {
+ override def run() = {
+ setShuttingDown()
+ hook
+ }
+ }
+ runtime addShutdownHook t
+
+ try body
+ finally runtime removeShutdownHook t
+ }
+
+ /** Search for a directory, possibly given only a name, by starting
+ * at the current dir and walking upward looking for it at each level.
+ */
+ protected def searchForDir(name: String): Directory = {
+ val result = Path(name) ifDirectory (x => x.normalize) orElse {
+ val cwd = Directory.Current getOrElse error("user.dir property not set")
+ val dirs = cwd :: cwd.parents map (_ / name)
+
+ Path onlyDirs dirs map (_.normalize) headOption
+ }
+
+ result getOrElse error("Fatal: could not find directory '%s'" format name)
+ }
+
+ /** Paths we ignore for most purposes.
+ */
+ def ignorePath(x: Path) = {
+ (x.name startsWith ".") ||
+ (x.isDirectory && ((x.name == "lib") || x.hasExtension("obj", "svn")))
+ }
+ /** Make a possibly relative path absolute using partestDir as the base.
+ */
+ def absolutize(path: String) = Path(path) toAbsoluteWithRoot partestDir
+
+ /** Go on a deleting binge.
+ */
+ def cleanupAll() {
+ if (isNoCleanup)
+ return
+
+ val (dirCount, fileCount) = (cleanupObjDirs(), cleanupLogs() + cleanupJunk())
+ if (dirCount + fileCount > 0)
+ normal("Cleaned up %d directories and %d files.\n".format(dirCount, fileCount))
+ }
+
+ def cleanupObjDirs() = countTrue(allObjDirs collect { case x if x.exists => x.deleteRecursively() })
+ def cleanupJunk() = countTrue(allClassFiles collect { case x if x.exists => x.delete() })
+ def cleanupLogs() = countTrue(allLogFiles collect { case x if x.exists => x.delete() })
+
+ /** Look through every file in the partest directory and ask around
+ * to make sure someone knows him. Complain about strangers.
+ */
+ def validateAll() {
+ def denotesTest(p: Path) = allCategories exists (_ denotesTest p)
+ def isMSILcheck(p: Path) = p.name endsWith "-msil.check"
+
+ def analyzeCategory(cat: DirBasedCategory) = {
+ val allTests = cat.enumerate
+ val otherPaths = cat.root walkFilter (x => !ignorePath(x)) filterNot (cat denotesTest _) filterNot isMSILcheck toList
+ val count = otherPaths.size
+
+ println("Validating %d non-test paths in %s.".format(count, cat.kind))
+
+ for (path <- otherPaths) {
+ (allTests find (_ acknowledges path)) match {
+ case Some(test) => if (isVerbose) println(" OK: '%s' is claimed by '%s'".format(path, test.label))
+ case _ => println(">> Unknown path '%s'" format path)
+ }
+ }
+ }
+
+ allCategories collect { case x: DirBasedCategory => analyzeCategory(x) }
+ }
+
+ trait TestHousekeeping {
+ self: TestEntity =>
+
+ /** Calculating derived files. Given a test like
+ * files/run/foo.scala or files/run/foo/
+ * This creates paths like foo.check, foo.flags, etc.
+ */
+ def withExtension(extension: String) = categoryDir / "%s.%s".format(label, extension)
+
+ /** True for a path if this test acknowledges it belongs to this test.
+ * Overridden by some categories.
+ */
+ def acknowledges(path: Path): Boolean = {
+ val loc = location.normalize
+ val knownPaths = List(scalaOptsFile, javaOptsFile, commandFile, logFile, checkFile) ++ jarsInTestDir
+ def isContainedSource = location.isDirectory && isJavaOrScala(path) && (path.normalize startsWith loc)
+
+ (knownPaths exists (_ isSame path)) || isContainedSource
+ }
+
+ /** This test "responds to" this String. This could mean anything -- it's a
+ * way of specifying ad-hoc collections of tests to exercise only a subset of tests.
+ * At present it looks for the given String in all the test sources.
+ */
+ def respondsToString(str: String) = containsString(str)
+ def containsString(str: String) = {
+ debug("Checking %s for \"%s\"".format(sourceFiles mkString ", ", str))
+ sourceFiles map safeSlurp exists (_ contains str)
+ }
+
+ def possiblyTimed[T](body: => T): T = {
+ if (isStats) timed(recordTestTiming(label, _))(body)
+ else body
+ }
+
+ private def prepareForTestRun() = {
+ // make sure we have a clean slate
+ deleteLog(force = true)
+ if (outDir.exists)
+ outDir.deleteRecursively()
+
+ // recreate object dir
+ outDir createDirectory true
+ }
+ def deleteOutDir() = outDir.deleteRecursively()
+ def deleteShutdownHook() = { debug("Shutdown hook deleting " + outDir) ; deleteOutDir() }
+
+ protected def runWrappers[T](body: => T): Option[T] = {
+ prepareForTestRun()
+
+ withShutdownHook(deleteShutdownHook()) {
+ loggingOutAndErr {
+ val result = possiblyTimed { body }
+ if (!isNoCleanup)
+ deleteOutDir()
+
+ result
+ }
+ }
+ }
+
+ override def toString = location.path
+ override def equals(other: Any) = other match {
+ case x: TestEntity => location.normalize == x.location.normalize
+ case _ => false
+ }
+ override def hashCode = location.normalize.hashCode
+ }
+
+ private def countTrue(f: => Iterator[Boolean]) = f filter (_ == true) length
+} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Partest.scala b/src/partest-alternative/scala/tools/partest/Partest.scala
new file mode 100644
index 0000000000..b3fe9a98ef
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Partest.scala
@@ -0,0 +1,81 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import nsc.io._
+import nsc.util._
+import category.AllCategories
+
+/** Global object for a Partest run. It is completely configured by the list
+ * of arguments passed to the constructor (although there are a few properties
+ * and environment variables which can influence matters.) See PartestSpec.scala
+ * for the complete list.
+ */
+class Partest(args: List[String]) extends {
+ val parsed = PartestSpec(args: _*)
+} with Universe with PartestSpec with cmd.Instance with AllCategories {
+
+ if (parsed.propertyArgs.nonEmpty)
+ debug("Partest property args: " + fromArgs(parsed.propertyArgs))
+
+ debug("Partest created with args: " + fromArgs(args))
+
+ def helpMsg = PartestSpec.helpMsg
+
+ // The abstract values from Universe.
+ lazy val testBuildDir = searchForDir(buildDir)
+ lazy val partestDir = searchForDir(rootDir)
+ lazy val allCategories = List(Pos, Neg, Run, Jvm, Res, Shootout, Scalap, Scalacheck, BuildManager, Script)
+ lazy val selectedCategories = if (isAllImplied) allCategories else specifiedCats
+
+ def specifiedTests = parsed.residualArgs map (x => Path(x).normalize)
+ def specifiedKinds = testKinds filter (x => isSet(x) || (runSets contains x))
+ def specifiedCats = specifiedKinds flatMap (x => allCategories find (_.kind == x))
+ def isAllImplied = isAll || (specifiedTests.isEmpty && specifiedKinds.isEmpty)
+
+ /** Assembles a filter based on command line options which restrict the test set
+ * --grep limits to only matching tests
+ * --failed limits to only recently failed tests (log file is present)
+ * --<category> limits to only the given tests and categories (but --all overrides)
+ * path/to/Test limits to only the given tests and categories
+ */
+ lazy val filter = {
+ def indivFilter(test: TestEntity) = specifiedTests contains test.location.normalize
+ def categoryFilter(test: TestEntity) = specifiedCats contains test.category
+ def indivOrCat(test: TestEntity) = isAllImplied || indivFilter(test) || categoryFilter(test) // combines previous two
+
+ def failedFilter(test: TestEntity) = !isFailed || (test.logFile exists)
+ def grepFilter(test: TestEntity) = grepExpr.isEmpty || (test containsString grepExpr.get)
+ def combinedFilter(x: TestEntity) = indivOrCat(x) && failedFilter(x) && grepFilter(x) // combines previous three
+
+ combinedFilter _
+ }
+
+ def launchTestSuite() = {
+ def onTimeout() = {
+ warning("Partest test run timed out after " + timeout + " seconds.\n")
+ System.exit(-1)
+ }
+ val alarm = new Alarmer(AlarmerAction(timeout, () => onTimeout()))
+
+ try runSelection(selectedCategories, filter)
+ finally alarm.cancelAll()
+ }
+}
+
+object Partest {
+ def fromBuild(dir: String, args: String*): Partest = apply("--builddir" +: dir +: args: _*)
+ def apply(args: String*): Partest = new Partest(args.toList)
+
+ // builds without partest jars won't actually work
+ def starr() = fromBuild("")
+ def locker() = fromBuild("build/locker")
+ def quick() = fromBuild("build/quick")
+ def pack() = fromBuild("build/pack")
+ def strap() = fromBuild("build/strap")
+ def dist() = fromBuild("dists/latest")
+}
+
diff --git a/src/partest-alternative/scala/tools/partest/PartestSpec.scala b/src/partest-alternative/scala/tools/partest/PartestSpec.scala
new file mode 100644
index 0000000000..c25119b3af
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/PartestSpec.scala
@@ -0,0 +1,104 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package partest
+
+import nsc.io._
+import cmd._
+
+/** This takes advantage of bits of scala goodness to fully define a command
+ * line program with a minimum of duplicated code. When the specification object
+ * is created, the vals are evaluated in order and each of them side effects
+ * a private accumulator. What emerges is a full list of the valid unary
+ * and binary arguments, as well as autogenerated help.
+ */
+trait PartestSpec extends Spec with Meta.StdOpts with Interpolation {
+ def referenceSpec = PartestSpec
+ def programInfo = Spec.Info("partest", "", "scala.tools.partest.Runner")
+ private val kind = new Spec.Accumulator[String]()
+ protected def testKinds = kind.get
+
+ private implicit val tokenizeString = FromString.ArgumentsFromString // String => List[String]
+
+ help("""
+ |# Pro Tip! Instant bash completion: `partest --bash` (note backticks)
+ |Usage: partest [<options>] [<test> <test> ...]
+ | <test>: a path to a test designator, typically a .scala file or a directory.
+ | Examples: files/pos/test1.scala, files/res/bug785
+ |
+ | Test categories:""".stripMargin)
+
+ val isAll = ("all" / "run all tests (default, unless no options given)" --?)
+ (kind("pos") / "Compile files that are expected to build" --?)
+ (kind("neg") / "Compile files that are expected to fail" --?)
+ (kind("run") / "Test JVM backend" --?)
+ (kind("jvm") / "Test JVM backend" --?)
+ (kind("res") / "Run resident compiler scenarii" --?)
+ (kind("buildmanager") / "Run Build Manager scenarii" --?)
+ (kind("scalacheck") / "Run Scalacheck tests" --?)
+ (kind("script") / "Run script files" --?)
+ (kind("shootout") / "Run shootout tests" --?)
+ (kind("scalap") / "Run scalap tests" --?)
+
+ heading ("""Test "smart" categories:""")
+ val grepExpr = "grep" / "run all tests with a source file containing <expr>" --|
+ val isFailed = "failed" / "run all tests which failed on the last run" --?
+
+ heading ("Specifying paths and additional flags, ~ means repository root:")
+
+ val rootDir = "rootdir" / "path from ~ to partest" defaultTo "test"
+ val buildDir = "builddir" / "path from ~ to test build" defaultTo "build/pack"
+ val srcDir = "srcdir" / "path from --rootdir to sources" defaultTo "files"
+ val javaOpts = "javaopts" / "flags to java on all runs" defaultToEnv "JAVA_OPTS"
+ val javacOpts = "javacopts" / "flags to javac on all runs" defaultToEnv "JAVAC_OPTS"
+ val scalacOpts = "scalacopts" / "flags to scalac on all tests" defaultToEnv "SCALAC_OPTS"
+
+ "pack" / "" expandTo ("--builddir", "build/pack")
+ "quick" / "" expandTo ("--builddir", "build/quick")
+
+ heading ("Options influencing output:")
+ val isTrace = "trace" / "show the individual steps taken by each test" --?
+ val isShowDiff = "show-diff" / "show diff between log and check file" --?
+ val isShowLog = "show-log" / "show log on failures" --?
+ val isDryRun = "dry-run" / "do not run tests, only show their traces." --?
+ val isTerse = "terse" / "be less verbose (almost silent except for failures)" --?
+ val isVerbose = "verbose" / "be more verbose (additive with --trace)" --?
+ val isDebug = "debug" / "maximum debugging output" --?
+ val isAnsi = "ansi" / "print output in color" --?
+
+ heading ("Other options:")
+ val timeout = "timeout" / "Overall timeout in seconds" defaultTo 7200
+ val testWarning = "test-warning" / "Test warning in seconds" defaultTo 90
+ val testTimeout = "test-timeout" / "Test timeout in seconds" defaultTo 900
+ val isCleanup = "cleanup" / "delete all stale files and dirs before run" --?
+ val isNoCleanup = "nocleanup" / "do not delete any logfiles or object dirs" --?
+ val isStats = "stats" / "collect and print statistics about the tests" --?
+ val isValidate = "validate" / "examine test filesystem for inconsistencies" --?
+ val isUpdateCheck = "update-check" / "overwrite checkFile if diff fails" --?
+
+ "version" / "print version" --> runAndExit(println(Properties.versionMsg))
+
+ // no help for anything below this line - secret options
+ // mostly intended for property configuration.
+ val runSets = ("runsets" --^) getOrElse Nil
+ val isNoAlarms = "noalarms" --?
+ val isInsideAnt = "is-in-ant" --?
+}
+
+object PartestSpec extends PartestSpec with Property {
+ lazy val propMapper = new PropertyMapper(PartestSpec) {
+ override def isPassThrough(key: String) = key == "partest.options"
+ }
+
+ type ThisCommandLine = PartestCommandLine
+ class PartestCommandLine(args: List[String]) extends SpecCommandLine(args) {
+ override def errorFn(msg: String) = printAndExit("Error: " + msg)
+
+ def propertyArgs = PartestSpec.propertyArgs
+ }
+
+ override def creator(args: List[String]): PartestCommandLine = new PartestCommandLine(args)
+}
diff --git a/src/partest-alternative/scala/tools/partest/Properties.scala b/src/partest-alternative/scala/tools/partest/Properties.scala
new file mode 100644
index 0000000000..a1ad1e5cbc
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Properties.scala
@@ -0,0 +1,17 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala.tools
+package partest
+
+/** Loads partest.properties from the jar. */
+object Properties extends scala.util.PropertiesTrait {
+ protected def propCategory = "partest"
+ protected def pickJarBasedOn = classOf[Application]
+}
diff --git a/src/partest-alternative/scala/tools/partest/Results.scala b/src/partest-alternative/scala/tools/partest/Results.scala
new file mode 100644
index 0000000000..5d0e300136
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Results.scala
@@ -0,0 +1,121 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import scala.collection.immutable
+
+trait Results {
+ self: Universe =>
+
+ /** A collection of tests for a Worker.
+ */
+ case class TestsToRun(entities: List[TestEntity])
+
+ /** The response from a Worker who has been given TestsToRun.
+ */
+ case class ResultsOfRun(results: immutable.Map[TestEntity, TestResult])
+
+ /** The result of a single test. (0: OK, 1: FAILED, 2: TIMEOUT)
+ */
+ sealed abstract class TestResult(val state: Int, val description: String) {
+ def entity: TestEntity
+
+ def passed = state == 0
+ def colorize(s: String): String
+ def show(msg: String) =
+ if (!isShuttingDown)
+ showResult(colorize(description), msg)
+
+ private def outputPrefix = if (isInsideAnt) "" else markNormal("partest: ")
+ private def name = src relativize entity.location // e.g. "neg/test.scala"
+ private def showResult(status: String, extraMsg: String) =
+ normal(outputPrefix + "[...]/%-40s [%s] %s\n".format(name, status, extraMsg))
+
+ override def equals(other: Any) = other match {
+ case x: TestResult => entity == x.entity
+ case _ => false
+ }
+ override def hashCode = entity.hashCode
+ override def toString = "%s [%s]".format(entity, description)
+ }
+
+ class Success(val entity: TestEntity) extends TestResult(0, " OK ") {
+ def colorize(s: String) = markSuccess(s)
+ override def show(msg: String) = if (!isTerse) super.show(msg)
+ }
+ class Failure(val entity: TestEntity) extends TestResult(1, " FAILED ") {
+ def colorize(s: String) = markFailure(s)
+
+ override def show(msg: String) = {
+ super.show(msg)
+
+ if (isShowDiff || isTrace)
+ normal(entity.diffOutput)
+
+ if (isShowLog || isTrace)
+ normal(toStringTrunc(entity.failureMessage(), 1600))
+ }
+ override def toString = List(super.toString, toStringTrunc(entity.failureMessage(), 400)) mkString "\n"
+ }
+ class Timeout(val entity: TestEntity) extends TestResult(2, "TIME OUT") {
+ def colorize(s: String) = markFailure(s)
+ }
+
+ object TestResult {
+ def apply(entity: TestEntity, success: Boolean) =
+ if (success) new Success(entity)
+ else new Failure(entity)
+
+ def apply(entity: TestEntity, state: Int) = state match {
+ case 0 => new Success(entity)
+ case 1 => new Failure(entity)
+ case 2 => new Timeout(entity)
+ }
+ def unapply(x: Any) = x match {
+ case x: TestResult => Some((x.entity, x.state))
+ case _ => None
+ }
+ }
+
+ /** The combined results of any number of tests.
+ */
+ case class CombinedTestResults(
+ passed: Int,
+ failed: Int,
+ elapsedMilliseconds: Long,
+ failures: List[TestResult]
+ ) {
+ // housekeeping
+ val elapsedSecs = elapsedMilliseconds / 1000
+ val elapsedMins = elapsedSecs / 60
+ val elapsedHrs = elapsedMins / 60
+ val dispMins = elapsedMins - elapsedHrs * 60
+ val dispSecs = elapsedSecs - elapsedMins * 60
+
+ def total = passed + failed
+ def hasFailures = failed > 0
+ def exitCode = if (expectedErrors == failed) 0 else 1
+
+ def ++(x: CombinedTestResults) = CombinedTestResults(
+ passed + x.passed,
+ failed + x.failed,
+ elapsedMilliseconds + x.elapsedMilliseconds,
+ failures ::: x.failures
+ )
+
+ def elapsedString = "%02d:%02d:%02d".format(elapsedHrs, dispMins, dispSecs)
+ def failuresString = {
+ if (failures.isEmpty) ""
+ else "Summary of failures:" :: failures mkString ("\n", "\n", "")
+ }
+
+ override def toString =
+ if (total == 0) "There were no tests to run."
+ else if (isDryRun) "%d tests would be run." format total
+ else if (hasFailures) "%d of %d tests failed (elapsed time: %s)".format(failed, total, elapsedString) + failuresString
+ else "All %d tests were successful (elapsed time: %s)".format(total, elapsedString)
+ }
+} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Runner.scala b/src/partest-alternative/scala/tools/partest/Runner.scala
new file mode 100644
index 0000000000..1a28e60896
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Runner.scala
@@ -0,0 +1,36 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+package scala.tools
+package partest
+
+import nsc.io._
+
+object Runner {
+ def main(args: Array[String]) {
+ val runner = Partest(args: _*)
+ import runner._
+
+ if (args.isEmpty) return println(helpMsg)
+ if (isValidate) return validateAll()
+
+ printConfigBanner()
+
+ if (isCleanup)
+ cleanupAll()
+
+ val result = launchTestSuite()
+ val exitCode = result.exitCode
+ val message = "\n" + result + "\n"
+
+ if (exitCode == 0) success(message)
+ else failure(message)
+
+ if (isStats)
+ showTestStatistics()
+
+ System exit exitCode
+ }
+}
diff --git a/src/partest-alternative/scala/tools/partest/Statistics.scala b/src/partest-alternative/scala/tools/partest/Statistics.scala
new file mode 100644
index 0000000000..2ea3c6e8f0
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Statistics.scala
@@ -0,0 +1,46 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+package scala.tools
+package partest
+
+import scala.collection.mutable.HashMap
+
+trait Statistics {
+ /** Only collected when --stats is given. */
+ lazy val testStatistics = new HashMap[String, Long]
+
+ /** Given function and block of code, evaluates code block,
+ * calls function with milliseconds elapsed, and returns block result.
+ */
+ def timed[T](f: Long => Unit)(body: => T): T = {
+ val start = System.currentTimeMillis
+ val result = body
+ val end = System.currentTimeMillis
+
+ f(end - start)
+ result
+ }
+ /** Times body and returns both values.
+ */
+ def timed2[T](body: => T): (Long, T) = {
+ var milliSeconds = 0L
+ val result = timed(x => milliSeconds = x)(body)
+
+ (milliSeconds, result)
+ }
+
+ def resultsToStatistics(results: Iterable[(_, Int)]): (Int, Int) =
+ (results partition (_._2 == 0)) match {
+ case (winners, losers) => (winners.size, losers.size)
+ }
+
+ def recordTestTiming(name: String, milliseconds: Long) =
+ synchronized { testStatistics(name) = milliseconds }
+
+ def showTestStatistics() {
+ testStatistics.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %.2f seconds".format(k, (v.toDouble / 1000))) }
+ }
+}
diff --git a/src/partest-alternative/scala/tools/partest/Universe.scala b/src/partest-alternative/scala/tools/partest/Universe.scala
new file mode 100644
index 0000000000..942fc1a8be
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/Universe.scala
@@ -0,0 +1,96 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools
+package partest
+
+import nsc.io._
+import category.AllCategories
+import io.Logging
+
+/** The high level view of the partest infrastructure.
+ */
+abstract class Universe
+ extends Entities
+ with BuildContributors
+ with Logging
+ with Dispatcher
+ with Statistics
+ with Housekeeping
+ with Results
+ with PartestCompilation
+ with PartestSpec
+ with Config
+ with Alarms
+ with Actions
+ with Categories {
+
+ /** The abstract values from which all else is derived. */
+ def partestDir: Directory
+ def testBuildDir: Directory
+ def allCategories: List[TestCategory]
+ def selectedCategories: List[TestCategory]
+
+ /** Some plausibly abstract types. */
+ type TestBuild <: BuildContributor // e.g. quick, pack
+ type TestCategory <: AbsTestCategory // e.g. pos, neg, run
+ type TestEntity <: AbsTestEntity // e.g. files/pos/test25.scala
+ type TestSequence <: AbsTestSequence // e.g. compile, run, diff
+
+ /** Although TestStep isn't much more than Function1 right now,
+ * it exists this way so it can become more capable.
+ */
+ implicit def f1ToTestStep(f: TestEntity => Boolean): TestStep =
+ new TestStep { def apply(test: TestEntity) = f(test) }
+
+ abstract class TestStep extends (TestEntity => Boolean) {
+ def apply(test: TestEntity): Boolean
+ }
+
+ /** An umbrella category of tests, such as "pos" or "run".
+ */
+ trait AbsTestCategory extends BuildContributor {
+ type TestSettings
+
+ def kind: String
+ def testSequence: TestSequence
+ def denotesTest(location: Path): Boolean
+
+ def createTest(location: Path): TestEntity
+ def createSettings(entity: TestEntity): TestSettings
+ def enumerate: List[TestEntity]
+ }
+
+ /** A single test. It may involve multiple files, but only a
+ * single path is used to designate it.
+ */
+ trait AbsTestEntity extends BuildContributor {
+ def category: TestCategory
+ def location: Path
+ def onException(x: Throwable): Unit
+ def testClasspath: String
+
+ /** Most tests will use the sequence defined by the category,
+ * but the test can override and define a custom sequence.
+ */
+ def testSequence: TestSequence
+
+ /** True if this test recognizes the given path as a piece of it.
+ * For validation purposes.
+ */
+ def acknowledges(path: Path): Boolean
+ }
+
+ /** Every TestEntity is partly characterized by a series of actions
+ * which are applied to the TestEntity in the given order. The test
+ * passes if all those actions return true, fails otherwise.
+ */
+ trait AbsTestSequence {
+ def actions: List[TestStep]
+ }
+} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/ant/JavaTask.scala b/src/partest-alternative/scala/tools/partest/ant/JavaTask.scala
new file mode 100644
index 0000000000..6740554dd8
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/ant/JavaTask.scala
@@ -0,0 +1,57 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools
+package partest
+package ant
+
+import org.apache.tools.ant.Task
+import org.apache.tools.ant.taskdefs.Java
+import org.apache.tools.ant.types.Environment
+
+import scala.tools.nsc.io._
+import scala.tools.nsc.util.ClassPath
+import cmd.Spec._
+
+class JavaTask extends Java {
+ override def getTaskName() = "partest"
+ private val scalaRunnerClass = "scala.tools.nsc.MainGenericRunner"
+ private val partestRunnerClass = "scala.tools.partest.Runner"
+ def defaultJvmArgs = "-Xms64M -Xmx768M -Xss768K -XX:MaxPermSize=96M"
+
+ protected def rootDir = prop("partest.rootdir") getOrElse (baseDir / "test").path
+ protected def partestJVMArgs = prop("partest.jvm.args") getOrElse defaultJvmArgs
+ protected def runnerArgs = List("-usejavacp", partestRunnerClass, "--javaopts", partestJVMArgs)
+
+ private def baseDir = Directory(getProject.getBaseDir)
+ private def prop(s: String) = Option(getProject getProperty s)
+ private def jvmline(s: String) = returning(createJvmarg())(_ setLine s)
+ private def addArg(s: String) = returning(createArg())(_ setValue s)
+
+ private def newKeyValue(key: String, value: String) =
+ returning(new Environment.Variable)(x => { x setKey key ; x setValue value })
+
+ def setDefaults() {
+ setFork(true)
+ setFailonerror(true)
+ getProject.setSystemProperties()
+ setClassname(scalaRunnerClass)
+ addSysproperty(newKeyValue("partest.is-in-ant", "true"))
+ jvmline(partestJVMArgs)
+ runnerArgs foreach addArg
+
+ // do we want basedir or rootDir to be the cwd?
+ // setDir(Path(rootDir).jfile)
+ }
+
+ override def init() = {
+ super.init()
+ setDefaults()
+ }
+}
+
diff --git a/src/partest-alternative/scala/tools/partest/antlib.xml b/src/partest-alternative/scala/tools/partest/antlib.xml
new file mode 100644
index 0000000000..af36f11368
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/antlib.xml
@@ -0,0 +1,3 @@
+<antlib>
+ <taskdef name="partest" classname="scala.tools.partest.ant.JavaTask"/>
+</antlib>
diff --git a/src/partest-alternative/scala/tools/partest/category/AllCategories.scala b/src/partest-alternative/scala/tools/partest/category/AllCategories.scala
new file mode 100644
index 0000000000..953f80324b
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/category/AllCategories.scala
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools
+package partest
+package category
+
+trait AllCategories extends Compiler with Analysis with Runner {
+ self: Universe =>
+
+ object Pos extends DirBasedCategory("pos") { lazy val testSequence: TestSequence = List(compile) }
+ object Neg extends DirBasedCategory("neg") { lazy val testSequence: TestSequence = List(checkFileRequired, not(compile), diff) }
+ object Run extends DirBasedCategory("run") { lazy val testSequence: TestSequence = List(compile, run, diff) }
+ object Jvm extends DirBasedCategory("jvm") { lazy val testSequence: TestSequence = List(compile, run, diff) }
+}
diff --git a/src/partest-alternative/scala/tools/partest/category/Analysis.scala b/src/partest-alternative/scala/tools/partest/category/Analysis.scala
new file mode 100644
index 0000000000..2c6c208ee5
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/category/Analysis.scala
@@ -0,0 +1,64 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+package category
+
+import java.lang.{ ClassLoader => JavaClassLoader }
+import java.net.URL
+import nsc.util.ScalaClassLoader
+import nsc.io._
+
+class PartestClassLoader(urls: Array[URL], parent: JavaClassLoader) extends ScalaClassLoader.URLClassLoader(urls, parent) {
+ def this(urls: Array[URL]) = this(urls, null)
+ def bytes(path: String) = findBytesForClassName(path)
+ def singleton(path: String) = tryToInitializeClass(path).get getField "MODULE$" get null
+
+ /** Calls a method in an object via reflection.
+ */
+ def apply[T](className: String, methodName: String)(args: Any*): T = {
+ def fail = error("Reflection failed on %s.%s".format(className, methodName))
+ val clazz = tryToLoadClass(className) getOrElse fail
+ val obj = singleton(className)
+ val m = clazz.getMethods find (x => x.getName == methodName && x.getParameterTypes.size == args.size) getOrElse fail
+
+ m.invoke(obj, args map (_.asInstanceOf[AnyRef]): _*).asInstanceOf[T]
+ }
+}
+
+trait Analysis {
+ self: Universe =>
+
+ object Scalap extends DirBasedCategory("scalap") {
+ val testSequence: TestSequence = List(checkFileRequired, compile, run, diff)
+ override def denotesTest(p: Path) = p.isDirectory && (p.toDirectory.files exists (_.name == "result.test"))
+ override def createTest(location: Path) = new ScalapTest(location)
+
+ class ScalapTest(val location: Path) extends TestEntity {
+ val category = Scalap
+ val scalapMain = "scala.tools.scalap.Main$"
+ val scalapMethod = "decompileScala"
+
+ override def classpathPaths = super.classpathPaths :+ build.scalap
+ override def checkFile = File(location / "result.test")
+
+ private def runnerURLs = build.classpathPaths ::: classpathPaths map (_.toURL)
+ private def createClassLoader = new PartestClassLoader(runnerURLs.toArray, this.getClass.getClassLoader)
+
+ val isPackageObject = containsString("package object")
+ val suffix = if (isPackageObject) ".package" else ""
+ val className = location.name.capitalize + suffix
+
+ override def run() = loggingResult {
+ def loader = createClassLoader
+ def bytes = loader.bytes(className)
+
+ trace("scalap %s".format(className))
+ if (isDryRun) ""
+ else loader[String](scalapMain, scalapMethod)(bytes, isPackageObject)
+ }
+ }
+ }
+}
diff --git a/src/partest-alternative/scala/tools/partest/category/Compiler.scala b/src/partest-alternative/scala/tools/partest/category/Compiler.scala
new file mode 100644
index 0000000000..49775d5031
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/category/Compiler.scala
@@ -0,0 +1,140 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+package category
+
+import nsc.io._
+import nsc.reporters._
+import nsc.{ Settings, CompilerCommand }
+import scala.tools.nsc.interactive.RefinedBuildManager
+import util.copyPath
+
+trait Compiler {
+ self: Universe =>
+
+ /** Resident Compiler.
+ * $SCALAC -d dir.obj -Xresident -sourcepath . "$@"
+ */
+ object Res extends DirBasedCategory("res") {
+ lazy val testSequence: TestSequence = List(checkFileRequired, compile, diff)
+
+ override def denotesTest(p: Path) = p.isDirectory && resFile(p).isFile
+ override def createTest(location: Path) = new ResidentTest(location.toDirectory)
+
+ override def createSettings(entity: TestEntity): TestSettings =
+ returning(super.createSettings(entity)) { settings =>
+ settings.resident.value = true
+ settings.sourcepath.value = entity.sourcesDir.path
+ }
+
+ class ResidentTest(val location: Directory) extends TestEntity {
+ val category = Res
+ override def sourcesDir = categoryDir
+
+ override def acknowledges(p: Path) =
+ super.acknowledges(p) || (resFile(location) isSame p)
+
+ private def residentCompilerCommands = safeLines(resFile(location))
+ private def compileResident(global: PartestGlobal, lines: List[String]) = {
+ def printPrompt = global inform "nsc> "
+ val results =
+ lines map { line =>
+ printPrompt
+ trace("compile " + line)
+ isDryRun || global.partestCompile(toArgs(line) map (categoryDir / _ path), false)
+ }
+
+ printPrompt
+
+ /** Note - some res tests are really "neg" style tests, so we can't
+ * use the return value of the compile. The diff catches failures.
+ */
+ true // results forall (_ == true)
+ }
+
+ override def compile() = compileResident(newGlobal(Nil)._1, residentCompilerCommands)
+ }
+ private[Res] def resFile(p: Path) = p.toFile addExtension "res"
+ }
+
+ object BuildManager extends DirBasedCategory("buildmanager") {
+ lazy val testSequence: TestSequence = List(checkFileRequired, compile, diff)
+ override def denotesTest(p: Path) = p.isDirectory && testFile(p).isFile
+ override def createTest(location: Path) = new BuildManagerTest(location.toDirectory)
+
+ override def createSettings(entity: TestEntity): TestSettings =
+ returning[TestSettings](super.createSettings(entity)) { settings =>
+ settings.Ybuildmanagerdebug.value = true
+ settings.sourcepath.value = entity.sourcesDir.path
+ }
+
+ class PartestBuildManager(settings: Settings, val reporter: ConsoleReporter) extends RefinedBuildManager(settings) {
+ def errorFn(msg: String) = Console println msg
+
+ override protected def newCompiler(newSettings: Settings) =
+ new BuilderGlobal(newSettings, reporter)
+
+ private def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
+ fs flatMap (s => Option(AbstractFile getFile (Path(settings.sourcepath.value) / s path))) toSet
+
+ def buildManagerCompile(line: String): Boolean = {
+ val prompt = "builder > "
+ reporter printMessage (prompt + line)
+ val command = new CompilerCommand(toArgs(line), settings)
+ val files = filesToSet(settings.sourcepath.value, command.files)
+
+ update(files, Set.empty)
+ true
+ }
+ }
+
+ private[BuildManager] def testFile(p: Path) = (p / p.name addExtension "test").toFile
+
+ class BuildManagerTest(val location: Directory) extends TestEntity {
+ val category = BuildManager
+
+ override def sourcesDir = outDir
+ override def sourceFiles = Path onlyFiles (location walkFilter (_ != changesDir) filter isJavaOrScala toList)
+ override def checkFile = File(location / location.name addExtension "check")
+
+ override def acknowledges(p: Path) = super.acknowledges(p) || (p isSame testFile(location))
+
+ def buildManagerCommands = safeLines(testFile(location))
+ def changesDir = Directory(location / (location.name + ".changes"))
+
+ override def compile() = {
+ val settings = createSettings(this)
+ val pbm = new PartestBuildManager(settings, newReporter(settings))
+
+ // copy files
+ for (source <- sourceFiles) {
+ val target = outDir / (location.normalize relativize source)
+ copyPath(source, target.toFile)
+ }
+
+ def runUpdate(line: String) = {
+ val Array(srcName, replacement) = line split "=>"
+ copyPath(File(changesDir / replacement), File(outDir / srcName))
+ }
+
+ def sendCommand(line: String): Boolean = {
+ val compileRegex = """^>>compile (.*)$""".r
+ val updateRegex = """^>>update\s+(.*)""".r
+ trace("send: " + (line drop 2))
+
+ isDryRun || (line match {
+ case compileRegex(xs) => pbm.buildManagerCompile(xs)
+ case updateRegex(line) => runUpdate(line)
+ })
+ }
+
+ // send each line to the build manager
+ buildManagerCommands forall sendCommand
+ }
+ }
+ }
+}
+
diff --git a/src/partest-alternative/scala/tools/partest/category/Runner.scala b/src/partest-alternative/scala/tools/partest/category/Runner.scala
new file mode 100644
index 0000000000..10bf5794a9
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/category/Runner.scala
@@ -0,0 +1,108 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools
+package partest
+package category
+
+import nsc.io._
+
+trait Runner {
+ self: Universe =>
+
+ /** Shootout.
+ */
+ object Shootout extends DirBasedCategory("shootout") {
+ lazy val testSequence: TestSequence = List(compile, run, diff)
+
+ override def denotesTest(p: Path) = isScala(p) && runner(p).isFile
+ override def createTest(location: Path) = new ShootoutTest(location.toFile)
+
+ class ShootoutTest(val location: File) extends TestEntity {
+ val category = Shootout
+ // The files in shootout are very free form, so acknowledge anything close.
+ override def acknowledges(p: Path) =
+ (p.parent.normalize isSame Shootout.root) && (p.name startsWith label)
+
+ private def generated = File(outDir / "test.scala")
+ private def runnerFile = runner(location)
+ override def sourceFiles = List(generated)
+
+ override def compile() = {
+ trace("generate %s from %s, %s".format(tracePath(generated), tracePath(location), tracePath(runnerFile)))
+ // generate source file (even on dry run, we need the path)
+ generated.writeAll(location.slurp(), runnerFile.slurp())
+
+ // compile generated file
+ super.compile()
+ }
+ }
+
+ private[Shootout] def runner(p: Path) = p addExtension "runner" toFile
+ }
+
+ object Scalacheck extends DirBasedCategory("scalacheck") {
+ lazy val testSequence: TestSequence = List(compile, run)
+ override def createTest(location: Path) = new ScalacheckTest(location)
+
+ class ScalacheckTest(val location: Path) extends TestEntity {
+ val category = Scalacheck
+
+ import build.{ scalacheck, forkjoin }
+ import org.scalacheck.Properties
+ import org.scalacheck.Test.{ checkProperties, defaultParams, Result }
+
+ override def classpathPaths = super.classpathPaths ::: List(scalacheck, forkjoin)
+ private def arrayURLs = Array(scalacheck, outDir) map (_.toURL)
+
+ /** For reasons I'm not entirely clear on, I've written all this
+ * to avoid a source dependency on scalacheck.
+ */
+ class ScalacheckClassLoader extends PartestClassLoader(arrayURLs, this.getClass.getClassLoader) {
+ type ScalacheckResult = { def passed: Boolean }
+
+ def propCallback(name: String, passed: Int, discarded: Int): Unit = ()
+ def testCallback(name: String, result: AnyRef): Unit = ()
+
+ val test = singleton("Test$")
+ val params = apply[AnyRef]("org.scalacheck.Test$", "defaultParams")()
+ val result = apply[Seq[(String, AnyRef)]]("org.scalacheck.Test$", "checkProperties")(test, params, propCallback _, testCallback _)
+
+ def allResults() =
+ for ((prop, res) <- result) yield {
+ ScalacheckTest.this.trace("%s: %s".format(prop, res))
+ res.asInstanceOf[ScalacheckResult].passed
+ }
+
+ def check() = allResults forall (_ == true)
+ }
+
+ override def run() = {
+ trace("scalacheck runs via classloader with: %s".format(arrayURLs mkString ", "))
+ isDryRun || (new ScalacheckClassLoader check)
+ }
+ }
+ }
+
+ object Script extends DirBasedCategory("script") {
+ val testSequence: TestSequence = List(exec, diff)
+ override def createTest(location: Path) = new ScriptTest(location)
+
+ class ScriptTest(val location: Path) extends TestEntity {
+ val category = Script
+ val scriptFile = if (location.isDirectory) location / (label + ".scala") else location
+ val argsFile = withExtension("args").toFile
+ def batFile = scriptFile changeExtension "bat"
+ def script = if (Properties.isWin) batFile else scriptFile
+
+ override def acknowledges(p: Path) = super.acknowledges(p) || (List(argsFile, batFile) exists (_ isSame p))
+ override def execCwd = Some(sourcesDir)
+ override def argumentsToExec = script.path :: safeArgs(argsFile)
+ }
+ }
+} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala b/src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala
new file mode 100644
index 0000000000..0ddcd97a5f
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala
@@ -0,0 +1,58 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+package scala.tools
+package partest
+package io
+
+import java.io.{ Writer, PrintWriter, OutputStream, OutputStreamWriter }
+
+object ANSIWriter {
+ val NONE = 0
+ val SOME = 1
+ val MANY = 2
+
+ def apply(isAnsi: Boolean) = if (isAnsi) MANY else NONE
+}
+import ANSIWriter._
+
+class ANSIWriter(writer: Writer) extends PrintWriter(writer, true) {
+ def this(out: OutputStream) = this(new OutputStreamWriter(out))
+ def colorful: Int = NONE
+
+ protected val manyColors = List(
+ Console.BOLD + Console.BLACK,
+ Console.BOLD + Console.GREEN,
+ Console.BOLD + Console.RED,
+ Console.BOLD + Console.YELLOW,
+ Console.RESET
+ )
+ protected val someColors = List(
+ Console.BOLD + Console.BLACK,
+ Console.RESET,
+ Console.BOLD + Console.BLACK,
+ Console.BOLD + Console.BLACK,
+ Console.RESET
+ )
+ protected val noColors = List("", "", "", "", "")
+
+ lazy val List(_outline, _success, _failure, _warning, _default) = colorful match {
+ case NONE => noColors
+ case SOME => someColors
+ case MANY => manyColors
+ case _ => noColors
+ }
+
+ private def wrprint(msg: String): Unit = synchronized {
+ print(msg)
+ flush()
+ }
+
+ def outline(msg: String) = wrprint(_outline + msg + _default)
+ def success(msg: String) = wrprint(_success + msg + _default)
+ def failure(msg: String) = wrprint(_failure + msg + _default)
+ def warning(msg: String) = wrprint(_warning + msg + _default)
+ def normal(msg: String) = wrprint(_default + msg)
+}
diff --git a/src/partest-alternative/scala/tools/partest/io/Diff.java b/src/partest-alternative/scala/tools/partest/io/Diff.java
new file mode 100644
index 0000000000..69428d7e7a
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/io/Diff.java
@@ -0,0 +1,873 @@
+
+package scala.tools.partest.io;
+
+import java.util.Hashtable;
+
+/** A class to compare IndexedSeqs of objects. The result of comparison
+ is a list of <code>change</code> objects which form an
+ edit script. The objects compared are traditionally lines
+ of text from two files. Comparison options such as "ignore
+ whitespace" are implemented by modifying the <code>equals</code>
+ and <code>hashcode</code> methods for the objects compared.
+<p>
+ The basic algorithm is described in: </br>
+ "An O(ND) Difference Algorithm and its Variations", Eugene Myers,
+ Algorithmica Vol. 1 No. 2, 1986, p 251.
+<p>
+ This class outputs different results from GNU diff 1.15 on some
+ inputs. Our results are actually better (smaller change list, smaller
+ total size of changes), but it would be nice to know why. Perhaps
+ there is a memory overwrite bug in GNU diff 1.15.
+
+ @author Stuart D. Gathman, translated from GNU diff 1.15
+ Copyright (C) 2000 Business Management Systems, Inc.
+<p>
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 1, or (at your option)
+ any later version.
+<p>
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+<p>
+ You should have received a copy of the <a href=COPYING.txt>
+ GNU General Public License</a>
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+
+ */
+
+public class Diff {
+
+ /** Prepare to find differences between two arrays. Each element of
+ the arrays is translated to an "equivalence number" based on
+ the result of <code>equals</code>. The original Object arrays
+ are no longer needed for computing the differences. They will
+ be needed again later to print the results of the comparison as
+ an edit script, if desired.
+ */
+ public Diff(Object[] a,Object[] b) {
+ Hashtable h = new Hashtable(a.length + b.length);
+ filevec[0] = new file_data(a,h);
+ filevec[1] = new file_data(b,h);
+ }
+
+ /** 1 more than the maximum equivalence value used for this or its
+ sibling file. */
+ private int equiv_max = 1;
+
+ /** When set to true, the comparison uses a heuristic to speed it up.
+ With this heuristic, for files with a constant small density
+ of changes, the algorithm is linear in the file size. */
+ public boolean heuristic = false;
+
+ /** When set to true, the algorithm returns a guarranteed minimal
+ set of changes. This makes things slower, sometimes much slower. */
+ public boolean no_discards = false;
+
+ private int[] xvec, yvec; /* IndexedSeqs being compared. */
+ private int[] fdiag; /* IndexedSeq, indexed by diagonal, containing
+ the X coordinate of the point furthest
+ along the given diagonal in the forward
+ search of the edit matrix. */
+ private int[] bdiag; /* IndexedSeq, indexed by diagonal, containing
+ the X coordinate of the point furthest
+ along the given diagonal in the backward
+ search of the edit matrix. */
+ private int fdiagoff, bdiagoff;
+ private final file_data[] filevec = new file_data[2];
+ private int cost;
+
+ /** Find the midpoint of the shortest edit script for a specified
+ portion of the two files.
+
+ We scan from the beginnings of the files, and simultaneously from the ends,
+ doing a breadth-first search through the space of edit-sequence.
+ When the two searches meet, we have found the midpoint of the shortest
+ edit sequence.
+
+ The value returned is the number of the diagonal on which the midpoint lies.
+ The diagonal number equals the number of inserted lines minus the number
+ of deleted lines (counting only lines before the midpoint).
+ The edit cost is stored into COST; this is the total number of
+ lines inserted or deleted (counting only lines before the midpoint).
+
+ This function assumes that the first lines of the specified portions
+ of the two files do not match, and likewise that the last lines do not
+ match. The caller must trim matching lines from the beginning and end
+ of the portions it is going to specify.
+
+ Note that if we return the "wrong" diagonal value, or if
+ the value of bdiag at that diagonal is "wrong",
+ the worst this can do is cause suboptimal diff output.
+ It cannot cause incorrect diff output. */
+
+ private int diag (int xoff, int xlim, int yoff, int ylim) {
+ final int[] fd = fdiag; // Give the compiler a chance.
+ final int[] bd = bdiag; // Additional help for the compiler.
+ final int[] xv = xvec; // Still more help for the compiler.
+ final int[] yv = yvec; // And more and more . . .
+ final int dmin = xoff - ylim; // Minimum valid diagonal.
+ final int dmax = xlim - yoff; // Maximum valid diagonal.
+ final int fmid = xoff - yoff; // Center diagonal of top-down search.
+ final int bmid = xlim - ylim; // Center diagonal of bottom-up search.
+ int fmin = fmid, fmax = fmid; // Limits of top-down search.
+ int bmin = bmid, bmax = bmid; // Limits of bottom-up search.
+ /* True if southeast corner is on an odd
+ diagonal with respect to the northwest. */
+ final boolean odd = (fmid - bmid & 1) != 0;
+
+ fd[fdiagoff + fmid] = xoff;
+ bd[bdiagoff + bmid] = xlim;
+
+ for (int c = 1;; ++c)
+ {
+ int d; /* Active diagonal. */
+ boolean big_snake = false;
+
+ /* Extend the top-down search by an edit step in each diagonal. */
+ if (fmin > dmin)
+ fd[fdiagoff + --fmin - 1] = -1;
+ else
+ ++fmin;
+ if (fmax < dmax)
+ fd[fdiagoff + ++fmax + 1] = -1;
+ else
+ --fmax;
+ for (d = fmax; d >= fmin; d -= 2)
+ {
+ int x, y, oldx, tlo = fd[fdiagoff + d - 1], thi = fd[fdiagoff + d + 1];
+
+ if (tlo >= thi)
+ x = tlo + 1;
+ else
+ x = thi;
+ oldx = x;
+ y = x - d;
+ while (x < xlim && y < ylim && xv[x] == yv[y]) {
+ ++x; ++y;
+ }
+ if (x - oldx > 20)
+ big_snake = true;
+ fd[fdiagoff + d] = x;
+ if (odd && bmin <= d && d <= bmax && bd[bdiagoff + d] <= fd[fdiagoff + d])
+ {
+ cost = 2 * c - 1;
+ return d;
+ }
+ }
+
+ /* Similar extend the bottom-up search. */
+ if (bmin > dmin)
+ bd[bdiagoff + --bmin - 1] = Integer.MAX_VALUE;
+ else
+ ++bmin;
+ if (bmax < dmax)
+ bd[bdiagoff + ++bmax + 1] = Integer.MAX_VALUE;
+ else
+ --bmax;
+ for (d = bmax; d >= bmin; d -= 2)
+ {
+ int x, y, oldx, tlo = bd[bdiagoff + d - 1], thi = bd[bdiagoff + d + 1];
+
+ if (tlo < thi)
+ x = tlo;
+ else
+ x = thi - 1;
+ oldx = x;
+ y = x - d;
+ while (x > xoff && y > yoff && xv[x - 1] == yv[y - 1]) {
+ --x; --y;
+ }
+ if (oldx - x > 20)
+ big_snake = true;
+ bd[bdiagoff + d] = x;
+ if (!odd && fmin <= d && d <= fmax && bd[bdiagoff + d] <= fd[fdiagoff + d])
+ {
+ cost = 2 * c;
+ return d;
+ }
+ }
+
+ /* Heuristic: check occasionally for a diagonal that has made
+ lots of progress compared with the edit distance.
+ If we have any such, find the one that has made the most
+ progress and return it as if it had succeeded.
+
+ With this heuristic, for files with a constant small density
+ of changes, the algorithm is linear in the file size. */
+
+ if (c > 200 && big_snake && heuristic)
+ {
+ int best = 0;
+ int bestpos = -1;
+
+ for (d = fmax; d >= fmin; d -= 2)
+ {
+ int dd = d - fmid;
+ if ((fd[fdiagoff + d] - xoff)*2 - dd > 12 * (c + (dd > 0 ? dd : -dd)))
+ {
+ if (fd[fdiagoff + d] * 2 - dd > best
+ && fd[fdiagoff + d] - xoff > 20
+ && fd[fdiagoff + d] - d - yoff > 20)
+ {
+ int k;
+ int x = fd[fdiagoff + d];
+
+ /* We have a good enough best diagonal;
+ now insist that it end with a significant snake. */
+ for (k = 1; k <= 20; k++)
+ if (xvec[x - k] != yvec[x - d - k])
+ break;
+
+ if (k == 21)
+ {
+ best = fd[fdiagoff + d] * 2 - dd;
+ bestpos = d;
+ }
+ }
+ }
+ }
+ if (best > 0)
+ {
+ cost = 2 * c - 1;
+ return bestpos;
+ }
+
+ best = 0;
+ for (d = bmax; d >= bmin; d -= 2)
+ {
+ int dd = d - bmid;
+ if ((xlim - bd[bdiagoff + d])*2 + dd > 12 * (c + (dd > 0 ? dd : -dd)))
+ {
+ if ((xlim - bd[bdiagoff + d]) * 2 + dd > best
+ && xlim - bd[bdiagoff + d] > 20
+ && ylim - (bd[bdiagoff + d] - d) > 20)
+ {
+ /* We have a good enough best diagonal;
+ now insist that it end with a significant snake. */
+ int k;
+ int x = bd[bdiagoff + d];
+
+ for (k = 0; k < 20; k++)
+ if (xvec[x + k] != yvec[x - d + k])
+ break;
+ if (k == 20)
+ {
+ best = (xlim - bd[bdiagoff + d]) * 2 + dd;
+ bestpos = d;
+ }
+ }
+ }
+ }
+ if (best > 0)
+ {
+ cost = 2 * c - 1;
+ return bestpos;
+ }
+ }
+ }
+ }
+
+ /** Compare in detail contiguous subsequences of the two files
+ which are known, as a whole, to match each other.
+
+ The results are recorded in the IndexedSeqs filevec[N].changed_flag, by
+ storing a 1 in the element for each line that is an insertion or deletion.
+
+ The subsequence of file 0 is [XOFF, XLIM) and likewise for file 1.
+
+ Note that XLIM, YLIM are exclusive bounds.
+ All line numbers are origin-0 and discarded lines are not counted. */
+
+ private void compareseq (int xoff, int xlim, int yoff, int ylim) {
+ /* Slide down the bottom initial diagonal. */
+ while (xoff < xlim && yoff < ylim && xvec[xoff] == yvec[yoff]) {
+ ++xoff; ++yoff;
+ }
+ /* Slide up the top initial diagonal. */
+ while (xlim > xoff && ylim > yoff && xvec[xlim - 1] == yvec[ylim - 1]) {
+ --xlim; --ylim;
+ }
+
+ /* Handle simple cases. */
+ if (xoff == xlim)
+ while (yoff < ylim)
+ filevec[1].changed_flag[1+filevec[1].realindexes[yoff++]] = true;
+ else if (yoff == ylim)
+ while (xoff < xlim)
+ filevec[0].changed_flag[1+filevec[0].realindexes[xoff++]] = true;
+ else
+ {
+ /* Find a point of correspondence in the middle of the files. */
+
+ int d = diag (xoff, xlim, yoff, ylim);
+ int c = cost;
+ int f = fdiag[fdiagoff + d];
+ int b = bdiag[bdiagoff + d];
+
+ if (c == 1)
+ {
+ /* This should be impossible, because it implies that
+ one of the two subsequences is empty,
+ and that case was handled above without calling `diag'.
+ Let's verify that this is true. */
+ throw new IllegalArgumentException("Empty subsequence");
+ }
+ else
+ {
+ /* Use that point to split this problem into two subproblems. */
+ compareseq (xoff, b, yoff, b - d);
+ /* This used to use f instead of b,
+ but that is incorrect!
+ It is not necessarily the case that diagonal d
+ has a snake from b to f. */
+ compareseq (b, xlim, b - d, ylim);
+ }
+ }
+ }
+
+ /** Discard lines from one file that have no matches in the other file.
+ */
+
+ private void discard_confusing_lines() {
+ filevec[0].discard_confusing_lines(filevec[1]);
+ filevec[1].discard_confusing_lines(filevec[0]);
+ }
+
+ private boolean inhibit = false;
+
+ /** Adjust inserts/deletes of blank lines to join changes
+ as much as possible.
+ */
+
+ private void shift_boundaries() {
+ if (inhibit)
+ return;
+ filevec[0].shift_boundaries(filevec[1]);
+ filevec[1].shift_boundaries(filevec[0]);
+ }
+
+ public interface ScriptBuilder {
+ /** Scan the tables of which lines are inserted and deleted,
+ producing an edit script.
+ @param changed0 true for lines in first file which do not match 2nd
+ @param len0 number of lines in first file
+ @param changed1 true for lines in 2nd file which do not match 1st
+ @param len1 number of lines in 2nd file
+ @return a linked list of changes - or null
+ */
+ public change build_script(
+ boolean[] changed0,int len0,
+ boolean[] changed1,int len1
+ );
+ }
+
+ /** Scan the tables of which lines are inserted and deleted,
+ producing an edit script in reverse order. */
+
+ static class ReverseScript implements ScriptBuilder {
+ public change build_script(
+ final boolean[] changed0,int len0,
+ final boolean[] changed1,int len1)
+ {
+ change script = null;
+ int i0 = 0, i1 = 0;
+ while (i0 < len0 || i1 < len1) {
+ if (changed0[1+i0] || changed1[1+i1]) {
+ int line0 = i0, line1 = i1;
+
+ /* Find # lines changed here in each file. */
+ while (changed0[1+i0]) ++i0;
+ while (changed1[1+i1]) ++i1;
+
+ /* Record this change. */
+ script = new change(line0, line1, i0 - line0, i1 - line1, script);
+ }
+
+ /* We have reached lines in the two files that match each other. */
+ i0++; i1++;
+ }
+
+ return script;
+ }
+ }
+
+ static class ForwardScript implements ScriptBuilder {
+ /** Scan the tables of which lines are inserted and deleted,
+ producing an edit script in forward order. */
+ public change build_script(
+ final boolean[] changed0,int len0,
+ final boolean[] changed1,int len1)
+ {
+ change script = null;
+ int i0 = len0, i1 = len1;
+
+ while (i0 >= 0 || i1 >= 0)
+ {
+ if (changed0[i0] || changed1[i1])
+ {
+ int line0 = i0, line1 = i1;
+
+ /* Find # lines changed here in each file. */
+ while (changed0[i0]) --i0;
+ while (changed1[i1]) --i1;
+
+ /* Record this change. */
+ script = new change(i0, i1, line0 - i0, line1 - i1, script);
+ }
+
+ /* We have reached lines in the two files that match each other. */
+ i0--; i1--;
+ }
+
+ return script;
+ }
+ }
+
+ /** Standard ScriptBuilders. */
+ public final static ScriptBuilder
+ forwardScript = new ForwardScript(),
+ reverseScript = new ReverseScript();
+
+ /* Report the differences of two files. DEPTH is the current directory
+ depth. */
+ public final change diff_2(final boolean reverse) {
+ return diff(reverse ? reverseScript : forwardScript);
+ }
+
+ /** Get the results of comparison as an edit script. The script
+ is described by a list of changes. The standard ScriptBuilder
+ implementations provide for forward and reverse edit scripts.
+ Alternate implementations could, for instance, list common elements
+ instead of differences.
+ @param bld an object to build the script from change flags
+ @return the head of a list of changes
+ */
+ public change diff(final ScriptBuilder bld) {
+
+ /* Some lines are obviously insertions or deletions
+ because they don't match anything. Detect them now,
+ and avoid even thinking about them in the main comparison algorithm. */
+
+ discard_confusing_lines ();
+
+ /* Now do the main comparison algorithm, considering just the
+ undiscarded lines. */
+
+ xvec = filevec[0].undiscarded;
+ yvec = filevec[1].undiscarded;
+
+ int diags =
+ filevec[0].nondiscarded_lines + filevec[1].nondiscarded_lines + 3;
+ fdiag = new int[diags];
+ fdiagoff = filevec[1].nondiscarded_lines + 1;
+ bdiag = new int[diags];
+ bdiagoff = filevec[1].nondiscarded_lines + 1;
+
+ compareseq (0, filevec[0].nondiscarded_lines,
+ 0, filevec[1].nondiscarded_lines);
+ fdiag = null;
+ bdiag = null;
+
+ /* Modify the results slightly to make them prettier
+ in cases where that can validly be done. */
+
+ shift_boundaries ();
+
+ /* Get the results of comparison in the form of a chain
+ of `struct change's -- an edit script. */
+ return bld.build_script(
+ filevec[0].changed_flag,
+ filevec[0].buffered_lines,
+ filevec[1].changed_flag,
+ filevec[1].buffered_lines
+ );
+
+ }
+
+ /** The result of comparison is an "edit script": a chain of change objects.
+ Each change represents one place where some lines are deleted
+ and some are inserted.
+
+ LINE0 and LINE1 are the first affected lines in the two files (origin 0).
+ DELETED is the number of lines deleted here from file 0.
+ INSERTED is the number of lines inserted here in file 1.
+
+ If DELETED is 0 then LINE0 is the number of the line before
+ which the insertion was done; vice versa for INSERTED and LINE1. */
+
+ public static class change {
+ /** Previous or next edit command. */
+ public change link;
+ /** # lines of file 1 changed here. */
+ public final int inserted;
+ /** # lines of file 0 changed here. */
+ public final int deleted;
+ /** Line number of 1st deleted line. */
+ public final int line0;
+ /** Line number of 1st inserted line. */
+ public final int line1;
+
+ /** Cons an additional entry onto the front of an edit script OLD.
+ LINE0 and LINE1 are the first affected lines in the two files (origin 0).
+ DELETED is the number of lines deleted here from file 0.
+ INSERTED is the number of lines inserted here in file 1.
+
+ If DELETED is 0 then LINE0 is the number of the line before
+ which the insertion was done; vice versa for INSERTED and LINE1. */
+ public change(int line0, int line1, int deleted, int inserted, change old) {
+ this.line0 = line0;
+ this.line1 = line1;
+ this.inserted = inserted;
+ this.deleted = deleted;
+ this.link = old;
+ //System.err.println(line0+","+line1+","+inserted+","+deleted);
+ }
+ }
+
+ /** Data on one input file being compared.
+ */
+
+ class file_data {
+
+ /** Allocate changed array for the results of comparison. */
+ void clear() {
+ /* Allocate a flag for each line of each file, saying whether that line
+ is an insertion or deletion.
+ Allocate an extra element, always zero, at each end of each IndexedSeq.
+ */
+ changed_flag = new boolean[buffered_lines + 2];
+ }
+
+ /** Return equiv_count[I] as the number of lines in this file
+ that fall in equivalence class I.
+ @return the array of equivalence class counts.
+ */
+ int[] equivCount() {
+ int[] equiv_count = new int[equiv_max];
+ for (int i = 0; i < buffered_lines; ++i)
+ ++equiv_count[equivs[i]];
+ return equiv_count;
+ }
+
+ /** Discard lines that have no matches in another file.
+
+ A line which is discarded will not be considered by the actual
+ comparison algorithm; it will be as if that line were not in the file.
+ The file's `realindexes' table maps virtual line numbers
+ (which don't count the discarded lines) into real line numbers;
+ this is how the actual comparison algorithm produces results
+ that are comprehensible when the discarded lines are counted.
+<p>
+ When we discard a line, we also mark it as a deletion or insertion
+ so that it will be printed in the output.
+ @param f the other file
+ */
+ void discard_confusing_lines(file_data f) {
+ clear();
+ /* Set up table of which lines are going to be discarded. */
+ final byte[] discarded = discardable(f.equivCount());
+
+ /* Don't really discard the provisional lines except when they occur
+ in a run of discardables, with nonprovisionals at the beginning
+ and end. */
+ filterDiscards(discarded);
+
+ /* Actually discard the lines. */
+ discard(discarded);
+ }
+
+ /** Mark to be discarded each line that matches no line of another file.
+ If a line matches many lines, mark it as provisionally discardable.
+ @see equivCount()
+ @param counts The count of each equivalence number for the other file.
+ @return 0=nondiscardable, 1=discardable or 2=provisionally discardable
+ for each line
+ */
+
+ private byte[] discardable(final int[] counts) {
+ final int end = buffered_lines;
+ final byte[] discards = new byte[end];
+ final int[] equivs = this.equivs;
+ int many = 5;
+ int tem = end / 64;
+
+ /* Multiply MANY by approximate square root of number of lines.
+ That is the threshold for provisionally discardable lines. */
+ while ((tem = tem >> 2) > 0)
+ many *= 2;
+
+ for (int i = 0; i < end; i++)
+ {
+ int nmatch;
+ if (equivs[i] == 0)
+ continue;
+ nmatch = counts[equivs[i]];
+ if (nmatch == 0)
+ discards[i] = 1;
+ else if (nmatch > many)
+ discards[i] = 2;
+ }
+ return discards;
+ }
+
+ /** Don't really discard the provisional lines except when they occur
+ in a run of discardables, with nonprovisionals at the beginning
+ and end. */
+
+ private void filterDiscards(final byte[] discards) {
+ final int end = buffered_lines;
+
+ for (int i = 0; i < end; i++)
+ {
+ /* Cancel provisional discards not in middle of run of discards. */
+ if (discards[i] == 2)
+ discards[i] = 0;
+ else if (discards[i] != 0)
+ {
+ /* We have found a nonprovisional discard. */
+ int j;
+ int length;
+ int provisional = 0;
+
+ /* Find end of this run of discardable lines.
+ Count how many are provisionally discardable. */
+ for (j = i; j < end; j++)
+ {
+ if (discards[j] == 0)
+ break;
+ if (discards[j] == 2)
+ ++provisional;
+ }
+
+ /* Cancel provisional discards at end, and shrink the run. */
+ while (j > i && discards[j - 1] == 2) {
+ discards[--j] = 0; --provisional;
+ }
+
+ /* Now we have the length of a run of discardable lines
+ whose first and last are not provisional. */
+ length = j - i;
+
+ /* If 1/4 of the lines in the run are provisional,
+ cancel discarding of all provisional lines in the run. */
+ if (provisional * 4 > length)
+ {
+ while (j > i)
+ if (discards[--j] == 2)
+ discards[j] = 0;
+ }
+ else
+ {
+ int consec;
+ int minimum = 1;
+ int tem = length / 4;
+
+ /* MINIMUM is approximate square root of LENGTH/4.
+ A subrun of two or more provisionals can stand
+ when LENGTH is at least 16.
+ A subrun of 4 or more can stand when LENGTH >= 64. */
+ while ((tem = tem >> 2) > 0)
+ minimum *= 2;
+ minimum++;
+
+ /* Cancel any subrun of MINIMUM or more provisionals
+ within the larger run. */
+ for (j = 0, consec = 0; j < length; j++)
+ if (discards[i + j] != 2)
+ consec = 0;
+ else if (minimum == ++consec)
+ /* Back up to start of subrun, to cancel it all. */
+ j -= consec;
+ else if (minimum < consec)
+ discards[i + j] = 0;
+
+ /* Scan from beginning of run
+ until we find 3 or more nonprovisionals in a row
+ or until the first nonprovisional at least 8 lines in.
+ Until that point, cancel any provisionals. */
+ for (j = 0, consec = 0; j < length; j++)
+ {
+ if (j >= 8 && discards[i + j] == 1)
+ break;
+ if (discards[i + j] == 2) {
+ consec = 0; discards[i + j] = 0;
+ }
+ else if (discards[i + j] == 0)
+ consec = 0;
+ else
+ consec++;
+ if (consec == 3)
+ break;
+ }
+
+ /* I advances to the last line of the run. */
+ i += length - 1;
+
+ /* Same thing, from end. */
+ for (j = 0, consec = 0; j < length; j++)
+ {
+ if (j >= 8 && discards[i - j] == 1)
+ break;
+ if (discards[i - j] == 2) {
+ consec = 0; discards[i - j] = 0;
+ }
+ else if (discards[i - j] == 0)
+ consec = 0;
+ else
+ consec++;
+ if (consec == 3)
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ /** Actually discard the lines.
+ @param discards flags lines to be discarded
+ */
+ private void discard(final byte[] discards) {
+ final int end = buffered_lines;
+ int j = 0;
+ for (int i = 0; i < end; ++i)
+ if (no_discards || discards[i] == 0)
+ {
+ undiscarded[j] = equivs[i];
+ realindexes[j++] = i;
+ }
+ else
+ changed_flag[1+i] = true;
+ nondiscarded_lines = j;
+ }
+
+ file_data(Object[] data,Hashtable h) {
+ buffered_lines = data.length;
+
+ equivs = new int[buffered_lines];
+ undiscarded = new int[buffered_lines];
+ realindexes = new int[buffered_lines];
+
+ for (int i = 0; i < data.length; ++i) {
+ Integer ir = (Integer)h.get(data[i]);
+ if (ir == null)
+ h.put(data[i],new Integer(equivs[i] = equiv_max++));
+ else
+ equivs[i] = ir.intValue();
+ }
+ }
+
+ /** Adjust inserts/deletes of blank lines to join changes
+ as much as possible.
+
+ We do something when a run of changed lines include a blank
+ line at one end and have an excluded blank line at the other.
+ We are free to choose which blank line is included.
+ `compareseq' always chooses the one at the beginning,
+ but usually it is cleaner to consider the following blank line
+ to be the "change". The only exception is if the preceding blank line
+ would join this change to other changes.
+ @param f the file being compared against
+ */
+
+ void shift_boundaries(file_data f) {
+ final boolean[] changed = changed_flag;
+ final boolean[] other_changed = f.changed_flag;
+ int i = 0;
+ int j = 0;
+ int i_end = buffered_lines;
+ int preceding = -1;
+ int other_preceding = -1;
+
+ for (;;)
+ {
+ int start, end, other_start;
+
+ /* Scan forwards to find beginning of another run of changes.
+ Also keep track of the corresponding point in the other file. */
+
+ while (i < i_end && !changed[1+i])
+ {
+ while (other_changed[1+j++])
+ /* Non-corresponding lines in the other file
+ will count as the preceding batch of changes. */
+ other_preceding = j;
+ i++;
+ }
+
+ if (i == i_end)
+ break;
+
+ start = i;
+ other_start = j;
+
+ for (;;)
+ {
+ /* Now find the end of this run of changes. */
+
+ while (i < i_end && changed[1+i]) i++;
+ end = i;
+
+ /* If the first changed line matches the following unchanged one,
+ and this run does not follow right after a previous run,
+ and there are no lines deleted from the other file here,
+ then classify the first changed line as unchanged
+ and the following line as changed in its place. */
+
+ /* You might ask, how could this run follow right after another?
+ Only because the previous run was shifted here. */
+
+ if (end != i_end
+ && equivs[start] == equivs[end]
+ && !other_changed[1+j]
+ && end != i_end
+ && !((preceding >= 0 && start == preceding)
+ || (other_preceding >= 0
+ && other_start == other_preceding)))
+ {
+ changed[1+end++] = true;
+ changed[1+start++] = false;
+ ++i;
+ /* Since one line-that-matches is now before this run
+ instead of after, we must advance in the other file
+ to keep in synch. */
+ ++j;
+ }
+ else
+ break;
+ }
+
+ preceding = i;
+ other_preceding = j;
+ }
+ }
+
+ /** Number of elements (lines) in this file. */
+ final int buffered_lines;
+
+ /** IndexedSeq, indexed by line number, containing an equivalence code for
+ each line. It is this IndexedSeq that is actually compared with that
+ of another file to generate differences. */
+ private final int[] equivs;
+
+ /** IndexedSeq, like the previous one except that
+ the elements for discarded lines have been squeezed out. */
+ final int[] undiscarded;
+
+ /** IndexedSeq mapping virtual line numbers (not counting discarded lines)
+ to real ones (counting those lines). Both are origin-0. */
+ final int[] realindexes;
+
+ /** Total number of nondiscarded lines. */
+ int nondiscarded_lines;
+
+ /** Array, indexed by real origin-1 line number,
+ containing true for a line that is an insertion or a deletion.
+ The results of comparison are stored here. */
+ boolean[] changed_flag;
+
+ }
+}
diff --git a/src/partest-alternative/scala/tools/partest/io/DiffPrint.java b/src/partest-alternative/scala/tools/partest/io/DiffPrint.java
new file mode 100644
index 0000000000..273b6cba52
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/io/DiffPrint.java
@@ -0,0 +1,606 @@
+
+package scala.tools.partest.io;
+
+import java.io.*;
+import java.util.Vector;
+import java.util.Date;
+//import com.objectspace.jgl.predicates.UnaryPredicate;
+
+interface UnaryPredicate {
+ boolean execute(Object obj);
+}
+
+/** A simple framework for printing change lists produced by <code>Diff</code>.
+ @see bmsi.util.Diff
+ @author Stuart D. Gathman
+ Copyright (C) 2000 Business Management Systems, Inc.
+<p>
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 1, or (at your option)
+ any later version.
+<p>
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+<p>
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ */
+public class DiffPrint {
+ /** A Base class for printing edit scripts produced by Diff.
+ This class divides the change list into "hunks", and calls
+ <code>print_hunk</code> for each hunk. Various utility methods
+ are provided as well.
+ */
+ public static abstract class Base {
+ protected Base(Object[] a,Object[] b, Writer w) {
+ outfile = new PrintWriter(w);
+ file0 = a;
+ file1 = b;
+ }
+ /** Set to ignore certain kinds of lines when printing
+ an edit script. For example, ignoring blank lines or comments.
+ */
+ protected UnaryPredicate ignore = null;
+
+ /** Set to the lines of the files being compared.
+ */
+ protected Object[] file0, file1;
+
+ /** Divide SCRIPT into pieces by calling HUNKFUN and
+ print each piece with PRINTFUN.
+ Both functions take one arg, an edit script.
+
+ PRINTFUN takes a subscript which belongs together (with a null
+ link at the end) and prints it. */
+ public void print_script(Diff.change script) {
+ Diff.change next = script;
+
+ while (next != null)
+ {
+ Diff.change t, end;
+
+ /* Find a set of changes that belong together. */
+ t = next;
+ end = hunkfun(next);
+
+ /* Disconnect them from the rest of the changes,
+ making them a hunk, and remember the rest for next iteration. */
+ next = end.link;
+ end.link = null;
+ //if (DEBUG)
+ // debug_script(t);
+
+ /* Print this hunk. */
+ print_hunk(t);
+
+ /* Reconnect the script so it will all be freed properly. */
+ end.link = next;
+ }
+ outfile.flush();
+ }
+
+ /** Called with the tail of the script
+ and returns the last link that belongs together with the start
+ of the tail. */
+
+ protected Diff.change hunkfun(Diff.change hunk) {
+ return hunk;
+ }
+
+ protected int first0, last0, first1, last1, deletes, inserts;
+ protected PrintWriter outfile;
+
+ /** Look at a hunk of edit script and report the range of lines in each file
+ that it applies to. HUNK is the start of the hunk, which is a chain
+ of `struct change'. The first and last line numbers of file 0 are stored
+ in *FIRST0 and *LAST0, and likewise for file 1 in *FIRST1 and *LAST1.
+ Note that these are internal line numbers that count from 0.
+
+ If no lines from file 0 are deleted, then FIRST0 is LAST0+1.
+
+ Also set *DELETES nonzero if any lines of file 0 are deleted
+ and set *INSERTS nonzero if any lines of file 1 are inserted.
+ If only ignorable lines are inserted or deleted, both are
+ set to 0. */
+
+ protected void analyze_hunk(Diff.change hunk) {
+ int f0, l0 = 0, f1, l1 = 0, show_from = 0, show_to = 0;
+ int i;
+ Diff.change next;
+ boolean nontrivial = (ignore == null);
+
+ show_from = show_to = 0;
+
+ f0 = hunk.line0;
+ f1 = hunk.line1;
+
+ for (next = hunk; next != null; next = next.link)
+ {
+ l0 = next.line0 + next.deleted - 1;
+ l1 = next.line1 + next.inserted - 1;
+ show_from += next.deleted;
+ show_to += next.inserted;
+ for (i = next.line0; i <= l0 && ! nontrivial; i++)
+ if (!ignore.execute(file0[i]))
+ nontrivial = true;
+ for (i = next.line1; i <= l1 && ! nontrivial; i++)
+ if (!ignore.execute(file1[i]))
+ nontrivial = true;
+ }
+
+ first0 = f0;
+ last0 = l0;
+ first1 = f1;
+ last1 = l1;
+
+ /* If all inserted or deleted lines are ignorable,
+ tell the caller to ignore this hunk. */
+
+ if (!nontrivial)
+ show_from = show_to = 0;
+
+ deletes = show_from;
+ inserts = show_to;
+ }
+
+ /** Print the script header which identifies the files compared. */
+ protected void print_header(String filea, String fileb) { }
+
+ protected abstract void print_hunk(Diff.change hunk);
+
+ protected void print_1_line(String pre,Object linbuf) {
+ outfile.println(pre + linbuf.toString());
+ }
+
+ /** Print a pair of line numbers with SEPCHAR, translated for file FILE.
+ If the two numbers are identical, print just one number.
+
+ Args A and B are internal line numbers.
+ We print the translated (real) line numbers. */
+
+ protected void print_number_range (char sepchar, int a, int b) {
+ /* Note: we can have B < A in the case of a range of no lines.
+ In this case, we should print the line number before the range,
+ which is B. */
+ if (++b > ++a)
+ outfile.print("" + a + sepchar + b);
+ else
+ outfile.print(b);
+ }
+
+ public static char change_letter(int inserts, int deletes) {
+ if (inserts == 0)
+ return 'd';
+ else if (deletes == 0)
+ return 'a';
+ else
+ return 'c';
+ }
+ }
+
+ /** Print a change list in the standard diff format.
+ */
+ public static class NormalPrint extends Base {
+
+ public NormalPrint(Object[] a,Object[] b, Writer w) {
+ super(a,b,w);
+ }
+
+ /** Print a hunk of a normal diff.
+ This is a contiguous portion of a complete edit script,
+ describing changes in consecutive lines. */
+
+ protected void print_hunk (Diff.change hunk) {
+
+ /* Determine range of line numbers involved in each file. */
+ analyze_hunk(hunk);
+ if (deletes == 0 && inserts == 0)
+ return;
+
+ /* Print out the line number header for this hunk */
+ print_number_range (',', first0, last0);
+ outfile.print(change_letter(inserts, deletes));
+ print_number_range (',', first1, last1);
+ outfile.println();
+
+ /* Print the lines that the first file has. */
+ if (deletes != 0)
+ for (int i = first0; i <= last0; i++)
+ print_1_line ("< ", file0[i]);
+
+ if (inserts != 0 && deletes != 0)
+ outfile.println("---");
+
+ /* Print the lines that the second file has. */
+ if (inserts != 0)
+ for (int i = first1; i <= last1; i++)
+ print_1_line ("> ", file1[i]);
+ }
+ }
+
+ /** Prints an edit script in a format suitable for input to <code>ed</code>.
+ The edit script must be generated with the reverse option to
+ be useful as actual <code>ed</code> input.
+ */
+ public static class EdPrint extends Base {
+
+ public EdPrint(Object[] a,Object[] b, Writer w) {
+ super(a,b,w);
+ }
+
+ /** Print a hunk of an ed diff */
+ protected void print_hunk(Diff.change hunk) {
+
+ /* Determine range of line numbers involved in each file. */
+ analyze_hunk (hunk);
+ if (deletes == 0 && inserts == 0)
+ return;
+
+ /* Print out the line number header for this hunk */
+ print_number_range (',', first0, last0);
+ outfile.println(change_letter(inserts, deletes));
+
+ /* Print new/changed lines from second file, if needed */
+ if (inserts != 0)
+ {
+ boolean inserting = true;
+ for (int i = first1; i <= last1; i++)
+ {
+ /* Resume the insert, if we stopped. */
+ if (! inserting)
+ outfile.println(i - first1 + first0 + "a");
+ inserting = true;
+
+ /* If the file's line is just a dot, it would confuse `ed'.
+ So output it with a double dot, and set the flag LEADING_DOT
+ so that we will output another ed-command later
+ to change the double dot into a single dot. */
+
+ if (".".equals(file1[i]))
+ {
+ outfile.println("..");
+ outfile.println(".");
+ /* Now change that double dot to the desired single dot. */
+ outfile.println(i - first1 + first0 + 1 + "s/^\\.\\././");
+ inserting = false;
+ }
+ else
+ /* Line is not `.', so output it unmodified. */
+ print_1_line ("", file1[i]);
+ }
+
+ /* End insert mode, if we are still in it. */
+ if (inserting)
+ outfile.println(".");
+ }
+ }
+ }
+
+ /** Prints an edit script in context diff format. This and its
+ 'unified' variation is used for source code patches.
+ */
+ public static class ContextPrint extends Base {
+
+ protected int context = 3;
+
+ public ContextPrint(Object[] a,Object[] b, Writer w) {
+ super(a,b,w);
+ }
+
+ protected void print_context_label (String mark, File inf, String label) {
+ if (label != null)
+ outfile.println(mark + ' ' + label);
+ else if (inf.lastModified() > 0)
+ // FIXME: use DateFormat to get precise format needed.
+ outfile.println(
+ mark + ' ' + inf.getPath() + '\t' + new Date(inf.lastModified())
+ );
+ else
+ /* Don't pretend that standard input is ancient. */
+ outfile.println(mark + ' ' + inf.getPath());
+ }
+
+ public void print_header(String filea,String fileb) {
+ print_context_label ("***", new File(filea), filea);
+ print_context_label ("---", new File(fileb), fileb);
+ }
+
+ /** If function_regexp defined, search for start of function. */
+ private String find_function(Object[] lines, int start) {
+ return null;
+ }
+
+ protected void print_function(Object[] file,int start) {
+ String function = find_function (file0, first0);
+ if (function != null) {
+ outfile.print(" ");
+ outfile.print(
+ (function.length() < 40) ? function : function.substring(0,40)
+ );
+ }
+ }
+
+ protected void print_hunk(Diff.change hunk) {
+
+ /* Determine range of line numbers involved in each file. */
+
+ analyze_hunk (hunk);
+
+ if (deletes == 0 && inserts == 0)
+ return;
+
+ /* Include a context's width before and after. */
+
+ first0 = Math.max(first0 - context, 0);
+ first1 = Math.max(first1 - context, 0);
+ last0 = Math.min(last0 + context, file0.length - 1);
+ last1 = Math.min(last1 + context, file1.length - 1);
+
+
+ outfile.print("***************");
+
+ /* If we looked for and found a function this is part of,
+ include its name in the header of the diff section. */
+ print_function (file0, first0);
+
+ outfile.println();
+ outfile.print("*** ");
+ print_number_range (',', first0, last0);
+ outfile.println(" ****");
+
+ if (deletes != 0) {
+ Diff.change next = hunk;
+
+ for (int i = first0; i <= last0; i++) {
+ /* Skip past changes that apply (in file 0)
+ only to lines before line I. */
+
+ while (next != null && next.line0 + next.deleted <= i)
+ next = next.link;
+
+ /* Compute the marking for line I. */
+
+ String prefix = " ";
+ if (next != null && next.line0 <= i)
+ /* The change NEXT covers this line.
+ If lines were inserted here in file 1, this is "changed".
+ Otherwise it is "deleted". */
+ prefix = (next.inserted > 0) ? "!" : "-";
+
+ print_1_line (prefix, file0[i]);
+ }
+ }
+
+ outfile.print("--- ");
+ print_number_range (',', first1, last1);
+ outfile.println(" ----");
+
+ if (inserts != 0) {
+ Diff.change next = hunk;
+
+ for (int i = first1; i <= last1; i++) {
+ /* Skip past changes that apply (in file 1)
+ only to lines before line I. */
+
+ while (next != null && next.line1 + next.inserted <= i)
+ next = next.link;
+
+ /* Compute the marking for line I. */
+
+ String prefix = " ";
+ if (next != null && next.line1 <= i)
+ /* The change NEXT covers this line.
+ If lines were deleted here in file 0, this is "changed".
+ Otherwise it is "inserted". */
+ prefix = (next.deleted > 0) ? "!" : "+";
+
+ print_1_line (prefix, file1[i]);
+ }
+ }
+ }
+ }
+
+ /** Prints an edit script in context diff format. This and its
+ 'unified' variation is used for source code patches.
+ */
+ public static class UnifiedPrint extends ContextPrint {
+
+ public UnifiedPrint(Object[] a,Object[] b, Writer w) {
+ super(a,b,w);
+ }
+
+ public void print_header(String filea,String fileb) {
+ print_context_label ("---", new File(filea), filea);
+ print_context_label ("+++", new File(fileb), fileb);
+ }
+
+ private void print_number_range (int a, int b) {
+ //translate_range (file, a, b, &trans_a, &trans_b);
+
+ /* Note: we can have B < A in the case of a range of no lines.
+ In this case, we should print the line number before the range,
+ which is B. */
+ if (b < a)
+ outfile.print(b + ",0");
+ else
+ super.print_number_range(',',a,b);
+ }
+
+ protected void print_hunk(Diff.change hunk) {
+ /* Determine range of line numbers involved in each file. */
+ analyze_hunk (hunk);
+
+ if (deletes == 0 && inserts == 0)
+ return;
+
+ /* Include a context's width before and after. */
+
+ first0 = Math.max(first0 - context, 0);
+ first1 = Math.max(first1 - context, 0);
+ last0 = Math.min(last0 + context, file0.length - 1);
+ last1 = Math.min(last1 + context, file1.length - 1);
+
+
+
+ outfile.print("@@ -");
+ print_number_range (first0, last0);
+ outfile.print(" +");
+ print_number_range (first1, last1);
+ outfile.print(" @@");
+
+ /* If we looked for and found a function this is part of,
+ include its name in the header of the diff section. */
+ print_function(file0,first0);
+
+ outfile.println();
+
+ Diff.change next = hunk;
+ int i = first0;
+ int j = first1;
+
+ while (i <= last0 || j <= last1) {
+
+ /* If the line isn't a difference, output the context from file 0. */
+
+ if (next == null || i < next.line0) {
+ outfile.print(' ');
+ print_1_line ("", file0[i++]);
+ j++;
+ }
+ else {
+ /* For each difference, first output the deleted part. */
+
+ int k = next.deleted;
+ while (k-- > 0) {
+ outfile.print('-');
+ print_1_line ("", file0[i++]);
+ }
+
+ /* Then output the inserted part. */
+
+ k = next.inserted;
+ while (k-- > 0) {
+ outfile.print('+');
+ print_1_line ("", file1[j++]);
+ }
+
+ /* We're done with this hunk, so on to the next! */
+
+ next = next.link;
+ }
+ }
+ }
+ }
+
+
+ /** Read a text file into an array of String. This provides basic diff
+ functionality. A more advanced diff utility will use specialized
+ objects to represent the text lines, with options to, for example,
+ convert sequences of whitespace to a single space for comparison
+ purposes.
+ */
+ static String[] slurp(String file) throws IOException {
+ BufferedReader rdr = new BufferedReader(new FileReader(file));
+ Vector s = new Vector();
+ for (;;) {
+ String line = rdr.readLine();
+ if (line == null) break;
+ s.addElement(line);
+ }
+ String[] a = new String[s.size()];
+ s.copyInto(a);
+ return a;
+ }
+
+ public static void main(String[] argv) throws IOException {
+ String filea = argv[argv.length - 2];
+ String fileb = argv[argv.length - 1];
+ String[] a = slurp(filea);
+ String[] b = slurp(fileb);
+ Diff d = new Diff(a,b);
+ char style = 'n';
+ for (int i = 0; i < argv.length - 2; ++i) {
+ String f = argv[i];
+ if (f.startsWith("-")) {
+ for (int j = 1; j < f.length(); ++j) {
+ switch (f.charAt(j)) {
+ case 'e': // Ed style
+ style = 'e'; break;
+ case 'c': // Context diff
+ style = 'c'; break;
+ case 'u':
+ style = 'u'; break;
+ }
+ }
+ }
+ }
+ boolean reverse = style == 'e';
+ Diff.change script = d.diff_2(reverse);
+ if (script == null)
+ System.err.println("No differences");
+ else {
+ Base p;
+ Writer w = new OutputStreamWriter(System.out);
+ switch (style) {
+ case 'e':
+ p = new EdPrint(a,b,w); break;
+ case 'c':
+ p = new ContextPrint(a,b,w); break;
+ case 'u':
+ p = new UnifiedPrint(a,b,w); break;
+ default:
+ p = new NormalPrint(a,b,w);
+ }
+ p.print_header(filea,fileb);
+ p.print_script(script);
+ }
+ }
+
+ public static void doDiff(String[] argv, Writer w) throws IOException {
+ String filea = argv[argv.length - 2];
+ String fileb = argv[argv.length - 1];
+ String[] a = slurp(filea);
+ String[] b = slurp(fileb);
+ Diff d = new Diff(a,b);
+ char style = 'n';
+ for (int i = 0; i < argv.length - 2; ++i) {
+ String f = argv[i];
+ if (f.startsWith("-")) {
+ for (int j = 1; j < f.length(); ++j) {
+ switch (f.charAt(j)) {
+ case 'e': // Ed style
+ style = 'e'; break;
+ case 'c': // Context diff
+ style = 'c'; break;
+ case 'u':
+ style = 'u'; break;
+ }
+ }
+ }
+ }
+ boolean reverse = style == 'e';
+ Diff.change script = d.diff_2(reverse);
+ if (script == null)
+ w.write("No differences\n");
+ else {
+ Base p;
+ switch (style) {
+ case 'e':
+ p = new EdPrint(a,b,w); break;
+ case 'c':
+ p = new ContextPrint(a,b,w); break;
+ case 'u':
+ p = new UnifiedPrint(a,b,w); break;
+ default:
+ p = new NormalPrint(a,b,w);
+ }
+ p.print_header(filea,fileb);
+ p.print_script(script);
+ }
+ }
+
+}
diff --git a/src/partest-alternative/scala/tools/partest/io/JUnitReport.scala b/src/partest-alternative/scala/tools/partest/io/JUnitReport.scala
new file mode 100644
index 0000000000..63ae200020
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/io/JUnitReport.scala
@@ -0,0 +1,38 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+package io
+
+/** This is disabled for the moment but I can fix it up if anyone
+ * is using it.
+ */
+class JUnitReport {
+ // create JUnit Report xml files if directory was specified
+ // def junitReport(dir: Directory) = {
+ // dir.mkdir()
+ // val report = testReport(set.kind, results, succs, fails)
+ // XML.save("%s/%s.xml".format(d.toAbsolute.path, set.kind), report)
+ // }
+
+ // def oneResult(res: (TestEntity, Int)) =
+ // <testcase name={res._1.path}>{
+ // res._2 match {
+ // case 0 => scala.xml.NodeSeq.Empty
+ // case 1 => <failure message="Test failed"/>
+ // case 2 => <failure message="Test timed out"/>
+ // }
+ // }</testcase>
+ //
+ // def testReport(kind: String, results: Iterable[(TestEntity, Int)], succs: Int, fails: Int) = {
+ // <testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}>
+ // <properties/>
+ // {
+ // results.map(oneResult(_))
+ // }
+ // </testsuite>
+ // }
+ //
+} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/io/Logging.scala b/src/partest-alternative/scala/tools/partest/io/Logging.scala
new file mode 100644
index 0000000000..52239ffb2c
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/io/Logging.scala
@@ -0,0 +1,137 @@
+package scala.tools
+package partest
+package io
+
+import java.io.{ StringWriter, PrintWriter, Writer }
+import scala.tools.nsc.io._
+import scala.util.control.ControlThrowable
+
+trait Logging {
+ universe: Universe =>
+
+ class PartestANSIWriter extends ANSIWriter(Console.out) {
+ override def colorful: Int = ANSIWriter(universe.isAnsi)
+ private def printIf(cond: Boolean, msg: String) =
+ if (cond) { outline("debug: ") ; println(msg) }
+
+ val verbose = printIf(isVerbose || isDebug, _: String)
+ val debug = printIf(isDebug, _: String)
+ }
+
+ lazy val NestUI = new PartestANSIWriter()
+
+ import NestUI.{ _outline, _success, _failure, _warning, _default }
+
+ def markOutline(msg: String) = _outline + msg + _default
+ def markSuccess(msg: String) = _success + msg + _default
+ def markFailure(msg: String) = _failure + msg + _default
+ def markWarning(msg: String) = _warning + msg + _default
+ def markNormal(msg: String) = _default + msg
+
+ def outline(msg: String) = NestUI outline msg
+ def success(msg: String) = NestUI success msg
+ def failure(msg: String) = NestUI failure msg
+ def warning(msg: String) = NestUI warning msg
+ def normal(msg: String) = NestUI normal msg
+
+ def verbose(msg: String) = NestUI verbose msg
+ def debug(msg: String) = NestUI debug msg
+
+ trait EntityLogging {
+ self: TestEntity =>
+
+ lazy val logWriter = new LogWriter(logFile)
+
+ /** Redirect stdout and stderr to logFile, run body, return result.
+ */
+ def loggingOutAndErr[T](body: => T): T = {
+ val log = logFile.printStream(append = true)
+
+ try Console.withOut(log) {
+ Console.withErr(log) {
+ body
+ }
+ }
+ finally log.close()
+ }
+
+ /** What to print in a failure summary.
+ */
+ def failureMessage() = if (diffOutput != "") diffOutput else safeSlurp(logFile)
+
+ /** For tracing. Outputs a line describing the next action. tracePath
+ * is a path wrapper which prints name or full path depending on verbosity.
+ */
+ def trace(msg: String) = if (isTrace || isDryRun) System.err.println(">> [%s] %s".format(label, msg))
+
+ def tracePath(path: Path): String = if (isVerbose) path.path else path.name
+ def tracePath(path: String): String = tracePath(Path(path))
+
+ /** v == verbose.
+ */
+ def vtrace(msg: String) = if (isVerbose) trace(msg)
+
+ /** Run body, writes result to logFile. Any throwable is
+ * caught, stringified, and written to the log.
+ */
+ def loggingResult(body: => String) =
+ try returning(true)(_ => logFile writeAll body)
+ catch {
+ case x: ControlThrowable => throw x
+ case x: InterruptedException => debug(this + " received interrupt, failing.\n") ; false
+ case x: Throwable => logException(x)
+ }
+
+ def throwableToString(x: Throwable): String = {
+ val w = new StringWriter
+ x.printStackTrace(new PrintWriter(w))
+ w.toString
+ }
+
+ def warnAndLog(str: String) = {
+ warning(toStringTrunc(str, 800))
+ logWriter append str
+ }
+
+ def warnAndLogException(msg: String, ex: Throwable) =
+ warnAndLog(msg + throwableToString(ex))
+
+ def deleteLog(force: Boolean = false) =
+ if (universe.isNoCleanup && !force) debug("Not cleaning up " + logFile)
+ else logFile.deleteIfExists()
+
+ def onException(x: Throwable) { logException(x) }
+ def logException(x: Throwable) = {
+ val msg = throwableToString(x)
+ if (!isTerse)
+ normal(msg)
+
+ logWriter append msg
+ false
+ }
+ }
+
+ /** A writer which doesn't create the file until a write comes in.
+ */
+ class LazilyCreatedWriter(log: File) extends Writer {
+ @volatile private var isCreated = false
+ private lazy val underlying = {
+ isCreated = true
+ log.bufferedWriter()
+ }
+
+ def flush() = if (isCreated) underlying.flush()
+ def close() = if (isCreated) underlying.close()
+ def write(chars: Array[Char], off: Int, len: Int) = {
+ underlying.write(chars, off, len)
+ underlying.flush()
+ }
+ }
+
+ class LogWriter(log: File) extends PrintWriter(new LazilyCreatedWriter(log), true) {
+ override def print(s: String) = {
+ super.print(s)
+ flush()
+ }
+ }
+} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/nest/StreamAppender.scala b/src/partest-alternative/scala/tools/partest/nest/StreamAppender.scala
new file mode 100644
index 0000000000..8cebcf1685
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/nest/StreamAppender.scala
@@ -0,0 +1,94 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+// $Id$
+
+package scala.tools.partest
+package nest
+
+import java.io._
+
+object StreamAppender {
+ def wrapIn(in: InputStream): BufferedReader = new BufferedReader(new InputStreamReader(in))
+ def wrapIn(reader: Reader): BufferedReader = new BufferedReader(reader)
+ def wrapIn(str: String): BufferedReader = new BufferedReader(new StringReader(str))
+
+ def wrapOut(out: OutputStream): PrintWriter = new PrintWriter(new OutputStreamWriter(out), true)
+ def wrapOut(writer: Writer): PrintWriter = new PrintWriter(writer, true)
+ def wrapOut(): PrintWriter = wrapOut(new StringWriter)
+
+ def apply(reader: BufferedReader, writer: Writer): StreamAppender =
+ new StreamAppender(reader, wrapOut(writer))
+
+ def apply(reader: Reader, writer: Writer): StreamAppender =
+ apply(wrapIn(reader), writer)
+
+ def apply(in: InputStream, writer: Writer): StreamAppender =
+ apply(wrapIn(in), writer)
+
+ def apply(str: String, writer: Writer): StreamAppender =
+ apply(wrapIn(str), writer)
+
+ def apply(in: File, out: File): StreamAppender =
+ apply(new FileReader(in), new FileWriter(out))
+
+ def appendToString(in1: InputStream, in2: InputStream): String = {
+ val swriter1 = new StringWriter
+ val swriter2 = new StringWriter
+ val app1 = StreamAppender(wrapIn(in1), swriter1)
+ val app2 = StreamAppender(wrapIn(in2), swriter2)
+
+ val async = new Thread(app2)
+ async.start()
+ app1.run()
+ async.join()
+ swriter1.toString + swriter2.toString
+ }
+/*
+ private def inParallel(t1: Runnable, t2: Runnable, t3: Runnable) {
+ val thr1 = new Thread(t1)
+ val thr2 = new Thread(t2)
+ thr1.start()
+ thr2.start()
+ t3.run()
+ thr1.join()
+ thr2.join()
+ }
+*/
+ private def inParallel(t1: Runnable, t2: Runnable) {
+ val thr = new Thread(t2)
+ thr.start()
+ t1.run()
+ thr.join()
+ }
+
+ def concat(in: InputStream, err: InputStream, out: OutputStream) = new Runnable {
+ override def run() {
+ val outWriter = wrapOut(out)
+ val inApp = StreamAppender(in, outWriter)
+
+ val errStringWriter = new StringWriter
+ val errApp = StreamAppender(wrapIn(err), errStringWriter)
+
+ inParallel(inApp, errApp)
+
+ // append error string to out
+ StreamAppender(errStringWriter.toString, outWriter).run()
+ }
+ }
+}
+
+class StreamAppender(reader: BufferedReader, writer: PrintWriter) extends Runnable {
+ override def run() = runAndMap(identity)
+ private def lines() = Iterator continually reader.readLine() takeWhile (_ != null)
+ def closeAll() = {
+ reader.close()
+ writer.close()
+ }
+
+ def runAndMap(f: String => String) =
+ try lines() map f foreach (writer println _)
+ catch { case e: IOException => e.printStackTrace() }
+}
diff --git a/src/partest-alternative/scala/tools/partest/package.scala b/src/partest-alternative/scala/tools/partest/package.scala
new file mode 100644
index 0000000000..f6d216e379
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/package.scala
@@ -0,0 +1,45 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+
+import nsc.io.{ File, Path, Process, Directory }
+import java.nio.charset.CharacterCodingException
+
+package object partest {
+ /** The CharacterCodingExceptions are thrown at least on windows trying
+ * to read a file like script/utf-8.scala
+ */
+ private[partest] def safeSlurp(f: File) =
+ try if (f.exists) f.slurp() else ""
+ catch { case _: CharacterCodingException => "" }
+
+ private[partest] def safeLines(f: File) = safeSlurp(f) split """\r\n|\r|\n""" toList
+ private[partest] def safeArgs(f: File) = toArgs(safeSlurp(f))
+ private[partest] def isJava(f: Path) = f.isFile && (f hasExtension "java")
+ private[partest] def isScala(f: Path) = f.isFile && (f hasExtension "scala")
+ private[partest] def isJavaOrScala(f: Path) = isJava(f) || isScala(f)
+
+ private[partest] def toArgs(line: String) = cmd toArgs line
+ private[partest] def fromArgs(args: List[String]) = cmd fromArgs args
+
+ /** Strings, argument lists, etc. */
+
+ private[partest] def fromAnyArgs(args: List[Any]) = args mkString " " // separate to avoid accidents
+ private[partest] def toStringTrunc(x: Any, max: Int = 240) = {
+ val s = x.toString
+ if (s.length < max) s
+ else (s take max) + " [...]"
+ }
+ private[partest] def setProp(k: String, v: String) = scala.util.Properties.setProp(k, v)
+
+ /** Pretty self explanatory. */
+ def printAndExit(msg: String): Unit = {
+ println(msg)
+ exit(1)
+ }
+
+ /** Apply a function and return the passed value */
+ def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
+} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/util/package.scala b/src/partest-alternative/scala/tools/partest/util/package.scala
new file mode 100644
index 0000000000..bc5470ba5d
--- /dev/null
+++ b/src/partest-alternative/scala/tools/partest/util/package.scala
@@ -0,0 +1,61 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import java.util.{ Timer, TimerTask }
+import java.io.StringWriter
+import nsc.io._
+
+/** Misc code still looking for a good home.
+ */
+package object util {
+
+ def allPropertiesString() = javaHashtableToString(System.getProperties)
+
+ private def javaHashtableToString(table: java.util.Hashtable[_,_]) = {
+ import collection.JavaConversions._
+ (table.toList map { case (k, v) => "%s -> %s\n".format(k, v) }).sorted mkString
+ }
+
+ def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
+ fs flatMap (x => Option(AbstractFile getFile (Path(pre) / x).path)) toSet
+
+ /** Copies one Path to another Path, trying to be sensible when one or the
+ * other is a Directory. Returns true if it believes it succeeded.
+ */
+ def copyPath(from: Path, to: Path): Boolean = {
+ if (!to.parent.isDirectory)
+ to.parent.createDirectory(force = true)
+
+ def copyDir = {
+ val sub = to / from.name createDirectory true
+ from.toDirectory.list forall (x => copyPath(x, sub))
+ }
+ (from.isDirectory, to.isDirectory) match {
+ case (true, true) => copyDir
+ case (true, false) => false
+ case (false, true) => from.toFile copyTo (to / from.name)
+ case (false, false) => from.toFile copyTo to
+ }
+ }
+
+ /**
+ * Compares two files using a Java implementation of the GNU diff
+ * available at http://www.bmsi.com/java/#diff.
+ *
+ * @param f1 the first file to be compared
+ * @param f2 the second file to be compared
+ * @return the text difference between the compared files
+ */
+ def diffFiles(f1: File, f2: File): String = {
+ val diffWriter = new StringWriter
+ val args = Array(f1.toAbsolute.path, f2.toAbsolute.path)
+
+ io.DiffPrint.doDiff(args, diffWriter)
+ val result = diffWriter.toString
+ if (result == "No differences") "" else result
+ }
+}
diff --git a/src/partest/README b/src/partest/README
index 430a2987f8..81876fc810 100644
--- a/src/partest/README
+++ b/src/partest/README
@@ -1,11 +1,11 @@
How partest choses the compiler / library:
- * ''-Dscalatest.build=build/four-pack'' -> will search for libraries in
+ * ''-Dpartest.build=build/four-pack'' -> will search for libraries in
''lib'' directory of given path
- * ''--pack'' -> will set ''scalatest.build=build/pack'', and run all tests.
+ * ''--pack'' -> will set ''partest.build=build/pack'', and run all tests.
add ''--[kind]'' to run a selected set of tests.
* auto detection:
- - scalatest.build property -> ''bin'' / ''lib'' directories
+ - partest.build property -> ''bin'' / ''lib'' directories
- distribution (''dists/latest'')
- supersabbus pack (''build/pack'')
- sabbus quick (''build/quick'')
@@ -24,8 +24,9 @@ Other arguments:
* --run next files test the interpreter and all backends
* --jvm next files test the JVM backend
* --res next files test the resident compiler
+ * --buildmanager next files test the build manager
* --shootout next files are shootout tests
* --script next files test the script runner
- * ''-Dscalatest.scalac_opts=...'' -> add compiler options
+ * ''-Dpartest.scalac_opts=...'' -> add compiler options
* ''--verbose'' -> print verbose messages
* ''-Dpartest.debug=true'' -> print debug messages
diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala
new file mode 100644
index 0000000000..139c54dedd
--- /dev/null
+++ b/src/partest/scala/tools/partest/PartestDefaults.scala
@@ -0,0 +1,30 @@
+package scala.tools
+package partest
+
+import nsc.io.{ File, Path, Process, Directory }
+import util.{ PathResolver }
+import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
+
+object PartestDefaults {
+ import nsc.Properties._
+ private def wrapAccessControl[T](body: => Option[T]): Option[T] =
+ try body catch { case _: java.security.AccessControlException => None }
+
+ def testRootName = propOrNone("partest.root")
+ def srcDirName = propOrElse("partest.srcdir", "files")
+ def testRootDir = testRootName map (x => Directory(x))
+
+ def classPath = PathResolver.Environment.javaUserClassPath // XXX
+
+ def javaCmd = propOrElse("partest.javacmd", "java")
+ def javacCmd = propOrElse("partest.javac_cmd", "javac")
+ def javaOpts = propOrElse("partest.java_opts", "")
+ def scalacOpts = propOrElse("partest.scalac_opts", "-deprecation")
+
+ def testBuild = propOrNone("partest.build")
+ def errorCount = propOrElse("partest.errors", "0").toInt
+ def numActors = propOrElse("partest.actors", "8").toInt
+ def poolSize = wrapAccessControl(propOrNone("actors.corePoolSize"))
+
+ def timeout = "1200000"
+}
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index 28dd00b408..230a6f73ec 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,17 +8,25 @@
// $Id$
-package scala.tools.partest
+package scala.tools
+package partest
import scala.actors.Actor._
+import scala.util.Properties.setProp
+import scala.tools.nsc.io.{ Directory, Path => SPath }
+import nsc.Settings
+import nsc.util.ClassPath
+import util.PathResolver
+import scala.tools.ant.sabbus.CompilationPathProperty
import java.io.File
import java.net.URLClassLoader
+import java.lang.reflect.Method
import org.apache.tools.ant.Task
import org.apache.tools.ant.types.{Path, Reference, FileSet}
-class PartestTask extends Task {
+class PartestTask extends Task with CompilationPathProperty {
def addConfiguredPosTests(input: FileSet) {
posFiles = Some(input)
@@ -40,6 +48,14 @@ class PartestTask extends Task {
residentFiles = Some(input)
}
+ def addConfiguredBuildManagerTests(input: FileSet) {
+ buildManagerFiles = Some(input)
+ }
+
+ def addConfiguredScalacheckTests(input: FileSet) {
+ scalacheckFiles = Some(input)
+ }
+
def addConfiguredScriptTests(input: FileSet) {
scriptFiles = Some(input)
}
@@ -52,6 +68,10 @@ class PartestTask extends Task {
scalapFiles = Some(input)
}
+ def setSrcDir(input: String) {
+ srcDir = Some(input)
+ }
+
def setClasspath(input: Path) {
if (classpath.isEmpty)
classpath = Some(input)
@@ -100,7 +120,12 @@ class PartestTask extends Task {
debug = input
}
+ def setJUnitReportDir(input: File) {
+ jUnitReportDir = Some(input)
+ }
+
private var classpath: Option[Path] = None
+ private var srcDir: Option[String] = None
private var javacmd: Option[File] = None
private var javaccmd: Option[File] = None
private var showDiff: Boolean = false
@@ -111,107 +136,82 @@ class PartestTask extends Task {
private var runFiles: Option[FileSet] = None
private var jvmFiles: Option[FileSet] = None
private var residentFiles: Option[FileSet] = None
+ private var buildManagerFiles: Option[FileSet] = None
+ private var scalacheckFiles: Option[FileSet] = None
private var scriptFiles: Option[FileSet] = None
private var shootoutFiles: Option[FileSet] = None
private var scalapFiles: Option[FileSet] = None
private var errorOnFailed: Boolean = false
private var scalacOpts: Option[String] = None
private var timeout: Option[String] = None
+ private var jUnitReportDir: Option[File] = None
private var debug = false
- private def getFiles(fileSet: Option[FileSet]): Array[File] =
- if (fileSet.isEmpty) Array()
- else {
- val files = fileSet.get
- files.getDirectoryScanner(getProject).getIncludedFiles map {
- fs => new File(files.getDir(getProject), fs)
- }
- }
+ def fileSetToDir(fs: FileSet) = Directory(fs getDir getProject)
+ def fileSetToArray(fs: FileSet): Array[SPath] = {
+ val root = fileSetToDir(fs)
+ (fs getDirectoryScanner getProject).getIncludedFiles map (root / _)
+ }
- private def getFilesAndDirs(fileSet: Option[FileSet]): Array[File] =
- if (!fileSet.isEmpty) {
- val files = fileSet.get
- val fileTests = getFiles(fileSet)
- val dir = files.getDir(getProject)
- val dirTests = dir.listFiles(new java.io.FileFilter {
- def accept(file: File) =
- file.isDirectory &&
- (!file.getName().equals(".svn")) &&
- (!file.getName().endsWith(".obj"))
- })
- (dirTests ++ fileTests).toArray
- }
- else
- Array()
+ private def getFiles(fileSet: Option[FileSet]): Array[File] = fileSet match {
+ case None => Array()
+ case Some(fs) => fileSetToArray(fs) filterNot (_ hasExtension "log") map (_.jfile)
+ }
+
+ private def getFilesAndDirs(fileSet: Option[FileSet]): Array[File] = fileSet match {
+ case None => Array()
+ case Some(fs) =>
+ def shouldExclude(name: String) = (name endsWith ".obj") || (name startsWith ".")
+
+ val fileTests = getFiles(Some(fs)) filterNot (x => shouldExclude(x.getName))
+ val dirTests: Iterator[SPath] = fileSetToDir(fs).dirs filterNot (x => shouldExclude(x.name))
+ val dirResult = dirTests.toList.toArray map (_.jfile)
+
+ dirResult ++ fileTests
+ }
- private def getPosFiles = getFilesAndDirs(posFiles)
- private def getNegFiles = getFiles(negFiles)
- private def getRunFiles = getFiles(runFiles)
- private def getJvmFiles = getFilesAndDirs(jvmFiles)
- private def getResidentFiles = getFiles(residentFiles)
- private def getScriptFiles = getFiles(scriptFiles)
- private def getShootoutFiles = getFiles(shootoutFiles)
- private def getScalapFiles = getFiles(scalapFiles)
+ private def getPosFiles = getFilesAndDirs(posFiles)
+ private def getNegFiles = getFilesAndDirs(negFiles)
+ private def getRunFiles = getFiles(runFiles)
+ private def getJvmFiles = getFilesAndDirs(jvmFiles)
+ private def getResidentFiles = getFiles(residentFiles)
+ private def getBuildManagerFiles = getFilesAndDirs(buildManagerFiles)
+ private def getScalacheckFiles = getFiles(scalacheckFiles)
+ private def getScriptFiles = getFiles(scriptFiles)
+ private def getShootoutFiles = getFiles(shootoutFiles)
+ private def getScalapFiles = getFiles(scalapFiles)
override def execute() {
- if (debug)
- System.setProperty("partest.debug", "true")
+ if (isPartestDebug)
+ setProp("partest.debug", "true")
- if (classpath.isEmpty)
- error("Mandatory attribute 'classpath' is not set.")
+ srcDir foreach (x => setProp("partest.srcdir", x))
+
+ val classpath = this.compilationPath getOrElse error("Mandatory attribute 'compilationPath' is not set.")
- val scalaLibrary =
- (classpath.get.list map { fs => new File(fs) }) find { f =>
+ val scalaLibrary = {
+ (classpath.list map { fs => new File(fs) }) find { f =>
f.getName match {
case "scala-library.jar" => true
case "library" if (f.getParentFile.getName == "classes") => true
case _ => false
}
}
+ } getOrElse error("Provided classpath does not contain a Scala library.")
- if (scalaLibrary.isEmpty)
- error("Provided classpath does not contain a Scala library.")
+ val antRunner = new scala.tools.partest.nest.AntRunner
+ val antFileManager = antRunner.fileManager
- val classloader = this.getClass.getClassLoader
+ antFileManager.showDiff = showDiff
+ antFileManager.showLog = showLog
+ antFileManager.failed = runFailed
+ antFileManager.CLASSPATH = ClassPath.join(classpath.list: _*)
+ antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath
- val antRunner: AnyRef =
- classloader.loadClass("scala.tools.partest.nest.AntRunner").newInstance().asInstanceOf[AnyRef]
- val antFileManager: AnyRef =
- antRunner.getClass.getMethod("fileManager", Array[Class[_]](): _*).invoke(antRunner, Array[Object](): _*)
-
- val runMethod =
- antRunner.getClass.getMethod("reflectiveRunTestsForFiles", Array(classOf[Array[File]], classOf[String]): _*)
-
- def runTestsForFiles(kindFiles: Array[File], kind: String): (Int, Int) = {
- val result = runMethod.invoke(antRunner, Array(kindFiles, kind): _*).asInstanceOf[Int]
- (result >> 16, result & 0x00FF)
- }
-
- def setFileManagerBooleanProperty(name: String, value: Boolean) {
- val setMethod =
- antFileManager.getClass.getMethod(name+"_$eq", Array(classOf[Boolean]): _*)
- setMethod.invoke(antFileManager, Array(java.lang.Boolean.valueOf(value)).asInstanceOf[Array[Object]]: _*)
- }
-
- def setFileManagerStringProperty(name: String, value: String) {
- val setMethod =
- antFileManager.getClass.getMethod(name+"_$eq", Array(classOf[String]): _*)
- setMethod.invoke(antFileManager, Array(value).asInstanceOf[Array[Object]]: _*)
- }
-
- setFileManagerBooleanProperty("showDiff", showDiff)
- setFileManagerBooleanProperty("showLog", showLog)
- setFileManagerBooleanProperty("failed", runFailed)
- if (!javacmd.isEmpty)
- setFileManagerStringProperty("JAVACMD", javacmd.get.getAbsolutePath)
- if (!javaccmd.isEmpty)
- setFileManagerStringProperty("JAVAC_CMD", javaccmd.get.getAbsolutePath)
- setFileManagerStringProperty("CLASSPATH", classpath.get.list.mkString(File.pathSeparator))
- setFileManagerStringProperty("LATEST_LIB", scalaLibrary.get.getAbsolutePath)
- if (!scalacOpts.isEmpty)
- setFileManagerStringProperty("SCALAC_OPTS", scalacOpts.get)
- if (!timeout.isEmpty)
- setFileManagerStringProperty("timeout", timeout.get)
+ javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
+ javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
+ scalacOpts foreach (antFileManager.SCALAC_OPTS = _)
+ timeout foreach (antFileManager.timeout = _)
type TFSet = (Array[File], String, String)
val testFileSets = List(
@@ -220,30 +220,68 @@ class PartestTask extends Task {
(getRunFiles, "run", "Compiling and running files"),
(getJvmFiles, "jvm", "Compiling and running files"),
(getResidentFiles, "res", "Running resident compiler scenarii"),
+ (getBuildManagerFiles, "buildmanager", "Running Build Manager scenarii"),
+ (getScalacheckFiles, "scalacheck", "Running scalacheck tests"),
(getScriptFiles, "script", "Running script files"),
(getShootoutFiles, "shootout", "Running shootout tests"),
(getScalapFiles, "scalap", "Running scalap tests")
)
- def runSet(set: TFSet): (Int, Int) = {
+ def runSet(set: TFSet): (Int, Int, Iterable[String]) = {
val (files, name, msg) = set
- if (files.isEmpty) (0, 0)
+ if (files.isEmpty) (0, 0, List())
else {
log(msg)
- runTestsForFiles(files, name)
+ val results: Iterable[(String, Int)] = antRunner.reflectiveRunTestsForFiles(files, name)
+ val (succs, fails) = resultsToStatistics(results)
+
+ val failed: Iterable[String] = results collect {
+ case (path, 1) => path + " [FAILED]"
+ case (path, 2) => path + " [TIMOUT]"
+ }
+
+ // create JUnit Report xml files if directory was specified
+ jUnitReportDir foreach { d =>
+ d.mkdir
+
+ val report = testReport(name, results, succs, fails)
+ scala.xml.XML.save(d.getAbsolutePath+"/"+name+".xml", report)
+ }
+
+ (succs, fails, failed)
}
}
- val (allSuccesses, allFailures): (Int, Int) =
- (testFileSets map runSet).foldLeft((0, 0))((sums, x) => (sums._1 + x._1, sums._2 + x._2))
+ val _results = testFileSets map runSet
+ val allSuccesses = _results map (_._1) sum
+ val allFailures = _results map (_._2) sum
+ val allFailedPaths = _results flatMap (_._3)
def f = if (errorOnFailed && allFailures > 0) error(_) else log(_: String)
def s = if (allFailures > 1) "s" else ""
val msg =
- if (allFailures > 0) "Test suite finished with %d case%s failing.".format(allFailures, s)
+ if (allFailures > 0)
+ "Test suite finished with %d case%s failing:\n".format(allFailures, s)+
+ allFailedPaths.mkString("\n")
else if (allSuccesses == 0) "There were no tests to run."
else "Test suite finished with no failures."
f(msg)
}
+ def oneResult(res: (String, Int)) =
+ <testcase name={res._1}>{
+ res._2 match {
+ case 0 => scala.xml.NodeSeq.Empty
+ case 1 => <failure message="Test failed"/>
+ case 2 => <failure message="Test timed out"/>
+ }
+ }</testcase>
+
+ def testReport(kind: String, results: Iterable[(String, Int)], succs: Int, fails: Int) =
+ <testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}>
+ <properties/>
+ {
+ results.map(oneResult(_))
+ }
+ </testsuite>
}
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
index 62bc4af75f..6c3dbb876e 100644
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ b/src/partest/scala/tools/partest/nest/AntRunner.scala
@@ -1,17 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.partest
package nest
import java.io.File
+import scala.tools.nsc.io.{ Directory }
class AntRunner extends DirectRunner {
@@ -19,14 +19,11 @@ class AntRunner extends DirectRunner {
var JAVACMD: String = "java"
var JAVAC_CMD: String = "javac"
var CLASSPATH: String = _
- var EXT_CLASSPATH: String = _
var LATEST_LIB: String = _
- val TESTROOT: String = ""
- }
-
- def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): Int = {
- val (succs, fails) = runTestsForFiles(kindFiles.toList, kind)
- succs << 16 | fails
+ val testRootPath: String = "test"
+ val testRootDir: Directory = Directory(testRootPath)
}
+ def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String) =
+ runTestsForFiles(kindFiles.toList, kind)
}
diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala
index 993771bcdc..22568ad2d0 100644
--- a/src/partest/scala/tools/partest/nest/CompileManager.scala
+++ b/src/partest/scala/tools/partest/nest/CompileManager.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Philipp Haller
*/
@@ -8,170 +8,149 @@
package scala.tools.partest
package nest
-import scala.tools.nsc.{Global, Settings, CompilerCommand, FatalError}
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
+import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError, io }
+import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
+import scala.tools.nsc.util.ClassPath
+import scala.tools.util.PathResolver
+import io.Path
-import java.io.{File, BufferedReader, PrintWriter, FileReader, FileWriter, StringWriter}
+import java.io.{ File, BufferedReader, PrintWriter, FileReader, Writer, FileWriter, StringWriter }
+import File.pathSeparator
-class ExtConsoleReporter(override val settings: Settings, reader: BufferedReader, var writer: PrintWriter) extends ConsoleReporter(settings, reader, writer) {
- def this(settings: Settings) = {
- this(settings, Console.in, new PrintWriter(new FileWriter("/dev/null")))
- }
- def hasWarnings: Boolean = WARNING.count != 0
+class ExtConsoleReporter(override val settings: Settings, reader: BufferedReader, var writer: PrintWriter)
+extends ConsoleReporter(settings, reader, writer) {
+ def this(settings: Settings) = this(settings, Console.in, new PrintWriter(new FileWriter("/dev/null")))
}
abstract class SimpleCompiler {
def compile(out: Option[File], files: List[File], kind: String, log: File): Boolean
}
-class TestSettings(fileMan: FileManager) extends {
- override val bootclasspathDefault =
- System.getProperty("sun.boot.class.path", "") + File.pathSeparator +
- fileMan.LATEST_LIB
- override val extdirsDefault =
- System.getProperty("java.ext.dirs", "")
-} with Settings(x => ())
+class TestSettings(fileMan: FileManager) extends Settings(_ => ()) { }
class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
def newGlobal(settings: Settings, reporter: Reporter): Global =
new Global(settings, reporter)
def newGlobal(settings: Settings, logWriter: FileWriter): Global = {
- val rep = new ExtConsoleReporter(settings,
- Console.in,
- new PrintWriter(logWriter))
+ val rep = newReporter(settings, logWriter)
rep.shortname = true
newGlobal(settings, rep)
}
- def newSettings = {
+ def newSettings(out: Option[String]) = {
val settings = new TestSettings(fileManager)
+ settings.usejavacp.value = true
settings.deprecation.value = true
settings.nowarnings.value = false
- settings.encoding.value = "iso-8859-1"
+ settings.encoding.value = "ISO-8859-1" // XXX why?
+
+ val classpathElements = settings.classpath.value :: fileManager.LATEST_LIB :: out.toList
+ settings.classpath.value = ClassPath.join(classpathElements: _*)
+ out foreach (settings.outdir.value = _)
+
settings
}
- def newReporter(sett: Settings) = new ExtConsoleReporter(sett,
- Console.in,
- new PrintWriter(new StringWriter))
+ def newReporter(sett: Settings, writer: Writer = new StringWriter) =
+ new ExtConsoleReporter(sett, Console.in, new PrintWriter(writer))
private def updatePluginPath(options: String): String = {
- val (opt1, opt2) =
- (options split "\\s").toList partition (_ startsWith "-Xplugin:")
- (opt2 mkString " ")+(
- if (opt1.isEmpty) ""
- else {
- def absolutize(path: String): List[String] = {
- val args = (path substring 9 split File.pathSeparator).toList
- val plugins = args map (arg =>
- if (new File(arg).isAbsolute) arg
- else fileManager.TESTROOT+File.separator+arg
- )
- plugins
- }
- " -Xplugin:"+((opt1 flatMap absolutize) mkString File.pathSeparator)
- }
- )
+ val dir = fileManager.testRootDir
+ def absolutize(path: String) = Path(path) match {
+ case x if x.isAbsolute => x.path
+ case x => (fileManager.testRootDir / x).toAbsolute.path
+ }
+
+ val (opt1, opt2) = (options split "\\s").toList partition (_ startsWith "-Xplugin:")
+ val plugins = opt1 map (_ stripPrefix "-Xplugin:") flatMap (_ split pathSeparator) map absolutize
+ val pluginOption = if (opt1.isEmpty) Nil else List("-Xplugin:" + (plugins mkString pathSeparator))
+
+ (opt2 ::: pluginOption) mkString " "
}
def compile(out: Option[File], files: List[File], kind: String, log: File): Boolean = {
- val testSettings = newSettings
+ val testSettings = newSettings(out map (_.getAbsolutePath))
val logWriter = new FileWriter(log)
// check whether there is a ".flags" file
- val testBase = {
- val logBase = fileManager.basename(log.getName)
- logBase.substring(0, logBase.length-4)
- }
- val argsFile = new File(log.getParentFile, testBase+".flags")
- val argString = if (argsFile.exists) {
- val fileReader = new FileReader(argsFile)
- val reader = new BufferedReader(fileReader)
- val options = updatePluginPath(reader.readLine())
- reader.close()
- options
- } else ""
+ val flagsFileName = "%s.flags" format (basename(log.getName) dropRight 4) // 4 is "-run" or similar
+ val argString = (io.File(log).parent / flagsFileName) ifFile (x => updatePluginPath(x.slurp())) getOrElse ""
val allOpts = fileManager.SCALAC_OPTS+" "+argString
- NestUI.verbose("scalac options: "+allOpts)
val args = (allOpts split "\\s").toList
- val command = new CompilerCommand(args, testSettings, x => {}, false)
+
+ NestUI.verbose("scalac options: "+allOpts)
+
+ val command = new CompilerCommand(args, testSettings)
val global = newGlobal(command.settings, logWriter)
val testRep: ExtConsoleReporter = global.reporter.asInstanceOf[ExtConsoleReporter]
- val test: TestFile = kind match {
- case "pos" => PosTestFile(files(0), fileManager, out.isEmpty)
- case "neg" => NegTestFile(files(0), fileManager, out.isEmpty)
- case "run" => RunTestFile(files(0), fileManager, out.isEmpty)
- case "jvm" => JvmTestFile(files(0), fileManager, out.isEmpty)
- case "shootout" => ShootoutTestFile(files(0), fileManager, out.isEmpty)
- case "scalap" => ScalapTestFile(files(0), fileManager, out.isEmpty)
- case "scalacheck" =>
- ScalaCheckTestFile(files(0), fileManager, out.isEmpty)
- }
- test.defineSettings(command.settings)
- out match {
- case Some(outDir) =>
- command.settings.outdir.value = outDir.getAbsolutePath
- command.settings.classpath.value = command.settings.classpath.value+
- File.pathSeparator+outDir.getAbsolutePath
- case None =>
- // do nothing
+ val testFileFn: (File, FileManager) => TestFile = kind match {
+ case "pos" => PosTestFile.apply
+ case "neg" => NegTestFile.apply
+ case "run" => RunTestFile.apply
+ case "jvm" => JvmTestFile.apply
+ case "shootout" => ShootoutTestFile.apply
+ case "scalap" => ScalapTestFile.apply
+ case "scalacheck" => ScalaCheckTestFile.apply
}
+ val test: TestFile = testFileFn(files.head, fileManager)
+ test.defineSettings(command.settings, out.isEmpty)
+ val toCompile = files map (_.getPath)
- val toCompile = files.map(_.getPath)
try {
NestUI.verbose("compiling "+toCompile)
- try {
- (new global.Run) compile toCompile
- } catch {
+ try new global.Run compile toCompile
+ catch {
case FatalError(msg) =>
testRep.error(null, "fatal error: " + msg)
}
+
testRep.printSummary
testRep.writer.flush
testRep.writer.close
- } catch {
+ }
+ catch {
case e =>
e.printStackTrace()
return false
- } finally {
- logWriter.close()
}
+ finally logWriter.close()
+
!testRep.hasErrors
}
}
-class ReflectiveCompiler(val fileManager: ConsoleFileManager) extends SimpleCompiler {
- import fileManager.{latestCompFile, latestPartestFile}
-
- val sepUrls = Array(latestCompFile.toURL, latestPartestFile.toURL)
- //NestUI.verbose("constructing URLClassLoader from URLs "+latestCompFile+" and "+latestPartestFile)
-
- val sepLoader = new java.net.URLClassLoader(sepUrls, null)
-
- val sepCompilerClass =
- sepLoader.loadClass("scala.tools.partest.nest.DirectCompiler")
- val sepCompiler = sepCompilerClass.newInstance()
-
- // needed for reflective invocation
- val fileClass = Class.forName("java.io.File")
- val stringClass = Class.forName("java.lang.String")
- val sepCompileMethod =
- sepCompilerClass.getMethod("compile", fileClass, stringClass)
- val sepCompileMethod2 =
- sepCompilerClass.getMethod("compile", fileClass, stringClass, fileClass)
-
- /* This method throws java.lang.reflect.InvocationTargetException
- * if the compiler crashes.
- * This exception is handled in the shouldCompile and shouldFailCompile
- * methods of class CompileManager.
- */
- def compile(out: Option[File], files: List[File], kind: String, log: File): Boolean = {
- val res = sepCompileMethod2.invoke(sepCompiler, out, files, kind, log).asInstanceOf[java.lang.Boolean]
- res.booleanValue()
- }
-}
+// class ReflectiveCompiler(val fileManager: ConsoleFileManager) extends SimpleCompiler {
+// import fileManager.{latestCompFile, latestPartestFile}
+//
+// val sepUrls = Array(latestCompFile.toURI.toURL, latestPartestFile.toURI.toURL)
+// //NestUI.verbose("constructing URLClassLoader from URLs "+latestCompFile+" and "+latestPartestFile)
+//
+// val sepLoader = new java.net.URLClassLoader(sepUrls, null)
+//
+// val sepCompilerClass =
+// sepLoader.loadClass("scala.tools.partest.nest.DirectCompiler")
+// val sepCompiler = sepCompilerClass.newInstance()
+//
+// // needed for reflective invocation
+// val fileClass = Class.forName("java.io.File")
+// val stringClass = Class.forName("java.lang.String")
+// val sepCompileMethod =
+// sepCompilerClass.getMethod("compile", fileClass, stringClass)
+// val sepCompileMethod2 =
+// sepCompilerClass.getMethod("compile", fileClass, stringClass, fileClass)
+//
+// /* This method throws java.lang.reflect.InvocationTargetException
+// * if the compiler crashes.
+// * This exception is handled in the shouldCompile and shouldFailCompile
+// * methods of class CompileManager.
+// */
+// def compile(out: Option[File], files: List[File], kind: String, log: File): Boolean = {
+// val res = sepCompileMethod2.invoke(sepCompiler, out, files, kind, log).asInstanceOf[java.lang.Boolean]
+// res.booleanValue()
+// }
+// }
class CompileManager(val fileManager: FileManager) {
var compiler: SimpleCompiler = new /*ReflectiveCompiler*/ DirectCompiler(fileManager)
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
index 33bcb11b70..58d16a3f45 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Philipp Haller
*/
@@ -8,24 +8,32 @@
package scala.tools.partest
package nest
-import java.io.{File, FilenameFilter, IOException, StringWriter}
+import java.io.{ File, FilenameFilter, IOException, StringWriter }
import java.net.URI
+import scala.util.Properties.{ propOrElse, scalaCmd, scalacCmd }
+import scala.tools.util.PathResolver
+import scala.tools.nsc.{ Settings }
+import scala.tools.nsc.{ io, util }
+import util.{ ClassPath }
+import io.{ Path, Directory }
+import File.pathSeparator
+import ClassPath.{ join }
+import PathResolver.{ Environment, Defaults }
+import RunnerUtils._
+
class ConsoleFileManager extends FileManager {
+ var testBuild: Option[String] = PartestDefaults.testBuild
+ def testBuildFile = testBuild map (testParent / _)
- var testBuild = System.getProperty("scalatest.build")
var testClasses: Option[String] = None
- val debug: Boolean =
- (System.getProperty("partest.debug", "false") equals "true") ||
- (System.getProperty("scalatest.debug", "false") equals "true")
-
def this(buildPath: String, rawClasses: Boolean) = {
this()
if (rawClasses)
testClasses = Some(buildPath)
else
- testBuild = buildPath
+ testBuild = Some(buildPath)
// re-run because initialization of default
// constructor must be updated
findLatest()
@@ -40,120 +48,59 @@ class ConsoleFileManager extends FileManager {
SCALAC_OPTS = SCALAC_OPTS+" "+moreOpts
}
- var CLASSPATH = System.getProperty("java.class.path", ".")
- NestUI.verbose("CLASSPATH: "+CLASSPATH)
+ lazy val srcDir = PathSettings.srcDir
+ lazy val testRootDir = PathSettings.testRoot
+ lazy val testRootPath = testRootDir.toAbsolute.path
+ def testParent = testRootDir.parent
- var JAVACMD = System.getProperty("scalatest.javacmd", "java")
- var JAVAC_CMD = System.getProperty("scalatest.javac_cmd", "javac")
+ var CLASSPATH = PartestDefaults.classPath
+ var JAVACMD = PartestDefaults.javaCmd
+ var JAVAC_CMD = PartestDefaults.javacCmd
- val prefixFile = {
- val cwd = System.getProperty("user.dir")
- if (cwd != null)
- (new File(cwd)).getCanonicalFile
- else
- error("user.dir property not set")
- }
- val PREFIX = prefixFile.getAbsolutePath
-
-/*
-if [ -d "$PREFIX/test" ]; then
- TESTROOT="$PREFIX/test";
-elif [ -d "$PREFIX/misc/scala-test" ]; then
- TESTROOT="$PREFIX/misc/scala-test";
-else
- abort "Test directory not found";
-*/
-
- val testRootFile = {
- val testRootProp = System.getProperty("scalatest.root")
- val testroot =
- if (testRootProp != null)
- new File(testRootProp)
- else {
- // case 1: cwd is `test`
- if (prefixFile.getName == "test" && (new File(prefixFile, "files")).exists)
- prefixFile
- else {
- // case 2: cwd is `test/..`
- val test = new File(prefixFile, "test")
- val scalaTest = new File(new File(prefixFile, "misc"), "scala-test")
- if (test.isDirectory)
- test
- else if (scalaTest.isDirectory)
- scalaTest
- else
- error("Test directory not found")
- }
- }
- testroot.getCanonicalFile
- }
- val TESTROOT = testRootFile.getAbsolutePath
-
- var srcDirName: String = ""
-
- val srcDir: File = {
- val srcDirProp = System.getProperty("partest.srcdir")
- val src =
- if (srcDirProp != null) {
- srcDirName = srcDirProp
- new File(testRootFile, srcDirName)
- } else {
- srcDirName = "files"
- new File(testRootFile, srcDirName)
- }
- if (src.isDirectory)
- src.getCanonicalFile
- else {
- val path = TESTROOT + File.separator + "files"
- NestUI.failure("Source directory \"" + path + "\" not found")
- exit(1)
- }
+
+ NestUI.verbose("CLASSPATH: "+CLASSPATH)
+
+ if (!srcDir.isDirectory) {
+ NestUI.failure("Source directory \"" + srcDir.path + "\" not found")
+ exit(1)
}
- LIB_DIR = (new File(testRootFile.getParentFile, "lib")).getCanonicalFile.getAbsolutePath
+ CLASSPATH = {
+ val libs = (srcDir / Directory("lib")).files filter (_ hasExtension "jar") map (_.normalize.path)
- CLASSPATH = CLASSPATH + File.pathSeparator + {
- val libs = new File(srcDir, "lib")
// add all jars in libs
- (libs.listFiles(new FilenameFilter {
- def accept(dir: File, name: String) = name endsWith ".jar"
- }) map {file => file.getCanonicalFile.getAbsolutePath}).mkString(""+File.pathSeparator)
+ (CLASSPATH :: libs.toList) mkString pathSeparator
}
def findLatest() {
- val testParent = testRootFile.getParentFile
NestUI.verbose("test parent: "+testParent)
- def prefixFileWith(parent: File, relPath: String): File =
- (new File(parent, relPath)).getCanonicalFile
-
- def prefixFile(relPath: String): File =
- prefixFileWith(testParent, relPath)
+ def prefixFileWith(parent: File, relPath: String) = (io.File(parent) / relPath).normalize
+ def prefixFile(relPath: String) = (testParent / relPath).normalize
if (!testClasses.isEmpty) {
- testClassesFile = (new File(testClasses.get)).getCanonicalFile
- NestUI.verbose("Running with classes in "+testClassesFile)
- latestFile = prefixFileWith(testClassesFile.getParentFile, "bin")
- latestLibFile = prefixFileWith(testClassesFile, "library")
- latestActFile = prefixFileWith(testClassesFile, "library")
- latestCompFile = prefixFileWith(testClassesFile, "compiler")
- latestPartestFile = prefixFileWith(testClassesFile, "partest")
- latestFjbgFile = prefixFile("lib/fjbg.jar")
+ testClassesDir = Path(testClasses.get).normalize.toDirectory
+ NestUI.verbose("Running with classes in "+testClassesDir)
+
+ latestFile = testClassesDir.parent / "bin"
+ latestLibFile = testClassesDir / "library"
+ latestCompFile = testClassesDir / "compiler"
+ latestPartestFile = testClassesDir / "partest"
+ latestFjbgFile = testParent / "lib" / "fjbg.jar"
}
- else if (testBuild != null) {
- testBuildFile = prefixFile(testBuild)
- NestUI.verbose("Running on "+testBuild)
- latestFile = prefixFile(testBuild+"/bin")
- latestLibFile = prefixFile(testBuild+"/lib/scala-library.jar")
- latestActFile = prefixFile(testBuild+"/lib/scala-library.jar")
- latestCompFile = prefixFile(testBuild+"/lib/scala-compiler.jar")
- latestPartestFile = prefixFile(testBuild+"/lib/scala-partest.jar")
- } else {
+ else if (testBuild.isDefined) {
+ val dir = Path(testBuild.get)
+ NestUI.verbose("Running on "+dir)
+ latestFile = dir / "bin"
+ latestLibFile = dir / "lib/scala-library.jar"
+ latestCompFile = dir / "lib/scala-compiler.jar"
+ latestPartestFile = dir / "lib/scala-partest.jar"
+ }
+ else {
def setupQuick() {
NestUI.verbose("Running build/quick")
latestFile = prefixFile("build/quick/bin")
latestLibFile = prefixFile("build/quick/classes/library")
- latestActFile = prefixFile("build/quick/classes/library")
latestCompFile = prefixFile("build/quick/classes/compiler")
latestPartestFile = prefixFile("build/quick/classes/partest")
}
@@ -163,7 +110,6 @@ else
val p = testParent.getParentFile
latestFile = prefixFileWith(p, "bin")
latestLibFile = prefixFileWith(p, "lib/scala-library.jar")
- latestActFile = prefixFileWith(p, "lib/scala-library.jar")
latestCompFile = prefixFileWith(p, "lib/scala-compiler.jar")
latestPartestFile = prefixFileWith(p, "lib/scala-partest.jar")
}
@@ -172,7 +118,6 @@ else
NestUI.verbose("Running dists/latest")
latestFile = prefixFile("dists/latest/bin")
latestLibFile = prefixFile("dists/latest/lib/scala-library.jar")
- latestActFile = prefixFile("dists/latest/lib/scala-library.jar")
latestCompFile = prefixFile("dists/latest/lib/scala-compiler.jar")
latestPartestFile = prefixFile("dists/latest/lib/scala-partest.jar")
}
@@ -181,134 +126,65 @@ else
NestUI.verbose("Running build/pack")
latestFile = prefixFile("build/pack/bin")
latestLibFile = prefixFile("build/pack/lib/scala-library.jar")
- latestActFile = prefixFile("build/pack/lib/scala-library.jar")
latestCompFile = prefixFile("build/pack/lib/scala-compiler.jar")
latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar")
}
- def max(a: Long, b: Long) = if (a > b) a else b
+ val dists = testParent / "dists"
+ val build = testParent / "build"
+ // in case of an installed dist, testRootDir is one level deeper
+ val bin = testParent.parent / "bin"
- val dists = new File(testParent, "dists")
- val build = new File(testParent, "build")
- // in case of an installed dist, testRootFile is one level deeper
- val bin = new File(testParent.getParentFile, "bin")
+ def mostRecentOf(base: String, names: String*) =
+ names map (x => prefixFile(base + "/" + x).lastModified) reduceLeft (_ max _)
// detect most recent build
- val quickTime =
- max(prefixFile("build/quick/classes/compiler/compiler.properties").lastModified,
- prefixFile("build/quick/classes/library/library.properties").lastModified)
- val packTime =
- max(prefixFile("build/pack/lib/scala-compiler.jar").lastModified,
- prefixFile("build/pack/lib/scala-library.jar").lastModified)
- val distTime =
- max(prefixFile("dists/latest/lib/scala-compiler.jar").lastModified,
- prefixFile("dists/latest/lib/scala-library.jar").lastModified)
- val instTime = {
- val p = testParent.getParentFile
- max(prefixFileWith(p, "lib/scala-compiler.jar").lastModified,
- prefixFileWith(p, "lib/scala-library.jar").lastModified)
- }
+ val quickTime = mostRecentOf("build/quick/classes", "compiler/compiler.properties", "library/library.properties")
+ val packTime = mostRecentOf("build/pack/lib", "scala-compiler.jar", "scala-library.jar")
+ val distTime = mostRecentOf("dists/latest/lib", "scala-compiler.jar", "scala-library.jar")
+ val instTime = mostRecentOf("lib", "scala-compiler.jar", "scala-library.jar")
+
+ val pairs = Map(
+ (quickTime, () => setupQuick()),
+ (packTime, () => setupPack()),
+ (distTime, () => setupDist()),
+ (instTime, () => setupInst())
+ )
+
+ // run setup based on most recent time
+ pairs(pairs.keys max)()
- if (quickTime > packTime) { // pack ruled out
- if (quickTime > distTime) { // dist ruled out
- if (quickTime > instTime) // inst ruled out
- setupQuick()
- else
- setupInst()
- } else { // quick ruled out
- if (distTime > instTime) // inst ruled out
- setupDist()
- else
- setupInst()
- }
- } else { // quick ruled out
- if (packTime > distTime) { // dist ruled out
- if (packTime > instTime) // inst ruled out
- setupPack()
- else
- setupInst()
- } else { // pack ruled out
- if (distTime > instTime) // inst ruled out
- setupDist()
- else
- setupInst()
- }
- }
latestFjbgFile = prefixFile("lib/fjbg.jar")
}
- BIN_DIR = latestFile.getAbsolutePath
LATEST_LIB = latestLibFile.getAbsolutePath
- LATEST_COMP = latestCompFile.getAbsolutePath
- LATEST_PARTEST = latestPartestFile.getAbsolutePath
-
- // detect whether we are running on Windows
- val osName = System.getProperty("os.name")
- NestUI.verbose("OS: "+osName)
-
- val scalaCommand = if (osName startsWith "Windows")
- "scala.bat" else "scala"
- val scalacCommand = if (osName startsWith "Windows")
- "scalac.bat" else "scalac"
-
- SCALA = (new File(latestFile, scalaCommand)).getAbsolutePath
- SCALAC_CMD = (new File(latestFile, scalacCommand)).getAbsolutePath
}
- var BIN_DIR: String = ""
var LATEST_LIB: String = ""
- var LATEST_COMP: String = ""
- var LATEST_PARTEST: String = ""
- var SCALA: String = ""
- var SCALAC_CMD: String = ""
var latestFile: File = _
var latestLibFile: File = _
- var latestActFile: File = _
var latestCompFile: File = _
var latestPartestFile: File = _
var latestFjbgFile: File = _
- var testBuildFile: File = _
- var testClassesFile: File = _
+ var testClassesDir: Directory = _
// initialize above fields
findLatest()
- var testFiles: List[File] = List()
-
- def getFiles(kind: String, doCheck: Boolean, filter: Option[(String, Boolean)]): List[File] = {
- val dir = new File(srcDir, kind)
- NestUI.verbose("look in "+dir+" for tests")
- val files = if (dir.isDirectory) {
- if (!testFiles.isEmpty) {
- val dirpath = dir.getAbsolutePath
- testFiles filter { _.getParentFile.getAbsolutePath == dirpath }
- } else if (doCheck) filter match {
- case Some((ending, enableDirs)) =>
- val filter = new FilenameFilter {
- def accept(dir: File, name: String) =
- name.endsWith(ending) ||
- (enableDirs && (name != ".svn") && (!name.endsWith(".obj")) &&
- (new File(dir, name)).isDirectory)
- }
- dir.listFiles(filter).toList
- case None =>
- val filter = new FilenameFilter {
- def accept(dir: File, name: String) = name != ".svn"
- }
- dir.listFiles(filter).toList
- } else // skip
- Nil
- } else {
- NestUI.failure("Directory \"" + dir.getPath + "\" not found")
- Nil
- }
- if (failed)
- files filter { logFileExists(_, kind) }
- else
- files
- }
+ var testFiles: List[io.Path] = Nil
+
+ def getFiles(kind: String, cond: Path => Boolean): List[File] = {
+ def ignoreDir(p: Path) = List("svn", "obj") exists (p hasExtension _)
- def getFiles(kind: String, doCheck: Boolean): List[File] =
- getFiles(kind, doCheck, Some((".scala", true)))
+ val dir = Directory(srcDir / kind)
+ if (dir.isDirectory) NestUI.verbose("look in %s for tests" format dir)
+ else NestUI.failure("Directory '%s' not found" format dir)
+
+ val files =
+ if (testFiles.nonEmpty) testFiles filter (_.parent isSame dir)
+ else dir.list filterNot ignoreDir filter cond toList
+
+ ( if (failed) files filter (x => logFileExists(x, kind)) else files ) map (_.jfile)
+ }
}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
index 1bce0fe792..33f575c0a0 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Philipp Haller
*/
@@ -11,179 +11,177 @@ package nest
import java.io.{File, PrintStream, FileOutputStream, BufferedReader,
InputStreamReader, StringWriter, PrintWriter}
import utils.Properties._
+import RunnerUtils._
+import scala.tools.nsc.Properties.{ versionMsg, setProp }
+import scala.tools.nsc.util.CommandLineParser
+import scala.tools.nsc.io
+import io.{ Path, Process }
-class ConsoleRunner extends DirectRunner with RunnerUtils {
+class ConsoleRunner extends DirectRunner {
+ import PathSettings.{ srcDir, testRoot }
- case class TestSet(loc: String,
- filter: Option[(String, Boolean)],
- kind: String,
- msg: String)
+ case class TestSet(kind: String, filter: Path => Boolean, msg: String)
val testSets = {
- val fileFilter = Some((".scala", true))
+ val pathFilter: Path => Boolean = _ hasExtension "scala"
+
List(
- TestSet("pos", fileFilter, "pos",
- "Testing compiler (on files whose compilation should succeed)"),
- TestSet("neg", fileFilter, "neg",
- "Testing compiler (on files whose compilation should fail)"),
- TestSet("run", fileFilter, "run", "Testing JVM backend"),
- TestSet("jvm", fileFilter, "jvm", "Testing JVM backend"),
- TestSet("res", Some((".res", false)), "res",
- "Testing resident compiler"),
- TestSet("shootout", fileFilter, "shootout", "Testing shootout tests"),
- TestSet("script", fileFilter, "script", "Testing script tests"),
- TestSet("scalacheck", fileFilter, "scalacheck", "Testing ScalaCheck tests"))
+ TestSet("pos", pathFilter, "Testing compiler (on files whose compilation should succeed)"),
+ TestSet("neg", pathFilter, "Testing compiler (on files whose compilation should fail)"),
+ TestSet("run", pathFilter, "Testing JVM backend"),
+ TestSet("jvm", pathFilter, "Testing JVM backend"),
+ TestSet("res", x => x.isFile && (x hasExtension "res"), "Testing resident compiler"),
+ TestSet("buildmanager", _.isDirectory, "Testing Build Manager"),
+ TestSet("shootout", pathFilter, "Testing shootout tests"),
+ TestSet("script", pathFilter, "Testing script tests"),
+ TestSet("scalacheck", pathFilter, "Testing ScalaCheck tests"),
+ TestSet("scalap", _.isDirectory, "Run scalap decompiler tests")
+ )
}
var fileManager: ConsoleFileManager = _
- private val isJava5 = javaVersion matches "1.[5|6|7].*"
- private var runAll = false
private var testFiles: List[File] = List()
- private val errors =
- Integer.parseInt(System.getProperty("scalatest.errors", "0"))
+ private val errors = PartestDefaults.errorCount
+ private val testSetKinds = testSets map (_.kind)
+ private val testSetArgs = testSets map ("--" + _.kind)
+ private val testSetArgMap = testSetArgs zip testSets toMap
- def denotesTestSet(arg: String) =
- testSets exists { set => arg == "--" + set.loc }
+ def denotesTestSet(arg: String) = testSetArgs contains arg
+ def denotesTestFile(arg: String) = (arg endsWith ".scala") || (arg endsWith ".res")
+ def denotesTestDir(arg: String) = Path(arg).isDirectory
+ def denotesTestPath(arg: String) = denotesTestDir(arg) || denotesTestFile(arg)
- def denotesTestFile(arg: String) =
- arg.endsWith(".scala") || arg.endsWith(".res")
+ private def printVersion { NestUI outline (versionMsg + "\n") }
- def denotesTestDir(arg: String) =
- (new File(arg)).isDirectory
+ private val unaryArgs = List(
+ "--pack", "--all", "--verbose", "--show-diff", "--show-log",
+ "--failed", "--version", "--ansi", "--debug"
+ ) ::: testSetArgs
- private def printVersion { NestUI outline (versionMsg + "\n") }
+ private val binaryArgs = List(
+ "--grep", "--srcpath", "--buildpath", "--classpath"
+ )
def main(argstr: String) {
- // tokenize args. filter: "".split("\\s") yields Array("")
- var args = (argstr split "\\s").toList.filterNot(_ == "")
-
- if (args.length == 0)
- NestUI.usage()
- else {
- // find out which build to test
- val (buildPath, args1) = searchAndRemovePath("--buildpath", args)
- val (classPath, args2) = searchAndRemovePath("--classpath", args1)
- val (srcPath, args3) = searchAndRemovePath("--srcpath", args2)
- args = args3
-
- if (!srcPath.isEmpty)
- System.setProperty("partest.srcdir", srcPath.get)
-
- fileManager =
- if (!buildPath.isEmpty)
- new ConsoleFileManager(buildPath.get)
- else if (!classPath.isEmpty)
- new ConsoleFileManager(classPath.get, true)
- else if (args contains "--pack") {
- args = args.filterNot(_ == "--pack") // will create a result file '--pack' otherwise
- new ConsoleFileManager("build/pack")
- } else // auto detection, see ConsoleFileManager.findLatest
- new ConsoleFileManager
-
- if (!args.exists(denotesTestSet(_)) &&
- !args.exists(denotesTestFile(_)) &&
- !args.exists(denotesTestDir(_)))
- runAll = true
-
- var enabled = List[TestSet]()
- var readTimeout = false
- for (arg <- args) {
- (testSets find { set => arg == "--" + set.loc }) match {
- case Some(set) => enabled = set :: enabled
- case None => arg match {
- case "--all" => runAll = true
- case "--verbose" => NestUI._verbose = true
- case "--show-diff" => fileManager.showDiff = true
- case "--show-log" => fileManager.showLog = true
- case "--failed" => fileManager.failed = true
- case "--version" => printVersion; return
- case "--ansi" => NestUI.initialize(NestUI.MANY)
- case "--timeout" => readTimeout = true
- case s: String if readTimeout =>
- fileManager.timeout = s
- readTimeout = false
- case _ =>
- if (denotesTestFile(arg) || denotesTestDir(arg)) {
- val file = new File(arg)
- if (file.exists) {
- NestUI.verbose("adding test file "+file)
- testFiles = file :: testFiles
- } else {
- NestUI.failure("File \"" + arg + "\" not found\n")
- System.exit(1)
- }
- } else {
- NestUI.failure("Invalid option \""+arg+"\"\n")
- NestUI.usage()
- }
- }
- }
+ val parsed = CommandLineParser(argstr) withUnaryArgs unaryArgs withBinaryArgs binaryArgs
+ val args = parsed.residualArgs
+
+ /** Early return on no args, version, or invalid args */
+ if (argstr == "") return NestUI.usage()
+ if (parsed isSet "--version") return printVersion
+ if (args exists (x => !denotesTestPath(x))) {
+ val invalid = (args filterNot denotesTestPath).head
+ NestUI.failure("Invalid argument '%s'\n" format invalid)
+ return NestUI.usage()
+ }
+
+ parsed get "--srcpath" foreach (x => setProp("partest.srcdir", x))
+
+ fileManager =
+ if (parsed isSet "--buildpath") new ConsoleFileManager(parsed("--buildpath"))
+ else if (parsed isSet "--classpath") new ConsoleFileManager(parsed("--classpath"), true)
+ else if (parsed isSet "--pack") new ConsoleFileManager("build/pack")
+ else new ConsoleFileManager // auto detection, see ConsoleFileManager.findLatest
+
+ def argNarrowsTests(x: String) = denotesTestSet(x) || denotesTestFile(x) || denotesTestDir(x)
+
+ NestUI._verbose = parsed isSet "--verbose"
+ fileManager.showDiff = parsed isSet "--show-diff"
+ fileManager.showLog = parsed isSet "--show-log"
+ fileManager.failed = parsed isSet "--failed"
+
+ if (parsed isSet "--ansi") NestUI initialize NestUI.MANY
+ if (parsed isSet "--timeout") fileManager.timeout = parsed("--timeout")
+ if (parsed isSet "--debug") setProp("partest.debug", "true")
+
+ def addTestFile(file: File) = {
+ if (!file.exists)
+ NestUI.failure("Test file '%s' not found, skipping.\n" format file)
+ else {
+ NestUI.verbose("adding test file " + file)
+ testFiles +:= file
}
- NestUI.verbose("enabled test sets: "+enabled)
- NestUI.verbose("runAll: "+runAll)
-
- val dir =
- if (!fileManager.testClasses.isEmpty)
- fileManager.testClassesFile
- else if (fileManager.testBuild != null)
- fileManager.testBuildFile
- else
- fileManager.latestCompFile.getParentFile.getParentFile.getCanonicalFile
- NestUI.outline("Scala compiler classes in: "+dir+"\n")
-
- NestUI.outline("Scala version is: "+scala.tools.nsc.Properties.versionMsg+"\n")
- NestUI.outline("Scalac options are: "+fileManager.SCALAC_OPTS+"\n")
-
- val vmBin = javaHome + File.separator + "bin"
- val vmName = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
- val vmOpts = fileManager.JAVA_OPTS
-
- NestUI.outline("Java binaries in: "+vmBin+"\n")
- NestUI.outline("Java runtime is: "+vmName+"\n")
- NestUI.outline("Java options are: "+vmOpts+"\n")
- NestUI.outline("Source directory is: "+fileManager.srcDir.getAbsolutePath+"\n")
-
- val start = System.currentTimeMillis
-
- val (successes, failures) = testCheckAll(enabled)
-
- val end = System.currentTimeMillis
- val total = successes + failures
-
- val elapsedSecs = (end - start)/1000
- val elapsedMins = elapsedSecs/60
- val elapsedHrs = elapsedMins/60
- val dispMins = elapsedMins - elapsedHrs * 60
- val dispSecs = elapsedSecs - elapsedMins * 60
- val dispElapsed = {
- def form(num: Long) = if (num < 10) "0"+num else ""+num
- form(elapsedHrs)+":"+form(dispMins)+":"+form(dispSecs)
+ }
+
+ // If --grep is given we suck in every file it matches.
+ parsed get "--grep" foreach { expr =>
+ val allFiles = srcDir.deepList() filter (_ hasExtension "scala") map (_.toFile) toList
+ val files = allFiles filter (_.slurp() contains expr)
+
+ if (files.isEmpty) NestUI.failure("--grep string '%s' matched no files." format expr)
+ else NestUI.verbose("--grep string '%s' matched %d file(s)".format(expr, files.size))
+
+ files foreach (x => addTestFile(x.jfile))
+ }
+ args foreach (x => addTestFile(new File(x)))
+
+ // If no file arguments were given, we assume --all
+ val enabledTestSets: List[TestSet] = {
+ val enabledArgs = testSetArgs filter parsed.isSet
+
+ if (args.isEmpty && !(parsed isSet "--grep") && (enabledArgs.isEmpty || (parsed isSet "--all"))) testSets
+ else enabledArgs map testSetArgMap
+ }
+
+ val dir =
+ if (fileManager.testClasses.isDefined) fileManager.testClassesDir
+ else fileManager.testBuildFile getOrElse {
+ fileManager.latestCompFile.getParentFile.getParentFile.getCanonicalFile
}
- println
- if (failures == 0)
- NestUI.success("All of "+total+" tests were successful (elapsed time: "+dispElapsed+")\n")
- else
- NestUI.failure(failures+" of "+total+" tests failed (elapsed time: "+dispElapsed+")\n")
+ val vmBin = javaHome + File.separator + "bin"
+ val vmName = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
+ val vmOpts = fileManager.JAVA_OPTS
- if (failures == errors)
- System.exit(0)
- else
- System.exit(1)
+ NestUI.verbose("enabled test sets: " + (enabledTestSets map (_.kind) mkString " "))
+
+ List(
+ "Scala compiler classes in: " + dir,
+ "Scala version is: " + versionMsg,
+ "Scalac options are: " + fileManager.SCALAC_OPTS,
+ "Java binaries in: " + vmBin,
+ "Java runtime is: " + vmName,
+ "Java options are: " + vmOpts,
+ "Source directory is: " + srcDir,
+ ""
+ ) foreach (x => NestUI outline (x + "\n"))
+
+ val start = System.currentTimeMillis
+ val (successes, failures) = testCheckAll(enabledTestSets)
+ val end = System.currentTimeMillis
+
+ val total = successes + failures
+
+ val elapsedSecs = (end - start)/1000
+ val elapsedMins = elapsedSecs/60
+ val elapsedHrs = elapsedMins/60
+ val dispMins = elapsedMins - elapsedHrs * 60
+ val dispSecs = elapsedSecs - elapsedMins * 60
+
+ val dispElapsed = {
+ def form(num: Long) = if (num < 10) "0"+num else ""+num
+ form(elapsedHrs)+":"+form(dispMins)+":"+form(dispSecs)
}
+
+ println
+ if (failures == 0)
+ NestUI.success("All of "+total+" tests were successful (elapsed time: "+dispElapsed+")\n")
+ else
+ NestUI.failure(failures+" of "+total+" tests failed (elapsed time: "+dispElapsed+")\n")
+
+ System exit ( if (failures == errors) 0 else 1 )
}
def runTests(testSet: TestSet): (Int, Int) = {
- val TestSet(loc, filter, kind, msg) = testSet
- val files = fileManager.getFiles(loc, true, filter)
- if (!files.isEmpty) {
- NestUI.verbose("test files: "+files)
- NestUI.outline("\n"+msg+"\n")
- runTestsForFiles(files, kind)
- } else {
- NestUI.verbose("test dir empty\n")
- (0, 0)
+ val TestSet(kind, filter, msg) = testSet
+
+ fileManager.getFiles(kind, filter) match {
+ case Nil => NestUI.verbose("test dir empty\n") ; (0, 0)
+ case files =>
+ NestUI.verbose("test files: "+files)
+ NestUI.outline("\n"+msg+"\n")
+ resultsToStatistics(runTestsForFiles(files, kind))
}
}
@@ -191,46 +189,20 @@ class ConsoleRunner extends DirectRunner with RunnerUtils {
* @return (success count, failure count)
*/
def testCheckAll(enabledSets: List[TestSet]): (Int, Int) = {
- def runTestsFiles = if (!testFiles.isEmpty) {
- def absName(f: File): String = f.getAbsoluteFile.getCanonicalPath
-
- def kindOf(f: File): String = {
- val firstName = absName(f)
- val len = fileManager.srcDirName.length
- val filesPos = firstName.indexOf(fileManager.srcDirName)
- if (filesPos == -1) {
- NestUI.failure("invalid test file: "+firstName+"\n")
- Predef.exit(1)
- } else {
- val short = firstName.substring(filesPos+len+1, filesPos+len+1+3)
- val shortKinds = List("pos", "neg", "run", "jvm", "res")
- if (shortKinds contains short) short
- else short match {
- case "sho" => "shootout"
- case "scr" => "script"
- case "sca" => "scalacheck"
- }
- }
- }
+ def kindOf(f: File) = (srcDir relativize Path(f).normalize).segments.head
+
+ val (valid, invalid) = testFiles partition (x => testSetKinds contains kindOf(x))
+ invalid foreach (x => NestUI.failure("Invalid test file '%s', skipping.\n" format x))
- val fstKind = kindOf(testFiles.head)
- NestUI.verbose("all test files expected to have kind "+fstKind)
- if (!testFiles.forall(kindOf(_) equals fstKind)) {
- NestUI.failure("test files have different kinds\n")
- Predef.exit(1)
- } else {
+ val runTestsFileLists =
+ for ((kind, files) <- valid groupBy kindOf toList) yield {
NestUI.outline("\nTesting individual files\n")
- runTestsForFiles(testFiles, fstKind)
+ resultsToStatistics(runTestsForFiles(files, kind))
}
- } else (0, 0)
- val runSets =
- if (runAll) testSets // run all test sets
- else enabledSets
- NestUI.verbose("run sets: "+runSets)
+ NestUI.verbose("Run sets: "+enabledSets)
+ val results = runTestsFileLists ::: (enabledSets map runTests)
- val results = List(runTestsFiles) ::: (runSets map runTests)
- results reduceLeft { (p: (Int, Int), q: (Int, Int)) =>
- (p._1+q._1, p._2+q._2) }
+ (results map (_._1) sum, results map (_._2) sum)
}
}
diff --git a/src/partest/scala/tools/partest/nest/Diff.java b/src/partest/scala/tools/partest/nest/Diff.java
index abd09d0293..f69fc6858b 100644
--- a/src/partest/scala/tools/partest/nest/Diff.java
+++ b/src/partest/scala/tools/partest/nest/Diff.java
@@ -1,4 +1,3 @@
-// $Id$
package scala.tools.partest.nest;
@@ -50,7 +49,7 @@ public class Diff {
an edit script, if desired.
*/
public Diff(Object[] a,Object[] b) {
- Hashtable h = new Hashtable(a.length + b.length);
+ Hashtable<Object, Integer> h = new Hashtable<Object, Integer>(a.length + b.length);
filevec[0] = new file_data(a,h);
filevec[1] = new file_data(b,h);
}
@@ -745,7 +744,7 @@ public class Diff {
nondiscarded_lines = j;
}
- file_data(Object[] data,Hashtable h) {
+ file_data(Object[] data, Hashtable<Object, Integer> h) {
buffered_lines = data.length;
equivs = new int[buffered_lines];
@@ -753,9 +752,9 @@ public class Diff {
realindexes = new int[buffered_lines];
for (int i = 0; i < data.length; ++i) {
- Integer ir = (Integer)h.get(data[i]);
+ Integer ir = h.get(data[i]);
if (ir == null)
- h.put(data[i],new Integer(equivs[i] = equiv_max++));
+ h.put(data[i], new Integer(equivs[i] = equiv_max++));
else
equivs[i] = ir.intValue();
}
diff --git a/src/partest/scala/tools/partest/nest/DiffPrint.java b/src/partest/scala/tools/partest/nest/DiffPrint.java
index 494bc06e4a..31f9a1bc79 100644
--- a/src/partest/scala/tools/partest/nest/DiffPrint.java
+++ b/src/partest/scala/tools/partest/nest/DiffPrint.java
@@ -1,4 +1,3 @@
-// $Id$
package scala.tools.partest.nest;
@@ -506,7 +505,7 @@ public class DiffPrint {
*/
static String[] slurp(String file) throws IOException {
BufferedReader rdr = new BufferedReader(new FileReader(file));
- Vector s = new Vector();
+ Vector<String> s = new Vector<String>();
for (;;) {
String line = rdr.readLine();
if (line == null) break;
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
index c087b58c69..f774320f4e 100644
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ b/src/partest/scala/tools/partest/nest/DirectRunner.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Philipp Haller
*/
@@ -11,6 +11,7 @@ package nest
import java.io.{File, PrintStream, FileOutputStream, BufferedReader,
InputStreamReader, StringWriter, PrintWriter}
import java.util.StringTokenizer
+import scala.util.Properties.{ setProp }
import scala.tools.nsc.io.Directory
import scala.actors.Actor._
@@ -20,24 +21,17 @@ trait DirectRunner {
def fileManager: FileManager
- private val numActors = Integer.parseInt(System.getProperty("scalatest.actors", "8"))
+ import PartestDefaults.numActors
- if ((System.getProperty("partest.debug", "false") equals "true") ||
- (System.getProperty("scalatest.debug", "false") equals "true"))
+ if (isPartestDebug)
scala.actors.Debug.level = 3
- private val coreProp = try {
- System.getProperty("actors.corePoolSize")
- } catch {
- case ace: java.security.AccessControlException =>
- null
- }
- if (coreProp == null) {
+ if (PartestDefaults.poolSize.isEmpty) {
scala.actors.Debug.info("actors.corePoolSize not defined")
- System.setProperty("actors.corePoolSize", "16")
+ setProp("actors.corePoolSize", "16")
}
- def runTestsForFiles(kindFiles: List[File], kind: String): (Int, Int) = {
+ def runTestsForFiles(kindFiles: List[File], kind: String): scala.collection.immutable.Map[String, Int] = {
val len = kindFiles.length
val (testsEach, lastFrag) = (len/numActors, len%numActors)
val last = numActors-1
@@ -51,28 +45,34 @@ trait DirectRunner {
worker ! RunTests(kind, toTest)
worker
}
- var succs = 0; var fails = 0
+
var logsToDelete: List[File] = List()
var outdirsToDelete: List[File] = List()
+ var results = new scala.collection.immutable.HashMap[String, Int]
workers foreach { w =>
receiveWithin(3600 * 1000) {
- case Results(s, f, logs, outdirs) =>
- logsToDelete = logsToDelete ::: logs.filter(_.toDelete)
- outdirsToDelete = outdirsToDelete ::: outdirs
- succs += s
- fails += f
+ case Results(res, logs, outdirs) =>
+ logsToDelete :::= logs filter (_.toDelete)
+ outdirsToDelete :::= outdirs
+ results ++= res
case TIMEOUT =>
// add at least one failure
NestUI.verbose("worker timed out; adding failed test")
- fails += 1
+ results += ("worker timed out; adding failed test" -> 2)
}
}
- for (x <- logsToDelete ::: outdirsToDelete) {
- NestUI.verbose("deleting "+x)
- Directory(x).deleteRecursively()
+
+ if (isPartestDebug)
+ fileManager.showTestTimings()
+
+ if (!isPartestDebug) {
+ for (x <- logsToDelete ::: outdirsToDelete) {
+ NestUI.verbose("deleting "+x)
+ Directory(x).deleteRecursively()
+ }
}
- (succs, fails)
+ results
}
}
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index 238681bfc6..bdbb34b3c4 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Philipp Haller
*/
@@ -8,19 +8,14 @@
package scala.tools.partest
package nest
-import java.io.{File, FilenameFilter, IOException, StringWriter}
+import java.io.{File, FilenameFilter, IOException, StringWriter,
+ FileInputStream, FileOutputStream, BufferedReader,
+ FileReader, PrintWriter, FileWriter}
import java.net.URI
-import scala.tools.nsc.io.Directory
+import scala.tools.nsc.io.{ Path, Directory }
+import scala.collection.mutable.HashMap
trait FileManager {
-
- def basename(name: String): String = {
- val inx = name.lastIndexOf(".")
- if (inx < 0) name else name.substring(0, inx)
- }
-
- def deleteRecursive(dir: File) { Directory(dir).deleteRecursively() }
-
/**
* Compares two files using a Java implementation of the GNU diff
* available at http://www.bmsi.com/java/#diff.
@@ -30,38 +25,38 @@ trait FileManager {
* @return the text difference between the compared files
*/
def compareFiles(f1: File, f2: File): String = {
- var res = ""
- try {
- val diffWriter = new StringWriter
- val args = Array(f1.getCanonicalPath(), f2.getCanonicalPath())
- DiffPrint.doDiff(args, diffWriter)
- res = diffWriter.toString
- if (res startsWith "No")
- res = ""
- } catch {
- case e: IOException =>
- e.printStackTrace()
- }
- res
+ val diffWriter = new StringWriter
+ val args = Array(f1.getCanonicalPath(), f2.getCanonicalPath())
+
+ DiffPrint.doDiff(args, diffWriter)
+ val res = diffWriter.toString
+ if (res startsWith "No") "" else res
}
+ def testRootDir: Directory
+ def testRootPath: String
+
var JAVACMD: String
var JAVAC_CMD: String
var CLASSPATH: String
var LATEST_LIB: String
- var LIB_DIR: String = ""
-
- val TESTROOT: String
var showDiff = false
var showLog = false
var failed = false
- var SCALAC_OPTS = System.getProperty("scalatest.scalac_opts", "-deprecation")
- var JAVA_OPTS = System.getProperty("scalatest.java_opts", "")
+ var SCALAC_OPTS = PartestDefaults.scalacOpts
+ var JAVA_OPTS = PartestDefaults.javaOpts
+ var timeout = PartestDefaults.timeout
- var timeout = "1200000"
+ /** Only when --debug is given. */
+ lazy val testTimings = new HashMap[String, Long]
+ def recordTestTiming(name: String, milliseconds: Long) =
+ synchronized { testTimings(name) = milliseconds }
+ def showTestTimings() {
+ testTimings.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %s".format(k, v)) }
+ }
def getLogFile(dir: File, fileBase: String, kind: String): LogFile =
new LogFile(dir, fileBase + "-" + kind + ".log")
@@ -72,8 +67,44 @@ trait FileManager {
getLogFile(dir, fileBase, kind)
}
- def logFileExists(file: File, kind: String): Boolean = {
- val logFile = getLogFile(file, kind)
- logFile.exists && logFile.canRead
+ def logFileExists(file: File, kind: String) =
+ getLogFile(file, kind).canRead
+
+ def overwriteFileWith(dest: File, file: File) =
+ dest.isFile && copyFile(file, dest)
+
+
+ def copyFile(from: File, dest: File): Boolean = {
+ def copyFile0(from: File, to: File): Boolean =
+ try {
+ val appender = StreamAppender(from, to)
+ appender.run()
+ appender.closeAll()
+ true
+ } catch {
+ case _: IOException => false
+ }
+
+ if (from.isDirectory) {
+ assert(dest.isDirectory, "cannot copy directory to file")
+ val subDir:Directory = Path(dest) / Directory(from.getName)
+ subDir.createDirectory()
+ from.listFiles.toList.forall(copyFile(_, subDir))
+ } else
+ copyFile0(from, if (dest.isDirectory) new File(dest, from.getName) else dest)
+ }
+
+ def mapFile(file: File, suffix: String, dir: File, replace: String => String) {
+ val tmpFile = File.createTempFile("tmp", suffix, dir) // prefix required by API
+
+ val appender = StreamAppender(file, tmpFile)
+ appender.runAndMap(replace)
+ appender.closeAll()
+
+ val appender2 = StreamAppender(tmpFile, file)
+ appender2.run()
+ appender2.closeAll()
+
+ tmpFile.delete()
}
}
diff --git a/src/partest/scala/tools/partest/nest/NestRunner.scala b/src/partest/scala/tools/partest/nest/NestRunner.scala
index 9275c1c668..158521875e 100644
--- a/src/partest/scala/tools/partest/nest/NestRunner.scala
+++ b/src/partest/scala/tools/partest/nest/NestRunner.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Philipp Haller
*/
diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala
index 115582f7b9..efff4e8375 100644
--- a/src/partest/scala/tools/partest/nest/NestUI.scala
+++ b/src/partest/scala/tools/partest/nest/NestUI.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Philipp Haller
*/
@@ -70,14 +70,17 @@ object NestUI {
println(" <options>:")
println
println(" Test categories:")
- println(" --all run all tests")
- println(" --pos run compilation tests (success)")
- println(" --neg run compilation tests (failure)")
- println(" --run run interpreter and backend tests")
- println(" --jvm run JVM backend tests")
- println(" --res run resident compiler tests")
- println(" --script run script runner tests")
- println(" --shootout run shootout tests")
+ println(" --all run all tests")
+ println(" --pos run compilation tests (success)")
+ println(" --neg run compilation tests (failure)")
+ println(" --run run interpreter and backend tests")
+ println(" --jvm run JVM backend tests")
+ println(" --res run resident compiler tests")
+ println(" --buildmanager run Build Manager tests")
+ println(" --scalacheck run ScalaCheck tests")
+ println(" --script run script runner tests")
+ println(" --shootout run shootout tests")
+ println(" --grep <expr> run all tests whose source file contains <expr>")
println
println(" Other options:")
println(" --pack pick compiler/library in build/pack, and run all tests")
@@ -90,6 +93,7 @@ object NestUI {
println(" --classpath set (absolute) path to build classes")
println(" --srcpath set (relative) path to test source files")
println(" ex.: --srcpath pending")
+ println(" --debug enable debugging output")
println
println(utils.Properties.versionString)
println("maintained by Philipp Haller (EPFL)")
@@ -97,6 +101,7 @@ object NestUI {
}
var _verbose = false
+ var _debug = false
def verbose(msg: String) {
if (_verbose) {
@@ -104,5 +109,10 @@ object NestUI {
println(msg)
}
}
-
+ def debug(msg: String) {
+ if (isPartestDebug) {
+ outline("debug: ")
+ println(msg)
+ }
+ }
}
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
new file mode 100644
index 0000000000..41bba5782e
--- /dev/null
+++ b/src/partest/scala/tools/partest/nest/PathSettings.scala
@@ -0,0 +1,41 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools.partest
+package nest
+
+import scala.tools.nsc.Properties.{ setProp, propOrEmpty, propOrNone, propOrElse }
+import scala.tools.nsc.util.ClassPath
+import scala.tools.nsc.io
+import io.{ Path, File, Directory }
+import RunnerUtils._
+import java.net.URLClassLoader
+
+object PathSettings {
+ import PartestDefaults.{ testRootDir, srcDirName }
+
+ private def cwd = Directory.Current getOrElse error("user.dir property not set")
+ private def isPartestDir(d: Directory) = (d.name == "test") && (d / srcDirName isDirectory)
+
+ // Directory <root>/test
+ lazy val testRoot: Directory = testRootDir getOrElse {
+ val candidates: List[Directory] = (cwd :: cwd.parents) flatMap (d => List(d, Directory(d / "test")))
+
+ candidates find isPartestDir getOrElse error("Directory 'test' not found.")
+ }
+
+ // Directory <root>/test/files
+ lazy val srcDir = Directory(testRoot / srcDirName normalize)
+
+ // Directory <root>/test/files/lib
+ lazy val srcLibDir = Directory(srcDir / "lib")
+
+ lazy val scalaCheck = srcLibDir.files find (_.name startsWith "scalacheck") getOrElse {
+ error("No scalacheck jar found in '%s'" format srcLibDir)
+ }
+}
+
+class PathSettings() {
+ // def classpathAsURLs: List[URL]
+}
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index 58aba34f87..b3f199a3d6 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Philipp Haller
*/
@@ -8,6 +8,13 @@
package scala.tools.partest
package nest
+import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
+import scala.tools.nsc.util.ClassPath
+import scala.tools.nsc.io
+import io.Path
+import RunnerUtils._
+import java.net.URLClassLoader
+
/* This class is used to load an instance of DirectRunner using
* a custom class loader.
* The purpose is to "auto-detect" a good classpath for the
@@ -15,20 +22,18 @@ package nest
* the main NestRunner can be started merely by putting its
* class on the classpath (ideally).
*/
-class ReflectiveRunner extends RunnerUtils {
+class ReflectiveRunner {
// TODO: we might also use fileManager.CLASSPATH
// to use the same classes as used by `scala` that
// was used to start the runner.
-
- import java.net.URLClassLoader
- import java.io.File.pathSeparator
- import utils.Properties.{ sysprop, syspropset }
-
val sepRunnerClassName = "scala.tools.partest.nest.ConsoleRunner"
def main(args: String) {
val argList = (args.split("\\s")).toList
+ if (isPartestDebug)
+ showAllJVMInfo
+
// find out which build to test
val buildPath = searchPath("--buildpath", argList)
val classPath = searchPath("--classpath", argList)
@@ -43,25 +48,28 @@ class ReflectiveRunner extends RunnerUtils {
new ConsoleFileManager
import fileManager.
- { latestCompFile, latestLibFile, latestActFile, latestPartestFile, latestFjbgFile }
+ { latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile }
val files =
- Array(latestCompFile, latestLibFile, latestActFile, latestPartestFile, latestFjbgFile)
+ Array(latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile) map (x => io.File(x))
- val sepUrls = files map { _.toURL }
+ val sepUrls = files map (_.toURL)
val sepLoader = new URLClassLoader(sepUrls, null)
- if (fileManager.debug)
+ if (isPartestDebug)
println("Loading classes from:\n" + sepUrls.mkString("\n"))
- val paths = (if (classPath.isEmpty) files.slice(0, 4) else files) map { _.getPath }
- val newClasspath = paths mkString pathSeparator
+ val paths = classPath match {
+ case Some(cp) => Nil
+ case _ => files.toList map (_.path)
+ }
+ val newClasspath = ClassPath.join(paths: _*)
- syspropset("java.class.path", newClasspath)
- syspropset("scala.home", "")
+ setProp("java.class.path", newClasspath)
+ setProp("scala.home", "")
- if (fileManager.debug)
+ if (isPartestDebug)
for (prop <- List("java.class.path", "sun.boot.class.path", "java.ext.dirs"))
- println(prop + ": " + sysprop(prop))
+ println(prop + ": " + propOrEmpty(prop))
try {
val sepRunnerClass = sepLoader loadClass sepRunnerClassName
diff --git a/src/partest/scala/tools/partest/nest/RunnerUtils.scala b/src/partest/scala/tools/partest/nest/RunnerUtils.scala
index 2743da7bc5..24445bb545 100644
--- a/src/partest/scala/tools/partest/nest/RunnerUtils.scala
+++ b/src/partest/scala/tools/partest/nest/RunnerUtils.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Philipp Haller
*/
@@ -8,35 +8,22 @@
package scala.tools.partest
package nest
-trait RunnerUtils {
+object RunnerUtils {
+ def splitArgs(str: String) = str split "\\s" filterNot (_ == "") toList
- def searchPath(option: String, as: List[String]): Option[String] = {
- val Option = option
- as match {
- case Option :: r :: rs => Some(r)
- case other :: rest => searchPath(option, rest)
- case List() => None
- }
+ def searchPath(option: String, as: List[String]): Option[String] = as match {
+ case `option` :: r :: _ => Some(r)
+ case _ :: rest => searchPath(option, rest)
+ case Nil => None
}
- def searchAndRemovePath(option: String, as: List[String]): (Option[String], List[String]) = {
- val Option = option
- def search(before: List[String], after: List[String]): (Option[String], List[String]) = after match {
- case Option :: r :: rs => (Some(r), before ::: rs)
- case other :: rest => search(before ::: List(other), rest)
- case List() => (None, before)
- }
- search(List(), as)
+ def searchAndRemovePath(option: String, as: List[String]) = (as indexOf option) match {
+ case -1 => (None, as)
+ case idx => (Some(as(idx + 1)), (as take idx) ::: (as drop (idx + 2)))
}
- def searchAndRemoveOption(option: String, as: List[String]): (Boolean, List[String]) = {
- val Option = option
- def search(before: List[String], after: List[String]): (Boolean, List[String]) = after match {
- case Option :: rest => (true, before ::: rest)
- case other :: rest => search(before ::: List(other), rest)
- case List() => (false, before)
- }
- search(List(), as)
+ def searchAndRemoveOption(option: String, as: List[String]) = (as indexOf option) match {
+ case -1 => (false, as)
+ case idx => (true, (as take idx) ::: (as drop (idx + 1)))
}
-
}
diff --git a/src/partest/scala/tools/partest/nest/StreamAppender.scala b/src/partest/scala/tools/partest/nest/StreamAppender.scala
index 1dc4ec1dd2..8cebcf1685 100644
--- a/src/partest/scala/tools/partest/nest/StreamAppender.scala
+++ b/src/partest/scala/tools/partest/nest/StreamAppender.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Philipp Haller
*/
@@ -8,27 +8,37 @@
package scala.tools.partest
package nest
-import java.io.{Writer, PrintWriter, Reader, BufferedReader,
- IOException, InputStream, StringWriter, InputStreamReader,
- OutputStreamWriter, StringReader, OutputStream}
+import java.io._
object StreamAppender {
+ def wrapIn(in: InputStream): BufferedReader = new BufferedReader(new InputStreamReader(in))
+ def wrapIn(reader: Reader): BufferedReader = new BufferedReader(reader)
+ def wrapIn(str: String): BufferedReader = new BufferedReader(new StringReader(str))
- def apply(reader: BufferedReader, writer: Writer): StreamAppender = {
- val pwriter = new PrintWriter(writer, true)
- new StreamAppender(reader, pwriter)
- }
+ def wrapOut(out: OutputStream): PrintWriter = new PrintWriter(new OutputStreamWriter(out), true)
+ def wrapOut(writer: Writer): PrintWriter = new PrintWriter(writer, true)
+ def wrapOut(): PrintWriter = wrapOut(new StringWriter)
+
+ def apply(reader: BufferedReader, writer: Writer): StreamAppender =
+ new StreamAppender(reader, wrapOut(writer))
def apply(reader: Reader, writer: Writer): StreamAppender =
- apply(new BufferedReader(reader), writer)
+ apply(wrapIn(reader), writer)
+
+ def apply(in: InputStream, writer: Writer): StreamAppender =
+ apply(wrapIn(in), writer)
+
+ def apply(str: String, writer: Writer): StreamAppender =
+ apply(wrapIn(str), writer)
+
+ def apply(in: File, out: File): StreamAppender =
+ apply(new FileReader(in), new FileWriter(out))
def appendToString(in1: InputStream, in2: InputStream): String = {
val swriter1 = new StringWriter
val swriter2 = new StringWriter
- val reader1 = new BufferedReader(new InputStreamReader(in1))
- val reader2 = new BufferedReader(new InputStreamReader(in2))
- val app1 = StreamAppender(reader1, swriter1)
- val app2 = StreamAppender(reader2, swriter2)
+ val app1 = StreamAppender(wrapIn(in1), swriter1)
+ val app2 = StreamAppender(wrapIn(in2), swriter2)
val async = new Thread(app2)
async.start()
@@ -56,35 +66,29 @@ object StreamAppender {
def concat(in: InputStream, err: InputStream, out: OutputStream) = new Runnable {
override def run() {
- val outWriter = new PrintWriter(new OutputStreamWriter(out), true)
- val inApp = new StreamAppender(new BufferedReader(new InputStreamReader(in)),
- outWriter)
+ val outWriter = wrapOut(out)
+ val inApp = StreamAppender(in, outWriter)
+
val errStringWriter = new StringWriter
- val errApp = StreamAppender(new BufferedReader(new InputStreamReader(err)),
- errStringWriter)
+ val errApp = StreamAppender(wrapIn(err), errStringWriter)
+
inParallel(inApp, errApp)
// append error string to out
- val errStrApp = new StreamAppender(new BufferedReader(new StringReader(errStringWriter.toString)),
- outWriter)
- errStrApp.run()
+ StreamAppender(errStringWriter.toString, outWriter).run()
}
}
}
class StreamAppender(reader: BufferedReader, writer: PrintWriter) extends Runnable {
override def run() = runAndMap(identity)
-
- def runAndMap(f: String => String) {
- try {
- var line = reader.readLine()
- while (line != null) {
- writer.println(f(line))
- line = reader.readLine()
- }
- } catch {
- case e: IOException =>
- e.printStackTrace()
- }
+ private def lines() = Iterator continually reader.readLine() takeWhile (_ != null)
+ def closeAll() = {
+ reader.close()
+ writer.close()
}
+
+ def runAndMap(f: String => String) =
+ try lines() map f foreach (writer println _)
+ catch { case e: IOException => e.printStackTrace() }
}
diff --git a/src/partest/scala/tools/partest/nest/TestFile.scala b/src/partest/scala/tools/partest/nest/TestFile.scala
index b8223a9202..741556fdd5 100644
--- a/src/partest/scala/tools/partest/nest/TestFile.scala
+++ b/src/partest/scala/tools/partest/nest/TestFile.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Philipp Haller
*/
@@ -8,102 +8,42 @@
package scala.tools.partest
package nest
-import java.io.{File, BufferedReader, FileReader}
+import java.io.{ File => JFile }
import scala.tools.nsc.Settings
+import scala.tools.nsc.io._
-class TestFile(kind: String, val file: File, val fileManager: FileManager, createOutDir: Boolean) {
- val dir = file.getParentFile
- val dirpath = dir.getAbsolutePath
- val fileBase: String = basename(file.getName)
+abstract class TestFile(kind: String) {
+ def file: JFile
+ def fileManager: FileManager
- // @mutates settings
- protected def baseSettings(settings: Settings) {
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+dirpath
- if (createOutDir)
- settings.outdir.value = {
- val outDir = new File(dir, fileBase + "-" + kind + ".obj")
- if (!outDir.exists)
- outDir.mkdir()
- outDir.toString
- }
+ val dir = file.toAbsolute.parent
+ val fileBase = file.stripExtension
+ lazy val objectDir = dir / "%s-%s.obj".format(fileBase, kind) createDirectory true
+ val flags: Option[String] = dir / "%s.flags".format(fileBase) ifFile { _.slurp().trim }
- // add additional flags found in 'testname.flags'
- val flagsFile = new File(dir, fileBase + ".flags")
- if (flagsFile.exists) {
- val reader = new BufferedReader(new java.io.FileReader(flagsFile))
- val flags = reader.readLine
- if (flags ne null)
- settings.parseParams(flags)
- }
- }
-
- def defineSettings(settings: Settings) {
- baseSettings(settings)
- }
-
- private def basename(name: String): String = {
- val inx = name.lastIndexOf(".")
- if (inx < 0) name else name.substring(0, inx)
- }
-
- override def toString(): String = kind+" "+file
-}
-
-case class PosTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("pos", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
- }
-}
+ def setOutDirTo = objectDir
-case class NegTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("neg", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
- }
-}
-
-case class RunTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("run", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
- }
-}
+ def defineSettings(settings: Settings, setOutDir: Boolean) = {
+ settings.classpath append dir.path
+ if (setOutDir)
+ settings.outdir.value = setOutDirTo.path
-case class ScalaCheckTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("scalacheck", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
+ flags foreach (settings processArgumentString _)
+ settings.classpath append fileManager.CLASSPATH
}
-}
-case class JvmTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("jvm", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
- }
+ override def toString(): String = "%s %s".format(kind, file)
}
-case class ShootoutTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("shootout", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
- settings.outdir.value = file.getParent
- }
+case class PosTestFile(file: JFile, fileManager: FileManager) extends TestFile("pos")
+case class NegTestFile(file: JFile, fileManager: FileManager) extends TestFile("neg")
+case class RunTestFile(file: JFile, fileManager: FileManager) extends TestFile("run")
+case class BuildManagerTestFile(file: JFile, fileManager: FileManager) extends TestFile("bm")
+case class ScalaCheckTestFile(file: JFile, fileManager: FileManager) extends TestFile("scalacheck")
+case class JvmTestFile(file: JFile, fileManager: FileManager) extends TestFile("jvm")
+case class ShootoutTestFile(file: JFile, fileManager: FileManager) extends TestFile("shootout") {
+ override def setOutDirTo = file.parent
}
-
-case class ScalapTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("scalap", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
- settings.outdir.value = file.getParent
- }
+case class ScalapTestFile(file: JFile, fileManager: FileManager) extends TestFile("scalap") {
+ override def setOutDirTo = file.parent
}
diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala
index 2e2049ffbe..931bc5cc13 100644
--- a/src/partest/scala/tools/partest/nest/Worker.scala
+++ b/src/partest/scala/tools/partest/nest/Worker.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2009 LAMP/EPFL
+ * Copyright 2007-2010 LAMP/EPFL
* @author Philipp Haller
*/
@@ -9,20 +9,28 @@ package scala.tools.partest
package nest
import java.io._
-import java.net.{URLClassLoader, URL}
-import java.util.{Timer, TimerTask}
+import java.net.{ URLClassLoader, URL }
+import java.util.{ Timer, TimerTask }
-import scala.tools.nsc.{ObjectRunner, GenericRunnerCommand}
-import scala.tools.nsc.io
+import scala.util.Properties.{ isWin }
+import scala.tools.nsc.{ ObjectRunner, Settings, CompilerCommand, Global }
+import scala.tools.nsc.io.{ AbstractFile, PlainFile, Path, Directory, File => SFile }
+import scala.tools.nsc.reporters.ConsoleReporter
+import scala.tools.nsc.util.{ ClassPath, FakePos }
+import ClassPath.{ join, split }
-import scala.actors.{Actor, Exit, TIMEOUT}
+import scala.actors.{ Actor, Exit, TIMEOUT }
import scala.actors.Actor._
import scala.tools.scalap.scalax.rules.scalasig.{ByteCode, ClassFileParser, ScalaSigAttributeParsers}
-import scala.collection.mutable.HashMap
+import scala.collection.immutable.{ HashMap, Map => ImmMap }
+import scala.collection.Map
+
+import scala.tools.nsc.interactive.{BuildManager, RefinedBuildManager}
case class RunTests(kind: String, files: List[File])
-case class Results(succ: Int, fail: Int, logs: List[LogFile], outdirs: List[File])
+case class Results(results: ImmMap[String, Int], logs: List[LogFile], outdirs: List[File])
+
case class LogContext(file: LogFile, writers: Option[(StringWriter, PrintWriter)])
abstract class TestResult {
@@ -37,34 +45,26 @@ class LogFile(parent: File, child: String) extends File(parent, child) {
class Worker(val fileManager: FileManager) extends Actor {
import fileManager._
- import scala.tools.nsc.{Settings, CompilerCommand, Global}
- import scala.tools.nsc.reporters.ConsoleReporter
- import scala.tools.nsc.util.FakePos
var reporter: ConsoleReporter = _
val timer = new Timer
- def error(msg: String) {
- reporter.error(FakePos("scalac"),
- msg + "\n scalac -help gives more information")
- }
+ def error(msg: String): Unit = reporter.error(
+ FakePos("scalac"),
+ msg + "\n scalac -help gives more information"
+ )
def act() {
react {
case RunTests(kind, files) =>
- NestUI.verbose("received "+files.length+" to test")
+ // NestUI.verbose("received "+files.length+" to test")
val master = sender
- runTests(kind, files, (succ: Int, fail: Int) => {
- master ! Results(succ, fail, createdLogFiles, createdOutputDirs)
- })
+ runTests(kind, files) { results =>
+ master ! Results(results, createdLogFiles, createdOutputDirs)
+ }
}
}
- private def basename(name: String): String = {
- val inx = name.lastIndexOf(".")
- if (inx < 0) name else name.substring(0, inx)
- }
-
def printInfoStart(file: File, printer: PrintWriter) {
NestUI.outline("testing: ", printer)
val filesdir = file.getAbsoluteFile.getParentFile.getParentFile
@@ -82,7 +82,7 @@ class Worker(val fileManager: FileManager) extends Actor {
file.getAbsolutePath.substring(filesPathLen)
}
}
- NestUI.normal("[...]"+name+List.toString(List.fill(totalWidth-name.length)(' ')), printer)
+ NestUI.normal("[...]%s%s".format(name, " " * (totalWidth - name.length)), printer)
}
def printInfoEnd(success: Boolean, printer: PrintWriter) {
@@ -99,85 +99,65 @@ class Worker(val fileManager: FileManager) extends Actor {
}
var log = ""
- var createdLogFiles: List[LogFile] = List()
- var createdOutputDirs: List[File] = List()
+ var createdLogFiles: List[LogFile] = Nil
+ var createdOutputDirs: List[File] = Nil
def createLogFile(file: File, kind: String): LogFile = {
val logFile = fileManager.getLogFile(file, kind)
- createdLogFiles = logFile :: createdLogFiles
+ createdLogFiles ::= logFile
logFile
}
def createOutputDir(dir: File, fileBase: String, kind: String): File = {
- val outDir = new File(dir, fileBase + "-" + kind + ".obj")
- if (!outDir.exists)
- outDir.mkdir()
- createdOutputDirs = outDir :: createdOutputDirs
- outDir
+ val outDir = Path(dir) / Directory("%s-%s.obj".format(fileBase, kind))
+ outDir.createDirectory()
+ createdOutputDirs ::= outDir.jfile
+ outDir.jfile
}
/* Note: not yet used/tested. */
- def execTestObjectRunner(file: File, outDir: File, logFile: File) {
- val consFM = new ConsoleFileManager
- import consFM.{latestCompFile, latestLibFile, latestActFile,
- latestPartestFile}
-
- val classpath: List[URL] =
- outDir.toURL ::
- //List(file.getParentFile.toURL) :::
- List(latestCompFile.toURL, latestLibFile.toURL,
- latestActFile.toURL, latestPartestFile.toURL) :::
- (List.fromString(CLASSPATH, File.pathSeparatorChar) map { x =>
- (new File(x)).toURL })
- NestUI.verbose("ObjectRunner classpath: "+classpath)
-
- try {
- // configure input/output files
- val logOut = new FileOutputStream(logFile)
- val logWriter = new PrintStream(logOut)
-
- // grab global lock
- fileManager.synchronized {
-
- val oldStdOut = System.out
- val oldStdErr = System.err
- System.setOut(logWriter)
- System.setErr(logWriter)
-
- /*
- " -Djava.library.path="+logFile.getParentFile.getAbsolutePath+
- " -Dscalatest.output="+outDir.getAbsolutePath+
- " -Dscalatest.lib="+LATEST_LIB+
- " -Dscalatest.cwd="+outDir.getParent+
- " -Djavacmd="+JAVACMD+
- */
-
- System.setProperty("java.library.path", logFile.getParentFile.getCanonicalFile.getAbsolutePath)
- System.setProperty("scalatest.output", outDir.getCanonicalFile.getAbsolutePath)
- System.setProperty("scalatest.lib", LATEST_LIB)
- System.setProperty("scalatest.cwd", outDir.getParent)
-
- ObjectRunner.run(classpath, "Test", List("jvm"))
-
- logWriter.flush()
- logWriter.close()
-
- System.setOut(oldStdOut)
- System.setErr(oldStdErr)
- }
-
- /*val out = new FileOutputStream(logFile, true)
- Console.withOut(new PrintStream(out)) {
- ObjectRunner.run(classpath, "Test", List("jvm"))
- }
- out.flush
- out.close*/
- } catch {
- case e: Exception =>
- NestUI.verbose(e+" ("+file.getPath+")")
- e.printStackTrace()
- }
- }
+ // def execTestObjectRunner(file: File, outDir: File, logFile: File) {
+ // val consFM = new ConsoleFileManager
+ //
+ // val classpath: List[URL] = {
+ // import consFM.{ latestCompFile, latestLibFile, latestPartestFile }
+ // val units = (
+ // List(outDir, latestCompFile, latestLibFile, latestPartestFile) :::
+ // ((CLASSPATH split File.pathSeparatorChar).toList map (x => new File(x)))
+ // )
+ // units map (_.toURI.toURL)
+ // }
+ //
+ // NestUI.verbose("ObjectRunner classpath: "+classpath)
+ //
+ // try {
+ // // configure input/output files
+ // val logOut = new FileOutputStream(logFile)
+ // val logWriter = new PrintStream(logOut)
+ //
+ // // grab global lock
+ // fileManager.synchronized {
+ // withOutputRedirected(logWriter) {
+ // System.setProperty("java.library.path", logFile.getParentFile.getCanonicalFile.getAbsolutePath)
+ // System.setProperty("partest.output", outDir.getCanonicalFile.getAbsolutePath)
+ // System.setProperty("partest.lib", LATEST_LIB)
+ // System.setProperty("partest.cwd", outDir.getParent)
+ // ObjectRunner.run(classpath, "Test", List("jvm"))
+ // }
+ // }
+ //
+ // /*val out = new FileOutputStream(logFile, true)
+ // Console.withOut(new PrintStream(out)) {
+ // ObjectRunner.run(classpath, "Test", List("jvm"))
+ // }
+ // out.flush
+ // out.close*/
+ // } catch {
+ // case e: Exception =>
+ // NestUI.verbose(e+" ("+file.getPath+")")
+ // e.printStackTrace()
+ // }
+ // }
def javac(outDir: File, files: List[File], output: File): Boolean = {
// compile using command-line javac compiler
@@ -190,7 +170,7 @@ class Worker(val fileManager: FileManager) extends Actor {
val cmd = javacCmd+
" -d "+outDir.getAbsolutePath+
- " -classpath "+outDir+File.pathSeparator+CLASSPATH+
+ " -classpath "+ join(outDir.toString, CLASSPATH) +
" "+files.mkString(" ")
val (success, msg) = try {
@@ -223,20 +203,16 @@ class Worker(val fileManager: FileManager) extends Actor {
val in = proc.getInputStream
val err = proc.getErrorStream
val writer = new PrintWriter(new FileWriter(output), true)
- val inApp = new StreamAppender(new BufferedReader(new InputStreamReader(in)),
- writer)
- val errApp = new StreamAppender(new BufferedReader(new InputStreamReader(err)),
- writer)
+ val inApp = StreamAppender(in, writer)
+ val errApp = StreamAppender(err, writer)
val async = new Thread(errApp)
async.start()
inApp.run()
async.join()
writer.close()
- try {
- proc.exitValue()
- } catch {
- case e: IllegalThreadStateException => 0
- }
+
+ try proc.exitValue()
+ catch { case _: IllegalThreadStateException => 0 }
}
def execTest(outDir: File, logFile: File, fileBase: String) {
@@ -252,33 +228,40 @@ class Worker(val fileManager: FileManager) extends Actor {
options
} else ""
- val cp = System.getProperty("java.class.path", ".")
- NestUI.verbose("java.class.path: "+cp)
-
def quote(path: String) = "\""+path+"\""
// Note! As this currently functions, JAVA_OPTS must precede argString
// because when an option is repeated to java only the last one wins.
// That means until now all the .javaopts files were being ignored because
// they all attempt to change options which are also defined in
- // scalatest.java_opts, leading to debug output like:
+ // partest.java_opts, leading to debug output like:
//
// debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k'
// debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...]
- val cmd =
- JAVACMD+
- " "+JAVA_OPTS+
- " "+argString+
- " -classpath "+outDir+File.pathSeparator+CLASSPATH+
- " -Djava.library.path="+logFile.getParentFile.getAbsolutePath+
- " -Dscalatest.output="+outDir.getAbsolutePath+
- " -Dscalatest.lib="+LATEST_LIB+
- " -Dscalatest.cwd="+outDir.getParent+
- " -Djavacmd="+JAVACMD+
- " -Duser.language=en -Duser.country=US"+
- " scala.tools.nsc.MainGenericRunner"+
- " Test jvm"
- NestUI.verbose(cmd)
+ val propertyOptions = List(
+ "-Djava.library.path="+logFile.getParentFile.getAbsolutePath,
+ "-Dpartest.output="+outDir.getAbsolutePath,
+ "-Dpartest.lib="+LATEST_LIB,
+ "-Dpartest.cwd="+outDir.getParent,
+ "-Djavacmd="+JAVACMD,
+ "-Duser.language=en -Duser.country=US"
+ ) ::: (
+ if (isPartestDebug) List("-Dpartest.debug=true") else Nil
+ )
+
+ val cmd = (
+ List(
+ JAVACMD,
+ JAVA_OPTS,
+ argString,
+ "-classpath " + join(outDir.toString, CLASSPATH)
+ ) ::: propertyOptions ::: List(
+ "scala.tools.nsc.MainGenericRunner",
+ "-usejavacp",
+ "Test",
+ "jvm"
+ )
+ ) mkString " "
runCommand(cmd, logFile)
@@ -293,59 +276,52 @@ class Worker(val fileManager: FileManager) extends Actor {
}
}
- def existsCheckFile(dir: File, fileBase: String, kind: String) = {
- val checkFile = {
- val chkFile = new File(dir, fileBase + ".check")
- if (chkFile.isFile)
- chkFile
- else
- new File(dir, fileBase + "-" + kind + ".check")
- }
- checkFile.exists && checkFile.canRead
+ def getCheckFile(dir: File, fileBase: String, kind: String) = {
+ def chkFile(s: String) = Directory(dir) / "%s%s.check".format(fileBase, s)
+ val checkFile = if (chkFile("").isFile) chkFile("") else chkFile("-" + kind)
+
+ if (checkFile.canRead) Some(checkFile) else None
}
- def compareOutput(dir: File, fileBase: String, kind: String, logFile: File): String = {
+ def existsCheckFile(dir: File, fileBase: String, kind: String) =
+ getCheckFile(dir, fileBase, kind).isDefined
+
+ def compareOutput(dir: File, fileBase: String, kind: String, logFile: File): String =
// if check file exists, compare with log file
- val checkFile = {
- val chkFile = new File(dir, fileBase + ".check")
- if (chkFile.isFile)
- chkFile
- else
- new File(dir, fileBase + "-" + kind + ".check")
- }
- if (!checkFile.exists || !checkFile.canRead) {
- val reader = new BufferedReader(new FileReader(logFile))
- val swriter = new StringWriter
- val pwriter = new PrintWriter(swriter, true)
- val appender = new StreamAppender(reader, pwriter)
- appender.run()
- swriter.toString
+ getCheckFile(dir, fileBase, kind) match {
+ case Some(f) => fileManager.compareFiles(logFile, f.jfile)
+ case _ => file2String(logFile)
}
- else fileManager.compareFiles(logFile, checkFile)
- }
- def file2String(logFile: File) = {
- val logReader = new BufferedReader(new FileReader(logFile))
- val strWriter = new StringWriter
- val logWriter = new PrintWriter(strWriter, true)
- val logAppender = new StreamAppender(logReader, logWriter)
- logAppender.run()
- logReader.close()
- strWriter.toString
- }
+ def file2String(logFile: File) = SFile(logFile).slurp()
+ def isJava(f: File) = SFile(f) hasExtension "java"
+ def isScala(f: File) = SFile(f) hasExtension "scala"
+ def isJavaOrScala(f: File) = isJava(f) || isScala(f)
/** Runs a list of tests.
*
* @param kind The test kind (pos, neg, run, etc.)
* @param files The list of test files
*/
- def runTests(kind: String, files: List[File], topcont: (Int, Int) => Unit) {
+ def runTests(kind: String, files: List[File])(topcont: ImmMap[String, Int] => Unit) {
val compileMgr = new CompileManager(fileManager)
var errors = 0
var succeeded = true
var diff = ""
var log = ""
+ def fail(what: Any) {
+ NestUI.verbose("scalac: compilation of "+what+" failed\n")
+ succeeded = false
+ }
+ def diffCheck(latestDiff: String) = {
+ diff = latestDiff
+ if (latestDiff != "") {
+ NestUI.verbose("output differs from log file\n")
+ succeeded = false
+ }
+ }
+
/** 1. Creates log file and output directory.
* 2. Runs <code>script</code> function, providing log file and
* output directory as arguments.
@@ -355,7 +331,7 @@ class Worker(val fileManager: FileManager) extends Actor {
// execute test only if log file is present
// (which means it failed before)
val logFile = createLogFile(file, kind)
- if (!fileManager.failed || (logFile.exists && logFile.canRead)) {
+ if (!fileManager.failed || logFile.canRead) {
val swr = new StringWriter
val wr = new PrintWriter(swr)
succeeded = true
@@ -371,7 +347,15 @@ class Worker(val fileManager: FileManager) extends Actor {
// run test-specific code
try {
- script(logFile, outDir)
+ if (isPartestDebug) {
+ val t1 = System.currentTimeMillis
+ script(logFile, outDir)
+ val t2 = System.currentTimeMillis
+ fileManager.recordTestTiming(file.getPath, t2 - t1)
+ }
+ else {
+ script(logFile, outDir)
+ }
} catch {
case e: Exception =>
val writer = new PrintWriter(new FileWriter(logFile), true)
@@ -386,171 +370,260 @@ class Worker(val fileManager: FileManager) extends Actor {
}
def compileFilesIn(dir: File, kind: String, logFile: File, outDir: File) {
- val testFiles = dir.listFiles.toList
+ val testFiles = dir.listFiles.toList filter isJavaOrScala
- val groups = for (i <- 0 to 9) yield testFiles filter { f =>
- f.getName.endsWith("_"+i+".java") ||
- f.getName.endsWith("_"+i+".scala") }
-
- val noSuffix = testFiles filter { f =>
- !groups.exists(_ contains f) && (
- f.getName.endsWith(".java") ||
- f.getName.endsWith(".scala")) }
+ def isInGroup(f: File, num: Int) = SFile(f).stripExtension endsWith ("_" + num)
+ val groups = (0 to 9).toList map (num => testFiles filter (f => isInGroup(f, num)))
+ val noGroupSuffix = testFiles filterNot (groups.flatten contains)
def compileGroup(g: List[File]) {
- val scalaFiles = g.filter(_.getName.endsWith(".scala"))
- val javaFiles = g.filter(_.getName.endsWith(".java"))
-
- if (!scalaFiles.isEmpty &&
- !compileMgr.shouldCompile(outDir,
- javaFiles ::: scalaFiles,
- kind, logFile)) {
- NestUI.verbose("scalac: compilation of "+g+" failed\n")
- succeeded = false
+ val (scalaFiles, javaFiles) = g partition isScala
+
+ if (scalaFiles.nonEmpty) {
+ if (!compileMgr.shouldCompile(outDir, javaFiles ::: scalaFiles, kind, logFile))
+ fail(g)
}
- if (succeeded && !javaFiles.isEmpty) {
+ if (succeeded && javaFiles.nonEmpty) {
succeeded = javac(outDir, javaFiles, logFile)
- if (succeeded && !scalaFiles.isEmpty
- && !compileMgr.shouldCompile(outDir,
- scalaFiles,
- kind, logFile)) {
- NestUI.verbose("scalac: compilation of "+scalaFiles+" failed\n")
- succeeded = false
- }
+ if (succeeded && scalaFiles.nonEmpty && !compileMgr.shouldCompile(outDir, scalaFiles, kind, logFile))
+ fail(scalaFiles)
}
}
- if (!noSuffix.isEmpty)
- compileGroup(noSuffix)
- for (grp <- groups) {
- if (succeeded)
- compileGroup(grp)
- }
+ if (noGroupSuffix.nonEmpty)
+ compileGroup(noGroupSuffix)
+
+ groups foreach (grp => if (succeeded) compileGroup(grp))
}
def failCompileFilesIn(dir: File, kind: String, logFile: File, outDir: File) {
- val testFiles = dir.listFiles.toList
- val javaFiles = testFiles.filter(_.getName.endsWith(".java"))
- val scalaFiles = testFiles.filter(_.getName.endsWith(".scala"))
- if (!(scalaFiles.isEmpty && javaFiles.isEmpty) &&
- !compileMgr.shouldFailCompile(outDir, javaFiles ::: scalaFiles, kind, logFile)) {
- NestUI.verbose("compilation of "+scalaFiles+" failed\n")
- succeeded = false
+ val testFiles = dir.listFiles.toList
+ val sourceFiles = testFiles filter isJavaOrScala
+
+ if (sourceFiles.nonEmpty) {
+ if (!compileMgr.shouldFailCompile(outDir, sourceFiles, kind, logFile))
+ fail(testFiles filter isScala)
}
}
- def runJvmTest(file: File, kind: String): LogContext =
+ def runTestCommon(file: File, kind: String, expectFailure: Boolean)(onSuccess: (File, File) => Unit): LogContext =
runInContext(file, kind, (logFile: File, outDir: File) => {
+
if (file.isDirectory) {
- compileFilesIn(file, kind, logFile, outDir)
- } else if (!compileMgr.shouldCompile(List(file), kind, logFile)) {
- NestUI.verbose("compilation of "+file+" failed\n")
- succeeded = false
+ val f = if (expectFailure) failCompileFilesIn _ else compileFilesIn _
+ f(file, kind, logFile, outDir)
}
- if (succeeded) { // run test
- val fileBase = basename(file.getName)
- val dir = file.getParentFile
+ else {
+ val f: (List[File], String, File) => Boolean =
+ if (expectFailure) compileMgr.shouldFailCompile _
+ else compileMgr.shouldCompile _
- //TODO: detect whether we have to use Runtime.exec
- val useRuntime = true
+ if (!f(List(file), kind, logFile))
+ fail(file)
+ }
- if (useRuntime)
- execTest(outDir, logFile, fileBase)
- else
- execTestObjectRunner(file, outDir, logFile)
- NestUI.verbose(this+" finished running "+fileBase)
+ if (succeeded) // run test
+ onSuccess(logFile, outDir)
+ })
- diff = compareOutput(dir, fileBase, kind, logFile)
- if (!diff.equals("")) {
- NestUI.verbose("output differs from log file\n")
- succeeded = false
- }
- }
+ def runJvmTest(file: File, kind: String): LogContext =
+ runTestCommon(file, kind, expectFailure = false)((logFile, outDir) => {
+ val fileBase = basename(file.getName)
+ val dir = file.getParentFile
+
+ //TODO: detect whether we have to use Runtime.exec
+ // val useRuntime = true
+ //
+ // if (useRuntime)
+ // execTest(outDir, logFile, fileBase)
+ // else
+ // execTestObjectRunner(file, outDir, logFile)
+ // // NestUI.verbose(this+" finished running "+fileBase)
+ execTest(outDir, logFile, fileBase)
+
+ diffCheck(compareOutput(dir, fileBase, kind, logFile))
})
def processSingleFile(file: File): LogContext = kind match {
case "scalacheck" =>
- runInContext(file, kind, (logFile: File, outDir: File) => {
- if (file.isDirectory) {
- compileFilesIn(file, kind, logFile, outDir)
- } else if (!compileMgr.shouldCompile(List(file), kind, logFile)) {
- NestUI.verbose("compilation of "+file+" failed\n")
- succeeded = false
- }
- if (succeeded) {
- val consFM = new ConsoleFileManager
- import consFM.{latestCompFile, latestLibFile, latestActFile,
- latestPartestFile}
+ runTestCommon(file, kind, expectFailure = false)((logFile, outDir) => {
+ val consFM = new ConsoleFileManager
+ import consFM.{ latestCompFile, latestLibFile, latestPartestFile }
- NestUI.verbose("compilation of "+file+" succeeded\n")
+ NestUI.verbose("compilation of "+file+" succeeded\n")
- val libs = new File(fileManager.LIB_DIR)
- val scalacheckURL = new File(libs, "ScalaCheck.jar") toURL
- val outURL = outDir.getCanonicalFile.toURL
- val classpath: List[URL] =
- List(outURL, scalacheckURL, latestCompFile.toURL, latestLibFile.toURL,
- latestActFile.toURL, latestPartestFile.toURL).removeDuplicates
+ val scalacheckURL = PathSettings.scalaCheck.toURL
+ val outURL = outDir.getCanonicalFile.toURI.toURL
+ val classpath: List[URL] =
+ List(outURL, scalacheckURL, latestCompFile.toURI.toURL, latestLibFile.toURI.toURL, latestPartestFile.toURI.toURL).distinct
- // XXX this is a big cut-and-paste mess, but the revamp is coming
- val logOut = new FileOutputStream(logFile)
- val logWriter = new PrintStream(logOut)
- val oldStdOut = System.out
- val oldStdErr = System.err
- System.setOut(logWriter)
- System.setErr(logWriter)
-
- ObjectRunner.run(classpath, "Test", Nil)
+ NestUI.debug("scalacheck urls")
+ classpath foreach (x => NestUI.debug(x.toString))
- logWriter.flush()
- logWriter.close()
- System.setOut(oldStdOut)
- System.setErr(oldStdErr)
+ val logWriter = new PrintStream(new FileOutputStream(logFile))
- NestUI.verbose(io.File(logFile).slurp())
- // obviously this must be improved upon
- succeeded = io.File(logFile).lines() forall (_ contains " OK")
+ withOutputRedirected(logWriter) {
+ ObjectRunner.run(classpath, "Test", Nil)
}
- })
+
+ NestUI.verbose(SFile(logFile).slurp())
+ // obviously this must be improved upon
+ succeeded = SFile(logFile).lines() forall (_ contains " OK")
+ })
case "pos" =>
- runInContext(file, kind, (logFile: File, outDir: File) => {
- if (file.isDirectory) {
- compileFilesIn(file, kind, logFile, outDir)
- } else if (!compileMgr.shouldCompile(List(file), kind, logFile)) {
- NestUI.verbose("compilation of "+file+" failed\n")
- succeeded = false
- }
- })
+ runTestCommon(file, kind, expectFailure = false)((_, _) => ())
case "neg" =>
- runInContext(file, kind, (logFile: File, outDir: File) => {
- if (file.isDirectory) {
- failCompileFilesIn(file, kind, logFile, outDir)
- } else if (!compileMgr.shouldFailCompile(List(file), kind, logFile)) {
+ runTestCommon(file, kind, expectFailure = true)((logFile, outDir) => {
+ // compare log file to check file
+ val fileBase = basename(file.getName)
+ val dir = file.getParentFile
+
+ diffCheck(
+ // diff is contents of logFile
+ if (!existsCheckFile(dir, fileBase, kind)) file2String(logFile)
+ else compareOutput(dir, fileBase, kind, logFile)
+ )
+ })
+
+ case "run" | "jvm" =>
+ runJvmTest(file, kind)
+
+ case "buildmanager" =>
+ val logFile = createLogFile(file, kind)
+ if (!fileManager.failed || logFile.canRead) {
+ val swr = new StringWriter
+ val wr = new PrintWriter(swr)
+ succeeded = true; diff = ""
+ printInfoStart(file, wr)
+ val (outDir, testFile, changesDir, fileBase) =
+
+ if (!file.isDirectory) {
succeeded = false
- }
- if (succeeded) { // compare log file to check file
- val fileBase = basename(file.getName)
- val dir = file.getParentFile
- if (!existsCheckFile(dir, fileBase, kind)) {
- // diff is contents of logFile
- diff = file2String(logFile)
- } else
- diff = compareOutput(dir, fileBase, kind, logFile)
-
- if (!diff.equals("")) {
- NestUI.verbose("output differs from log file\n")
+ (null, null, null, null)
+ } else {
+ val fileBase: String = basename(file.getName)
+ NestUI.verbose(this+" running test "+fileBase)
+ val outDir = createOutputDir(file, fileBase, kind)
+ if (!outDir.exists) outDir.mkdir()
+ val testFile = new File(file, fileBase + ".test")
+ val changesDir = new File(file, fileBase + ".changes")
+ if (changesDir.isFile || !testFile.isFile) {
+ // if changes exists then it has to be a dir
+ if (!testFile.isFile) NestUI.verbose("invalid build manager test file")
+ if (changesDir.isFile) NestUI.verbose("invalid build manager changes directory")
succeeded = false
+ (null, null, null, null)
+ } else {
+ copyTestFiles(file, outDir)
+ NestUI.verbose("outDir: "+outDir)
+ NestUI.verbose("logFile: "+logFile)
+ (outDir, testFile, changesDir, fileBase)
}
}
- })
- case "run" =>
- runJvmTest(file, kind)
+ if (succeeded) {
+ // Pre-conditions satisfied
- case "jvm" =>
- runJvmTest(file, kind)
+ try {
+ val sourcepath = outDir.getAbsolutePath+File.separator
+
+ // configure input/output files
+ val logWriter = new PrintStream(new FileOutputStream(logFile))
+ val testReader = new BufferedReader(new FileReader(testFile))
+ val logConsoleWriter = new PrintWriter(logWriter)
+
+ // create proper settings for the compiler
+ val settings = new Settings(error)
+ settings.outdir.value = outDir.getCanonicalFile.getAbsolutePath
+ settings.sourcepath.value = sourcepath
+ settings.classpath.value = fileManager.CLASSPATH
+ settings.Ybuildmanagerdebug.value = true
+
+ // simulate Build Manager loop
+ val prompt = "builder > "
+ reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
+ val bM: BuildManager =
+ new RefinedBuildManager(settings) {
+ override protected def newCompiler(settings: Settings) =
+ new BuilderGlobal(settings, reporter)
+ }
+
+ val testCompile = (line: String) => {
+ NestUI.verbose("compiling " + line)
+ val args = (line split ' ').toList
+ val command = new CompilerCommand(args, settings)
+ bM.update(filesToSet(settings.sourcepath.value, command.files), Set.empty)
+ !reporter.hasErrors
+ }
+
+ val updateFiles = (line: String) => {
+ NestUI.verbose("updating " + line)
+ val res =
+ ((line split ' ').toList).forall(u => {
+ (u split "=>").toList match {
+ case origFileName::(newFileName::Nil) =>
+ val newFile = new File(changesDir, newFileName)
+ if (newFile.isFile) {
+ val v = overwriteFileWith(new File(outDir, origFileName), newFile)
+ if (!v)
+ NestUI.verbose("'update' operation on " + u + " failed")
+ v
+ } else {
+ NestUI.verbose("File " + newFile + " is invalid")
+ false
+ }
+ case a =>
+ NestUI.verbose("Other =: " + a)
+ false
+ }
+ })
+ if (!res)
+ NestUI.verbose("updating failed")
+ else
+ NestUI.verbose("updating succeeded")
+ res
+ }
+
+ def loop() {
+ val command = testReader.readLine()
+ if ((command ne null) && command.length() > 0) {
+ val commandResult = command match {
+ case s if (s.startsWith(">>update ")) =>
+ updateFiles(s.stripPrefix(">>update "))
+ case s if (s.startsWith(">>compile ")) =>
+ val files = s.stripPrefix(">>compile ")
+ logWriter.println(prompt + files)
+ testCompile(files) // In the end, it can finish with an error
+ case _ =>
+ NestUI.verbose("wrong command in test file: " + command)
+ false
+ }
+
+ if (commandResult) loop()
+
+ } else {
+ NestUI.verbose("finished")
+ succeeded = true
+ }
+ }
+
+ withOutputRedirected(logWriter) {
+ loop()
+ testReader.close()
+ }
+ fileManager.mapFile(logFile, "tmp", file, _.replace(sourcepath, "").
+ replaceAll(java.util.regex.Matcher.quoteReplacement("\\"), "/"))
+
+ diffCheck(compareOutput(file, fileBase, kind, logFile))
+ }
+ LogContext(logFile, Some((swr, wr)))
+ } else
+ LogContext(logFile, None)
+ } else
+ LogContext(logFile, None)
case "res" => {
// when option "--failed" is provided
@@ -559,7 +632,7 @@ class Worker(val fileManager: FileManager) extends Actor {
//val (logFileOut, logFileErr) = createLogFiles(file, kind)
val logFile = createLogFile(file, kind)
- if (!fileManager.failed || (logFile.exists && logFile.canRead)) {
+ if (!fileManager.failed || logFile.canRead) {
val swr = new StringWriter
val wr = new PrintWriter(swr)
succeeded = true; diff = ""; log = ""
@@ -589,7 +662,7 @@ class Worker(val fileManager: FileManager) extends Actor {
"-d "+outDir.getCanonicalFile.getAbsolutePath+
" -Xresident"+
" -sourcepath "+sourcepath
- val argList = List.fromString(argString, ' ')
+ val argList = argString split ' ' toList
// configure input/output files
val logOut = new FileOutputStream(logFile)
@@ -602,7 +675,7 @@ class Worker(val fileManager: FileManager) extends Actor {
settings.sourcepath.value = sourcepath
settings.classpath.value = fileManager.CLASSPATH
reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
- val command = new CompilerCommand(argList, settings, error, false)
+ val command = new CompilerCommand(argList, settings)
object compiler extends Global(command.settings, reporter)
// simulate resident compiler loop
@@ -610,11 +683,11 @@ class Worker(val fileManager: FileManager) extends Actor {
val resCompile = (line: String) => {
NestUI.verbose("compiling "+line)
- val cmdArgs = List.fromString(line, ' ') map { fs => new File(dir, fs).getAbsolutePath }
+ val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath)
NestUI.verbose("cmdArgs: "+cmdArgs)
val sett = new Settings(error)
sett.sourcepath.value = sourcepath
- val command = new CompilerCommand(cmdArgs, sett, error, true)
+ val command = new CompilerCommand(cmdArgs, sett)
(new compiler.Run) compile command.files
}
@@ -649,55 +722,26 @@ class Worker(val fileManager: FileManager) extends Actor {
loop(action)
}
}
- val oldStdOut = System.out
- val oldStdErr = System.err
- System.setOut(logWriter)
- System.setErr(logWriter)
- loop(resCompile)
- resReader.close()
- logWriter.flush()
- logWriter.close()
-
- System.setOut(oldStdOut)
- System.setErr(oldStdErr)
-
- val tempLogFile = new File(dir, fileBase+".temp.log")
- val logFileReader = new BufferedReader(new FileReader(logFile))
- val tempLogFilePrinter = new PrintWriter(new FileWriter(tempLogFile))
- val appender =
- new StreamAppender(logFileReader, tempLogFilePrinter)
-
- // function that removes a given string from another string
- def removeFrom(line: String, path: String): String = {
- // find `path` in `line`
- val index = line.indexOf(path)
- if (index != -1) {
- line.substring(0, index) + line.substring(index + path.length, line.length)
- } else line
+
+ withOutputRedirected(logWriter) {
+ loop(resCompile)
+ resReader.close()
}
- appender.runAndMap({ s =>
- val woPath = removeFrom(s, dir.getAbsolutePath/*.replace(File.separatorChar,'/')*/+File.separator)
- // now replace single '\' with '/'
- woPath.replace('\\', '/')
- })
- logFileReader.close()
- tempLogFilePrinter.close()
-
- val tempLogFileReader = new BufferedReader(new FileReader(tempLogFile))
- val logFilePrinter= new PrintWriter(new FileWriter(logFile), true)
- (new StreamAppender(tempLogFileReader, logFilePrinter)).run
- tempLogFileReader.close()
- logFilePrinter.close()
-
- tempLogFile.delete()
-
- diff = compareOutput(dir, fileBase, kind, logFile)
- if (!diff.equals("")) {
- NestUI.verbose("output differs from log file\n")
- succeeded = false
+ def replaceSlashes(s: String): String = {
+ val path = dir.getAbsolutePath+File.separator
+ // find `path` in `line`
+ val index = s.indexOf(path)
+ val line =
+ if (index != -1)
+ s.substring(0, index) + s.substring(index + path.length, s.length)
+ else s
+ line.replace('\\', '/')
}
+ fileManager.mapFile(logFile, "tmp", dir, replaceSlashes)
+ diffCheck(compareOutput(dir, fileBase, kind, logFile))
+
} catch {
case e: Exception =>
e.printStackTrace()
@@ -714,7 +758,7 @@ class Worker(val fileManager: FileManager) extends Actor {
// execute test only if log file is present
// (which means it failed before)
val logFile = createLogFile(file, kind)
- if (!fileManager.failed || (logFile.exists && logFile.canRead)) {
+ if (!fileManager.failed || logFile.canRead) {
val swr = new StringWriter
val wr = new PrintWriter(swr)
succeeded = true; diff = ""; log = ""
@@ -750,12 +794,15 @@ class Worker(val fileManager: FileManager) extends Actor {
// -------- run test --------
//TODO: detect whether we have to use Runtime.exec
- val useRuntime = true
+ // val useRuntime = true
+ //
+ // if (useRuntime)
+ // execTest(outDir, logFile, fileBase)
+ // else
+ // execTestObjectRunner(file, outDir, logFile)
+
+ execTest(outDir, logFile, fileBase)
- if (useRuntime)
- execTest(outDir, logFile, fileBase)
- else
- execTestObjectRunner(file, outDir, logFile)
NestUI.verbose(this+" finished running "+fileBase)
} // successful compile
} catch { // *catch-all*
@@ -764,11 +811,7 @@ class Worker(val fileManager: FileManager) extends Actor {
succeeded = false
}
- diff = compareOutput(dir, fileBase, kind, logFile)
- if (!diff.equals("")) {
- NestUI.verbose("output differs from log file\n")
- succeeded = false
- }
+ diffCheck(compareOutput(dir, fileBase, kind, logFile))
LogContext(logFile, Some((swr, wr)))
} else
@@ -777,14 +820,6 @@ class Worker(val fileManager: FileManager) extends Actor {
case "scalap" => {
- def decompileFile(clazz: Class[_], packObj: Boolean) = {
- val byteCode = ByteCode.forClass(clazz)
- val classFile = ClassFileParser.parse(byteCode)
- val Some(sig) = classFile.attribute("ScalaSig").map(_.byteCode).map(ScalaSigAttributeParsers.parse)
- import scala.tools.scalap.Main._
- parseScalaSignature(sig, packObj)
- }
-
runInContext(file, kind, (logFile: File, outDir: File) => {
val sourceDir = file.getParentFile
val sourceDirName = sourceDir.getName
@@ -811,7 +846,8 @@ class Worker(val fileManager: FileManager) extends Actor {
val loader = new URLClassLoader(Array(url), getClass.getClassLoader)
val clazz = loader.loadClass(className)
- val result = decompileFile(clazz, isPackageObject)
+ val byteCode = ByteCode.forClass(clazz)
+ val result = scala.tools.scalap.Main.decompileScala(byteCode.bytes, isPackageObject)
try {
val fstream = new FileWriter(logFile);
@@ -822,23 +858,18 @@ class Worker(val fileManager: FileManager) extends Actor {
case e: IOException => NestUI.verbose(e.getMessage()); succeeded = false
}
- val diff = fileManager.compareFiles(logFile, resFile)
- if (!diff.equals("")) {
- NestUI.verbose("output differs from log file\n")
- succeeded = false
- }
+ diffCheck(fileManager.compareFiles(logFile, resFile))
}
}
})
}
case "script" => {
- val osName = System.getProperty("os.name", "")
// when option "--failed" is provided
// execute test only if log file is present
// (which means it failed before)
val logFile = createLogFile(file, kind)
- if (!fileManager.failed || (logFile.exists && logFile.canRead)) {
+ if (!fileManager.failed || logFile.canRead) {
val swr = new StringWriter
val wr = new PrintWriter(swr)
succeeded = true; diff = ""; log = ""
@@ -860,7 +891,7 @@ class Worker(val fileManager: FileManager) extends Actor {
try {
val cmdString =
- if (osName startsWith "Windows") {
+ if (isWin) {
val batchFile = new File(file.getParentFile, fileBase+".bat")
NestUI.verbose("batchFile: "+batchFile)
batchFile.getAbsolutePath
@@ -881,11 +912,7 @@ class Worker(val fileManager: FileManager) extends Actor {
writer.close()
- diff = compareOutput(file.getParentFile, fileBase, kind, logFile)
- if (!diff.equals("")) {
- NestUI.verbose("output differs from log file\n")
- succeeded = false
- }
+ diffCheck(compareOutput(file.getParentFile, fileBase, kind, logFile))
} catch { // *catch-all*
case e: Exception =>
NestUI.verbose("caught "+e)
@@ -898,37 +925,40 @@ class Worker(val fileManager: FileManager) extends Actor {
}
}
- def reportAll(cont: (Int, Int) => Unit) {
- NestUI.verbose("finished testing "+kind+" with "+errors+" errors")
- NestUI.verbose("created "+compileMgr.numSeparateCompilers+" separate compilers")
+ def reportAll(results: ImmMap[String, Int], cont: ImmMap[String, Int] => Unit) {
+ // NestUI.verbose("finished testing "+kind+" with "+errors+" errors")
+ // NestUI.verbose("created "+compileMgr.numSeparateCompilers+" separate compilers")
timer.cancel()
- cont(files.length-errors, errors)
+ cont(results)
}
- def reportResult(logs: Option[LogContext]) {
- if (!succeeded) {
+ def reportResult(state: Int, logFile: Option[LogFile], writers: Option[(StringWriter, PrintWriter)]) {
+ val good = (state == 0)
+ if (!good) {
errors += 1
NestUI.verbose("incremented errors: "+errors)
}
try {
// delete log file only if test was successful
- if (succeeded && !logs.isEmpty)
- logs.get.file.toDelete = true
-
- if (!logs.isEmpty)
- logs.get.writers match {
- case Some((swr, wr)) =>
- printInfoEnd(succeeded, wr)
- wr.flush()
- swr.flush()
- NestUI.normal(swr.toString)
- if (!succeeded && fileManager.showDiff && diff != "")
- NestUI.normal(diff)
- if (!succeeded && fileManager.showLog)
- showLog(logs.get.file)
- case None =>
- }
+ if (good && !logFile.isEmpty && !isPartestDebug)
+ logFile.get.toDelete = true
+
+ writers match {
+ case Some((swr, wr)) =>
+ if (state == 2)
+ printInfoTimeout(wr)
+ else
+ printInfoEnd(good, wr)
+ wr.flush()
+ swr.flush()
+ NestUI.normal(swr.toString)
+ if (state == 1 && fileManager.showDiff && diff != "")
+ NestUI.normal(diff)
+ if (state == 1 && fileManager.showLog)
+ showLog(logFile.get)
+ case None =>
+ }
} catch {
case npe: NullPointerException =>
}
@@ -936,10 +966,10 @@ class Worker(val fileManager: FileManager) extends Actor {
val numFiles = files.size
if (numFiles == 0)
- reportAll(topcont)
+ reportAll(ImmMap(), topcont)
// maps canonical file names to the test result (0: OK, 1: FAILED, 2: TIMOUT)
- val status = new HashMap[String, Int]
+ var status = new HashMap[String, Int]
var fileCnt = 1
Actor.loopWhile(fileCnt <= numFiles) {
@@ -972,33 +1002,57 @@ class Worker(val fileManager: FileManager) extends Actor {
val path = res.file.getCanonicalPath
status.get(path) match {
case Some(stat) => // ignore message
- case None => res match {
- case Timeout(_) =>
- status += (path -> 2)
- val swr = new StringWriter
- val wr = new PrintWriter(swr)
- printInfoStart(files(fileCnt-1), wr)
- printInfoTimeout(wr)
- wr.flush()
- swr.flush()
- NestUI.normal(swr.toString)
- succeeded = false
- reportResult(None)
- if (fileCnt == numFiles)
- reportAll(topcont)
- fileCnt += 1
- case Result(_, logs) =>
- status += (path -> (if (succeeded) 0 else 1))
- reportResult(if (logs != null) Some(logs) else None)
- if (fileCnt == numFiles)
- reportAll(topcont)
- fileCnt += 1
- }
+ case None =>
+ res match {
+ case Timeout(_) =>
+ status = status + (path -> 2)
+ val swr = new StringWriter
+ val wr = new PrintWriter(swr)
+ printInfoStart(res.file, wr)
+ succeeded = false
+ reportResult(2, None, Some((swr, wr)))
+ case Result(_, logs) =>
+ status = status + (path -> (if (succeeded) 0 else 1))
+ reportResult(
+ if (succeeded) 0 else 1,
+ if (logs != null) Some(logs.file) else None,
+ if (logs != null) logs.writers else None)
+ }
+ if (fileCnt == numFiles)
+ reportAll(status, topcont)
+ fileCnt += 1
}
}
}
}
+ private def withOutputRedirected(out: PrintStream)(func: => Unit) {
+ val oldStdOut = System.out
+ val oldStdErr = System.err
+
+ try {
+ System.setOut(out)
+ System.setErr(out)
+ func
+ out.flush()
+ out.close()
+ } finally {
+ System.setOut(oldStdOut)
+ System.setErr(oldStdErr)
+ }
+ }
+
+ private def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
+ fs flatMap (s => Option(AbstractFile getFile (pre + s))) toSet
+
+ private def copyTestFiles(testDir: File, destDir: File) {
+ val invalidExts = List("changes", "svn", "obj")
+ testDir.listFiles.toList filter (
+ f => (isJavaOrScala(f) && f.isFile) ||
+ (f.isDirectory && !(invalidExts.contains(SFile(f).extension)))) foreach
+ { f => fileManager.copyFile(f, destDir) }
+ }
+
def showLog(logFile: File) {
try {
val logReader = new BufferedReader(new FileReader(logFile))
diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala
new file mode 100644
index 0000000000..e9eda6fb75
--- /dev/null
+++ b/src/partest/scala/tools/partest/package.scala
@@ -0,0 +1,40 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+
+import java.io.{ File => JFile }
+import nsc.io.{ Path, Process, Directory }
+import util.{ PathResolver }
+import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
+
+package object partest {
+ import nest.NestUI
+
+ implicit private[partest] def temporaryPath2File(x: Path): JFile = x.jfile
+ implicit private[partest] def temporaryFile2Path(x: JFile): Path = Path(x)
+
+ def basename(name: String): String = Path(name).stripExtension
+ def resultsToStatistics(results: Iterable[(_, Int)]): (Int, Int) = {
+ val (files, failures) = results map (_._2 == 0) partition (_ == true)
+ (files.size, failures.size)
+ }
+
+ def vmArgString = {
+ val str = Process.javaVmArguments mkString " "
+ "Java VM started with arguments: '%s'" format str
+ }
+
+ def allPropertiesString = {
+ import collection.JavaConversions._
+ System.getProperties.toList.sorted map { case (k, v) => "%s -> %s\n".format(k, v) } mkString
+ }
+
+ def showAllJVMInfo {
+ NestUI.verbose(vmArgString)
+ NestUI.verbose(allPropertiesString)
+ }
+
+ def isPartestDebug = propOrEmpty("partest.debug") == "true"
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/utils/PrintMgr.scala b/src/partest/scala/tools/partest/utils/PrintMgr.scala
index 2927605160..10533130f1 100644
--- a/src/partest/scala/tools/partest/utils/PrintMgr.scala
+++ b/src/partest/scala/tools/partest/utils/PrintMgr.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/partest/scala/tools/partest/utils/Properties.scala b/src/partest/scala/tools/partest/utils/Properties.scala
index b2c9ce3408..bc72995f8b 100644
--- a/src/partest/scala/tools/partest/utils/Properties.scala
+++ b/src/partest/scala/tools/partest/utils/Properties.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.tools.partest
package utils
diff --git a/src/scalap/decoder.properties b/src/scalap/decoder.properties
index 2aeb55400c..f8e296a6fc 100644
--- a/src/scalap/decoder.properties
+++ b/src/scalap/decoder.properties
@@ -1,2 +1,2 @@
version.number=2.0.1
-copyright.string=(c) 2002-2009 LAMP/EPFL
+copyright.string=(c) 2002-2010 LAMP/EPFL
diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala
index 724032bea2..39f74cbfac 100644
--- a/src/scalap/scala/tools/scalap/Arguments.scala
+++ b/src/scalap/scala/tools/scalap/Arguments.scala
@@ -1,11 +1,10 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2009, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2010, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
-// $Id$
package scala.tools.scalap
diff --git a/src/scalap/scala/tools/scalap/ByteArrayReader.scala b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
index 2fde050782..8b3d5ff5b1 100644
--- a/src/scalap/scala/tools/scalap/ByteArrayReader.scala
+++ b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
@@ -1,11 +1,10 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2009, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2010, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
-// $Id$
package scala.tools.scalap
@@ -45,23 +44,22 @@ class ByteArrayReader(content: Array[Byte]) {
*/
def nextChar: Char = {
bp += 2
- (((buf(bp - 2) & 0xff) << 8) + (buf(bp - 1) & 0xff)).asInstanceOf[Char]
+ getChar(bp - 2)
}
/** read an integer
*/
def nextInt: Int = {
bp += 4
- ((buf(bp - 4) & 0xff) << 24) +
- ((buf(bp - 3) & 0xff) << 16) +
- ((buf(bp - 2) & 0xff) << 8) +
- (buf(bp - 1) & 0xff)
+ getInt(bp - 4)
}
/** read a long
*/
- def nextLong: Long =
- (nextInt.toLong << 32) + (nextInt.toLong & 0xffffffffL)
+ def nextLong: Long = {
+ bp += 8
+ getLong(bp - 8)
+ }
/** read a float
*/
@@ -71,55 +69,12 @@ class ByteArrayReader(content: Array[Byte]) {
*/
def nextDouble: Double = java.lang.Double.longBitsToDouble(nextLong)
- /** read the next integer number
- */
- def nextNat: Int = {
- var x = 0
- var b: Byte = 0
- do {
- b = buf(bp)
- bp += 1
- x = (x << 7) + (b & 0x7f)
- } while ((b & 0x80) != 0)
- x
- }
-
- /** read the next signed number in big endian format
- */
- def nextNum(n: Int): Long = {
- var x: Long = 0
- var i: Int = 0
- while (i < n) {
- x = (x << 8) + (nextByte & 0xff)
- i += 1
- }
- val leading: Int = 64 - (n * 8)
- x << leading >> leading
- }
-
/** read an UTF8 encoded string
*/
def nextUTF8(len: Int): String = {
- val cs: Array[Char] = new Array(len)
- var i = bp
- var j = 0
+ val cs = scala.io.Codec.toUTF8(buf.slice(bp, bp + len))
bp += len
- while (i < bp) {
- var b: Int = buf(i) & 0xFF
- i += 1
- if (b >= 0xE0) {
- b = ((b & 0x0F) << 12) | (buf(i) & 0x3F) << 6
- i += 1
- b = b | (buf(i) & 0x3F)
- i += 1
- } else if (b >= 0xC0) {
- b = ((b & 0x1F) << 6) | (buf(i) & 0x3F)
- i += 1
- }
- cs(j) = b.asInstanceOf[Char]
- j += 1
- }
- new String(cs, 0, j)
+ new String(cs)
}
/** extract a character at position bp from buf
diff --git a/src/scalap/scala/tools/scalap/Classfile.scala b/src/scalap/scala/tools/scalap/Classfile.scala
index dd7873f6b0..6526d90473 100644
--- a/src/scalap/scala/tools/scalap/Classfile.scala
+++ b/src/scalap/scala/tools/scalap/Classfile.scala
@@ -1,11 +1,10 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2006, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2010, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/
**
*/
-// $Id$
package scala.tools.scalap
@@ -13,10 +12,12 @@ package scala.tools.scalap
class Classfile(in: ByteArrayReader) {
import Classfiles._
+ type UTF8 = Pool#UTF8
+
assert(in.nextInt == JAVA_MAGIC)
val minorVersion = in.nextChar
val majorVersion = in.nextChar
- val pool = readPool
+ val pool = new Pool()
val flags = in.nextChar
val classname = in.nextChar
val superclass = in.nextChar
@@ -24,6 +25,7 @@ class Classfile(in: ByteArrayReader) {
val fields = readMembers(true)
val methods = readMembers(false)
val attribs = readAttribs
+ def scalaSigAttribute = attribs find (_.toString == Main.SCALA_SIG)
def readAttribs = {
val n = in.nextChar
@@ -58,69 +60,70 @@ class Classfile(in: ByteArrayReader) {
intfs
}
- def readPool = {
- val pool = new Array[PoolEntry](in.nextChar)
- var i = 1
- while (i < pool.length) {
- val tag: Int = in.nextByte
- tag match {
- case CONSTANT_UTF8 =>
- pool(i) = UTF8(in.nextUTF8(in.nextChar))
- case CONSTANT_UNICODE =>
- in.skip(in.nextChar)
- pool(i) = Empty()
- case CONSTANT_CLASS =>
- pool(i) = ClassRef(in.nextChar)
- case CONSTANT_STRING =>
- pool(i) = StringConst(in.nextChar)
- case CONSTANT_FIELDREF =>
- pool(i) = FieldRef(in.nextChar, in.nextChar)
- case CONSTANT_METHODREF =>
- pool(i) = MethodRef(in.nextChar, in.nextChar)
- case CONSTANT_INTFMETHODREF =>
- pool(i) = IntfMethodRef(in.nextChar, in.nextChar)
- case CONSTANT_NAMEANDTYPE =>
- pool(i) = NameAndType(in.nextChar, in.nextChar)
- case CONSTANT_INTEGER =>
- pool(i) = IntegerConst(in.nextInt)
- case CONSTANT_FLOAT =>
- pool(i) = FloatConst(in.nextFloat)
- case CONSTANT_LONG =>
- pool(i) = LongConst(in.nextLong)
- i = i + 1
- pool(i) = Empty()
- case CONSTANT_DOUBLE =>
- pool(i) = DoubleConst(in.nextDouble)
+ class Pool() {
+ sealed abstract class PoolEntry(val tag: Int) {
+ def typeString = constantTagToString(tag)
+ }
+ case class UTF8(str: String) extends PoolEntry(CONSTANT_UTF8) { override def toString = "\"" + str + "\"" }
+ case class ClassRef(classId: Int) extends PoolEntry(CONSTANT_CLASS) { override def toString = "Class(%s)".format(entries(classId)) }
+ case class FieldRef(classId: Int, memberId: Int) extends PoolEntry(CONSTANT_FIELDREF)
+ case class MethodRef(classId: Int, memberId: Int) extends PoolEntry(CONSTANT_METHODREF) {
+ // //Method java/lang/Object."<init>":()V
+ override def toString() = "Method %s.\"%s\"".format(entries(classId), entries(memberId))
+ }
+ case class IntfMethodRef(classId: Int, memberId: Int) extends PoolEntry(CONSTANT_INTFMETHODREF)
+ case class StringConst(strId: Int) extends PoolEntry(CONSTANT_STRING)
+ case class IntegerConst(x: Int) extends PoolEntry(CONSTANT_INTEGER)
+ case class FloatConst(x: Float) extends PoolEntry(CONSTANT_FLOAT)
+ case class LongConst(x: Long) extends PoolEntry(CONSTANT_LONG)
+ case class DoubleConst(x: Double) extends PoolEntry(CONSTANT_DOUBLE)
+ case class NameAndType(nameId: Int, typeId: Int) extends PoolEntry(CONSTANT_NAMEANDTYPE)
+ case object Empty extends PoolEntry(0) { }
+
+ val entries = {
+ val pool = new Array[PoolEntry](in.nextChar)
+ var i = 1
+ while (i < pool.length) {
+ val tag = in.nextByte
+ // Double sized entry
+ if (tag == CONSTANT_LONG || tag == CONSTANT_DOUBLE) {
+ pool(i) = if (tag == CONSTANT_LONG) LongConst(in.nextLong) else DoubleConst(in.nextDouble)
i = i + 1
- pool(i) = Empty()
+ pool(i) = Empty
+ }
+ else pool(i) = tag match {
+ case CONSTANT_UTF8 => UTF8(in.nextUTF8(in.nextChar))
+ case CONSTANT_UNICODE => in.skip(in.nextChar) ; Empty
+ case CONSTANT_CLASS => ClassRef(in.nextChar)
+ case CONSTANT_STRING => StringConst(in.nextChar)
+ case CONSTANT_FIELDREF => FieldRef(in.nextChar, in.nextChar)
+ case CONSTANT_METHODREF => MethodRef(in.nextChar, in.nextChar)
+ case CONSTANT_INTFMETHODREF => IntfMethodRef(in.nextChar, in.nextChar)
+ case CONSTANT_NAMEANDTYPE => NameAndType(in.nextChar, in.nextChar)
+ case CONSTANT_INTEGER => IntegerConst(in.nextInt)
+ case CONSTANT_FLOAT => FloatConst(in.nextFloat)
+ }
+
+ i += 1
}
- i = i + 1
+ pool
}
- pool
- }
- class PoolEntry
- case class UTF8(str: String) extends PoolEntry
- case class ClassRef(classId: Int) extends PoolEntry
- case class FieldRef(classId: Int, memberId: Int) extends PoolEntry
- case class MethodRef(classId: Int, memberId: Int) extends PoolEntry
- case class IntfMethodRef(classId: Int, memberId: Int) extends PoolEntry
- case class StringConst(strId: Int) extends PoolEntry
- case class IntegerConst(x: Int) extends PoolEntry
- case class FloatConst(x: Float) extends PoolEntry
- case class LongConst(x: Long) extends PoolEntry
- case class DoubleConst(x: Double) extends PoolEntry
- case class NameAndType(nameId: Int, typeId: Int) extends PoolEntry
- case class Empty() extends PoolEntry
+ lazy val length = entries.length
+ def apply(x: Int) = entries(x)
+ def stringOf(x: Int) = apply(x).toString
+ override def toString = (
+ for ((x, i) <- entries.zipWithIndex ; if x != null) yield
+ "const #%d = %s\t%s\n".format(i + 1, x.typeString, x)
+ ).mkString
+ }
+ /** **/
case class Member(field: Boolean, flags: Int, name: Int, tpe: Int, attribs: List[Attribute])
case class Attribute(name: Int, data: Array[Byte]) {
-
- override def toString(): String = pool(name) match {
- case UTF8(str: String) => str
+ override def toString = (pool(name): @unchecked) match {
+ case pool.UTF8(s) => s
}
-
def reader: ByteArrayReader = new ByteArrayReader(data)
}
-
}
diff --git a/src/scalap/scala/tools/scalap/Classfiles.scala b/src/scalap/scala/tools/scalap/Classfiles.scala
index e21ae76fb0..2919ba5a0f 100644
--- a/src/scalap/scala/tools/scalap/Classfiles.scala
+++ b/src/scalap/scala/tools/scalap/Classfiles.scala
@@ -1,11 +1,10 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2009, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2010, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
-// $Id$
package scala.tools.scalap
@@ -28,6 +27,21 @@ object Classfiles {
final val CONSTANT_INTFMETHODREF = 11
final val CONSTANT_NAMEANDTYPE = 12
+ final val constantTagToString = Map(
+ CONSTANT_UTF8 -> "UTF8",
+ CONSTANT_UNICODE -> "Unicode",
+ CONSTANT_INTEGER -> "Int",
+ CONSTANT_FLOAT -> "Float",
+ CONSTANT_LONG -> "Long",
+ CONSTANT_DOUBLE -> "Double",
+ CONSTANT_CLASS -> "class",
+ CONSTANT_STRING -> "Asciz",
+ CONSTANT_FIELDREF -> "Field",
+ CONSTANT_METHODREF -> "Method",
+ CONSTANT_INTFMETHODREF -> "InterfaceMethod",
+ CONSTANT_NAMEANDTYPE -> "NameAndType"
+ )
+
final val BAD_ATTR = 0x00000
final val SOURCEFILE_ATTR = 0x00001
final val SYNTHETIC_ATTR = 0x00002
diff --git a/src/scalap/scala/tools/scalap/CodeWriter.scala b/src/scalap/scala/tools/scalap/CodeWriter.scala
index 738e13b9fb..aee7e84e97 100644
--- a/src/scalap/scala/tools/scalap/CodeWriter.scala
+++ b/src/scalap/scala/tools/scalap/CodeWriter.scala
@@ -1,11 +1,10 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2009, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2010, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
-// $Id$
package scala.tools.scalap
diff --git a/src/scalap/scala/tools/scalap/Decode.scala b/src/scalap/scala/tools/scalap/Decode.scala
new file mode 100644
index 0000000000..b289e476aa
--- /dev/null
+++ b/src/scalap/scala/tools/scalap/Decode.scala
@@ -0,0 +1,101 @@
+/* ___ ____ ___ __ ___ ___
+** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2010, LAMP/EPFL
+** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
+**
+*/
+
+// $Id$
+
+package scala.tools.scalap
+
+import scala.tools.scalap.scalax.rules.scalasig._
+import scala.tools.nsc.util.ScalaClassLoader
+import scala.tools.nsc.util.ScalaClassLoader.getSystemLoader
+import scala.reflect.generic.ByteCodecs
+
+import ClassFileParser.{ ConstValueIndex, Annotation }
+import Main.{ SCALA_SIG, SCALA_SIG_ANNOTATION, BYTES_VALUE }
+
+/** Temporary decoder. This would be better off in the scala.tools.nsc
+ * but right now the compiler won't acknowledge scala.tools.scalap
+ * when it's bootstrapping, so the reference has to go from here to there.
+ */
+object Decode {
+ private def getAliasSymbol(t: Type): Symbol = t match {
+ case TypeRefType(_, s, _) => s
+ case PolyType(typeRef, _) => getAliasSymbol(typeRef)
+ case _ => NoSymbol
+ }
+
+ /** Return the classfile bytes representing the scala sig classfile attribute.
+ * This has been obsoleted by the switch to annotations.
+ */
+ def scalaSigBytes(name: String): Option[Array[Byte]] = scalaSigBytes(name, getSystemLoader())
+ def scalaSigBytes(name: String, classLoader: ScalaClassLoader): Option[Array[Byte]] = {
+ val bytes = classLoader.findBytesForClassName(name)
+ val reader = new ByteArrayReader(bytes)
+ val cf = new Classfile(reader)
+ cf.scalaSigAttribute map (_.data)
+ }
+
+ /** Return the bytes representing the annotation
+ */
+ def scalaSigAnnotationBytes(name: String): Option[Array[Byte]] = scalaSigAnnotationBytes(name, getSystemLoader())
+ def scalaSigAnnotationBytes(name: String, classLoader: ScalaClassLoader): Option[Array[Byte]] = {
+ val bytes = classLoader.findBytesForClassName(name)
+ val byteCode = ByteCode(bytes)
+ val classFile = ClassFileParser.parse(byteCode)
+ import classFile._
+
+ classFile annotation SCALA_SIG_ANNOTATION map { case Annotation(_, els) =>
+ val bytesElem = els find (x => constant(x.elementNameIndex) == BYTES_VALUE) get
+ val _bytes = bytesElem.elementValue match { case ConstValueIndex(x) => constantWrapped(x) }
+ val bytes = _bytes.asInstanceOf[StringBytesPair].bytes
+ val length = ByteCodecs.decode(bytes)
+
+ bytes take length
+ }
+ }
+
+ /** private[scala] so nobody gets the idea this is a supported interface.
+ */
+ private[scala] def caseParamNames(path: String): Option[List[String]] = {
+ val (outer, inner) = (path indexOf '$') match {
+ case -1 => (path, "")
+ case x => (path take x, path drop (x + 1))
+ }
+
+ for {
+ clazz <- getSystemLoader.tryToLoadClass[AnyRef](outer)
+ ssig <- ScalaSigParser.parse(clazz)
+ }
+ yield {
+ val f: PartialFunction[Symbol, List[String]] =
+ if (inner == "") {
+ case x: MethodSymbol if x.isCaseAccessor && (x.name endsWith " ") => List(x.name dropRight 1)
+ }
+ else {
+ case x: ClassSymbol if x.name == inner =>
+ val xs = x.children filter (child => child.isCaseAccessor && (child.name endsWith " "))
+ xs.toList map (_.name dropRight 1)
+ }
+
+ (ssig.symbols collect f).flatten toList
+ }
+ }
+
+ /** Returns a map of Alias -> Type for the given package.
+ */
+ private[scala] def typeAliases(pkg: String) = {
+ for {
+ clazz <- getSystemLoader.tryToLoadClass[AnyRef](pkg + ".package")
+ ssig <- ScalaSigParser.parse(clazz)
+ }
+ yield {
+ val typeAliases = ssig.symbols collect { case x: AliasSymbol => x }
+ Map(typeAliases map (x => (x.name, getAliasSymbol(x.infoType).path)): _*)
+ }
+ }
+}
+
diff --git a/src/scalap/scala/tools/scalap/JavaWriter.scala b/src/scalap/scala/tools/scalap/JavaWriter.scala
index 06fec1a8fb..b539dd5fda 100644
--- a/src/scalap/scala/tools/scalap/JavaWriter.scala
+++ b/src/scalap/scala/tools/scalap/JavaWriter.scala
@@ -1,11 +1,10 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2009, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2010, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
-// $Id$
package scala.tools.scalap
@@ -92,11 +91,15 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer
}
}
- def getName(n: Int): String = cf.pool(n) match {
- case cf.UTF8(str) => str
- case cf.StringConst(m) => getName(m)
- case cf.ClassRef(m) => getName(m)
- case x => "<error>"
+ def getName(n: Int): String = {
+ import cf.pool._
+
+ cf.pool(n) match {
+ case UTF8(str) => str
+ case StringConst(m) => getName(m)
+ case ClassRef(m) => getName(m)
+ case _ => "<error>"
+ }
}
def getClassName(n: Int): String = nameToClass(getName(n))
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 9b57b9cb18..c2b9324ba9 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -1,19 +1,19 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2009, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2010, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
-// $Id$
package scala.tools.scalap
-
-import java.io.{File, PrintStream, OutputStreamWriter, ByteArrayOutputStream}
+import java.io.{PrintStream, OutputStreamWriter, ByteArrayOutputStream}
import scalax.rules.scalasig._
-import tools.nsc.io.AbstractFile
-import tools.nsc.util.{ClassPath, JavaClassPath}
+import tools.nsc.util.{ ClassPath }
+import tools.util.PathResolver
+import ClassPath.DefaultJavaContext
+import tools.nsc.io.{PlainFile, AbstractFile}
/**The main object used to execute scalap on the command-line.
*
@@ -21,6 +21,9 @@ import tools.nsc.util.{ClassPath, JavaClassPath}
*/
object Main {
val SCALA_SIG = "ScalaSig"
+ val SCALA_SIG_ANNOTATION = "Lscala/reflect/ScalaSignature;"
+ val BYTES_VALUE = "bytes"
+
val versionMsg = "Scala classfile decoder " +
Properties.versionString + " -- " +
Properties.copyrightString + "\n"
@@ -34,7 +37,8 @@ object Main {
*/
def usage {
Console.println("usage: scalap {<option>} <name>")
- Console.println("where <option> is")
+ Console.println("where <name> is fully-qualified class name or <package_name>.package for package objects")
+ Console.println("and <option> is")
Console.println(" -private print private definitions")
Console.println(" -verbose print out additional information")
Console.println(" -version print out the version number of scalap")
@@ -46,7 +50,7 @@ object Main {
def isScalaFile(bytes: Array[Byte]): Boolean = {
val byteCode = ByteCode(bytes)
val classFile = ClassFileParser.parse(byteCode)
- classFile.attribute("ScalaSig") match {case Some(_) => true; case None => false}
+ classFile.attribute("ScalaSig").isDefined
}
/**Processes the given Java class file.
@@ -95,17 +99,16 @@ object Main {
baos.toString
}
-
- def decompileScala(bytes: Array[Byte], isPackageObject: Boolean) = {
+ def decompileScala(bytes: Array[Byte], isPackageObject: Boolean): String = {
val byteCode = ByteCode(bytes)
val classFile = ClassFileParser.parse(byteCode)
- classFile.attribute(SCALA_SIG).map(_.byteCode).map(ScalaSigAttributeParsers.parse) match {
- case Some(scalaSig) => Console.println(parseScalaSignature(scalaSig, isPackageObject))
- case None => //Do nothing
+
+ ScalaSigParser.parse(classFile) match {
+ case Some(scalaSig) => parseScalaSignature(scalaSig, isPackageObject)
+ case None => ""
}
}
-
/**Executes scalap with the given arguments and classpath for the
* class denoted by <code>classname</code>.
*
@@ -126,7 +129,7 @@ object Main {
}
val bytes = cfile.toByteArray
if (isScalaFile(bytes)) {
- decompileScala(bytes, isPackageObjectFile(encName))
+ Console.println(decompileScala(bytes, isPackageObjectFile(encName)))
} else {
// construct a reader for the classfile content
val reader = new ByteArrayReader(cfile.toByteArray)
@@ -134,18 +137,6 @@ object Main {
val clazz = new Classfile(reader)
processJavaClassFile(clazz)
}
- // if the class corresponds to the artificial class scala.All.
- // (to be removed after update of the STARR libraries)
- } else if (classname == "scala.All") {
- Console.println("package scala")
- Console.println("/* Deprecated. Use scala.Nothing instead. */")
- Console.println("sealed abstract class All")
- // if the class corresponds to the artificial class scala.AllRef.
- // (to be removed after update of the STARR libraries)
- } else if (classname == "scala.AllRef") {
- Console.println("package scala")
- Console.println("/* Deprecated. Use scala.Null instead. */")
- Console.println("sealed abstract class AllRef")
// if the class corresponds to the artificial class scala.Any.
// (see member list in class scala.tool.nsc.symtab.Definitions)
} else if (classname == "scala.Any") {
@@ -263,13 +254,8 @@ object Main {
verbose = arguments contains "-verbose"
printPrivates = arguments contains "-private"
// construct a custom class path
- val path = arguments.getArgument("-classpath") match {
- case None => arguments.getArgument("-cp") match {
- case None => EmptyClasspath
- case Some(path) => new JavaClassPath("", "", path, "", "")
- }
- case Some(path) => new JavaClassPath("", "", path, "", "")
- }
+ def cparg = List("-classpath", "-cp") map (arguments getArgument _) reduceLeft (_ orElse _)
+ val path = cparg map (PathResolver fromPathString _) getOrElse EmptyClasspath
// print the classpath if output is verbose
if (verbose) {
Console.println(Console.BOLD + "CLASSPATH" + Console.RESET + " = " + path)
@@ -280,13 +266,15 @@ object Main {
}
object EmptyClasspath extends ClassPath[AbstractFile] {
- import tools.nsc.util.ClassRep
/**
* The short name of the package (without prefix)
*/
def name: String = ""
- def classes: List[ClassRep[AbstractFile]] = Nil
- def packages: List[ClassPath[AbstractFile]] = Nil
- def sourcepaths: List[AbstractFile] = Nil
+ def asURLs = Nil
+ def asClasspathString = ""
+ val context = DefaultJavaContext
+ val classes: List[ClassRep] = Nil
+ val packages: List[ClassPath[AbstractFile]] = Nil
+ val sourcepaths: List[AbstractFile] = Nil
}
}
diff --git a/src/scalap/scala/tools/scalap/MetaParser.scala b/src/scalap/scala/tools/scalap/MetaParser.scala
index 9313b10889..2b75d97349 100644
--- a/src/scalap/scala/tools/scalap/MetaParser.scala
+++ b/src/scalap/scala/tools/scalap/MetaParser.scala
@@ -1,11 +1,10 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2009, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2010, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
-// $Id$
package scala.tools.scalap
diff --git a/src/scalap/scala/tools/scalap/Names.scala b/src/scalap/scala/tools/scalap/Names.scala
index d0197ddd77..a916ea1220 100644
--- a/src/scalap/scala/tools/scalap/Names.scala
+++ b/src/scalap/scala/tools/scalap/Names.scala
@@ -1,11 +1,10 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2009, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2010, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
-// $Id$
package scala.tools.scalap
diff --git a/src/scalap/scala/tools/scalap/Properties.scala b/src/scalap/scala/tools/scalap/Properties.scala
index 9027bc3d03..4cdbe69d01 100644
--- a/src/scalap/scala/tools/scalap/Properties.scala
+++ b/src/scalap/scala/tools/scalap/Properties.scala
@@ -1,11 +1,10 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2009, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2010, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
-// $Id$
package scala.tools.scalap
@@ -14,5 +13,4 @@ object Properties extends scala.util.PropertiesTrait
{
protected def propCategory = "decoder"
protected def pickJarBasedOn = classOf[Classfile]
- val cmdName = scala.tools.nsc.Properties.cmdName
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala b/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala
index aa95b48d44..aa852c1e63 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala
@@ -60,7 +60,7 @@ trait Functors {
}
}
-/** One of the 'unit' definitions must be overriden in concrete subclasses */
+/** One of the 'unit' definitions must be overridden in concrete subclasses */
trait UnitFunctors extends Units with Functors {
def unit : M[Unit] = unit(())
def unit[A](a : => A) : M[A] = unit map { Unit => a }
@@ -73,7 +73,7 @@ trait Monoidals extends UnitFunctors {
implicit def app[A, B](fab : M[A => B]) = (fa : M[A]) => fa applyTo fab
implicit def appUnit[A, B](a2b : A => B) = app(unit(a2b))
- /** One of 'and' and 'applyTo' definitions must be overriden in concrete subclasses */
+ /** One of 'and' and 'applyTo' definitions must be overridden in concrete subclasses */
trait Monoidal[+A] extends Functor[A] { self : M[A] =>
def and[B](fb : => M[B]) : M[(A, B)] = ((a : A) => (b : B) => (a, b))(this)(fb)
def applyTo[B](fab : M[A => B]) : M[B] = fab and this map { case (f, a) => f(a) }
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala b/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
index 1324ea695a..827c2dfff7 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
@@ -44,7 +44,7 @@ trait DefaultMemoisable extends Memoisable {
map.getOrElseUpdate(key, compute(key, a)).asInstanceOf[A]
}
- protected def compute[A](key : AnyRef, a : => A) = a match {
+ protected def compute[A](key : AnyRef, a : => A): Any = a match {
case success : Success[_, _] => onSuccess(key, success); success
case other =>
if(DefaultMemoisable.debug) println(key + " -> " + other)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Result.scala b/src/scalap/scala/tools/scalap/scalax/rules/Result.scala
index 6befbb83c8..17ad4bd053 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Result.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Result.scala
@@ -42,11 +42,11 @@ case class Success[+Out, +A](out : Out, value : A) extends Result[Out, A, Nothin
def toOption = Some(value)
- def map[B](f : A => B) = Success(out, f(value))
- def mapOut[Out2](f : Out => Out2) = Success(f(out), value)
- def map[Out2, B](f : (Out, A) => (Out2, B)) = f(out, value) match { case (out2, b) => Success(out2, b) }
- def flatMap[Out2, B](f : (Out, A) => Result[Out2, B, Nothing]) = f(out, value)
- def orElse[Out2 >: Out, B >: A](other : => Result[Out2, B, Nothing]) = this
+ def map[B](f : A => B) : Result[Out, B, Nothing] = Success(out, f(value))
+ def mapOut[Out2](f : Out => Out2) : Result[Out2, A, Nothing] = Success(f(out), value)
+ def map[Out2, B](f : (Out, A) => (Out2, B)) : Success[Out2, B] = f(out, value) match { case (out2, b) => Success(out2, b) }
+ def flatMap[Out2, B](f : (Out, A) => Result[Out2, B, Nothing]) : Result[Out2, B, Nothing]= f(out, value)
+ def orElse[Out2 >: Out, B >: A](other : => Result[Out2, B, Nothing]) : Result[Out2, B, Nothing] = this
}
sealed abstract class NoSuccess[+X] extends Result[Nothing, Nothing, X] {
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
index 4e8ddc8dbe..43f9c20b1d 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
@@ -98,7 +98,7 @@ trait StateRules {
def nil = unit(Nil)
def none = unit(None)
- /** Create a rule that suceeds if f(in) is true. */
+ /** Create a rule that identities if f(in) is true. */
def cond(f : S => Boolean) = get filter f
/** Create a rule that succeeds if all of the given rules succeed.
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
index 54f2c70bdc..34f52a1e19 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
@@ -24,7 +24,7 @@ class InRule[In, +Out, +A, +X](rule : Rule[In, Out, A, X]) {
in : In => f(rule(in))(in)
}
- /** Creates a rule that suceeds only if the original rule would fail on the given context. */
+ /** Creates a rule that succeeds only if the original rule would fail on the given context. */
def unary_! : Rule[In, In, Unit, Nothing] = mapRule {
case Success(_, _) => in : In => Failure
case _ => in : In => Success(in, ())
@@ -82,7 +82,7 @@ class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) {
/** Repeats this rule num times */
def times(num : Int) : Rule[S, S, Seq[A], X] = from[S] {
- val result = new collection.mutable.GenericArray[A](num)
+ val result = new collection.mutable.ArraySeq[A](num)
// more compact using HoF but written this way so it's tail-recursive
def rep(i : Int, in : S) : Result[S, Seq[A], X] = {
if (i == num) Success(in, result)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
index 182054c01b..01652a50b9 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
@@ -9,8 +9,6 @@ import java.io.IOException
import scala._
import scala.Predef._
-import scalax.rules.Error
-
object ByteCode {
def apply(bytes : Array[Byte]) = new ByteCode(bytes, 0, bytes.length)
@@ -62,31 +60,23 @@ class ByteCode(val bytes : Array[Byte], val pos : Int, val length : Int) {
def toInt = fold(0) { (x, b) => (x << 8) + (b & 0xFF)}
def toLong = fold(0L) { (x, b) => (x << 8) + (b & 0xFF)}
- // NOTE the UTF8 decoder in the Scala compiler is broken for pos > 0
- // TODO figure out patch and submit
- def toUTF8String = {
- val sb = new StringBuilder(length)
- var i = pos
- val end = pos + length
- while (i < end) {
- var b = bytes(i) & 0xFF
- i += 1
- if (b >= 0xE0) {
- b = ((b & 0x0F) << 12) | (bytes(i) & 0x3F) << 6
- b = b | (bytes(i+1) & 0x3F)
- i += 2
- } else if (b >= 0xC0) {
- b = ((b & 0x1F) << 6) | (bytes(i) & 0x3F)
- i += 1
- }
- sb += b.toChar
- }
- sb.toString
+ /**
+ * Transforms array subsequence of the current buffer into the UTF8 String and
+ * stores and array of bytes for the decompiler
+ */
+ def toUTF8StringAndBytes = {
+ val chunk: Array[Byte] = bytes drop pos take length
+ StringBytesPair(io.Codec.toUTF8(chunk).mkString, chunk)
}
def byte(i : Int) = bytes(pos) & 0xFF
}
+/**
+ * The wrapper for decode UTF-8 string
+ */
+case class StringBytesPair(string: String, bytes: Array[Byte])
+
/** Provides rules for parsing byte-code.
*/
trait ByteCodeReader extends RulesWithState {
@@ -100,13 +90,11 @@ trait ByteCodeReader extends RulesWithState {
val u4 = bytes(4) ^^ (_ toInt) // should map to Long??
def bytes(n : Int) = apply(_ next n)
-
-
-
}
object ClassFileParser extends ByteCodeReader {
def parse(byteCode : ByteCode) = expect(classFile)(byteCode)
+ def parseAnnotations(byteCode: ByteCode) = expect(annotations)(byteCode)
val magicNumber = (u4 filter (_ == 0xCAFEBABE)) | error("Not a valid class file")
val version = u2 ~ u2 ^^ { case minor ~ major => (major, minor) }
@@ -114,7 +102,7 @@ object ClassFileParser extends ByteCodeReader {
// NOTE currently most constants just evaluate to a string description
// TODO evaluate to useful values
- val utf8String = (u2 >> bytes) ^^ add1 { raw => pool => raw.toUTF8String }
+ val utf8String = (u2 >> bytes) ^^ add1 { raw => pool => raw.toUTF8StringAndBytes }
val intConstant = u4 ^^ add1 { x => pool => x }
val floatConstant = bytes(4) ^^ add1 { raw => pool => "Float: TODO" }
val longConstant = bytes(8) ^^ add2 { raw => pool => raw.toLong }
@@ -142,9 +130,32 @@ object ClassFileParser extends ByteCodeReader {
val interfaces = u2 >> u2.times
+ // bytes are parametrizes by the length, declared in u4 section
val attribute = u2 ~ (u4 >> bytes) ^~^ Attribute
+ // parse attributes u2 times
val attributes = u2 >> attribute.times
+ // parse runtime-visible annotations
+ abstract class ElementValue
+ case class AnnotationElement(elementNameIndex: Int, elementValue: ElementValue)
+ case class ConstValueIndex(index: Int) extends ElementValue
+ case class EnumConstValue(typeNameIndex: Int, constNameIndex: Int) extends ElementValue
+ case class ClassInfoIndex(index: Int) extends ElementValue
+ case class Annotation(typeIndex: Int, elementValuePairs: Seq[AnnotationElement]) extends ElementValue
+ case class ArrayValue(values: Seq[ElementValue]) extends ElementValue
+
+ def element_value: Parser[ElementValue] = u1 >> {
+ case 'B'|'C'|'D'|'F'|'I'|'J'|'S'|'Z'|'s' => u2 ^^ ConstValueIndex
+ case 'e' => u2 ~ u2 ^~^ EnumConstValue
+ case 'c' => u2 ^^ ClassInfoIndex
+ case '@' => annotation //nested annotation
+ case '[' => u2 >> element_value.times ^^ ArrayValue
+ }
+
+ val element_value_pair = u2 ~ element_value ^~^ AnnotationElement
+ val annotation: Parser[Annotation] = u2 ~ (u2 >> element_value_pair.times) ^~^ Annotation
+ val annotations = u2 >> annotation.times
+
val field = u2 ~ u2 ~ u2 ~ attributes ^~~~^ Field
val fields = u2 >> field.times
@@ -176,9 +187,20 @@ case class ClassFile(
def superClass = constant(header.superClassIndex)
def interfaces = header.interfaces.map(constant)
- def constant(index : Int) = header.constants(index)
+ def constant(index : Int) = header.constants(index) match {
+ case StringBytesPair(str, _) => str
+ case z => z
+ }
+
+ def constantWrapped(index: Int) = header.constants(index)
+
+ def attribute(name : String) = attributes.find {attrib => constant(attrib.nameIndex) == name }
+
+ val RUNTIME_VISIBLE_ANNOTATIONS = "RuntimeVisibleAnnotations"
+ def annotations = (attributes.find(attr => constant(attr.nameIndex) == RUNTIME_VISIBLE_ANNOTATIONS)
+ .map(attr => ClassFileParser.parseAnnotations(attr.byteCode)))
- def attribute(name : String) = attributes.find { attrib => constant(attrib.nameIndex) == name }
+ def annotation(name: String) = annotations.flatMap(seq => seq.find(annot => constant(annot.typeIndex) == name))
}
case class Attribute(nameIndex : Int, byteCode : ByteCode)
@@ -217,7 +239,7 @@ case class ConstantPool(len : Int) {
}
def add(f : ConstantPool => Any) = {
- buffer + f
+ buffer += f
this
}
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
index c78ed83a33..4614d27727 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
@@ -1,38 +1,55 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2009, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2010, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
-// $Id$
package scala.tools.scalap
package scalax
package rules
package scalasig
+import ClassFileParser.{ ConstValueIndex, Annotation }
+import scala.reflect.generic.ByteCodecs
+
object ScalaSigParser {
+ import Main.{ SCALA_SIG, SCALA_SIG_ANNOTATION, BYTES_VALUE }
- def getScalaSig(clazz : Class[_]) : Option[ByteCode] = {
- val byteCode = ByteCode.forClass(clazz)
- val classFile = ClassFileParser.parse(byteCode)
+ def scalaSigFromAnnotation(classFile: ClassFile): Option[ScalaSig] = {
+ import classFile._
+
+ classFile.annotation(SCALA_SIG_ANNOTATION) map {
+ case Annotation(_, elements) =>
+ val bytesElem = elements.find(elem => constant(elem.elementNameIndex) == BYTES_VALUE).get
+ val bytes = ((bytesElem.elementValue match {case ConstValueIndex(index) => constantWrapped(index)})
+ .asInstanceOf[StringBytesPair].bytes)
+ val length = ByteCodecs.decode(bytes)
+
+ ScalaSigAttributeParsers.parse(ByteCode(bytes.take(length)))
+ }
+ }
- /*
- println("ClassFile version: " + classFile.majorVersion + "." + classFile.minorVersion)
- println("Class: " + classFile.className)
- println("Superclass: " + classFile.superClass)
- println("Interfaces: " + classFile.interfaces.mkString(", "))
- println("Constant pool:")
- val constantPool = classFile.header.constants
- for (i <- 1 to constantPool.size) println(i + "\t" + constantPool(i))
- */
-
- classFile.attribute("ScalaSig").map(_.byteCode)
+ def scalaSigFromAttribute(classFile: ClassFile) : Option[ScalaSig] =
+ classFile.attribute(SCALA_SIG).map(_.byteCode).map(ScalaSigAttributeParsers.parse)
+
+ def parse(classFile: ClassFile): Option[ScalaSig] = {
+ val scalaSig = scalaSigFromAttribute(classFile)
+
+ scalaSig match {
+ // No entries in ScalaSig attribute implies that the signature is stored in the annotation
+ case Some(ScalaSig(_, _, entries)) if entries.length == 0 =>
+ scalaSigFromAnnotation(classFile)
+ case x => x
+ }
}
- def parse(clazz : Class[_]) : Option[ScalaSig] = {
- getScalaSig(clazz).map(ScalaSigAttributeParsers.parse)
+ def parse(clazz : Class[_]): Option[ScalaSig] = {
+ val byteCode = ByteCode.forClass(clazz)
+ val classFile = ClassFileParser.parse(byteCode)
+
+ parse(classFile)
}
}
@@ -55,7 +72,7 @@ object ScalaSigAttributeParsers extends ByteCodeReader {
val symtab = nat >> entry.times
val scalaSig = nat ~ nat ~ symtab ^~~^ ScalaSig
- val utf8 = read(_ toUTF8String)
+ val utf8 = read(x => x.toUTF8StringAndBytes.string)
val longValue = read(_ toLong)
}
@@ -164,21 +181,21 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
* | 5 ALIASsym len_Nat SymbolInfo
* | 6 CLASSsym len_Nat SymbolInfo [thistype_Ref]
* | 7 MODULEsym len_Nat SymbolInfo
- * | 8 VALsym len_Nat [defaultGetter_Ref] SymbolInfo [alias_Ref]
+ * | 8 VALsym len_Nat [defaultGetter_Ref /* no longer needed*/] SymbolInfo [alias_Ref]
* | 9 EXTref len_Nat name_Ref [owner_Ref]
* | 10 EXTMODCLASSref len_Nat name_Ref [owner_Ref]
* | 11 NOtpe len_Nat
* | 12 NOPREFIXtpe len_Nat
* | 13 THIStpe len_Nat sym_Ref
* | 14 SINGLEtpe len_Nat type_Ref sym_Ref
- * | 15 CONSTANTtpe len_Nat type_Ref constant_Ref
+ * | 15 CONSTANTtpe len_Nat constant_Ref
* | 16 TYPEREFtpe len_Nat type_Ref sym_Ref {targ_Ref}
* | 17 TYPEBOUNDStpe len_Nat tpe_Ref tpe_Ref
* | 18 REFINEDtpe len_Nat classsym_Ref {tpe_Ref}
* | 19 CLASSINFOtpe len_Nat classsym_Ref {tpe_Ref}
* | 20 METHODtpe len_Nat tpe_Ref {sym_Ref}
* | 21 POLYTtpe len_Nat tpe_Ref {sym_Ref}
- * | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {tpe_Ref}
+ * | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {sym_Ref} /* no longer needed */
* | 52 SUPERtpe len_Nat tpe_Ref tpe_Ref
* | 24 LITERALunit len_Nat
* | 25 LITERALboolean len_Nat value_Long
@@ -195,13 +212,12 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
* | 36 LITERALenum len_Nat sym_Ref
* | 40 SYMANNOT len_Nat sym_Ref AnnotInfoBody
* | 41 CHILDREN len_Nat sym_Ref {sym_Ref}
- * | 42 ANNOTATEDtpe len_Nat [sym_Ref] tpe_Ref {annotinfo_Ref}
+ * | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref}
* | 43 ANNOTINFO len_Nat AnnotInfoBody
* | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref}
* | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat
* | 48 EXISTENTIALtpe len_Nat type_Ref {symbol_Ref}
*/
-
val noSymbol = 3 -^ NoSymbol
val typeSymbol = symbolEntry(4) ^^ TypeSymbol as "typeSymbol"
val aliasSymbol = symbolEntry(5) ^^ AliasSymbol as "alias"
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
index abff45fca5..915087a256 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
@@ -1,11 +1,10 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2009, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2010, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
-// $Id$
package scala.tools.scalap
package scalax
@@ -16,6 +15,8 @@ import java.io.{PrintStream, ByteArrayOutputStream}
import java.util.regex.Pattern
import scala.tools.scalap.scalax.util.StringUtil
+import reflect.NameTransformer
+import java.lang.String
class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
import stream._
@@ -24,13 +25,24 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
case class TypeFlags(printRep: Boolean)
- def printSymbol(symbol: Symbol) { printSymbol(0, symbol) }
+ def printSymbol(symbol: Symbol) {printSymbol(0, symbol)}
+
+ def printSymbolAttributes(s: Symbol, onNewLine: Boolean, indent: => Unit) = s match {
+ case t: SymbolInfoSymbol => {
+ for (a <- t.attributes) {
+ indent; print(toString(a))
+ if (onNewLine) print("\n") else print(" ")
+ }
+ }
+ case _ =>
+ }
def printSymbol(level: Int, symbol: Symbol) {
if (!symbol.isLocal &&
- !(symbol.isPrivate && !printPrivates)) {
+ !(symbol.isPrivate && !printPrivates)) {
def indent() {for (i <- 1 to level) print(" ")}
+ printSymbolAttributes(symbol, true, indent)
symbol match {
case o: ObjectSymbol =>
if (!isCaseClassObject(o)) {
@@ -50,8 +62,9 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
case a: AliasSymbol =>
indent
printAlias(level, a)
- case t: TypeSymbol =>
- ()
+ case t: TypeSymbol if !t.isParam && !t.name.matches("_\\$\\d+")=>
+ indent
+ printTypeSymbol(level, t)
case s =>
}
}
@@ -82,11 +95,20 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
}
def printModifiers(symbol: Symbol) {
+ // print private access modifier
+ if (symbol.isPrivate) print("private ")
+ else if (symbol.isProtected) print("protected ")
+ else symbol match {
+ case sym: SymbolInfoSymbol => sym.symbolInfo.privateWithin match {
+ case Some(t: Symbol) => print("private[" + t.name +"] ")
+ case _ =>
+ }
+ case _ =>
+ }
+
if (symbol.isSealed) print("sealed ")
if (symbol.isImplicit) print("implicit ")
if (symbol.isFinal && !symbol.isInstanceOf[ObjectSymbol]) print("final ")
- if (symbol.isPrivate) print("private ")
- else if (symbol.isProtected) print("protected ")
if (symbol.isOverride) print("override ")
if (symbol.isAbstract) symbol match {
case c@(_: ClassSymbol | _: ObjectSymbol) if !c.isTrait => print("abstract ")
@@ -98,37 +120,42 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
private def refinementClass(c: ClassSymbol) = c.name == "<refinement>"
def printClass(level: Int, c: ClassSymbol) {
- printModifiers(c)
- val defaultConstructor = if (c.isCase) getPrinterByConstructor(c) else ""
- if (c.isTrait) print("trait ") else print("class ")
- print(processName(c.name))
- val it = c.infoType
- val classType = it match {
- case PolyType(typeRef, symbols) => PolyTypeWithCons(typeRef, symbols, defaultConstructor)
- case _ => it
- }
- printType(classType)
- print(" {")
- //Print class selftype
- c.selfType match {
- case Some(t: Type) => print("\n"); print(" this : " + toString(t) + " =>")
- case None =>
+ if (c.name == "<local child>" /*scala.tools.nsc.symtab.StdNames.LOCALCHILD.toString()*/ ) {
+ print("\n")
+ } else {
+ printModifiers(c)
+ val defaultConstructor = if (c.isCase) getPrinterByConstructor(c) else ""
+ if (c.isTrait) print("trait ") else print("class ")
+ print(processName(c.name))
+ val it = c.infoType
+ val classType = it match {
+ case PolyType(typeRef, symbols) => PolyTypeWithCons(typeRef, symbols, defaultConstructor)
+ case ClassInfoType(a, b) if c.isCase => ClassInfoTypeWithCons(a, b, defaultConstructor)
+ case _ => it
+ }
+ printType(classType)
+ print(" {")
+ //Print class selftype
+ c.selfType match {
+ case Some(t: Type) => print("\n"); print(" this : " + toString(t) + " =>")
+ case None =>
+ }
+ print("\n")
+ printChildren(level, c)
+ printWithIndent(level, "}\n")
}
- print("\n")
- printChildren(level, c)
- printWithIndent(level, "}\n")
}
def getPrinterByConstructor(c: ClassSymbol) = {
- c.children.find{
- case m : MethodSymbol if m.name == CONSTRUCTOR_NAME => true
+ c.children.find {
+ case m: MethodSymbol if m.name == CONSTRUCTOR_NAME => true
case _ => false
} match {
case Some(m: MethodSymbol) =>
val baos = new ByteArrayOutputStream
val stream = new PrintStream(baos)
val printer = new ScalaSigPrinter(stream, printPrivates)
-// printer.printMethodType(m.infoType, false)
+ printer.printMethodType(m.infoType, false)(())
baos.toString
case None =>
""
@@ -170,7 +197,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
if (res.length > 1) StringUtil.decapitalize(res.substring(0, 1)) else res.toLowerCase
})
- def printMethodType(t: Type, printResult: Boolean)(implicit cont : => Unit): Unit = {
+ def printMethodType(t: Type, printResult: Boolean)(cont: => Unit): Unit = {
def _pmt(mt: Type {def resultType: Type; def paramSymbols: Seq[Symbol]}) = {
@@ -179,9 +206,9 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
case _ => "^___^"
})
- // Printe parameter clauses
+ // Print parameter clauses
print(paramEntries.mkString(
- "(" + (mt match {case _ : ImplicitMethodType => "implicit "; case _ => ""})
+ "(" + (mt match {case _: ImplicitMethodType => "implicit "; case _ => ""})
, ", ", ")"))
// Print result type
@@ -215,13 +242,14 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
val n = m.name
if (underCaseClass(m) && n == CONSTRUCTOR_NAME) return
+ if (n.matches(".+\\$default\\$\\d+")) return // skip default function parameters
if (n.startsWith("super$")) return // do not print auxiliary qualified super accessors
if (m.isAccessor && n.endsWith("_$eq")) return
indent()
printModifiers(m)
if (m.isAccessor) {
val indexOfSetter = m.parent.get.children.indexWhere(x => x.isInstanceOf[MethodSymbol] &&
- x.asInstanceOf[MethodSymbol].name == n + "_$eq")
+ x.asInstanceOf[MethodSymbol].name == n + "_$eq")
print(if (indexOfSetter > 0) "var " else "val ")
} else {
print("def ")
@@ -234,7 +262,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
val nn = processName(name)
print(nn)
printMethodType(m.infoType, true)(
- {if (!m.isDeferred) print(" = { /* compiled code */ }" /* Print body only for non-abstract metods */ )}
+ {if (!m.isDeferred) print(" = { /* compiled code */ }" /* Print body only for non-abstract methods */ )}
)
}
print("\n")
@@ -248,35 +276,48 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
printChildren(level, a)
}
- def printAttributes(sym: SymbolInfoSymbol) {
- for (attrib <- sym.attributes) printAttribute(attrib)
+ def printTypeSymbol(level: Int, t: TypeSymbol) {
+ print("type ")
+ print(processName(t.name))
+ printType(t.infoType)
+ print("\n")
}
- def printAttribute(attrib: AttributeInfo) {
- printType(attrib.typeRef, "@")
+ def toString(attrib: AttributeInfo): String = {
+ val buffer = new StringBuffer
+ buffer.append(toString(attrib.typeRef, "@"))
if (attrib.value.isDefined) {
- print("(")
- printValue(attrib.value.get)
- print(")")
+ buffer.append("(")
+ val value = attrib.value.get
+ val stringVal = value.isInstanceOf[String]
+ if (stringVal) buffer.append("\"")
+ val stringValue = valueToString(value)
+ val isMultiline = stringVal && (stringValue.contains("\n")
+ || stringValue.contains("\r"))
+ if (isMultiline) buffer.append("\"\"")
+ buffer.append(valueToString(value))
+ if (isMultiline) buffer.append("\"\"")
+ if (stringVal) buffer.append("\"")
+ buffer.append(")")
}
if (!attrib.values.isEmpty) {
- print(" {")
+ buffer.append(" {")
for (name ~ value <- attrib.values) {
- print(" val ")
- print(processName(name))
- print(" = ")
- printValue(value)
+ buffer.append(" val ")
+ buffer.append(processName(name))
+ buffer.append(" = ")
+ buffer.append(valueToString(value))
}
- printValue(attrib.value)
- print(" }")
+ buffer.append(valueToString(attrib.value))
+ buffer.append(" }")
}
- print(" ")
+ buffer.toString
}
- def printValue(value: Any): Unit = value match {
- case t: Type => printType(t)
+ def valueToString(value: Any): String = value match {
+ case t: Type => toString(t)
// TODO string, char, float, etc.
- case _ => print(value)
+ case _ => value.toString
}
implicit object _tf extends TypeFlags(false)
@@ -289,57 +330,74 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
def toString(t: Type)(implicit flags: TypeFlags): String = toString(t, "")(flags)
- def toString(t: Type, sep: String)(implicit flags: TypeFlags): String = t match {
- case ThisType(symbol) => sep + symbol.path + ".type"
- case SingleType(typeRef, symbol) => sep + symbol.path + ".type"
- case ConstantType(constant) => sep + (constant match {
- case null => "scala.Null"
- case _: Unit => "scala.Unit"
- case _: Boolean => "scala.Boolean"
- case _: Byte => "scala.Byte"
- case _: Char => "scala.Char"
- case _: Short => "scala.Short"
- case _: Int => "scala.Int"
- case _: Long => "scala.Long"
- case _: Float => "scala.Float"
- case _: Double => "scala.Double"
- case _: String => "java.lang.String"
- case c: Class[_] => "java.lang.Class[" + c.getComponentType.getCanonicalName.replace("$", ".") + "]"
- })
- case TypeRefType(prefix, symbol, typeArgs) => sep + (symbol.path match {
- case "scala.<repeated>" => flags match {
- case TypeFlags(true) => toString(typeArgs.first) + "*"
- case _ => "scala.Seq" + typeArgString(typeArgs)
+ def toString(t: Type, sep: String)(implicit flags: TypeFlags): String = {
+ // print type itself
+ t match {
+ case ThisType(symbol) => sep + processName(symbol.path) + ".type"
+ case SingleType(typeRef, symbol) => sep + processName(symbol.path) + ".type"
+ case ConstantType(constant) => sep + (constant match {
+ case null => "scala.Null"
+ case _: Unit => "scala.Unit"
+ case _: Boolean => "scala.Boolean"
+ case _: Byte => "scala.Byte"
+ case _: Char => "scala.Char"
+ case _: Short => "scala.Short"
+ case _: Int => "scala.Int"
+ case _: Long => "scala.Long"
+ case _: Float => "scala.Float"
+ case _: Double => "scala.Double"
+ case _: String => "java.lang.String"
+ case c: Class[_] => "java.lang.Class[" + c.getComponentType.getCanonicalName.replace("$", ".") + "]"
+ })
+ case TypeRefType(prefix, symbol, typeArgs) => sep + (symbol.path match {
+ case "scala.<repeated>" => flags match {
+ case TypeFlags(true) => toString(typeArgs.head) + "*"
+ case _ => "scala.Seq" + typeArgString(typeArgs)
+ }
+ case "scala.<byname>" => "=> " + toString(typeArgs.head)
+ case _ => {
+ val path = StringUtil.cutSubstring(symbol.path)(".package") //remove package object reference
+ StringUtil.trimStart(processName(path) + typeArgString(typeArgs), "<empty>.")
+ }
+ })
+ case TypeBoundsType(lower, upper) => {
+ val lb = toString(lower)
+ val ub = toString(upper)
+ val lbs = if (!lb.equals("scala.Nothing")) " >: " + lb else ""
+ val ubs = if (!ub.equals("scala.Any")) " <: " + ub else ""
+ lbs + ubs
}
- case "scala.<byname>" => "=> " + toString(typeArgs.first)
- case _ => {
- val path = StringUtil.cutSubstring(symbol.path)(".package") //remove package object reference
- StringUtil.trimStart(processName(path) + typeArgString(typeArgs), "<empty>.")
+ case RefinedType(classSym, typeRefs) => sep + typeRefs.map(toString).mkString("", " with ", "")
+ case ClassInfoType(symbol, typeRefs) => sep + typeRefs.map(toString).mkString(" extends ", " with ", "")
+ case ClassInfoTypeWithCons(symbol, typeRefs, cons) => sep + typeRefs.map(toString).
+ mkString(cons + " extends ", " with ", "")
+
+ case ImplicitMethodType(resultType, _) => toString(resultType, sep)
+ case MethodType(resultType, _) => toString(resultType, sep)
+
+ case PolyType(typeRef, symbols) => typeParamString(symbols) + toString(typeRef, sep)
+ case PolyTypeWithCons(typeRef, symbols, cons) => typeParamString(symbols) + processName(cons) + toString(typeRef, sep)
+ case AnnotatedType(typeRef, attribTreeRefs) => {
+ toString(typeRef, sep)
}
- })
- case TypeBoundsType(lower, upper) => " >: " + toString(lower) + " <: " + toString(upper)
- case RefinedType(classSym, typeRefs) => sep + typeRefs.map(toString).mkString("", " with ", "")
- case ClassInfoType(symbol, typeRefs) => sep + typeRefs.map(toString).mkString(" extends ", " with ", "")
-
- case ImplicitMethodType(resultType, _) => toString(resultType, sep)
- case MethodType(resultType, _) => toString(resultType, sep)
-
- case PolyType(typeRef, symbols) => typeParamString(symbols) + toString(typeRef, sep)
- case PolyTypeWithCons(typeRef, symbols, cons) => typeParamString(symbols) + cons + toString(typeRef, sep)
- case AnnotatedType(typeRef, attribTreeRefs) => toString(typeRef, sep)
- case AnnotatedWithSelfType(typeRef, symbol, attribTreeRefs) => toString(typeRef, sep)
- //case DeBruijnIndexType(typeLevel, typeIndex) =>
- case ExistentialType(typeRef, symbols) => {
- val refs = symbols.map(toString _).filter(!_.startsWith("_ ")).map("type " + _)
- toString(typeRef, sep) + (if (refs.size > 0) refs.mkString(" forSome {", "; ", "}") else "")
+ case AnnotatedWithSelfType(typeRef, symbol, attribTreeRefs) => toString(typeRef, sep)
+ //case DeBruijnIndexType(typeLevel, typeIndex) =>
+ case ExistentialType(typeRef, symbols) => {
+ val refs = symbols.map(toString _).filter(!_.startsWith("_")).map("type " + _)
+ toString(typeRef, sep) + (if (refs.size > 0) refs.mkString(" forSome {", "; ", "}") else "")
+ }
+ case _ => sep + t.toString
}
- case _ => sep + t.toString
}
def getVariance(t: TypeSymbol) = if (t.isCovariant) "+" else if (t.isContravariant) "-" else ""
def toString(symbol: Symbol): String = symbol match {
- case symbol: TypeSymbol => getVariance(symbol) + processName(symbol.name) + toString(symbol.infoType)
+ case symbol: TypeSymbol => {
+ val attrs = (for (a <- symbol.attributes) yield toString(a)).mkString(" ")
+ val atrs = if (attrs.length > 0) attrs.trim + " " else ""
+ atrs + getVariance(symbol) + processName(symbol.name) + toString(symbol.infoType)
+ }
case s => symbol.toString
}
@@ -356,19 +414,27 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
"\\$minus" -> "-", "\\$eq" -> "=", "\\$less" -> "<",
"\\$times" -> "*", "\\$div" -> "/", "\\$bslash" -> "\\\\",
"\\$greater" -> ">", "\\$qmark" -> "?", "\\$percent" -> "%",
- "\\$amp" -> "&", "\\$colon" -> ":", "\\$u2192" -> "→")
+ "\\$amp" -> "&", "\\$colon" -> ":", "\\$u2192" -> "→",
+ "\\$hash" -> "#")
val pattern = Pattern.compile(_syms.keys.foldLeft("")((x, y) => if (x == "") y else x + "|" + y))
val placeholderPattern = "_\\$(\\d)+"
+ private def stripPrivatePrefix(name: String) = {
+ val i = name.lastIndexOf("$$")
+ if (i > 0) name.substring(i + 2) else name
+ }
+
def processName(name: String) = {
- val m = pattern.matcher(name)
- var temp = name
+ val stripped = stripPrivatePrefix(name)
+ val m = pattern.matcher(stripped)
+ var temp = stripped
while (m.find) {
val key = m.group
val re = "\\" + key
temp = temp.replaceAll(re, _syms(re))
}
- temp.replaceAll(placeholderPattern, "_")
+ val result = temp.replaceAll(placeholderPattern, "_")
+ NameTransformer.decode(result)
}
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
index e224525d06..c991df6c09 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
@@ -15,6 +15,7 @@ case class TypeRefType(prefix : Type, symbol : Symbol, typeArgs : Seq[Type]) ext
case class TypeBoundsType(lower : Type, upper : Type) extends Type
case class RefinedType(classSym : Symbol, typeRefs : List[Type]) extends Type
case class ClassInfoType(symbol : Symbol, typeRefs : Seq[Type]) extends Type
+case class ClassInfoTypeWithCons(symbol : Symbol, typeRefs : Seq[Type], cons: String) extends Type
case class MethodType(resultType : Type, paramSymbols : Seq[Symbol]) extends Type
case class PolyType(typeRef : Type, symbols : Seq[TypeSymbol]) extends Type
case class PolyTypeWithCons(typeRef : Type, symbols : Seq[TypeSymbol], cons: String) extends Type
diff --git a/src/swing/scala/swing/AbstractButton.scala b/src/swing/scala/swing/AbstractButton.scala
index 243a274792..ee26a6da31 100644
--- a/src/swing/scala/swing/AbstractButton.scala
+++ b/src/swing/scala/swing/AbstractButton.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -20,7 +19,7 @@ import javax.swing.{AbstractButton => JAbstractButton, Icon}
*
* @see javax.swing.AbstractButton
*/
-abstract class AbstractButton extends Component with Action.Trigger with Publisher {
+abstract class AbstractButton extends Component with Action.Trigger.Wrapper with Publisher {
override lazy val peer: JAbstractButton = new JAbstractButton with SuperMixin {}
def text: String = peer.getText
@@ -41,14 +40,6 @@ abstract class AbstractButton extends Component with Action.Trigger with Publish
def rolloverSelectedIcon: Icon = peer.getRolloverSelectedIcon
def rolloverSelectedIcon_=(b: Icon) = peer.setRolloverSelectedIcon(b)
- // TODO: we need an action cache
- private var _action: Action = Action.NoAction
- def action: Action = _action
- def action_=(a: Action) { _action = a; peer.setAction(a.peer) }
-
- //1.6: def hideActionText: Boolean = peer.getHideActionText
- //def hideActionText_=(b: Boolean) = peer.setHideActionText(b)
-
peer.addActionListener(Swing.ActionListener { e =>
publish(ButtonClicked(AbstractButton.this))
})
diff --git a/src/swing/scala/swing/Action.scala b/src/swing/scala/swing/Action.scala
index d2f48d4b48..4c3e92bff7 100644
--- a/src/swing/scala/swing/Action.scala
+++ b/src/swing/scala/swing/Action.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -29,9 +28,17 @@ object Action {
def peer: javax.swing.JComponent {
def addActionListener(a: ActionListener)
def removeActionListener(a: ActionListener)
- def setAction(a: Action): javax.swing.Action
- def getAction: javax.swing.Action
+ def setAction(a: javax.swing.Action)
+ def getAction(): javax.swing.Action
}
+
+ // TODO: we need an action cache
+ private var _action: Action = Action.NoAction
+ def action: Action = _action
+ def action_=(a: Action) { _action = a; peer.setAction(a.peer) }
+
+ //1.6: def hideActionText: Boolean = peer.getHideActionText
+ //def hideActionText_=(b: Boolean) = peer.setHideActionText(b)
}
}
@@ -128,7 +135,7 @@ abstract class Action(title0: String) {
def accelerator: Option[KeyStroke] =
toOption(peer.getValue(javax.swing.Action.ACCELERATOR_KEY))
def accelerator_=(k: Option[KeyStroke]) {
- peer.putValue(javax.swing.Action.ACCELERATOR_KEY, toNull(k))
+ peer.putValue(javax.swing.Action.ACCELERATOR_KEY, k orNull)
}
/**
@@ -140,7 +147,7 @@ abstract class Action(title0: String) {
/*/**
* Only honored if not <code>None</code>. For various buttons.
*/
- 1.6: def selected: Option[Boolean] = toOption(peer.getValue(javax.swing.Action.SELECTED_KEY))
+ 1.6: def selected: Option[Boolean] = Option(peer.getValue(javax.swing.Action.SELECTED_KEY))
def selected_=(b: Option[Boolean]) {
peer.putValue(javax.swing.Action.SELECTED_KEY,
if (b == None) null else new java.lang.Boolean(b.get))
diff --git a/src/swing/scala/swing/Alignment.scala b/src/swing/scala/swing/Alignment.scala
index 32ae4e08d1..a864afe91b 100644
--- a/src/swing/scala/swing/Alignment.scala
+++ b/src/swing/scala/swing/Alignment.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/Applet.scala b/src/swing/scala/swing/Applet.scala
index a3569c2a71..ab6bb50e0a 100644
--- a/src/swing/scala/swing/Applet.scala
+++ b/src/swing/scala/swing/Applet.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/BorderPanel.scala b/src/swing/scala/swing/BorderPanel.scala
index 42f6f1b40e..392c205376 100644
--- a/src/swing/scala/swing/BorderPanel.scala
+++ b/src/swing/scala/swing/BorderPanel.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -51,5 +50,11 @@ class BorderPanel extends Panel with LayoutContainer {
wrapPosition(layoutManager.getConstraints(comp.peer).asInstanceOf[String])
protected def areValid(c: Constraints): (Boolean, String) = (true, "")
- protected def add(c: Component, l: Constraints) { peer.add(c.peer, l.toString) }
+ protected def add(c: Component, l: Constraints) {
+ // we need to remove previous components with the same constraints as the new one,
+ // otherwise the layout manager loses track of the old one
+ val old = layoutManager.getLayoutComponent(l.toString)
+ if(old != null) peer.remove(old)
+ peer.add(c.peer, l.toString)
+ }
}
diff --git a/src/swing/scala/swing/BoxPanel.scala b/src/swing/scala/swing/BoxPanel.scala
index 8c5ce9b098..f976813e59 100644
--- a/src/swing/scala/swing/BoxPanel.scala
+++ b/src/swing/scala/swing/BoxPanel.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/BufferWrapper.scala b/src/swing/scala/swing/BufferWrapper.scala
index 7b5d9f0ea5..75b86aa2a7 100644
--- a/src/swing/scala/swing/BufferWrapper.scala
+++ b/src/swing/scala/swing/BufferWrapper.scala
@@ -1,18 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
import scala.collection.mutable.Buffer
-import scala.collection.IndexedSeq
/**
* Default partial implementation for buffer adapters.
diff --git a/src/swing/scala/swing/Button.scala b/src/swing/scala/swing/Button.scala
index e3387ef271..45dc703c68 100644
--- a/src/swing/scala/swing/Button.scala
+++ b/src/swing/scala/swing/Button.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/ButtonGroup.scala b/src/swing/scala/swing/ButtonGroup.scala
index 8af67cdb3c..477f7a1372 100644
--- a/src/swing/scala/swing/ButtonGroup.scala
+++ b/src/swing/scala/swing/ButtonGroup.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/CheckBox.scala b/src/swing/scala/swing/CheckBox.scala
index 8c909b34d1..d528fa1428 100644
--- a/src/swing/scala/swing/CheckBox.scala
+++ b/src/swing/scala/swing/CheckBox.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/ComboBox.scala b/src/swing/scala/swing/ComboBox.scala
index c1d793cecd..a9800693d4 100644
--- a/src/swing/scala/swing/ComboBox.scala
+++ b/src/swing/scala/swing/ComboBox.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -15,7 +14,6 @@ import event._
import javax.swing.{JList, JComponent, JComboBox, JTextField, ComboBoxModel, AbstractListModel, ListCellRenderer}
import java.awt.event.ActionListener
-
object ComboBox {
/**
* An editor for a combo box. Let's you edit the currently selected item.
@@ -205,8 +203,8 @@ class ComboBox[A](items: Seq[A]) extends Component with Publisher {
peer.setEditor(editor(this).comboBoxPeer)
}
- def prototypeDisplayValue: Option[A] = Swing.toOption(peer.getPrototypeDisplayValue)
+ def prototypeDisplayValue: Option[A] = toOption[A](peer.getPrototypeDisplayValue)
def prototypeDisplayValue_=(v: Option[A]) {
- peer.setPrototypeDisplayValue(Swing.toNull(v.map(_.asInstanceOf[AnyRef])))
+ peer.setPrototypeDisplayValue(v map toAnyRef orNull)
}
}
diff --git a/src/swing/scala/swing/Component.scala b/src/swing/scala/swing/Component.scala
index 598e78abf4..62344bdb45 100644
--- a/src/swing/scala/swing/Component.scala
+++ b/src/swing/scala/swing/Component.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -79,6 +78,9 @@ abstract class Component extends UIElement {
}
}
+ def name: String = peer.getName
+ def name_=(s: String) = peer.setName(s)
+
/**
* Used by certain layout managers, e.g., BoxLayout or OverlayLayout to
* align components relative to each other.
@@ -100,12 +102,12 @@ abstract class Component extends UIElement {
def tooltip: String = peer.getToolTipText
def tooltip_=(t: String) = peer.setToolTipText(t)
- def inputVerifier: this.type => Boolean = { a =>
- peer.getInputVerifier().verify(a.peer)
+ def inputVerifier: Component => Boolean = { a =>
+ peer.getInputVerifier.verify(a.peer)
}
- def inputVerifier_=(v: this.type => Boolean) {
+ def inputVerifier_=(v: Component => Boolean) {
peer.setInputVerifier(new javax.swing.InputVerifier {
- def verify(c: javax.swing.JComponent) = v(UIElement.cachedWrapper(c))
+ def verify(c: javax.swing.JComponent) = v(UIElement.cachedWrapper[Component](c))
})
}
@@ -114,7 +116,7 @@ abstract class Component extends UIElement {
}
def verifyOnTraversal_=(v: (Component, Component) => Boolean) {
peer.setInputVerifier(new javax.swing.InputVerifier {
- def verify(c: javax.swing.JComponent) = v(UIElement.cachedWrapper(c))
+ def verify(c: javax.swing.JComponent) = v(UIElement.cachedWrapper[Component](c))
})
}*/
@@ -198,7 +200,7 @@ abstract class Component extends UIElement {
def requestFocusInWindow() = peer.requestFocusInWindow()
def hasFocus: Boolean = peer.isFocusOwner
- override def onFirstSubscribe {
+ protected override def onFirstSubscribe {
super.onFirstSubscribe
// TODO: deprecated, remove after 2.8
peer.addComponentListener(new java.awt.event.ComponentListener {
@@ -218,7 +220,7 @@ abstract class Component extends UIElement {
peer.addFocusListener(new java.awt.event.FocusListener {
def other(e: java.awt.event.FocusEvent) = e.getOppositeComponent match {
- case c: JComponent => Some(UIElement.cachedWrapper(c))
+ case c: JComponent => Some(UIElement.cachedWrapper[Component](c))
case _ => None
}
@@ -234,8 +236,8 @@ abstract class Component extends UIElement {
def propertyChange(e: java.beans.PropertyChangeEvent) {
e.getPropertyName match {
case "font" => publish(FontChanged(Component.this))
- case "background" => publish(ForegroundChanged(Component.this))
- case "foreground" => publish(BackgroundChanged(Component.this))
+ case "background" => publish(BackgroundChanged(Component.this))
+ case "foreground" => publish(ForegroundChanged(Component.this))
case _ =>
/*case "focusable" =>
case "focusTraversalKeysEnabled" =>
diff --git a/src/swing/scala/swing/Container.scala b/src/swing/scala/swing/Container.scala
index fba1ab87a3..33201eba51 100644
--- a/src/swing/scala/swing/Container.scala
+++ b/src/swing/scala/swing/Container.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -30,7 +29,7 @@ object Container {
override def remove(n: Int): Component = {
val c = peer.getComponent(n)
peer.remove(n)
- UIElement.cachedWrapper(c)
+ UIElement.cachedWrapper[Component](c)
}
protected def insertAt(n: Int, c: Component) { peer.add(c.peer, n) }
def +=(c: Component): this.type = { peer.add(c.peer) ; this }
@@ -41,11 +40,11 @@ object Container {
peer.addContainerListener(new java.awt.event.ContainerListener {
def componentAdded(e: java.awt.event.ContainerEvent) {
publish(ComponentAdded(Wrapper.this,
- UIElement.cachedWrapper(e.getChild.asInstanceOf[javax.swing.JComponent])))
+ UIElement.cachedWrapper[Component](e.getChild.asInstanceOf[javax.swing.JComponent])))
}
def componentRemoved(e: java.awt.event.ContainerEvent) {
publish(ComponentRemoved(Wrapper.this,
- UIElement.cachedWrapper(e.getChild.asInstanceOf[javax.swing.JComponent])))
+ UIElement.cachedWrapper[Component](e.getChild.asInstanceOf[javax.swing.JComponent])))
}
})
}
diff --git a/src/swing/scala/swing/EditorPane.scala b/src/swing/scala/swing/EditorPane.scala
index 0175d6140a..9302650d50 100644
--- a/src/swing/scala/swing/EditorPane.scala
+++ b/src/swing/scala/swing/EditorPane.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -21,7 +20,7 @@ import java.awt.event._
* @see javax.swing.JEditorPane
*/
class EditorPane(contentType0: String, text0: String) extends TextComponent {
- override lazy val peer: JEditorPane = new JEditorPane(contentType0, text0) with SuperMixin {}
+ override lazy val peer: JEditorPane = new JEditorPane(contentType0, text0) with SuperMixin
def this() = this("text/plain", "")
def contentType: String = peer.getContentType
diff --git a/src/swing/scala/swing/FileChooser.scala b/src/swing/scala/swing/FileChooser.scala
index 44d6e65ae5..f9eda4a182 100644
--- a/src/swing/scala/swing/FileChooser.scala
+++ b/src/swing/scala/swing/FileChooser.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -59,7 +58,7 @@ class FileChooser(dir: File) {
def title: String = peer.getDialogTitle
def title_=(t: String) { peer.setDialogTitle(t) }
- def accessory: Component = UIElement.cachedWrapper(peer.getAccessory)
+ def accessory: Component = UIElement.cachedWrapper[Component](peer.getAccessory)
def accessory_=(c: Component) { peer.setAccessory(c.peer) }
def fileHidingEnabled: Boolean = peer.isFileHidingEnabled
diff --git a/src/swing/scala/swing/FlowPanel.scala b/src/swing/scala/swing/FlowPanel.scala
index 5047a622f0..25f128d1ee 100644
--- a/src/swing/scala/swing/FlowPanel.scala
+++ b/src/swing/scala/swing/FlowPanel.scala
@@ -1,17 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
import java.awt.FlowLayout
+import javax.swing.JPanel
object FlowPanel {
object Alignment extends Enumeration {
@@ -30,7 +30,8 @@ object FlowPanel {
* @see java.awt.FlowLayout
*/
class FlowPanel(alignment: FlowPanel.Alignment.Value)(contents0: Component*) extends Panel with SequentialContainer.Wrapper {
- override lazy val peer: javax.swing.JPanel = new javax.swing.JPanel(new java.awt.FlowLayout(alignment.id))
+ override lazy val peer: JPanel =
+ new JPanel(new java.awt.FlowLayout(alignment.id)) with SuperMixin
def this(contents0: Component*) = this(FlowPanel.Alignment.Center)(contents0: _*)
def this() = this(FlowPanel.Alignment.Center)()
diff --git a/src/swing/scala/swing/Font.scala b/src/swing/scala/swing/Font.scala
new file mode 100644
index 0000000000..a58c8967d7
--- /dev/null
+++ b/src/swing/scala/swing/Font.scala
@@ -0,0 +1,70 @@
+package scala.swing
+
+/*object Font {
+ def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
+ def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
+ def decode(str: String) = java.awt.Font.decode(str)
+
+ /* TODO: finish implementation
+ /**
+ * See [java.awt.Font.getFont].
+ */
+ def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
+ java.awt.Font.getFont(ImmutableMapWrapper(attributes))
+
+ import java.{util => ju}
+ private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(m : ClassManifest[A]) extends ju.AbstractMap[A, B] {
+ self =>
+ override def size = underlying.size
+
+ override def put(k : A, v : B) =
+ throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
+ override def remove(k : AnyRef) =
+ throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
+
+ override def entrySet : ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
+ def size = self.size
+
+ def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
+ val ui = underlying.iterator
+ var prev : Option[A] = None
+
+ def hasNext = ui.hasNext
+
+ def next = {
+ val (k, v) = ui.next
+ prev = Some(k)
+ new ju.Map.Entry[A, B] {
+ def getKey = k
+ def getValue = v
+ def setValue(v1 : B) = self.put(k, v1)
+ override def equals(other : Any) = other match {
+ case e : ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
+ case _ => false
+ }
+ }
+ }
+
+ def remove = prev match {
+ case Some(k) => val v = self.remove(k.asInstanceOf[AnyRef]) ; prev = None ; v
+ case _ => throw new IllegalStateException("next must be called at least once before remove")
+ }
+ }
+ }
+ }
+ */
+
+ /**
+ * See [java.awt.Font.getFont].
+ */
+ def get(nm: String) = java.awt.Font.getFont(nm)
+ /**
+ * See [java.awt.Font.getFont].
+ */
+ def get(nm: String, font: Font) = java.awt.Font.getFont(nm, font)
+
+ def Insets(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
+ def Rectangle(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
+ def Point(x: Int, y: Int) = new Point(x, y)
+ def Dimension(x: Int, y: Int) = new Dimension(x, y)
+}*/ \ No newline at end of file
diff --git a/src/swing/scala/swing/FormattedTextField.scala b/src/swing/scala/swing/FormattedTextField.scala
index 623764dc76..0253750134 100644
--- a/src/swing/scala/swing/FormattedTextField.scala
+++ b/src/swing/scala/swing/FormattedTextField.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -33,7 +32,7 @@ object FormattedTextField {
* @see javax.swing.JFormattedTextField
*/
class FormattedTextField(format: java.text.Format) extends TextComponent {
- override lazy val peer: JFormattedTextField = new JFormattedTextField(format)
+ override lazy val peer: JFormattedTextField = new JFormattedTextField(format) with SuperMixin
import FormattedTextField._
diff --git a/src/swing/scala/swing/GUIApplication.scala b/src/swing/scala/swing/GUIApplication.scala
index 07b06e1b38..95cc8cbfea 100644
--- a/src/swing/scala/swing/GUIApplication.scala
+++ b/src/swing/scala/swing/GUIApplication.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/GridBagPanel.scala b/src/swing/scala/swing/GridBagPanel.scala
index 15f506350d..586d94158f 100644
--- a/src/swing/scala/swing/GridBagPanel.scala
+++ b/src/swing/scala/swing/GridBagPanel.scala
@@ -1,17 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
-import java.awt.{GridBagConstraints}
+import java.awt.{GridBagConstraints, GridBagLayout}
object GridBagPanel {
@@ -50,10 +49,10 @@ object GridBagPanel {
* @see java.awt.GridBagLayout
*/
class GridBagPanel extends Panel with LayoutContainer {
- override lazy val peer = new javax.swing.JPanel(new java.awt.GridBagLayout)
+ override lazy val peer = new javax.swing.JPanel(new GridBagLayout) with SuperMixin
import GridBagPanel._
- private def layoutManager = peer.getLayout.asInstanceOf[java.awt.GridBagLayout]
+ private def layoutManager = peer.getLayout.asInstanceOf[GridBagLayout]
/**
* Convenient conversion from xy-coords given as pairs to
diff --git a/src/swing/scala/swing/GridPanel.scala b/src/swing/scala/swing/GridPanel.scala
index fbb680b864..acba29e753 100644
--- a/src/swing/scala/swing/GridPanel.scala
+++ b/src/swing/scala/swing/GridPanel.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -21,7 +20,8 @@ object GridPanel {
* @see java.awt.GridLayout
*/
class GridPanel(rows0: Int, cols0: Int) extends Panel with SequentialContainer.Wrapper {
- override lazy val peer = new javax.swing.JPanel(new java.awt.GridLayout(rows0, cols0))
+ override lazy val peer =
+ new javax.swing.JPanel(new java.awt.GridLayout(rows0, cols0)) with SuperMixin
/*type Constraints = (Int, Int)
diff --git a/src/swing/scala/swing/Label.scala b/src/swing/scala/swing/Label.scala
index 5bc9989ffb..00b1b2898a 100644
--- a/src/swing/scala/swing/Label.scala
+++ b/src/swing/scala/swing/Label.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -20,7 +19,8 @@ import scala.swing.Swing._
* @see javax.swing.JLabel
*/
class Label(text0: String, icon0: Icon, align: Alignment.Value) extends Component {
- override lazy val peer: JLabel = new JLabel(text0, toNullIcon(icon0), align.id) with SuperMixin
+ override lazy val peer: JLabel =
+ new JLabel(text0, toNullIcon(icon0), align.id) with SuperMixin
def this() = this("", EmptyIcon, Alignment.Center)
def this(s: String) = this(s, EmptyIcon, Alignment.Center)
diff --git a/src/swing/scala/swing/LayoutContainer.scala b/src/swing/scala/swing/LayoutContainer.scala
index d3587d638a..1b16ba9466 100644
--- a/src/swing/scala/swing/LayoutContainer.scala
+++ b/src/swing/scala/swing/LayoutContainer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -42,7 +41,10 @@ trait LayoutContainer extends Container.Wrapper {
protected def areValid(c: Constraints): (Boolean, String)
/**
* Adds a component with the given constraints to the underlying layout
- * manager and the component peer.
+ * manager and the component peer. This method needs to interact properly
+ * with method `constraintsFor`, i.e., it might need to remove previously
+ * held components in order to maintain layout consistency. See `BorderPanel`
+ * for an example.
*/
protected def add(comp: Component, c: Constraints)
@@ -53,7 +55,7 @@ trait LayoutContainer extends Container.Wrapper {
*
* layout(myComponent) = myConstraints
*
- * also ensures that myComponent is properly add to this container.
+ * also ensures that myComponent is properly added to this container.
*/
def layout: Map[Component, Constraints] = new Map[Component, Constraints] {
def -= (c: Component): this.type = { _contents -= c; this }
@@ -64,7 +66,7 @@ trait LayoutContainer extends Container.Wrapper {
add(c, l)
this
}
- def get(c: Component) = Swing.toOption(constraintsFor(c))
+ def get(c: Component) = Option(constraintsFor(c))
override def size = peer.getComponentCount
def iterator: Iterator[(Component, Constraints)] =
peer.getComponents.iterator.map { c =>
diff --git a/src/swing/scala/swing/ListView.scala b/src/swing/scala/swing/ListView.scala
index 4841cb3ee7..7a94ff112f 100644
--- a/src/swing/scala/swing/ListView.scala
+++ b/src/swing/scala/swing/ListView.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -143,7 +142,7 @@ object ListView {
*/
class ListView[A] extends Component {
import ListView._
- override lazy val peer: JList = new JList
+ override lazy val peer: JList = new JList with SuperMixin
def this(items: Seq[A]) = {
this()
@@ -157,12 +156,12 @@ class ListView[A] extends Component {
def listData: Seq[A] = peer.getModel match {
case model: ModelWrapper => model.items
- case model @ _ => new Seq[A] {
+ case model @ _ => new Seq[A] { selfSeq =>
def length = model.getSize
def iterator = new Iterator[A] {
var idx = 0
def next = { idx += 1; apply(idx-1) }
- def hasNext = idx < length
+ def hasNext = idx < selfSeq.length
}
def apply(n: Int) = model.getElementAt(n).asInstanceOf[A]
}
diff --git a/src/swing/scala/swing/MainFrame.scala b/src/swing/scala/swing/MainFrame.scala
index 86b0eb5901..4bc855d1c3 100644
--- a/src/swing/scala/swing/MainFrame.scala
+++ b/src/swing/scala/swing/MainFrame.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -18,5 +17,5 @@ import event._
* framework and quits the application when closed.
*/
class MainFrame extends Frame {
- override def closeOperation { System.exit(0); }
+ override def closeOperation { System.exit(0) }
}
diff --git a/src/swing/scala/swing/Menu.scala b/src/swing/scala/swing/Menu.scala
index 1879725821..21d1d7ae12 100644
--- a/src/swing/scala/swing/Menu.scala
+++ b/src/swing/scala/swing/Menu.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -24,7 +23,7 @@ object MenuBar {
* @see javax.swing.JMenuBar
*/
class MenuBar extends Component with SequentialContainer.Wrapper {
- override lazy val peer: JMenuBar = new JMenuBar
+ override lazy val peer: JMenuBar = new JMenuBar with SuperMixin
def menus: Seq[Menu] = contents.filter(_.isInstanceOf[Menu]).map(_.asInstanceOf[Menu])
diff --git a/src/swing/scala/swing/Orientable.scala b/src/swing/scala/swing/Orientable.scala
index fb89e47e29..8aac30f339 100644
--- a/src/swing/scala/swing/Orientable.scala
+++ b/src/swing/scala/swing/Orientable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/Orientation.scala b/src/swing/scala/swing/Orientation.scala
index 7f10bbee65..e017b2374f 100644
--- a/src/swing/scala/swing/Orientation.scala
+++ b/src/swing/scala/swing/Orientation.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/Oriented.scala b/src/swing/scala/swing/Oriented.scala
index d25d796050..b19a5f898d 100644
--- a/src/swing/scala/swing/Oriented.scala
+++ b/src/swing/scala/swing/Oriented.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/Panel.scala b/src/swing/scala/swing/Panel.scala
index bce28c97a1..1671915074 100644
--- a/src/swing/scala/swing/Panel.scala
+++ b/src/swing/scala/swing/Panel.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/PasswordField.scala b/src/swing/scala/swing/PasswordField.scala
index 4fcc761be5..aac9c393b9 100644
--- a/src/swing/scala/swing/PasswordField.scala
+++ b/src/swing/scala/swing/PasswordField.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -21,7 +20,7 @@ import java.awt.event._
* @see javax.swing.JPasswordField
*/
class PasswordField(text0: String, columns0: Int) extends TextField(text0, columns0) {
- override lazy val peer: JPasswordField = new JPasswordField(text0, columns0)
+ override lazy val peer: JPasswordField = new JPasswordField(text0, columns0) with SuperMixin
def this(text: String) = this(text, 0)
def this(columns: Int) = this("", columns)
def this() = this("")
diff --git a/src/swing/scala/swing/ProgressBar.scala b/src/swing/scala/swing/ProgressBar.scala
index cdc4262bfd..557d7b89c3 100644
--- a/src/swing/scala/swing/ProgressBar.scala
+++ b/src/swing/scala/swing/ProgressBar.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -22,7 +21,7 @@ import event._
*/
class ProgressBar extends Component with Orientable.Wrapper {
override lazy val peer: javax.swing.JProgressBar =
- new javax.swing.JProgressBar
+ new javax.swing.JProgressBar with SuperMixin
def min: Int = peer.getMinimum
def min_=(v: Int) { peer.setMinimum(v) }
diff --git a/src/swing/scala/swing/Publisher.scala b/src/swing/scala/swing/Publisher.scala
index 4603f3e62b..3fa9fc7e56 100644
--- a/src/swing/scala/swing/Publisher.scala
+++ b/src/swing/scala/swing/Publisher.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -52,11 +51,15 @@ trait Publisher extends Reactor {
listenTo(this)
}
+/**
+ * A publisher that subscribes itself to an underlying event source not before the first
+ * reaction is installed. Can unsubscribe itself when the last reaction is uninstalled.
+ */
private[swing] trait LazyPublisher extends Publisher {
import Reactions._
- def onFirstSubscribe()
- def onLastUnsubscribe()
+ protected def onFirstSubscribe()
+ protected def onLastUnsubscribe()
override def subscribe(listener: Reaction) {
if(listeners.size == 1) onFirstSubscribe()
@@ -77,7 +80,7 @@ private[swing] trait SingleRefCollection[+A <: AnyRef] extends Iterable[A] { sel
trait Ref[+A <: AnyRef] extends Reference[A] {
override def hashCode() = {
val v = get
- if (v == None) 0 else v.get.hashCode
+ if (v == None) 0 else v.get.##
}
override def equals(that: Any) = that match {
case that: ReferenceWrapper[_] =>
diff --git a/src/swing/scala/swing/RadioButton.scala b/src/swing/scala/swing/RadioButton.scala
index f15ec81a4e..c030b3c92e 100644
--- a/src/swing/scala/swing/RadioButton.scala
+++ b/src/swing/scala/swing/RadioButton.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -21,6 +20,6 @@ import javax.swing._
* @see javax.swing.JRadioButton
*/
class RadioButton(text0: String) extends ToggleButton {
- override lazy val peer: JRadioButton = new JRadioButton(text0)
+ override lazy val peer: JRadioButton = new JRadioButton(text0) with SuperMixin
def this() = this("")
}
diff --git a/src/swing/scala/swing/Reactions.scala b/src/swing/scala/swing/Reactions.scala
index dc7cb2d2f1..a30cb9f5a4 100644
--- a/src/swing/scala/swing/Reactions.scala
+++ b/src/swing/scala/swing/Reactions.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/Reactor.scala b/src/swing/scala/swing/Reactor.scala
index 8a3fbd2f00..8fdd6cf911 100644
--- a/src/swing/scala/swing/Reactor.scala
+++ b/src/swing/scala/swing/Reactor.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/RichWindow.scala b/src/swing/scala/swing/RichWindow.scala
index c4a72b3d20..10e396247e 100644
--- a/src/swing/scala/swing/RichWindow.scala
+++ b/src/swing/scala/swing/RichWindow.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -53,7 +52,7 @@ sealed trait RichWindow extends Window {
* The menu bar of this frame or `NoMenuBar` if no menu bar is set.
*/
def menuBar: MenuBar = {
- val m = UIElement.cachedWrapper(peer.getJMenuBar)
+ val m = UIElement.cachedWrapper[MenuBar](peer.getJMenuBar)
if (m != null) m else MenuBar.NoMenuBar
}
/**
@@ -156,7 +155,7 @@ object Dialog {
initial: Int): Result.Value = {
val r = JOptionPane.showOptionDialog(nullPeer(parent), message, title,
optionType.id, messageType.id, Swing.wrapIcon(icon),
- entries.map(_.asInstanceOf[AnyRef]).toArray, entries(initial))
+ entries map toAnyRef toArray, entries(initial))
Result(r)
}
@@ -168,11 +167,12 @@ object Dialog {
entries: Seq[A] = Nil,
initial: A): Option[A] = {
val e = if (entries.isEmpty) null
- else entries.map(_.asInstanceOf[AnyRef]).toArray
+ else entries map toAnyRef toArray
val r = JOptionPane.showInputDialog(nullPeer(parent), message, title,
messageType.id, Swing.wrapIcon(icon),
e, initial)
- Swing.toOption(r)
+
+ toOption[A](r)
}
def showMessage(parent: Component = null,
message: Any,
diff --git a/src/swing/scala/swing/RootPanel.scala b/src/swing/scala/swing/RootPanel.scala
index 40d0860d4c..86bfd090d9 100644
--- a/src/swing/scala/swing/RootPanel.scala
+++ b/src/swing/scala/swing/RootPanel.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -22,11 +21,13 @@ trait RootPanel extends Container {
/**
* At most one component.
*/
- def contents: Seq[Component] = {
- Swing.toOption[Any](peer.getContentPane.getComponent(0)).map { c =>
- UIElement.cachedWrapper(c.asInstanceOf[javax.swing.JComponent])
- }.toList
- }
+ def contents: Seq[Component] =
+ if (peer.getContentPane.getComponentCount == 0) Nil
+ else {
+ val c = peer.getContentPane.getComponent(0).asInstanceOf[javax.swing.JComponent]
+ List(UIElement.cachedWrapper[Component](c))
+ }
+
def contents_=(c: Component) {
if (peer.getContentPane.getComponentCount > 0) {
val old = peer.getContentPane.getComponent(0)
diff --git a/src/swing/scala/swing/ScrollBar.scala b/src/swing/scala/swing/ScrollBar.scala
index 8a19d7293c..cd2fbdc973 100644
--- a/src/swing/scala/swing/ScrollBar.scala
+++ b/src/swing/scala/swing/ScrollBar.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -23,7 +22,7 @@ object ScrollBar {
}
class ScrollBar extends Component with Orientable.Wrapper with Adjustable.Wrapper {
- override lazy val peer = new JScrollBar
+ override lazy val peer: JScrollBar = new JScrollBar with SuperMixin
def valueIsAjusting = peer.getValueIsAdjusting
def valueIsAjusting_=(b : Boolean) = peer.setValueIsAdjusting(b)
diff --git a/src/swing/scala/swing/ScrollPane.scala b/src/swing/scala/swing/ScrollPane.scala
index 8cfe9336e9..a840bf2751 100644
--- a/src/swing/scala/swing/ScrollPane.scala
+++ b/src/swing/scala/swing/ScrollPane.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -43,13 +42,13 @@ object ScrollPane {
class ScrollPane extends Component with Container {
import ScrollPane._
- override lazy val peer: JScrollPane = new JScrollPane
+ override lazy val peer: JScrollPane = new JScrollPane with SuperMixin
def this(c: Component) = {
this()
contents = c
}
def contents: Seq[Component] =
- List(UIElement.cachedWrapper(peer.getViewport.getView.asInstanceOf[javax.swing.JComponent]))
+ List(UIElement.cachedWrapper[Component](peer.getViewport.getView.asInstanceOf[javax.swing.JComponent]))
/**
* Sets the single child.
@@ -63,17 +62,20 @@ class ScrollPane extends Component with Container {
* want to let the row header be a list view with the same row height as
* the viewport component.
*/
- def rowHeaderView: Option[Component] = Swing.toOption(peer.getRowHeader.getView).map(UIElement.cachedWrapper(_))
+ def rowHeaderView: Option[Component] =
+ Option(peer.getRowHeader.getView) map UIElement.cachedWrapper[Component]
def rowHeaderView_=(c: Component) = peer.setRowHeaderView(c.peer)
- def rowHeaderView_=(c: Option[Component]) = peer.setRowHeaderView(Swing.toNull(c.map(_.peer)))
+ def rowHeaderView_=(c: Option[Component]) = peer.setRowHeaderView(c map (_.peer) orNull)
- def columnHeaderView: Option[Component] = Swing.toOption(peer.getColumnHeader.getView).map(UIElement.cachedWrapper(_))
+ def columnHeaderView: Option[Component] =
+ Option(peer.getColumnHeader.getView) map UIElement.cachedWrapper[Component]
def columnHeaderView_=(c: Component) = peer.setColumnHeaderView(c.peer)
- def columnHeaderView_=(c: Option[Component]) = peer.setColumnHeaderView(Swing.toNull(c.map(_.peer)))
+ def columnHeaderView_=(c: Option[Component]) = peer.setColumnHeaderView(c map (_.peer) orNull)
- def viewportView: Option[Component] = Swing.toOption(peer.getViewport.getView).map(UIElement.cachedWrapper(_))
+ def viewportView: Option[Component] =
+ Option(peer.getViewport.getView) map UIElement.cachedWrapper[Component]
def viewportView_=(c: Component) = peer.setViewportView(c.peer)
- def viewportView_=(c: Option[Component]) = peer.setViewportView(Swing.toNull(c.map(_.peer)))
+ def viewportView_=(c: Option[Component]) = peer.setViewportView(c map (_.peer) orNull)
def verticalScrollBarPolicy = BarPolicy.wrap(peer.getVerticalScrollBarPolicy)
def verticalScrollBarPolicy_=(p: BarPolicy.Value) = peer.setVerticalScrollBarPolicy(p.verticalPeer)
diff --git a/src/swing/scala/swing/Scrollable.scala b/src/swing/scala/swing/Scrollable.scala
index f5e3116c11..34c0261ef3 100644
--- a/src/swing/scala/swing/Scrollable.scala
+++ b/src/swing/scala/swing/Scrollable.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/Separator.scala b/src/swing/scala/swing/Separator.scala
index 608df073e0..f5eaa5d43f 100644
--- a/src/swing/scala/swing/Separator.scala
+++ b/src/swing/scala/swing/Separator.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -19,6 +18,6 @@ import javax.swing._
* @see javax.swing.JSeparator
*/
class Separator(o: Orientation.Value) extends Component with Oriented.Wrapper {
- override lazy val peer: JSeparator = new JSeparator(o.id)
+ override lazy val peer: JSeparator = new JSeparator(o.id) with SuperMixin
def this() = this(Orientation.Horizontal)
}
diff --git a/src/swing/scala/swing/SequentialContainer.scala b/src/swing/scala/swing/SequentialContainer.scala
index 84a664ed9d..ba2b105b2b 100644
--- a/src/swing/scala/swing/SequentialContainer.scala
+++ b/src/swing/scala/swing/SequentialContainer.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/SimpleGUIApplication.scala b/src/swing/scala/swing/SimpleGUIApplication.scala
index e349a0b65f..fec8dbfef7 100644
--- a/src/swing/scala/swing/SimpleGUIApplication.scala
+++ b/src/swing/scala/swing/SimpleGUIApplication.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -15,12 +14,12 @@ import javax.swing._
/**
* Extend this class for most simple UI applications. Clients need to implement the
- * <code>top</code> method. Framework intialization is done by this class.
+ * <code>top</code> method. Framework initialization is done by this class.
*
* In order to conform to Swing's threading policy, never implement top or any additional
* member that created Swing components as a value unless component creation happens on
* the EDT (see Swing.onEDT and Swing.onEDTWait). Lazy values are okay for the same reason
- * if they are intialized on the EDT always.
+ * if they are initialized on the EDT always.
*/
@deprecated("Use SimpleSwingApplication instead") abstract class SimpleGUIApplication extends GUIApplication {
@@ -44,5 +43,5 @@ import javax.swing._
this.getClass.getResource(path)
def resourceFromUserDirectory(path: String): java.io.File =
- new java.io.File(System.getProperty("user.dir"), path)
+ new java.io.File(util.Properties.userDir, path)
}
diff --git a/src/swing/scala/swing/SimpleSwingApplication.scala b/src/swing/scala/swing/SimpleSwingApplication.scala
index 9f66cc5be5..786c7b4711 100644
--- a/src/swing/scala/swing/SimpleSwingApplication.scala
+++ b/src/swing/scala/swing/SimpleSwingApplication.scala
@@ -5,7 +5,7 @@ abstract class SimpleSwingApplication extends SwingApplication {
override def startup(args: Array[String]) {
val t = top
- t.pack()
+ if (t.size == new Dimension(0,0)) t.pack()
t.visible = true
}
@@ -13,5 +13,5 @@ abstract class SimpleSwingApplication extends SwingApplication {
this.getClass.getResource(path)
def resourceFromUserDirectory(path: String): java.io.File =
- new java.io.File(System.getProperty("user.dir"), path)
+ new java.io.File(util.Properties.userDir, path)
}
diff --git a/src/swing/scala/swing/Slider.scala b/src/swing/scala/swing/Slider.scala
index 53d7efa5e3..47065afe4a 100644
--- a/src/swing/scala/swing/Slider.scala
+++ b/src/swing/scala/swing/Slider.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -24,7 +23,7 @@ import event._
* @see javax.swing.JSlider
*/
class Slider extends Component with Orientable.Wrapper with Publisher {
- override lazy val peer: JSlider = new JSlider
+ override lazy val peer: JSlider = new JSlider with SuperMixin
def min: Int = peer.getMinimum
def min_=(v: Int) { peer.setMinimum(v) }
@@ -52,10 +51,11 @@ class Slider extends Component with Orientable.Wrapper with Publisher {
def adjusting = peer.getValueIsAdjusting
- def labels: scala.collection.Map[Int, Label] =
- new scala.collection.JavaConversions.JMapWrapper[Int, JLabel](
- peer.getLabelTable.asInstanceOf[java.util.Hashtable[Int, JLabel]])
- .mapValues(v => (UIElement cachedWrapper v).asInstanceOf[Label])
+ def labels: scala.collection.Map[Int, Label] = {
+ val labelTable = peer.getLabelTable.asInstanceOf[java.util.Hashtable[Int, JLabel]]
+ new scala.collection.JavaConversions.JMapWrapper(labelTable)
+ .mapValues(v => UIElement.cachedWrapper[Label](v))
+ }
def labels_=(l: scala.collection.Map[Int, Label]) {
// TODO: do some lazy wrapping
val table = new java.util.Hashtable[Any, Any]
diff --git a/src/swing/scala/swing/SplitPane.scala b/src/swing/scala/swing/SplitPane.scala
index 9e5584cd19..583e480beb 100644
--- a/src/swing/scala/swing/SplitPane.scala
+++ b/src/swing/scala/swing/SplitPane.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -23,7 +22,7 @@ import Swing._
*/
class SplitPane(o: Orientation.Value, left: Component, right: Component) extends Component with Container with Orientable.Wrapper {
override lazy val peer: javax.swing.JSplitPane =
- new javax.swing.JSplitPane(o.id, left.peer, right.peer)
+ new javax.swing.JSplitPane(o.id, left.peer, right.peer) with SuperMixin
def this(o: Orientation.Value) = this(o, new Component {}, new Component {})
def this() = this(Orientation.Horizontal)
@@ -33,9 +32,11 @@ class SplitPane(o: Orientation.Value, left: Component, right: Component) extends
peer.setRightComponent(right.peer)
}
- def topComponent: Component = UIElement.cachedWrapper(peer.getTopComponent.asInstanceOf[javax.swing.JComponent])
+ def topComponent: Component =
+ UIElement.cachedWrapper[Component](peer.getTopComponent.asInstanceOf[javax.swing.JComponent])
def topComponent_=(c: Component) { peer.setTopComponent(c.peer) }
- def bottomComponent: Component = UIElement.cachedWrapper(peer.getBottomComponent.asInstanceOf[javax.swing.JComponent])
+ def bottomComponent: Component =
+ UIElement.cachedWrapper[Component](peer.getBottomComponent.asInstanceOf[javax.swing.JComponent])
def bottomComponent_=(c: Component) { peer.setBottomComponent(c.peer) }
def leftComponent: Component = topComponent
diff --git a/src/swing/scala/swing/Swing.scala b/src/swing/scala/swing/Swing.scala
index 15bf30bb44..551c1541cd 100644
--- a/src/swing/scala/swing/Swing.scala
+++ b/src/swing/scala/swing/Swing.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -20,9 +19,6 @@ import javax.swing.{JComponent, Icon, BorderFactory, SwingUtilities}
* Helpers for this package.
*/
object Swing {
- protected[swing] def ifNull[A](o: Object, a: A): A = if(o eq null) a else o.asInstanceOf[A]
- protected[swing] def toOption[A](o: Object): Option[A] = if(o eq null) None else Some(o.asInstanceOf[A])
- protected[swing] def toNull[A>:Null<:AnyRef](a: Option[A]): A = if(a == None) null else a.get
protected[swing] def toNoIcon(i: Icon): Icon = if(i == null) EmptyIcon else i
protected[swing] def toNullIcon(i: Icon): Icon = if(i == EmptyIcon) null else i
protected[swing] def nullPeer(c: Component) = if (c != null) c.peer else null
diff --git a/src/swing/scala/swing/SwingActor.scala b/src/swing/scala/swing/SwingActor.scala
index 2de0258b57..01df831f2e 100644
--- a/src/swing/scala/swing/SwingActor.scala
+++ b/src/swing/scala/swing/SwingActor.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/TabbedPane.scala b/src/swing/scala/swing/TabbedPane.scala
index 5183bf25ca..f87209f00d 100644
--- a/src/swing/scala/swing/TabbedPane.scala
+++ b/src/swing/scala/swing/TabbedPane.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -77,7 +76,7 @@ object TabbedPane {
* @see javax.swing.JTabbedPane
*/
class TabbedPane extends Component with Publisher {
- override lazy val peer: JTabbedPane = new JTabbedPane
+ override lazy val peer: JTabbedPane = new JTabbedPane with SuperMixin
import TabbedPane._
object pages extends BufferWrapper[Page] {
@@ -99,8 +98,8 @@ class TabbedPane extends Component with Publisher {
def +=(t: Page): this.type = { t.parent = TabbedPane.this; peer.addTab(t.title, null, t.content.peer, t.tip); this }
def length = peer.getTabCount
def apply(n: Int) = new Page(TabbedPane.this, peer.getTitleAt(n),
- UIElement.cachedWrapper(peer.getComponentAt(n).asInstanceOf[javax.swing.JComponent]),
- peer.getToolTipTextAt(n))
+ UIElement.cachedWrapper[Component](peer.getComponentAt(n).asInstanceOf[javax.swing.JComponent]),
+ peer.getToolTipTextAt(n))
}
def tabLayoutPolicy: Layout.Value = Layout(peer.getTabLayoutPolicy)
diff --git a/src/swing/scala/swing/Table.scala b/src/swing/scala/swing/Table.scala
index 47d0b43c60..0a9eb6379a 100644
--- a/src/swing/scala/swing/Table.scala
+++ b/src/swing/scala/swing/Table.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -15,7 +14,7 @@ import event._
import javax.swing._
import javax.swing.table._
import javax.swing.event._
-import scala.collection.mutable.{Set, IndexedSeq}
+import scala.collection.mutable
object Table {
object AutoResizeMode extends Enumeration {
@@ -110,7 +109,7 @@ object Table {
* @see javax.swing.JTable
*/
class Table extends Component with Scrollable.Wrapper {
- override lazy val peer: JTable = new JTable with Table.JTableMixin {
+ override lazy val peer: JTable = new JTable with Table.JTableMixin with SuperMixin {
def tableWrapper = Table.this
override def getCellRenderer(r: Int, c: Int) = new TableCellRenderer {
def getTableCellRendererComponent(table: JTable, value: AnyRef, isSelected: Boolean, hasFocus: Boolean, row: Int, column: Int) =
@@ -124,7 +123,7 @@ class Table extends Component with Scrollable.Wrapper {
// TODO: use IndexedSeq[_ <: IndexedSeq[Any]], see ticket #2005
def this(rowData: Array[Array[Any]], columnNames: Seq[_]) = {
this()
- peer.setModel(new AbstractTableModel {
+ model = new AbstractTableModel {
override def getColumnName(column: Int) = columnNames(column).toString
def getRowCount() = rowData.length
def getColumnCount() = columnNames.length
@@ -134,7 +133,7 @@ class Table extends Component with Scrollable.Wrapper {
rowData(row)(col) = value
fireTableCellUpdated(row, col)
}
- })
+ }
}
def this(rows: Int, columns: Int) = {
this()
@@ -155,6 +154,7 @@ class Table extends Component with Scrollable.Wrapper {
def model = peer.getModel()
def model_=(x: TableModel) = {
peer.setModel(x)
+ model.removeTableModelListener(modelListener)
model.addTableModelListener(modelListener)
}
@@ -173,7 +173,7 @@ class Table extends Component with Scrollable.Wrapper {
object selection extends Publisher {
// TODO: could be a sorted set
- protected abstract class SelectionSet[A](a: =>Seq[A]) extends scala.collection.mutable.Set[A] {
+ protected abstract class SelectionSet[A](a: =>Seq[A]) extends mutable.Set[A] {
def -=(n: A): this.type
def +=(n: A): this.type
def contains(n: A) = a.contains(n)
@@ -197,7 +197,7 @@ class Table extends Component with Scrollable.Wrapper {
def anchorIndex: Int = peer.getColumnModel.getSelectionModel.getAnchorSelectionIndex
}
- def cells: Set[(Int, Int)] =
+ def cells: mutable.Set[(Int, Int)] =
new SelectionSet[(Int, Int)]((for(r <- selection.rows; c <- selection.columns) yield (r,c)).toSeq) { outer =>
def -=(n: (Int, Int)) = {
peer.removeRowSelectionInterval(n._1,n._1)
@@ -299,7 +299,7 @@ class Table extends Component with Scrollable.Wrapper {
def tableChanged(e: TableModelEvent) = publish(
e.getType match {
case TableModelEvent.UPDATE =>
- if (e.getFirstRow == 0 && e.getLastRow == Math.MAX_INT && e.getColumn == TableModelEvent.ALL_COLUMNS)
+ if (e.getFirstRow == 0 && e.getLastRow == Int.MaxValue && e.getColumn == TableModelEvent.ALL_COLUMNS)
TableChanged(Table.this)
else if (e.getFirstRow == TableModelEvent.HEADER_ROW)
TableStructureChanged(Table.this)
@@ -312,5 +312,4 @@ class Table extends Component with Scrollable.Wrapper {
}
)
}
- model.addTableModelListener(modelListener)
}
diff --git a/src/swing/scala/swing/TextArea.scala b/src/swing/scala/swing/TextArea.scala
index 4ff0a666da..fe2baa6952 100644
--- a/src/swing/scala/swing/TextArea.scala
+++ b/src/swing/scala/swing/TextArea.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -20,8 +19,9 @@ import java.awt.event._
*
* @see javax.swing.JTextArea
*/
-class TextArea(text0: String, rows0: Int, columns0: Int) extends TextComponent with TextComponent.HasColumns with TextComponent.HasRows {
- override lazy val peer: JTextArea = new JTextArea(text0, rows0, columns0)
+class TextArea(text0: String, rows0: Int, columns0: Int) extends TextComponent
+ with TextComponent.HasColumns with TextComponent.HasRows {
+ override lazy val peer: JTextArea = new JTextArea(text0, rows0, columns0) with SuperMixin
def this(text: String) = this(text, 0, 0)
def this(rows: Int, columns: Int) = this("", rows, columns)
def this() = this("", 0, 0)
diff --git a/src/swing/scala/swing/TextComponent.scala b/src/swing/scala/swing/TextComponent.scala
index 3ed495ad2f..61ff33009e 100644
--- a/src/swing/scala/swing/TextComponent.scala
+++ b/src/swing/scala/swing/TextComponent.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -66,6 +65,7 @@ class TextComponent extends Component with Publisher {
def editable_=(x: Boolean) = peer.setEditable(x)
def cut() { peer.cut() }
def copy() { peer.copy() }
+ def paste() { peer.paste() }
def selected: String = peer.getSelectedText
def selectAll() { peer.selectAll() }
diff --git a/src/swing/scala/swing/TextField.scala b/src/swing/scala/swing/TextField.scala
index 7067062d68..dc5a4a326a 100644
--- a/src/swing/scala/swing/TextField.scala
+++ b/src/swing/scala/swing/TextField.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -30,7 +29,7 @@ import java.awt.event._
*
* @see javax.swing.JTextField
*/
-class TextField(text0: String, columns0: Int) extends TextComponent with TextComponent.HasColumns {
+class TextField(text0: String, columns0: Int) extends TextComponent with TextComponent.HasColumns with Action.Trigger.Wrapper {
override lazy val peer: JTextField = new JTextField(text0, columns0) with SuperMixin
def this(text: String) = this(text, 0)
def this(columns: Int) = this("", columns)
@@ -48,7 +47,7 @@ class TextField(text0: String, columns0: Int) extends TextComponent with TextCom
publish(EditDone(TextField.this))
}
- override def onFirstSubscribe {
+ protected override def onFirstSubscribe {
super.onFirstSubscribe
peer.addActionListener(actionListener)
peer.addFocusListener(new FocusAdapter {
@@ -56,7 +55,7 @@ class TextField(text0: String, columns0: Int) extends TextComponent with TextCom
})
}
- override def onLastUnsubscribe {
+ protected override def onLastUnsubscribe {
super.onLastUnsubscribe
peer.removeActionListener(actionListener)
}
@@ -64,15 +63,16 @@ class TextField(text0: String, columns0: Int) extends TextComponent with TextCom
def verifier: String => Boolean = s => peer.getInputVerifier.verify(peer)
def verifier_=(v: String => Boolean) {
peer.setInputVerifier(new InputVerifier {
+ private val old = peer.getInputVerifier
def verify(c: JComponent) = v(text)
- override def shouldYieldFocus(c: JComponent) =
- peer.getInputVerifier.shouldYieldFocus(c)
+ override def shouldYieldFocus(c: JComponent) = old.shouldYieldFocus(c)
})
}
def shouldYieldFocus: String=>Boolean = s => peer.getInputVerifier.shouldYieldFocus(peer)
def shouldYieldFocus_=(y: String=>Boolean) {
peer.setInputVerifier(new InputVerifier {
- def verify(c: JComponent) = peer.getInputVerifier.verify(c)
+ private val old = peer.getInputVerifier
+ def verify(c: JComponent) = old.verify(c)
override def shouldYieldFocus(c: JComponent) = y(text)
})
}
diff --git a/src/swing/scala/swing/ToggleButton.scala b/src/swing/scala/swing/ToggleButton.scala
index 608f2c02b7..45a2f8982f 100644
--- a/src/swing/scala/swing/ToggleButton.scala
+++ b/src/swing/scala/swing/ToggleButton.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -21,6 +20,6 @@ import javax.swing._
* @see javax.swing.JToggleButton
*/
class ToggleButton(text0: String) extends AbstractButton {
- override lazy val peer: JToggleButton = new JToggleButton(text0)
+ override lazy val peer: JToggleButton = new JToggleButton(text0) with SuperMixin
def this() = this("")
}
diff --git a/src/swing/scala/swing/UIElement.scala b/src/swing/scala/swing/UIElement.scala
index 599c671538..8ba9493b8c 100644
--- a/src/swing/scala/swing/UIElement.scala
+++ b/src/swing/scala/swing/UIElement.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -31,8 +30,12 @@ object UIElement {
* Java Swing peer. If this method finds one of the given type `C`,
* it will return that wrapper. Otherwise it returns `null`. This
* method never throws an exception.
+ *
+ * Clients should be extremely careful with type parameter `C` and
+ * its interaction with type inference. Better err on the side of caution
+ * and explicitly specify `C`.
*/
- private[swing] def cachedWrapper[C<:UIElement](c: java.awt.Component): C = {
+ private[swing] def cachedWrapper[C>:Null<:UIElement](c: java.awt.Component): C = {
val w = c match {
case c: javax.swing.JComponent => c.getClientProperty(ClientKey)
case _ => wrapperCache.get(c)
@@ -89,9 +92,6 @@ trait UIElement extends Proxy with LazyPublisher {
def preferredSize = peer.getPreferredSize
def preferredSize_=(x: Dimension) = peer.setPreferredSize(x)
- @deprecated("Use implicit conversion from Swing object instead")
- def preferredSize_=(xy: (Int, Int)) { peer.setPreferredSize(new Dimension(xy._1, xy._2)) }
-
def font: Font = peer.getFont
def font_=(f: Font) = peer.setFont(f)
@@ -99,10 +99,10 @@ trait UIElement extends Proxy with LazyPublisher {
def location = peer.getLocation
def bounds = peer.getBounds
def size = peer.getSize
+ @deprecated("Explicit size assignement for UIElements is not supported anymore. " +
+ "Use a layout manager or subclass Window.")
def size_=(dim: Dimension) = peer.setSize(dim)
- @deprecated("Use implicit conversion from Swing object instead")
- def size_=(xy: (Int, Int)) { peer.setSize(new Dimension(xy._1, xy._2)) }
def locale = peer.getLocale
def toolkit = peer.getToolkit
@@ -119,7 +119,7 @@ trait UIElement extends Proxy with LazyPublisher {
def ignoreRepaint: Boolean = peer.getIgnoreRepaint
def ignoreRepaint_=(b: Boolean) { peer.setIgnoreRepaint(b) }
- def onFirstSubscribe {
+ protected def onFirstSubscribe {
peer.addComponentListener(new java.awt.event.ComponentListener {
def componentHidden(e: java.awt.event.ComponentEvent) {
publish(UIElementHidden(UIElement.this))
@@ -135,5 +135,5 @@ trait UIElement extends Proxy with LazyPublisher {
}
})
}
- def onLastUnsubscribe {}
+ protected def onLastUnsubscribe {}
}
diff --git a/src/swing/scala/swing/Window.scala b/src/swing/scala/swing/Window.scala
index b435d8041c..4eeefc0329 100644
--- a/src/swing/scala/swing/Window.scala
+++ b/src/swing/scala/swing/Window.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -39,12 +38,12 @@ abstract class Window extends UIElement with RootPanel with Publisher { outer =>
peer.pack() // pack also validates, which is generally required after an add
}
def defaultButton: Option[Button] =
- Swing.toOption(peer.getRootPane.getDefaultButton).map(UIElement.cachedWrapper(_))
+ toOption(peer.getRootPane.getDefaultButton) map UIElement.cachedWrapper[Button]
def defaultButton_=(b: Button) {
peer.getRootPane.setDefaultButton(b.peer)
}
def defaultButton_=(b: Option[Button]) {
- peer.getRootPane.setDefaultButton(Swing.toNull(b.map(_.peer)))
+ peer.getRootPane.setDefaultButton(b map (_.peer) orNull)
}
def dispose() { peer.dispose() }
@@ -54,11 +53,13 @@ abstract class Window extends UIElement with RootPanel with Publisher { outer =>
def setLocationRelativeTo(c: UIElement) { peer.setLocationRelativeTo(c.peer) }
def centerOnScreen() { peer.setLocationRelativeTo(null) }
def location_=(p: Point) { peer.setLocation(p) }
+ override def size_=(size: Dimension) { peer.setSize(size) }
+ def bounds_=(rect: Rectangle) { peer.setBounds(rect) }
- def owner: Window = UIElement.cachedWrapper(peer.getOwner)
+ def owner: Window = UIElement.cachedWrapper[Window](peer.getOwner)
- def open() { peer.show() }
- def close() { peer.hide() }
+ def open() { peer setVisible true }
+ def close() { peer setVisible false }
peer.addWindowListener(new java.awt.event.WindowListener {
def windowActivated(e: java.awt.event.WindowEvent) { publish(WindowActivated(outer)) }
diff --git a/src/swing/scala/swing/event/ActionEvent.scala b/src/swing/scala/swing/event/ActionEvent.scala
index 1f3f795da8..83df36ad7d 100644
--- a/src/swing/scala/swing/event/ActionEvent.scala
+++ b/src/swing/scala/swing/event/ActionEvent.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/AdjustingEvent.scala b/src/swing/scala/swing/event/AdjustingEvent.scala
index 3867f9ed79..55e51d79f6 100644
--- a/src/swing/scala/swing/event/AdjustingEvent.scala
+++ b/src/swing/scala/swing/event/AdjustingEvent.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/BackgroundChanged.scala b/src/swing/scala/swing/event/BackgroundChanged.scala
index 6eac04a655..3b6a71ba3a 100644
--- a/src/swing/scala/swing/event/BackgroundChanged.scala
+++ b/src/swing/scala/swing/event/BackgroundChanged.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/ButtonClicked.scala b/src/swing/scala/swing/event/ButtonClicked.scala
index 578ab41876..b5bd5ebbbc 100644
--- a/src/swing/scala/swing/event/ButtonClicked.scala
+++ b/src/swing/scala/swing/event/ButtonClicked.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/CaretUpdate.scala b/src/swing/scala/swing/event/CaretUpdate.scala
index c476049781..0560159038 100644
--- a/src/swing/scala/swing/event/CaretUpdate.scala
+++ b/src/swing/scala/swing/event/CaretUpdate.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/ComponentEvent.scala b/src/swing/scala/swing/event/ComponentEvent.scala
index 0ee06228ab..582932ae61 100644
--- a/src/swing/scala/swing/event/ComponentEvent.scala
+++ b/src/swing/scala/swing/event/ComponentEvent.scala
@@ -1,26 +1,25 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
package event
trait ComponentEvent extends UIEvent {
- override val source: Component
+ val source: Component
}
@deprecated("Use UIElementMoved instead.")
-case class ComponentMoved(override val source: Component) extends ComponentEvent
+case class ComponentMoved(source: Component) extends ComponentEvent
@deprecated("Use UIElementResized instead.")
-case class ComponentResized(override val source: Component) extends ComponentEvent
+case class ComponentResized(source: Component) extends ComponentEvent
@deprecated("Use UIElementShown instead.")
-case class ComponentShown(override val source: Component) extends ComponentEvent
+case class ComponentShown(source: Component) extends ComponentEvent
@deprecated("Use UIElementHidden instead.")
-case class ComponentHidden(override val source: Component) extends ComponentEvent
+case class ComponentHidden(source: Component) extends ComponentEvent
diff --git a/src/swing/scala/swing/event/ContainerEvent.scala b/src/swing/scala/swing/event/ContainerEvent.scala
index 9f2bb837c3..5eecf6a551 100644
--- a/src/swing/scala/swing/event/ContainerEvent.scala
+++ b/src/swing/scala/swing/event/ContainerEvent.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/EditDone.scala b/src/swing/scala/swing/event/EditDone.scala
index 9d648214e9..250cf6f9ff 100644
--- a/src/swing/scala/swing/event/EditDone.scala
+++ b/src/swing/scala/swing/event/EditDone.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/Event.scala b/src/swing/scala/swing/event/Event.scala
index 443351cc65..3e04ae6bd2 100644
--- a/src/swing/scala/swing/event/Event.scala
+++ b/src/swing/scala/swing/event/Event.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/FocusEvent.scala b/src/swing/scala/swing/event/FocusEvent.scala
index 7c60490656..7f74000ea0 100644
--- a/src/swing/scala/swing/event/FocusEvent.scala
+++ b/src/swing/scala/swing/event/FocusEvent.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/FontChanged.scala b/src/swing/scala/swing/event/FontChanged.scala
index b2202627a8..701d2792a4 100644
--- a/src/swing/scala/swing/event/FontChanged.scala
+++ b/src/swing/scala/swing/event/FontChanged.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/ForegroundChanged.scala b/src/swing/scala/swing/event/ForegroundChanged.scala
index 5091c0fc85..4a11de8d04 100644
--- a/src/swing/scala/swing/event/ForegroundChanged.scala
+++ b/src/swing/scala/swing/event/ForegroundChanged.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/InputEvent.scala b/src/swing/scala/swing/event/InputEvent.scala
index 8030be38cd..69cbdbaf74 100644
--- a/src/swing/scala/swing/event/InputEvent.scala
+++ b/src/swing/scala/swing/event/InputEvent.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/Key.scala b/src/swing/scala/swing/event/Key.scala
index dfe5e89111..5bc3ad6c4a 100644
--- a/src/swing/scala/swing/event/Key.scala
+++ b/src/swing/scala/swing/event/Key.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/KeyEvent.scala b/src/swing/scala/swing/event/KeyEvent.scala
index 277731bb59..ff0da501b9 100644
--- a/src/swing/scala/swing/event/KeyEvent.scala
+++ b/src/swing/scala/swing/event/KeyEvent.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -21,21 +20,24 @@ sealed abstract class KeyEvent extends InputEvent {
case class KeyTyped(val source: Component, char: Char, val modifiers: Key.Modifiers,
location: Key.Location.Value)
(val peer: java.awt.event.KeyEvent) extends KeyEvent {
- def this(e: java.awt.event.KeyEvent) = this(UIElement.cachedWrapper(e.getSource.asInstanceOf[JComponent]),
- e.getKeyChar, e.getModifiersEx,
- Key.Location(e.getKeyLocation))(e)
+ def this(e: java.awt.event.KeyEvent) =
+ this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
+ e.getKeyChar, e.getModifiersEx,
+ Key.Location(e.getKeyLocation))(e)
}
case class KeyPressed(val source: Component, key: Key.Value, val modifiers: Key.Modifiers,
location: Key.Location.Value)
(val peer: java.awt.event.KeyEvent) extends KeyEvent {
- def this(e: java.awt.event.KeyEvent) = this(UIElement.cachedWrapper(e.getSource.asInstanceOf[JComponent]),
- Key(e.getKeyCode), e.getModifiersEx, Key.Location(e.getKeyLocation))(e)
+ def this(e: java.awt.event.KeyEvent) =
+ this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
+ Key(e.getKeyCode), e.getModifiersEx, Key.Location(e.getKeyLocation))(e)
}
case class KeyReleased(val source: Component, key: Key.Value, val modifiers: Key.Modifiers,
location: Key.Location.Value)
(val peer: java.awt.event.KeyEvent) extends KeyEvent {
- def this(e: java.awt.event.KeyEvent) = this(UIElement.cachedWrapper(e.getSource.asInstanceOf[JComponent]),
- Key(e.getKeyCode), e.getModifiersEx, Key.Location(e.getKeyLocation))(e)
+ def this(e: java.awt.event.KeyEvent) =
+ this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
+ Key(e.getKeyCode), e.getModifiersEx, Key.Location(e.getKeyLocation))(e)
}
diff --git a/src/swing/scala/swing/event/ListEvent.scala b/src/swing/scala/swing/event/ListEvent.scala
index 7a35f95011..b0dfc70318 100644
--- a/src/swing/scala/swing/event/ListEvent.scala
+++ b/src/swing/scala/swing/event/ListEvent.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/MouseEvent.scala b/src/swing/scala/swing/event/MouseEvent.scala
index 44201c194f..8176a2a291 100644
--- a/src/swing/scala/swing/event/MouseEvent.scala
+++ b/src/swing/scala/swing/event/MouseEvent.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -27,46 +26,54 @@ sealed abstract class MouseButtonEvent extends MouseEvent {
case class MouseClicked(val source: Component, point: Point, val modifiers: Key.Modifiers,
clicks: Int, triggersPopup: Boolean)(val peer: java.awt.event.MouseEvent)
extends MouseButtonEvent {
- def this(e: java.awt.event.MouseEvent) = this(UIElement.cachedWrapper(e.getSource.asInstanceOf[JComponent]),
- e.getPoint, e.getModifiersEx, e.getClickCount, e.isPopupTrigger)(e)
+ def this(e: java.awt.event.MouseEvent) =
+ this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
+ e.getPoint, e.getModifiersEx, e.getClickCount, e.isPopupTrigger)(e)
}
case class MousePressed(val source: Component, point: Point, val modifiers: Key.Modifiers,
clicks: Int, triggersPopup: Boolean)(val peer: java.awt.event.MouseEvent)
extends MouseButtonEvent {
- def this(e: java.awt.event.MouseEvent) = this(UIElement.cachedWrapper(e.getSource.asInstanceOf[JComponent]),
- e.getPoint, e.getModifiersEx, e.getClickCount, e.isPopupTrigger)(e)
+ def this(e: java.awt.event.MouseEvent) =
+ this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
+ e.getPoint, e.getModifiersEx, e.getClickCount, e.isPopupTrigger)(e)
}
case class MouseReleased(val source: Component, point: Point, val modifiers: Key.Modifiers,
clicks: Int, triggersPopup: Boolean)(val peer: java.awt.event.MouseEvent)
extends MouseButtonEvent {
- def this(e: java.awt.event.MouseEvent) = this(UIElement.cachedWrapper(e.getSource.asInstanceOf[JComponent]),
- e.getPoint, e.getModifiersEx, e.getClickCount, e.isPopupTrigger)(e)
+ def this(e: java.awt.event.MouseEvent) =
+ this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
+ e.getPoint, e.getModifiersEx, e.getClickCount, e.isPopupTrigger)(e)
}
sealed abstract class MouseMotionEvent extends MouseEvent
case class MouseMoved(val source: Component, point: Point, val modifiers: Key.Modifiers)(val peer: java.awt.event.MouseEvent)
extends MouseMotionEvent {
- def this(e: java.awt.event.MouseEvent) = this(UIElement.cachedWrapper(e.getSource.asInstanceOf[JComponent]),
- e.getPoint, e.getModifiersEx)(e)
+ def this(e: java.awt.event.MouseEvent) =
+ this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
+ e.getPoint, e.getModifiersEx)(e)
}
case class MouseDragged(val source: Component, point: Point, val modifiers: Key.Modifiers)(val peer: java.awt.event.MouseEvent)
extends MouseMotionEvent {
- def this(e: java.awt.event.MouseEvent) = this(UIElement.cachedWrapper(e.getSource.asInstanceOf[JComponent]),
- e.getPoint, e.getModifiersEx)(e)
+ def this(e: java.awt.event.MouseEvent) =
+ this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
+ e.getPoint, e.getModifiersEx)(e)
}
case class MouseEntered(val source: Component, point: Point, val modifiers: Key.Modifiers)(val peer: java.awt.event.MouseEvent)
extends MouseMotionEvent {
- def this(e: java.awt.event.MouseEvent) = this(UIElement.cachedWrapper(e.getSource.asInstanceOf[JComponent]),
- e.getPoint, e.getModifiersEx)(e)
+ def this(e: java.awt.event.MouseEvent) =
+ this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
+ e.getPoint, e.getModifiersEx)(e)
}
case class MouseExited(val source: Component, point: Point, val modifiers: Key.Modifiers)(val peer: java.awt.event.MouseEvent)
extends MouseMotionEvent {
- def this(e: java.awt.event.MouseEvent) = this(UIElement.cachedWrapper(e.getSource.asInstanceOf[JComponent]),
- e.getPoint, e.getModifiersEx)(e)
+ def this(e: java.awt.event.MouseEvent) =
+ this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
+ e.getPoint, e.getModifiersEx)(e)
}
case class MouseWheelMoved(val source: Component, point: Point, val modifiers: Key.Modifiers, rotation: Int)(val peer: java.awt.event.MouseEvent)
extends MouseEvent {
- def this(e: java.awt.event.MouseWheelEvent) = this(UIElement.cachedWrapper(e.getSource.asInstanceOf[JComponent]),
- e.getPoint, e.getModifiersEx, e.getWheelRotation)(e)
+ def this(e: java.awt.event.MouseWheelEvent) =
+ this(UIElement.cachedWrapper[Component](e.getSource.asInstanceOf[JComponent]),
+ e.getPoint, e.getModifiersEx, e.getWheelRotation)(e)
}
diff --git a/src/swing/scala/swing/event/SelectionEvent.scala b/src/swing/scala/swing/event/SelectionEvent.scala
index ca0c0fabb7..898713aef8 100644
--- a/src/swing/scala/swing/event/SelectionEvent.scala
+++ b/src/swing/scala/swing/event/SelectionEvent.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/TableEvent.scala b/src/swing/scala/swing/event/TableEvent.scala
index 101f6dd5d7..600346400e 100644
--- a/src/swing/scala/swing/event/TableEvent.scala
+++ b/src/swing/scala/swing/event/TableEvent.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
@@ -18,7 +17,7 @@ abstract class TableChange(override val source: Table) extends TableEvent(source
/**
* The most general table change. The table might have changed completely,
- * i.e., comlumns might have been reordered, rows added or removed, etc.
+ * i.e., columns might have been reordered, rows added or removed, etc.
* No other event indicates that the structure might have changed.
*/
case class TableStructureChanged(override val source: Table) extends TableChange(source)
diff --git a/src/swing/scala/swing/event/UIEvent.scala b/src/swing/scala/swing/event/UIEvent.scala
index f4764b6c76..0373268dbc 100644
--- a/src/swing/scala/swing/event/UIEvent.scala
+++ b/src/swing/scala/swing/event/UIEvent.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/ValueChanged.scala b/src/swing/scala/swing/event/ValueChanged.scala
index dd64615cee..5f32c0339a 100644
--- a/src/swing/scala/swing/event/ValueChanged.scala
+++ b/src/swing/scala/swing/event/ValueChanged.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/WindowActivated.scala b/src/swing/scala/swing/event/WindowActivated.scala
index e00de4dda3..3c0fae4c5e 100644
--- a/src/swing/scala/swing/event/WindowActivated.scala
+++ b/src/swing/scala/swing/event/WindowActivated.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/WindowClosing.scala b/src/swing/scala/swing/event/WindowClosing.scala
index 39347542a8..7d878f0199 100644
--- a/src/swing/scala/swing/event/WindowClosing.scala
+++ b/src/swing/scala/swing/event/WindowClosing.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/WindowDeactivated.scala b/src/swing/scala/swing/event/WindowDeactivated.scala
index cd5b03b785..08f0f63590 100644
--- a/src/swing/scala/swing/event/WindowDeactivated.scala
+++ b/src/swing/scala/swing/event/WindowDeactivated.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/WindowDeiconified.scala b/src/swing/scala/swing/event/WindowDeiconified.scala
index 5dfeb1e22b..2abeb731a4 100644
--- a/src/swing/scala/swing/event/WindowDeiconified.scala
+++ b/src/swing/scala/swing/event/WindowDeiconified.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/WindowEvent.scala b/src/swing/scala/swing/event/WindowEvent.scala
index 11eb19d238..d1be47d617 100644
--- a/src/swing/scala/swing/event/WindowEvent.scala
+++ b/src/swing/scala/swing/event/WindowEvent.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/WindowIconified.scala b/src/swing/scala/swing/event/WindowIconified.scala
index 52c80caa6a..82ec8ab1fe 100644
--- a/src/swing/scala/swing/event/WindowIconified.scala
+++ b/src/swing/scala/swing/event/WindowIconified.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/event/WindowOpened.scala b/src/swing/scala/swing/event/WindowOpened.scala
index 95029fb63d..54684c3415 100644
--- a/src/swing/scala/swing/event/WindowOpened.scala
+++ b/src/swing/scala/swing/event/WindowOpened.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
diff --git a/src/swing/scala/swing/model/Matrix.scala b/src/swing/scala/swing/model/Matrix.scala
index 1e5d13b817..f91512a284 100644
--- a/src/swing/scala/swing/model/Matrix.scala
+++ b/src/swing/scala/swing/model/Matrix.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
package scala.swing
package model
diff --git a/src/swing/scala/swing/package.scala b/src/swing/scala/swing/package.scala
index 8f4c281a4b..deb291ddb2 100644
--- a/src/swing/scala/swing/package.scala
+++ b/src/swing/scala/swing/package.scala
@@ -13,78 +13,8 @@ package object swing {
type Color = java.awt.Color
type Image = java.awt.Image
type Font = java.awt.Font
-}
-
-object Font {
- import swing._
-
- def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
- def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
- def decode(str: String) = java.awt.Font.decode(str)
-
- /* TODO: finish implementation
- /**
- * See [java.awt.Font.getFont].
- */
- def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
- java.awt.Font.getFont(ImmutableMapWrapper(attributes))
-
- import java.{util => ju}
- private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(m : ClassManifest[A]) extends ju.AbstractMap[A, B] {
- self =>
- override def size = underlying.size
-
- override def put(k : A, v : B) =
- throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
- override def remove(k : AnyRef) =
- throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
-
- override def entrySet : ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
- def size = self.size
-
- def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
- val ui = underlying.iterator
- var prev : Option[A] = None
- def hasNext = ui.hasNext
-
- def next = {
- val (k, v) = ui.next
- prev = Some(k)
- new ju.Map.Entry[A, B] {
- def getKey = k
- def getValue = v
- def setValue(v1 : B) = self.put(k, v1)
- override def equals(other : Any) = other match {
- case e : ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
- case _ => false
- }
- }
- }
-
- def remove = prev match {
- case Some(k) => val v = self.remove(k.asInstanceOf[AnyRef]) ; prev = None ; v
- case _ => throw new IllegalStateException("next must be called at least once before remove")
- }
- }
- }
- }
- */
-
- /**
- * See [java.awt.Font.getFont].
- */
- def get(nm: String) = java.awt.Font.getFont(nm)
- /**
- * See [java.awt.Font.getFont].
- */
- def get(nm: String, font: Font) = java.awt.Font.getFont(nm, font)
-
-
- def Insets(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
- def Rectangle(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
- def Point(x: Int, y: Int) = new Point(x, y)
- def Dimension(x: Int, y: Int) = new Dimension(x, y)
-
-
-} \ No newline at end of file
+ protected[swing] def ifNull[A](o: Object, a: A): A = if(o eq null) a else o.asInstanceOf[A]
+ protected[swing] def toOption[A](o: Object): Option[A] = if(o eq null) None else Some(o.asInstanceOf[A])
+ protected[swing] def toAnyRef(x: Any): AnyRef = x.asInstanceOf[AnyRef]
+}
diff --git a/src/swing/scala/swing/test/SimpleApplet.scala b/src/swing/scala/swing/test/SimpleApplet.scala
index 090f4cde8d..d5f17f8a40 100644
--- a/src/swing/scala/swing/test/SimpleApplet.scala
+++ b/src/swing/scala/swing/test/SimpleApplet.scala
@@ -7,8 +7,7 @@ class SimpleApplet extends Applet {
object ui extends UI with Reactor {
def init() = {
val button = new Button("Press here!")
- val text = new TextArea("Java Version: " +
- System.getProperty("java.version")+"\n")
+ val text = new TextArea("Java Version: " + util.Properties.javaVersion + "\n")
listenTo(button)
reactions += {
case ButtonClicked(_) => text.text += "Button Pressed!\n"
diff --git a/src/swing/scala/swing/test/UIDemo.scala b/src/swing/scala/swing/test/UIDemo.scala
index ce40fe2056..9207c82948 100644
--- a/src/swing/scala/swing/test/UIDemo.scala
+++ b/src/swing/scala/swing/test/UIDemo.scala
@@ -73,11 +73,7 @@ object UIDemo extends SimpleSwingApplication {
reactLive = live.selected
}
}
- contents += new Button("Center Frame") {
- reactions += {
- case ButtonClicked(_) => centerOnScreen()
- }
- }
+ contents += new Button(Action("Center Frame") { centerOnScreen() })
}
pages += new Page("Buttons", buttons)
pages += new Page("GridBag", GridBagDemo.ui)
diff --git a/test/files/cli/test1/Main.check.j9vm5 b/test/attic/files/cli/test1/Main.check.j9vm5
index de454ef478..de454ef478 100644
--- a/test/files/cli/test1/Main.check.j9vm5
+++ b/test/attic/files/cli/test1/Main.check.j9vm5
diff --git a/test/files/cli/test1/Main.check.java b/test/attic/files/cli/test1/Main.check.java
index 64410de98f..64410de98f 100644
--- a/test/files/cli/test1/Main.check.java
+++ b/test/attic/files/cli/test1/Main.check.java
diff --git a/test/files/cli/test1/Main.check.java5 b/test/attic/files/cli/test1/Main.check.java5
index 64410de98f..64410de98f 100644
--- a/test/files/cli/test1/Main.check.java5
+++ b/test/attic/files/cli/test1/Main.check.java5
diff --git a/test/files/cli/test1/Main.check.java5_api b/test/attic/files/cli/test1/Main.check.java5_api
index 8693a5d92f..8693a5d92f 100644
--- a/test/files/cli/test1/Main.check.java5_api
+++ b/test/attic/files/cli/test1/Main.check.java5_api
diff --git a/test/files/cli/test1/Main.check.java5_j9 b/test/attic/files/cli/test1/Main.check.java5_j9
index de454ef478..de454ef478 100644
--- a/test/files/cli/test1/Main.check.java5_j9
+++ b/test/attic/files/cli/test1/Main.check.java5_j9
diff --git a/test/files/cli/test1/Main.check.javac b/test/attic/files/cli/test1/Main.check.javac
index ba25d9b6ca..ba25d9b6ca 100644
--- a/test/files/cli/test1/Main.check.javac
+++ b/test/attic/files/cli/test1/Main.check.javac
diff --git a/test/files/cli/test1/Main.check.javac5 b/test/attic/files/cli/test1/Main.check.javac5
index 0cb29d31ff..0cb29d31ff 100644
--- a/test/files/cli/test1/Main.check.javac5
+++ b/test/attic/files/cli/test1/Main.check.javac5
diff --git a/test/files/cli/test1/Main.check.javac6 b/test/attic/files/cli/test1/Main.check.javac6
index 8f37a05bcb..8f37a05bcb 100644
--- a/test/files/cli/test1/Main.check.javac6
+++ b/test/attic/files/cli/test1/Main.check.javac6
diff --git a/test/files/cli/test1/Main.check.jikes b/test/attic/files/cli/test1/Main.check.jikes
index cd891689db..cd891689db 100644
--- a/test/files/cli/test1/Main.check.jikes
+++ b/test/attic/files/cli/test1/Main.check.jikes
diff --git a/test/files/cli/test1/Main.check.jikes5 b/test/attic/files/cli/test1/Main.check.jikes5
index cd891689db..cd891689db 100644
--- a/test/files/cli/test1/Main.check.jikes5
+++ b/test/attic/files/cli/test1/Main.check.jikes5
diff --git a/test/files/cli/test1/Main.check.scala b/test/attic/files/cli/test1/Main.check.scala
index 43b200ae02..43b200ae02 100644
--- a/test/files/cli/test1/Main.check.scala
+++ b/test/attic/files/cli/test1/Main.check.scala
diff --git a/test/files/cli/test1/Main.check.scala_api b/test/attic/files/cli/test1/Main.check.scala_api
index 6fac39d3f0..6fac39d3f0 100644
--- a/test/files/cli/test1/Main.check.scala_api
+++ b/test/attic/files/cli/test1/Main.check.scala_api
diff --git a/test/files/cli/test1/Main.check.scala_j9 b/test/attic/files/cli/test1/Main.check.scala_j9
index 65d5ddaac4..65d5ddaac4 100644
--- a/test/files/cli/test1/Main.check.scala_j9
+++ b/test/attic/files/cli/test1/Main.check.scala_j9
diff --git a/test/files/cli/test1/Main.check.scalac b/test/attic/files/cli/test1/Main.check.scalac
index 8465810d0b..8465810d0b 100644
--- a/test/files/cli/test1/Main.check.scalac
+++ b/test/attic/files/cli/test1/Main.check.scalac
diff --git a/test/files/cli/test1/Main.check.scalaint b/test/attic/files/cli/test1/Main.check.scalaint
index 88345d1874..88345d1874 100644
--- a/test/files/cli/test1/Main.check.scalaint
+++ b/test/attic/files/cli/test1/Main.check.scalaint
diff --git a/test/files/cli/test1/Main.java b/test/attic/files/cli/test1/Main.java
index 8850b87517..8850b87517 100644
--- a/test/files/cli/test1/Main.java
+++ b/test/attic/files/cli/test1/Main.java
diff --git a/test/files/cli/test1/Main.scala b/test/attic/files/cli/test1/Main.scala
index f7dd8a0a36..f7dd8a0a36 100644
--- a/test/files/cli/test1/Main.scala
+++ b/test/attic/files/cli/test1/Main.scala
diff --git a/test/files/cli/test2/Main.check.j9vm5 b/test/attic/files/cli/test2/Main.check.j9vm5
index 8f4fdf8aa1..8f4fdf8aa1 100644
--- a/test/files/cli/test2/Main.check.j9vm5
+++ b/test/attic/files/cli/test2/Main.check.j9vm5
diff --git a/test/files/cli/test2/Main.check.java b/test/attic/files/cli/test2/Main.check.java
index aca383de3e..aca383de3e 100644
--- a/test/files/cli/test2/Main.check.java
+++ b/test/attic/files/cli/test2/Main.check.java
diff --git a/test/files/cli/test2/Main.check.java5 b/test/attic/files/cli/test2/Main.check.java5
index aca383de3e..aca383de3e 100644
--- a/test/files/cli/test2/Main.check.java5
+++ b/test/attic/files/cli/test2/Main.check.java5
diff --git a/test/files/cli/test2/Main.check.java5_api b/test/attic/files/cli/test2/Main.check.java5_api
index 4ff775c3da..4ff775c3da 100644
--- a/test/files/cli/test2/Main.check.java5_api
+++ b/test/attic/files/cli/test2/Main.check.java5_api
diff --git a/test/files/cli/test2/Main.check.java5_j9 b/test/attic/files/cli/test2/Main.check.java5_j9
index 2dcb6e892a..2dcb6e892a 100644
--- a/test/files/cli/test2/Main.check.java5_j9
+++ b/test/attic/files/cli/test2/Main.check.java5_j9
diff --git a/test/files/cli/test2/Main.check.javac b/test/attic/files/cli/test2/Main.check.javac
index c40c0a7a89..c40c0a7a89 100644
--- a/test/files/cli/test2/Main.check.javac
+++ b/test/attic/files/cli/test2/Main.check.javac
diff --git a/test/files/cli/test2/Main.check.javac5 b/test/attic/files/cli/test2/Main.check.javac5
index 0ac32b056e..0ac32b056e 100644
--- a/test/files/cli/test2/Main.check.javac5
+++ b/test/attic/files/cli/test2/Main.check.javac5
diff --git a/test/files/cli/test2/Main.check.javac6 b/test/attic/files/cli/test2/Main.check.javac6
index 350d3253bc..350d3253bc 100644
--- a/test/files/cli/test2/Main.check.javac6
+++ b/test/attic/files/cli/test2/Main.check.javac6
diff --git a/test/files/cli/test2/Main.check.jikes b/test/attic/files/cli/test2/Main.check.jikes
index 97943e8347..97943e8347 100644
--- a/test/files/cli/test2/Main.check.jikes
+++ b/test/attic/files/cli/test2/Main.check.jikes
diff --git a/test/files/cli/test2/Main.check.jikes5 b/test/attic/files/cli/test2/Main.check.jikes5
index 97943e8347..97943e8347 100644
--- a/test/files/cli/test2/Main.check.jikes5
+++ b/test/attic/files/cli/test2/Main.check.jikes5
diff --git a/test/files/cli/test2/Main.check.scala b/test/attic/files/cli/test2/Main.check.scala
index 7e5f17625b..7e5f17625b 100644
--- a/test/files/cli/test2/Main.check.scala
+++ b/test/attic/files/cli/test2/Main.check.scala
diff --git a/test/files/cli/test2/Main.check.scala_api b/test/attic/files/cli/test2/Main.check.scala_api
index bcb0f0c7fb..bcb0f0c7fb 100644
--- a/test/files/cli/test2/Main.check.scala_api
+++ b/test/attic/files/cli/test2/Main.check.scala_api
diff --git a/test/files/cli/test2/Main.check.scala_j9 b/test/attic/files/cli/test2/Main.check.scala_j9
index 80cbb50fa9..80cbb50fa9 100644
--- a/test/files/cli/test2/Main.check.scala_j9
+++ b/test/attic/files/cli/test2/Main.check.scala_j9
diff --git a/test/files/cli/test2/Main.check.scalac b/test/attic/files/cli/test2/Main.check.scalac
index 8465810d0b..8465810d0b 100644
--- a/test/files/cli/test2/Main.check.scalac
+++ b/test/attic/files/cli/test2/Main.check.scalac
diff --git a/test/files/cli/test2/Main.check.scalaint b/test/attic/files/cli/test2/Main.check.scalaint
index 89b6766bb5..89b6766bb5 100644
--- a/test/files/cli/test2/Main.check.scalaint
+++ b/test/attic/files/cli/test2/Main.check.scalaint
diff --git a/test/files/cli/test2/Main.java b/test/attic/files/cli/test2/Main.java
index f6797632bf..f6797632bf 100644
--- a/test/files/cli/test2/Main.java
+++ b/test/attic/files/cli/test2/Main.java
diff --git a/test/files/cli/test2/Main.scala b/test/attic/files/cli/test2/Main.scala
index 1d43759fdf..11c878b9c0 100644
--- a/test/files/cli/test2/Main.scala
+++ b/test/attic/files/cli/test2/Main.scala
@@ -1,5 +1,5 @@
// @info 1 dependency
-package test2
+package test2
object Main {
def main(args: Array[String]) = {
Console.print("2: ")
diff --git a/test/files/cli/test3/Main.check.j9vm5 b/test/attic/files/cli/test3/Main.check.j9vm5
index a094dc8daf..a094dc8daf 100644
--- a/test/files/cli/test3/Main.check.j9vm5
+++ b/test/attic/files/cli/test3/Main.check.j9vm5
diff --git a/test/files/cli/test3/Main.check.java b/test/attic/files/cli/test3/Main.check.java
index de3eb7b136..de3eb7b136 100644
--- a/test/files/cli/test3/Main.check.java
+++ b/test/attic/files/cli/test3/Main.check.java
diff --git a/test/files/cli/test3/Main.check.java5 b/test/attic/files/cli/test3/Main.check.java5
index de3eb7b136..de3eb7b136 100644
--- a/test/files/cli/test3/Main.check.java5
+++ b/test/attic/files/cli/test3/Main.check.java5
diff --git a/test/files/cli/test3/Main.check.java5_api b/test/attic/files/cli/test3/Main.check.java5_api
index f6112211f0..f6112211f0 100644
--- a/test/files/cli/test3/Main.check.java5_api
+++ b/test/attic/files/cli/test3/Main.check.java5_api
diff --git a/test/files/cli/test3/Main.check.java5_j9 b/test/attic/files/cli/test3/Main.check.java5_j9
index 9e228d7649..9e228d7649 100644
--- a/test/files/cli/test3/Main.check.java5_j9
+++ b/test/attic/files/cli/test3/Main.check.java5_j9
diff --git a/test/files/cli/test3/Main.check.javac b/test/attic/files/cli/test3/Main.check.javac
index 8d235b647b..8d235b647b 100644
--- a/test/files/cli/test3/Main.check.javac
+++ b/test/attic/files/cli/test3/Main.check.javac
diff --git a/test/files/cli/test3/Main.check.javac5 b/test/attic/files/cli/test3/Main.check.javac5
index 3a48fa000e..3a48fa000e 100644
--- a/test/files/cli/test3/Main.check.javac5
+++ b/test/attic/files/cli/test3/Main.check.javac5
diff --git a/test/files/cli/test3/Main.check.javac6 b/test/attic/files/cli/test3/Main.check.javac6
index 677b950aed..677b950aed 100644
--- a/test/files/cli/test3/Main.check.javac6
+++ b/test/attic/files/cli/test3/Main.check.javac6
diff --git a/test/files/cli/test3/Main.check.jikes b/test/attic/files/cli/test3/Main.check.jikes
index 604333e81a..604333e81a 100644
--- a/test/files/cli/test3/Main.check.jikes
+++ b/test/attic/files/cli/test3/Main.check.jikes
diff --git a/test/files/cli/test3/Main.check.jikes5 b/test/attic/files/cli/test3/Main.check.jikes5
index 604333e81a..604333e81a 100644
--- a/test/files/cli/test3/Main.check.jikes5
+++ b/test/attic/files/cli/test3/Main.check.jikes5
diff --git a/test/files/cli/test3/Main.check.scala b/test/attic/files/cli/test3/Main.check.scala
index f78729b9a2..f78729b9a2 100644
--- a/test/files/cli/test3/Main.check.scala
+++ b/test/attic/files/cli/test3/Main.check.scala
diff --git a/test/files/cli/test3/Main.check.scala_api b/test/attic/files/cli/test3/Main.check.scala_api
index 4552819b5b..4552819b5b 100644
--- a/test/files/cli/test3/Main.check.scala_api
+++ b/test/attic/files/cli/test3/Main.check.scala_api
diff --git a/test/files/cli/test3/Main.check.scala_j9 b/test/attic/files/cli/test3/Main.check.scala_j9
index 3804c17636..3804c17636 100644
--- a/test/files/cli/test3/Main.check.scala_j9
+++ b/test/attic/files/cli/test3/Main.check.scala_j9
diff --git a/test/files/cli/test3/Main.check.scalac b/test/attic/files/cli/test3/Main.check.scalac
index 8465810d0b..8465810d0b 100644
--- a/test/files/cli/test3/Main.check.scalac
+++ b/test/attic/files/cli/test3/Main.check.scalac
diff --git a/test/files/cli/test3/Main.check.scalaint b/test/attic/files/cli/test3/Main.check.scalaint
index cffa02c5b6..cffa02c5b6 100644
--- a/test/files/cli/test3/Main.check.scalaint
+++ b/test/attic/files/cli/test3/Main.check.scalaint
diff --git a/test/files/cli/test3/Main.java b/test/attic/files/cli/test3/Main.java
index 208863d012..208863d012 100644
--- a/test/files/cli/test3/Main.java
+++ b/test/attic/files/cli/test3/Main.java
diff --git a/test/files/cli/test3/Main.scala b/test/attic/files/cli/test3/Main.scala
index 63fc11b771..63fc11b771 100644
--- a/test/files/cli/test3/Main.scala
+++ b/test/attic/files/cli/test3/Main.scala
diff --git a/test/debug/buildmanager/.gitignore b/test/debug/buildmanager/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/buildmanager/.gitignore
diff --git a/test/debug/jvm/.gitignore b/test/debug/jvm/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/jvm/.gitignore
diff --git a/test/debug/neg/.gitignore b/test/debug/neg/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/neg/.gitignore
diff --git a/test/debug/pos/.gitignore b/test/debug/pos/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/pos/.gitignore
diff --git a/test/debug/res/.gitignore b/test/debug/res/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/res/.gitignore
diff --git a/test/debug/run/.gitignore b/test/debug/run/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/run/.gitignore
diff --git a/test/debug/scalacheck/.gitignore b/test/debug/scalacheck/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/scalacheck/.gitignore
diff --git a/test/debug/scalap/.gitignore b/test/debug/scalap/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/scalap/.gitignore
diff --git a/test/debug/shootout/.gitignore b/test/debug/shootout/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/shootout/.gitignore
diff --git a/test/files/script/loadAndExecute/loadAndExecute.check b/test/disabled-windows/script/loadAndExecute.check
index ccd8cd6e37..ccd8cd6e37 100644
--- a/test/files/script/loadAndExecute/loadAndExecute.check
+++ b/test/disabled-windows/script/loadAndExecute.check
diff --git a/test/files/script/loadAndExecute/lAndE1.scala b/test/disabled-windows/script/loadAndExecute/lAndE1.scala
index b20d1a9428..b20d1a9428 100755
--- a/test/files/script/loadAndExecute/lAndE1.scala
+++ b/test/disabled-windows/script/loadAndExecute/lAndE1.scala
diff --git a/test/files/script/loadAndExecute/lAndE2.scala b/test/disabled-windows/script/loadAndExecute/lAndE2.scala
index ea15a04d86..ea15a04d86 100755
--- a/test/files/script/loadAndExecute/lAndE2.scala
+++ b/test/disabled-windows/script/loadAndExecute/lAndE2.scala
diff --git a/test/files/script/loadAndExecute/loadAndExecute.scala b/test/disabled-windows/script/loadAndExecute/loadAndExecute.scala
index 2a9718382b..2a9718382b 100755
--- a/test/files/script/loadAndExecute/loadAndExecute.scala
+++ b/test/disabled-windows/script/loadAndExecute/loadAndExecute.scala
diff --git a/test/files/script/utf8.bat b/test/disabled-windows/script/utf8.bat
index 8bc5c886f7..8bc5c886f7 100755
--- a/test/files/script/utf8.bat
+++ b/test/disabled-windows/script/utf8.bat
diff --git a/test/files/script/utf8.check b/test/disabled-windows/script/utf8.check
index 29dc0518ff..29dc0518ff 100644
--- a/test/files/script/utf8.check
+++ b/test/disabled-windows/script/utf8.check
diff --git a/test/files/script/utf8.scala b/test/disabled-windows/script/utf8.scala
index 5366562cee..5dfade0bb2 100755
--- a/test/files/script/utf8.scala
+++ b/test/disabled-windows/script/utf8.scala
@@ -1,6 +1,6 @@
#!/bin/sh
-# fact - A simple Scala script that prints out the factorial of
-# the argument specified on the command line.
+#
+# Checks if UTF-8 output makes it through unmangled.
cygwin=false;
case "`uname`" in
@@ -9,7 +9,7 @@ esac
SOURCE="$0";
if $cygwin; then
- if [ "$OS" = "Windows_NT" ] && cygpath -m .>/dev/null 2>/dev/null ;
+ if [ "$OS" = "Windows_NT" ] && cygpath -m .>/dev/null 2>/dev/null ;
then
format=mixed
else
@@ -18,8 +18,7 @@ then
SOURCE=`cygpath --$format "$SOURCE"`;
fi
-export LC_CTYPE=en_US.UTF-8
-exec scala -nocompdaemon "$SOURCE" "$@"
+exec scala -Dfile.encoding="UTF-8" -nocompdaemon "$SOURCE" "$@"
!#
/*Comment Комментарий*/
diff --git a/test/disabled/buildmanager/t2651_1/A.scala b/test/disabled/buildmanager/t2651_1/A.scala
new file mode 100644
index 0000000000..d712f6febe
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/A.scala
@@ -0,0 +1 @@
+trait A[T]
diff --git a/test/disabled/buildmanager/t2651_1/B.scala b/test/disabled/buildmanager/t2651_1/B.scala
new file mode 100644
index 0000000000..a8aca3d0ed
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/B.scala
@@ -0,0 +1,2 @@
+trait B[T] extends A[T]
+
diff --git a/test/disabled/buildmanager/t2651_1/C.scala b/test/disabled/buildmanager/t2651_1/C.scala
new file mode 100644
index 0000000000..690dcf518d
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/C.scala
@@ -0,0 +1,3 @@
+object C {
+ new A[Int] {}
+}
diff --git a/test/disabled/buildmanager/t2651_1/D.scala b/test/disabled/buildmanager/t2651_1/D.scala
new file mode 100644
index 0000000000..51273ad986
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/D.scala
@@ -0,0 +1,3 @@
+object D {
+ def x[T](a: A[T]) = a
+}
diff --git a/test/disabled/buildmanager/t2651_1/t2651_1.changes/A2.scala b/test/disabled/buildmanager/t2651_1/t2651_1.changes/A2.scala
new file mode 100644
index 0000000000..574b522149
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/t2651_1.changes/A2.scala
@@ -0,0 +1,2 @@
+trait A
+
diff --git a/test/disabled/buildmanager/t2651_1/t2651_1.check b/test/disabled/buildmanager/t2651_1/t2651_1.check
new file mode 100644
index 0000000000..8d2cbc8194
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/t2651_1.check
@@ -0,0 +1,19 @@
+builder > A.scala B.scala C.scala D.scala
+compiling Set(A.scala, B.scala, C.scala, D.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Class(A))[ tparams: List()]))
+invalidate B.scala because parents have changed [Changed(Class(A))[ tparams: List()]]
+invalidate C.scala because parents have changed [Changed(Class(A))[ tparams: List()]]
+invalidate D.scala because it references changed class [Changed(Class(A))[ tparams: List()]]
+compiling Set(B.scala, C.scala, D.scala)
+B.scala:1: error: A does not take type parameters
+trait B[T] extends A[T]
+ ^
+C.scala:2: error: A does not take type parameters
+ new A[Int] {}
+ ^
+D.scala:2: error: A does not take type parameters
+ def x[T](a: A[T]) = a
+ ^
diff --git a/test/disabled/buildmanager/t2651_1/t2651_1.test b/test/disabled/buildmanager/t2651_1/t2651_1.test
new file mode 100644
index 0000000000..4f67d5e233
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/t2651_1.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala C.scala D.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/disabled/pos/bug2919.scala b/test/disabled/pos/bug2919.scala
new file mode 100644
index 0000000000..5e51cf9de7
--- /dev/null
+++ b/test/disabled/pos/bug2919.scala
@@ -0,0 +1,12 @@
+import javax.xml.bind.annotation.adapters.XmlAdapter
+import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter
+
+case class Link(
+ @XmlJavaTypeAdapter(classOf[StringOptionAdapter]) val title: Option[String]
+)
+
+class StringOptionAdapter extends XmlAdapter[String, Option[String]] {
+ def unmarshal(str: String) = error("stub")
+ def marshal(op: Option[String]) = error("Stub")
+}
+
diff --git a/test/disabled/pos/spec-traits.scala b/test/disabled/pos/spec-traits.scala
new file mode 100644
index 0000000000..49a59d5391
--- /dev/null
+++ b/test/disabled/pos/spec-traits.scala
@@ -0,0 +1,83 @@
+trait A[@specialized(Int) T] { def foo: T }
+class B extends A[Int] { val foo = 10 }
+class C extends B
+
+// issue 3309
+class Lazy {
+ def test[U](block: => U): Unit = { block }
+
+ test { lazy val x = 1 }
+}
+
+// issue 3307
+class Bug3307 {
+ def f[Z](block: String => Z) {
+ block("abc")
+ }
+
+ ({ () =>
+ f { implicit x => println(x) } })()
+}
+
+// issue 3301
+ trait T[X]
+
+class Bug3301 {
+ def t[A]: T[A] = error("stub")
+
+ () => {
+ type X = Int
+
+ def foo[X] = t[X]
+ ()
+ }
+}
+// issue 3299
+object Failure {
+ def thunk() {
+ for (i <- 1 to 2) {
+ val Array(a, b) = Array(1,2)
+ ()
+ }
+ }
+}
+
+// issue 3296
+
+object AA
+{
+ def f(block: => Unit) {}
+
+ object BB
+ {
+ f {
+ object CC
+
+ ()
+ }
+ }
+
+ def foo[T](x: T) = { object A; false }
+}
+
+// issue 3292
+import scala.swing._
+import scala.swing.GridBagPanel._
+
+object Grid {
+
+ def later(code : => Unit) =
+ javax.swing.SwingUtilities.invokeLater(new Runnable { def run { code }})
+
+ def test = later {
+ val frame = new Dialog {
+ contents = new GridBagPanel {
+ val c = new Constraints
+ }
+ }
+ }
+
+}
+
+// issue 3325
+object O { def f[@specialized T] { for(k <- Nil: List[T]) { } } }
diff --git a/test/files/run/docgenerator.check b/test/disabled/run/docgenerator.check
index dbb7eeed28..dbb7eeed28 100644
--- a/test/files/run/docgenerator.check
+++ b/test/disabled/run/docgenerator.check
diff --git a/test/files/run/docgenerator.scala b/test/disabled/run/docgenerator.scala
index 59f90ba415..8e0c7589ad 100644
--- a/test/files/run/docgenerator.scala
+++ b/test/disabled/run/docgenerator.scala
@@ -10,7 +10,7 @@ object Test {
def main(args: Array[String]) {
// overwrites value of UrlContext.generator in file DocUtil.scala
System.setProperty("doc.generator", "scaladoc")
- var dirname = System.getProperty("scalatest.output")
+ var dirname = System.getProperty("partest.output")
if (dirname eq null) dirname = System.getProperty("java.io.tmpdir")
val tmpDir = new File(dirname)
tmpDir.mkdirs()
@@ -116,7 +116,7 @@ object Foo2 {
// when running that compiler, give it a scala-library to the classpath
docSettings.classpath.value = System.getProperty("java.class.path")
reporter = new ConsoleReporter(docSettings)
- val command = new CompilerCommand(args.toList, docSettings, error, false)
+ val command = new CompilerCommand(args.toList, docSettings)
try {
object compiler extends Global(command.settings, reporter) {
override protected def computeInternalPhases() : Unit = {
diff --git a/test/disabled/run/script-positions.scala b/test/disabled/run/script-positions.scala
new file mode 100644
index 0000000000..2c80d550c0
--- /dev/null
+++ b/test/disabled/run/script-positions.scala
@@ -0,0 +1,86 @@
+import scala.tools.nsc._
+import util.stringFromStream
+
+// Testing "scripts" without the platform delights which accompany actual scripts.
+object Scripts {
+
+ val test1 =
+"""#!/bin/sh
+ exec scala $0 $@
+!#
+
+println("statement 1")
+println("statement 2".thisisborked)
+println("statement 3")
+"""
+
+ val output1 =
+"""thisisborked.scala:6: error: value thisisborked is not a member of java.lang.String
+println("statement 2".thisisborked)
+ ^
+one error found"""
+ val test2 =
+"""#!scala
+// foo
+// bar
+!#
+
+val x = "line 6"
+val y = "line 7"
+val z "line 8""""
+
+ val output2 =
+"""bob.scala:8: error: '=' expected but string literal found.
+val z "line 8"
+ ^
+bob.scala:8: error: illegal start of simple expression
+val z "line 8"
+ ^
+two errors found"""
+}
+
+object Test {
+ import Scripts._
+
+ def settings = new GenericRunnerSettings(println _)
+ settings.nocompdaemon.value = true
+
+ def runScript(code: String): String =
+ stringFromStream(stream =>
+ Console.withOut(stream) {
+ Console.withErr(stream) {
+ ScriptRunner.runCommand(settings, code, Nil)
+ }
+ }
+ )
+
+ val tests: List[(String, String)] = List(
+ test1 -> output1,
+ test2 -> output2
+ )
+ // def lines(s: String) = s split """\r\n|\r|\n""" toList
+ def lines(s: String) = s split "\\n" toList
+
+ // strip the random temp filename from error msgs
+ def stripFilename(s: String) = (s indexOf ".scala:") match {
+ case -1 => s
+ case idx => s drop (idx + 7)
+ }
+ def toLines(text: String) = lines(text) map stripFilename
+
+ def main(args: Array[String]): Unit = {
+ for ((code, expected) <- tests) {
+ val out = toLines(runScript(code))
+ val exp = toLines(expected)
+ val nomatch = out zip exp filter { case (x, y) => x != y }
+ val success = out.size == exp.size && nomatch.isEmpty
+
+ assert(
+ success,
+ "Output doesn't match expected:\n" +
+ "Expected:\n" + expected +
+ "Actual:\n" + out.mkString("\n")
+ )
+ }
+ }
+}
diff --git a/test/disabled/run/sigtp.check b/test/disabled/run/sigtp.check
new file mode 100644
index 0000000000..6b961be3d0
--- /dev/null
+++ b/test/disabled/run/sigtp.check
@@ -0,0 +1,7 @@
+public A Bug.key()
+public Bug<A, B> Bug.foo()
+public Bug<A, B> Bug.next()
+public void Bug.next_$eq(Bug<A, B>)
+public abstract A BugBase.key()
+public abstract E BugBase.next()
+public abstract void BugBase.next_$eq(E)
diff --git a/test/disabled/run/sigtp.scala b/test/disabled/run/sigtp.scala
new file mode 100644
index 0000000000..8cba5a748b
--- /dev/null
+++ b/test/disabled/run/sigtp.scala
@@ -0,0 +1,18 @@
+trait BugBase [A, E] {
+ val key: A
+ var next: E = _
+}
+
+final class Bug[A, B](val key: A) extends BugBase[A, Bug[A, B]] {
+ def foo = next
+}
+
+object Test {
+ def f(clazz: Class[_]) =
+ clazz.getDeclaredMethods.toList.map(_.toGenericString).sorted foreach println
+
+ def main(args: Array[String]): Unit = {
+ f(classOf[Bug[_, _]])
+ f(classOf[BugBase[_, _]])
+ }
+}
diff --git a/test/disabled/run/t2946/Parsers.scala b/test/disabled/run/t2946/Parsers.scala
new file mode 100644
index 0000000000..c0961034c4
--- /dev/null
+++ b/test/disabled/run/t2946/Parsers.scala
@@ -0,0 +1,4 @@
+class Parser {
+ def parse(t: Any): Unit = {
+ }
+}
diff --git a/test/disabled/run/t2946/ResponseCommon.scala b/test/disabled/run/t2946/ResponseCommon.scala
new file mode 100644
index 0000000000..fa9d8acccb
--- /dev/null
+++ b/test/disabled/run/t2946/ResponseCommon.scala
@@ -0,0 +1,14 @@
+trait ResponseCommon extends Parser {
+ private[this] var paramsParser: Parser = null
+ def withParamsParser(parser: Parser) = {paramsParser = parser; this}
+
+ class Foo {
+ println(paramsParser)
+ }
+
+ override abstract def parse(t: Any): Unit = t match {
+ case ("params", value: List[_]) => value.foreach {paramsParser.parse(_)}
+ case _ => super.parse(t)
+ }
+}
+
diff --git a/test/disabled/run/t2946/Test.scala b/test/disabled/run/t2946/Test.scala
new file mode 100644
index 0000000000..e9d9896a0e
--- /dev/null
+++ b/test/disabled/run/t2946/Test.scala
@@ -0,0 +1,7 @@
+class Test extends Parser with ResponseCommon
+
+object Test {
+ def main(args: Array[String]) {
+ new Test
+ }
+}
diff --git a/test/disabled/scalacheck/redblack.scala b/test/disabled/scalacheck/redblack.scala
new file mode 100644
index 0000000000..301d332334
--- /dev/null
+++ b/test/disabled/scalacheck/redblack.scala
@@ -0,0 +1,157 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+
+/*
+Properties of a Red & Black Tree:
+
+A node is either red or black.
+The root is black. (This rule is used in some definitions and not others. Since the
+root can always be changed from red to black but not necessarily vice-versa this
+rule has little effect on analysis.)
+All leaves are black.
+Both children of every red node are black.
+Every simple path from a given node to any of its descendant leaves contains the same number of black nodes.
+*/
+
+abstract class RedBlackTest extends Properties("RedBlack") {
+ object RedBlackTest extends scala.collection.immutable.RedBlack[Int] {
+ def isSmaller(x: Int, y: Int) = x < y
+ }
+
+ import RedBlackTest._
+
+ def rootIsBlack[A](t: Tree[A]) = t.isBlack
+
+ def areAllLeavesBlack[A](t: Tree[A]): Boolean = t match {
+ case Empty => t.isBlack
+ case ne: NonEmpty[_] => List(ne.left, ne.right) forall areAllLeavesBlack
+ }
+
+ def areRedNodeChildrenBlack[A](t: Tree[A]): Boolean = t match {
+ case RedTree(_, _, left, right) => List(left, right) forall (t => t.isBlack && areRedNodeChildrenBlack(t))
+ case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack
+ case Empty => true
+ }
+
+ def blackNodesToLeaves[A](t: Tree[A]): List[Int] = t match {
+ case Empty => List(1)
+ case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1)
+ case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves
+ }
+
+ def areBlackNodesToLeavesEqual[A](t: Tree[A]): Boolean = t match {
+ case Empty => true
+ case ne: NonEmpty[_] =>
+ (
+ blackNodesToLeaves(ne).removeDuplicates.size == 1
+ && areBlackNodesToLeavesEqual(ne.left)
+ && areBlackNodesToLeavesEqual(ne.right)
+ )
+ }
+
+ def orderIsPreserved[A](t: Tree[A]): Boolean = t match {
+ case Empty => true
+ case ne: NonEmpty[_] =>
+ (
+ (ne.left.iterator map (_._1) forall (isSmaller(_, ne.key)))
+ && (ne.right.iterator map (_._1) forall (isSmaller(ne.key, _)))
+ && (List(ne.left, ne.right) forall orderIsPreserved)
+ )
+ }
+
+ def setup(l: List[Int], invariant: Tree[Unit] => Boolean): (Boolean, Tree[Unit])
+
+ def listNoRepetitions(size: Int) = for {
+ s <- Gen.choose(1, size)
+ l <- Gen.listOfN(size, Gen.choose(0, Int.MaxValue)) suchThat (l => l.size == l.removeDuplicates.size)
+ } yield l
+ def listFewRepetitions(size: Int) = for {
+ s <- Gen.choose(1, size)
+ l <- Gen.listOfN(s, Gen.choose(0, size * 4)) suchThat (l => l.size != l.removeDuplicates.size)
+ } yield l
+ def listManyRepetitions(size: Int) = for {
+ s <- Gen.choose(1, size)
+ l <- Gen.listOfN(s, Gen.choose(0, size)) suchThat (l => l.size != l.removeDuplicates.size)
+ } yield l
+ def listEvenRepetitions(size: Int) = listFewRepetitions(size) map (x =>
+ scala.util.Random.shuffle(x zip x flatMap { case (a, b) => List(a, b) })
+ )
+
+ // Arbitrarily weighted list distribution types
+ val seqType: Gen[Int => Gen[List[Int]]]
+
+ def myGen(sized: Int) = for {
+ size <- Gen.choose(0, sized)
+ seq <- seqType
+ list <- seq(size)
+ } yield list
+
+ property("root is black") = forAll(myGen(10)) { l =>
+ setup(l, rootIsBlack)._1 :| setup(l, rootIsBlack)._2.toString
+ }
+ property("all leaves are black") = forAll(myGen(50)) { l =>
+ setup(l, areAllLeavesBlack)._1 :| setup(l, areAllLeavesBlack)._2.toString
+ }
+ property("children of red nodes are black") = forAll(myGen(50)) { l =>
+ setup(l, areRedNodeChildrenBlack)._1 :| setup(l, areRedNodeChildrenBlack)._2.toString
+ }
+ property("Every path from a node to its descendant leaves contains the same number of black nodes") = forAll(myGen(50)) { l =>
+ setup(l, areBlackNodesToLeavesEqual)._1 :| setup(l, areBlackNodesToLeavesEqual)._2.toString
+ }
+ property("Ordering of keys is preserved") = forAll(myGen(50)) { l =>
+ setup(l, orderIsPreserved)._1 :| setup(l, orderIsPreserved)._2.toString
+ }
+}
+
+object TestInsertion extends RedBlackTest {
+ import RedBlackTest._
+ override val seqType = Gen.frequency(
+ (1, listNoRepetitions _),
+ (1, listManyRepetitions _)
+ )
+
+ property("update adds elements") = forAll(myGen(50)) { l =>
+ val tree = l.foldLeft(Empty: Tree[Unit])((acc, n) => acc update (n, ()))
+ forAll(Gen.pick(1, l)) ( n => !(tree lookup n.head isEmpty) :| "Tree: "+tree+" N: "+n.head )
+ }
+
+ override def setup(l: List[Int], invariant: Tree[Unit] => Boolean) = l.foldLeft((true, Empty: Tree[Unit])) {
+ case ((true, acc), n) =>
+ val newRoot = acc update (n, ())
+ (invariant(newRoot), newRoot)
+ case (failed, _) => failed
+ }
+}
+
+object TestDeletion extends RedBlackTest {
+ import RedBlackTest._
+ override val seqType = Gen.frequency(
+ (2, listFewRepetitions _),
+ (3, listManyRepetitions _),
+ (1, listEvenRepetitions _)
+ )
+
+ property("delete removes elements") = forAll(myGen(50)) { l =>
+ val tree = l.foldLeft(Empty: Tree[Unit])((acc, n) => acc update (n, ()))
+ forAll(Gen.choose(1, l.size)) { numberOfElementsToRemove =>
+ forAll(Gen.pick(numberOfElementsToRemove, l)) { elementsToRemove =>
+ val newTree = elementsToRemove.foldLeft(tree)((acc, n) => acc delete n)
+ (elementsToRemove forall (n => newTree lookup n isEmpty)) :| "Tree: "+tree+"New Tree: "+newTree+" Elements to Remove: "+elementsToRemove
+ }
+ }
+ }
+
+ override def setup(l: List[Int], invariant: Tree[Unit] => Boolean) = l.foldLeft((true, Empty: Tree[Unit])) {
+ case ((true, acc), n) =>
+ val newRoot = if (acc lookup n isEmpty) acc update (n, ()) else acc delete n
+ (invariant(newRoot), newRoot)
+ case (failed, _) => failed
+ }
+}
+
+object Test extends Properties("RedBlack") {
+ include(TestInsertion)
+ include(TestDeletion)
+}
+
diff --git a/test/files/bench/equality/eq.scala b/test/files/bench/equality/eq.scala
new file mode 100755
index 0000000000..4e57a81734
--- /dev/null
+++ b/test/files/bench/equality/eq.scala
@@ -0,0 +1,34 @@
+object eq extends testing.Benchmark {
+
+ def eqtest[T](creator: Int => T, n: Int): Int = {
+ val elems = Array.tabulate[AnyRef](n)(i => creator(i % 2).asInstanceOf[AnyRef])
+
+ var sum = 0
+ var i = 0
+ while (i < n) {
+ var j = 0
+ while (j < n) {
+ if (elems(i) eq elems(j)) sum += 1
+ j += 1
+ }
+ i += 1
+ }
+ sum
+ }
+
+ val obj1 = new Object
+ val obj2 = new Object
+
+ def run() {
+ var sum = 0
+ sum += eqtest(x => if (x == 0) obj1 else obj2, 2000)
+ sum += eqtest(x => x, 1000)
+ sum += eqtest(x => x.toChar, 550)
+ sum += eqtest(x => x.toByte, 550)
+ sum += eqtest(x => x.toLong, 550)
+ sum += eqtest(x => x.toShort, 100)
+ sum += eqtest(x => x.toFloat, 100)
+ sum += eqtest(x => x.toDouble, 100)
+ assert(sum == 2958950)
+ }
+}
diff --git a/test/files/bench/equality/eqeq.eqlog b/test/files/bench/equality/eqeq.eqlog
new file mode 100644
index 0000000000..d1e27aceed
--- /dev/null
+++ b/test/files/bench/equality/eqeq.eqlog
@@ -0,0 +1,42 @@
+Banchmark results for testing equality operations:
+eq.scala: Base case, use eq equality only
+eqeq.scala: Test case, use == instead of eq.
+All tests run on Thinkpad T400, 1.6.0_12 client VM.
+Test command: java eq 5 5
+ java eqeq 5 5
+eq.scala, no -optimise
+eq$ 109 78 79 63 63
+eq$ 94 63 63 78 78
+eq$ 94 62 62 62 78
+eq$ 94 78 78 78 78
+eq$ 94 78 78 78 78
+eq.scala, with -optimise
+eq$ 421 63 62 47 63
+eq$ 406 62 62 63 62
+eq$ 407 62 62 78 63
+eq$ 406 63 63 62 62
+eq$ 407 62 62 63 47
+eqeq.scala with version of BoxesRuntime as of Nov 13th, no -optimise
+eqeq$ 562 516 516 516 515
+eqeq$ 547 515 515 531 532
+eqeq$ 532 516 516 515 516
+eqeq$ 547 531 531 516 531
+eqeq$ 547 515 515 516 516
+eqeq.scala with version of BoxesRuntime as of Nov 13th, with -optimise
+eqeq$ 1031 390 391 391 391
+eqeq$ 1031 391 391 391 390
+eqeq$ 1031 390 390 391 391
+eqeq$ 1031 406 407 391 390
+eqeq$ 1031 390 390 391 391
+eqeq.scala with 1st optimized of Nov 14th, no -optimise
+eqeq$ 484 421 438 438 437
+eqeq$ 484 438 437 437 438
+eqeq$ 469 437 453 454 438
+eqeq$ 468 437 438 468 438
+eqeq$ 485 437 437 422 438
+eqeq.scala with 1st optimized of Nov 14th, with -optimise
+eqeq$ 1016 375 391 375 375
+eqeq$ 1016 375 391 390 375
+eqeq$ 1016 390 391 375 375
+eqeq$ 1015 375 391 390 375
+eqeq$ 1016 390 375 375 375
diff --git a/test/files/bench/equality/eqeq.scala b/test/files/bench/equality/eqeq.scala
new file mode 100755
index 0000000000..e1fda69c0c
--- /dev/null
+++ b/test/files/bench/equality/eqeq.scala
@@ -0,0 +1,46 @@
+/** benchmark for testing equality.
+ * Mix: == between non-numbers ith Object.equals as equality: 66%
+ * 50% of these are tests where eq is true.
+ * == between boxed integers: 17%
+ * == between boxed characters: 5%
+ * == between boxed bytes: 5%
+ * == between boxed longs: 5%
+ * == between boxed shorts: < 1%
+ * == between boxed floats: < 1%
+ * == between boxed doubles: < 1%
+ * In all cases 50% of the tests return true.
+ */
+object eqeq extends testing.Benchmark {
+
+ def eqeqtest[T](creator: Int => T, n: Int): Int = {
+ val elems = Array.tabulate[AnyRef](n)(i => creator(i % 2).asInstanceOf[AnyRef])
+
+ var sum = 0
+ var i = 0
+ while (i < n) {
+ var j = 0
+ while (j < n) {
+ if (elems(i) == elems(j)) sum += 1
+ j += 1
+ }
+ i += 1
+ }
+ sum
+ }
+
+ val obj1 = new Object
+ val obj2 = new Object
+
+ def run() {
+ var sum = 0
+ sum += eqeqtest(x => if (x == 0) obj1 else obj2, 2000)
+ sum += eqeqtest(x => x, 1000)
+ sum += eqeqtest(x => x.toChar, 550)
+ sum += eqeqtest(x => x.toByte, 550)
+ sum += eqeqtest(x => x.toLong, 550)
+ sum += eqeqtest(x => x.toShort, 100)
+ sum += eqeqtest(x => x.toFloat, 100)
+ sum += eqeqtest(x => x.toDouble, 100)
+ assert(sum == 2968750)
+ }
+}
diff --git a/test/files/buildmanager/annotated/A.scala b/test/files/buildmanager/annotated/A.scala
new file mode 100644
index 0000000000..4130cf21ec
--- /dev/null
+++ b/test/files/buildmanager/annotated/A.scala
@@ -0,0 +1 @@
+case class A[T](x: String, y: T)
diff --git a/test/files/buildmanager/annotated/annotated.check b/test/files/buildmanager/annotated/annotated.check
new file mode 100644
index 0000000000..ce92c9a294
--- /dev/null
+++ b/test/files/buildmanager/annotated/annotated.check
@@ -0,0 +1,6 @@
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(), object A -> List())
diff --git a/test/files/buildmanager/annotated/annotated.test b/test/files/buildmanager/annotated/annotated.test
new file mode 100644
index 0000000000..392e0d365f
--- /dev/null
+++ b/test/files/buildmanager/annotated/annotated.test
@@ -0,0 +1,2 @@
+>>compile A.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/freshnames/A.scala b/test/files/buildmanager/freshnames/A.scala
new file mode 100644
index 0000000000..e8ab26ca1e
--- /dev/null
+++ b/test/files/buildmanager/freshnames/A.scala
@@ -0,0 +1,16 @@
+abstract class A {
+
+ var t: List[B]
+
+ def foo(n: String): Option[B] = {
+ t.reverse find (_.names contains n)
+ }
+
+ def bar(n: Int): Option[B] = {
+ t.reverse find (_.names contains n)
+ }
+}
+
+//class A
+case class B(names: List[String])
+
diff --git a/test/files/buildmanager/freshnames/B.scala b/test/files/buildmanager/freshnames/B.scala
new file mode 100644
index 0000000000..d700225c08
--- /dev/null
+++ b/test/files/buildmanager/freshnames/B.scala
@@ -0,0 +1,4 @@
+abstract class C extends A {
+ def test(n: Int) = bar(n)
+}
+
diff --git a/test/files/buildmanager/freshnames/freshnames.check b/test/files/buildmanager/freshnames/freshnames.check
new file mode 100644
index 0000000000..9f05fb8a36
--- /dev/null
+++ b/test/files/buildmanager/freshnames/freshnames.check
@@ -0,0 +1,6 @@
+builder > B.scala A.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(), class B -> List(), object B -> List())
diff --git a/test/files/buildmanager/freshnames/freshnames.test b/test/files/buildmanager/freshnames/freshnames.test
new file mode 100644
index 0000000000..20b20298f9
--- /dev/null
+++ b/test/files/buildmanager/freshnames/freshnames.test
@@ -0,0 +1,2 @@
+>>compile B.scala A.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/infer/A.scala b/test/files/buildmanager/infer/A.scala
new file mode 100644
index 0000000000..46b5391609
--- /dev/null
+++ b/test/files/buildmanager/infer/A.scala
@@ -0,0 +1,16 @@
+class Foo(flag: Boolean) {
+ val classpath =
+ if (flag)
+ new AClasspath
+ else
+ new BClasspath
+}
+
+class AClasspath extends MergedClasspath[A]
+
+class BClasspath extends MergedClasspath[B]
+
+abstract class MergedClasspath[T]
+
+class A
+class B
diff --git a/test/files/buildmanager/infer/infer.check b/test/files/buildmanager/infer/infer.check
new file mode 100644
index 0000000000..1f736977ff
--- /dev/null
+++ b/test/files/buildmanager/infer/infer.check
@@ -0,0 +1,6 @@
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(), class AClasspath -> List(), class B -> List(), class BClasspath -> List(), class Foo -> List(), class MergedClasspath -> List())
diff --git a/test/files/buildmanager/infer/infer.test b/test/files/buildmanager/infer/infer.test
new file mode 100644
index 0000000000..392e0d365f
--- /dev/null
+++ b/test/files/buildmanager/infer/infer.test
@@ -0,0 +1,2 @@
+>>compile A.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/overloaded_1/A.scala b/test/files/buildmanager/overloaded_1/A.scala
new file mode 100644
index 0000000000..33b63b8006
--- /dev/null
+++ b/test/files/buildmanager/overloaded_1/A.scala
@@ -0,0 +1,11 @@
+trait As {
+ trait C extends D {
+ override def foo = this /// Shouldn't cause the change
+ override def foo(act: List[D]) = this
+ }
+
+ abstract class D{
+ def foo: D = this
+ def foo(act: List[D]) = this
+ }
+}
diff --git a/test/files/buildmanager/overloaded_1/overloaded_1.check b/test/files/buildmanager/overloaded_1/overloaded_1.check
new file mode 100644
index 0000000000..4d643ce6b4
--- /dev/null
+++ b/test/files/buildmanager/overloaded_1/overloaded_1.check
@@ -0,0 +1,6 @@
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class As$D -> List(), object As$C$class -> List(), object As$class -> List(), trait As -> List(), trait As$C -> List())
diff --git a/test/files/buildmanager/overloaded_1/overloaded_1.test b/test/files/buildmanager/overloaded_1/overloaded_1.test
new file mode 100644
index 0000000000..392e0d365f
--- /dev/null
+++ b/test/files/buildmanager/overloaded_1/overloaded_1.test
@@ -0,0 +1,2 @@
+>>compile A.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/simpletest/A.scala b/test/files/buildmanager/simpletest/A.scala
new file mode 100644
index 0000000000..ef704706bb
--- /dev/null
+++ b/test/files/buildmanager/simpletest/A.scala
@@ -0,0 +1,3 @@
+class A {
+ def foo = 2
+}
diff --git a/test/files/buildmanager/simpletest/B.scala b/test/files/buildmanager/simpletest/B.scala
new file mode 100644
index 0000000000..364dc6e4cb
--- /dev/null
+++ b/test/files/buildmanager/simpletest/B.scala
@@ -0,0 +1,3 @@
+class B extends A {
+ override def foo = 2
+}
diff --git a/test/files/buildmanager/simpletest/simpletest.changes/A1.scala b/test/files/buildmanager/simpletest/simpletest.changes/A1.scala
new file mode 100644
index 0000000000..83d15dc739
--- /dev/null
+++ b/test/files/buildmanager/simpletest/simpletest.changes/A1.scala
@@ -0,0 +1 @@
+class A
diff --git a/test/files/buildmanager/simpletest/simpletest.check b/test/files/buildmanager/simpletest/simpletest.check
new file mode 100644
index 0000000000..95ea2c4c0d
--- /dev/null
+++ b/test/files/buildmanager/simpletest/simpletest.check
@@ -0,0 +1,11 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Removed(Definition(A.foo))))
+invalidate B.scala because inherited method removed [Removed(Definition(A.foo))]
+compiling Set(B.scala)
+B.scala:2: error: method foo overrides nothing
+ override def foo = 2
+ ^
diff --git a/test/files/buildmanager/simpletest/simpletest.test b/test/files/buildmanager/simpletest/simpletest.test
new file mode 100644
index 0000000000..2c0be1502f
--- /dev/null
+++ b/test/files/buildmanager/simpletest/simpletest.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A1.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2280/A.scala b/test/files/buildmanager/t2280/A.scala
new file mode 100644
index 0000000000..5febadeb06
--- /dev/null
+++ b/test/files/buildmanager/t2280/A.scala
@@ -0,0 +1 @@
+class A extends B
diff --git a/test/files/buildmanager/t2280/B.java b/test/files/buildmanager/t2280/B.java
new file mode 100644
index 0000000000..aef8e106e9
--- /dev/null
+++ b/test/files/buildmanager/t2280/B.java
@@ -0,0 +1,2 @@
+public class B {}
+
diff --git a/test/files/buildmanager/t2280/t2280.check b/test/files/buildmanager/t2280/t2280.check
new file mode 100644
index 0000000000..7ea7511c63
--- /dev/null
+++ b/test/files/buildmanager/t2280/t2280.check
@@ -0,0 +1,6 @@
+builder > A.scala B.java
+compiling Set(A.scala, B.java)
+Changes: Map()
+builder > B.java
+compiling Set(B.java)
+Changes: Map(class B -> List())
diff --git a/test/files/buildmanager/t2280/t2280.test b/test/files/buildmanager/t2280/t2280.test
new file mode 100644
index 0000000000..2eda777853
--- /dev/null
+++ b/test/files/buildmanager/t2280/t2280.test
@@ -0,0 +1,2 @@
+>>compile A.scala B.java
+>>compile B.java
diff --git a/test/files/buildmanager/t2556_1/A.scala b/test/files/buildmanager/t2556_1/A.scala
new file mode 100644
index 0000000000..c6e200b217
--- /dev/null
+++ b/test/files/buildmanager/t2556_1/A.scala
@@ -0,0 +1,3 @@
+class A {
+ def x(i: Int) = i+"3"
+}
diff --git a/test/files/buildmanager/t2556_1/B.scala b/test/files/buildmanager/t2556_1/B.scala
new file mode 100644
index 0000000000..8529587b56
--- /dev/null
+++ b/test/files/buildmanager/t2556_1/B.scala
@@ -0,0 +1,3 @@
+class B extends A {
+ def x(s: String) = s+"5"
+}
diff --git a/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala b/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala
new file mode 100644
index 0000000000..4ac1045e13
--- /dev/null
+++ b/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala
@@ -0,0 +1,4 @@
+class A {
+ def x(i: String) = i+"3"
+}
+
diff --git a/test/files/buildmanager/t2556_1/t2556_1.check b/test/files/buildmanager/t2556_1/t2556_1.check
new file mode 100644
index 0000000000..dc9437fa7e
--- /dev/null
+++ b/test/files/buildmanager/t2556_1/t2556_1.check
@@ -0,0 +1,12 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]]
+compiling Set(B.scala)
+B.scala:2: error: overriding method x in class A of type (i: String)java.lang.String;
+ method x needs `override' modifier
+ def x(s: String) = s+"5"
+ ^
diff --git a/test/files/buildmanager/t2556_1/t2556_1.test b/test/files/buildmanager/t2556_1/t2556_1.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2556_1/t2556_1.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2556_2/A.scala b/test/files/buildmanager/t2556_2/A.scala
new file mode 100644
index 0000000000..b8da5c8fb1
--- /dev/null
+++ b/test/files/buildmanager/t2556_2/A.scala
@@ -0,0 +1,4 @@
+class A {
+ def x(i: Int) = i+"3"
+}
+
diff --git a/test/files/buildmanager/t2556_2/B.scala b/test/files/buildmanager/t2556_2/B.scala
new file mode 100644
index 0000000000..80ff25d0ca
--- /dev/null
+++ b/test/files/buildmanager/t2556_2/B.scala
@@ -0,0 +1,2 @@
+class B extends A
+
diff --git a/test/files/buildmanager/t2556_2/C.scala b/test/files/buildmanager/t2556_2/C.scala
new file mode 100644
index 0000000000..0ab13e3757
--- /dev/null
+++ b/test/files/buildmanager/t2556_2/C.scala
@@ -0,0 +1,4 @@
+class C extends B {
+ def x(s: String) = s+"5"
+}
+
diff --git a/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala b/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala
new file mode 100644
index 0000000000..4ac1045e13
--- /dev/null
+++ b/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala
@@ -0,0 +1,4 @@
+class A {
+ def x(i: String) = i+"3"
+}
+
diff --git a/test/files/buildmanager/t2556_2/t2556_2.check b/test/files/buildmanager/t2556_2/t2556_2.check
new file mode 100644
index 0000000000..a4d6724b11
--- /dev/null
+++ b/test/files/buildmanager/t2556_2/t2556_2.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala C.scala
+compiling Set(A.scala, B.scala, C.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]]
+invalidate C.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]]
+compiling Set(B.scala, C.scala)
+C.scala:2: error: overriding method x in class A of type (i: String)java.lang.String;
+ method x needs `override' modifier
+ def x(s: String) = s+"5"
+ ^
diff --git a/test/files/buildmanager/t2556_2/t2556_2.test b/test/files/buildmanager/t2556_2/t2556_2.test
new file mode 100644
index 0000000000..9f31bb6409
--- /dev/null
+++ b/test/files/buildmanager/t2556_2/t2556_2.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala C.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2556_3/A.scala b/test/files/buildmanager/t2556_3/A.scala
new file mode 100644
index 0000000000..089a05f493
--- /dev/null
+++ b/test/files/buildmanager/t2556_3/A.scala
@@ -0,0 +1,5 @@
+class A {
+ def x = 3
+}
+class B extends A
+
diff --git a/test/files/buildmanager/t2556_3/B.scala b/test/files/buildmanager/t2556_3/B.scala
new file mode 100644
index 0000000000..0ec5ae4b55
--- /dev/null
+++ b/test/files/buildmanager/t2556_3/B.scala
@@ -0,0 +1,5 @@
+object E {
+ def main(args: Array[String]) =
+ println( (new C).x )
+}
+
diff --git a/test/files/buildmanager/t2556_3/C.scala b/test/files/buildmanager/t2556_3/C.scala
new file mode 100644
index 0000000000..403df8455e
--- /dev/null
+++ b/test/files/buildmanager/t2556_3/C.scala
@@ -0,0 +1,2 @@
+class C extends B
+
diff --git a/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala b/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala
new file mode 100644
index 0000000000..21cb2779f9
--- /dev/null
+++ b/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala
@@ -0,0 +1,5 @@
+class A {
+ def x = 3
+}
+class B
+
diff --git a/test/files/buildmanager/t2556_3/t2556_3.check b/test/files/buildmanager/t2556_3/t2556_3.check
new file mode 100644
index 0000000000..af0c63eebc
--- /dev/null
+++ b/test/files/buildmanager/t2556_3/t2556_3.check
@@ -0,0 +1,18 @@
+builder > A.scala B.scala C.scala
+compiling Set(A.scala, B.scala, C.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(), class B -> List(Changed(Class(B))[List((A,java.lang.Object), (ScalaObject,ScalaObject))]))
+invalidate C.scala because parents have changed [Changed(Class(B))[List((A,java.lang.Object), (ScalaObject,ScalaObject))]]
+invalidate B.scala because it references invalid (no longer inherited) definition [ParentChanged(Class(C))]
+compiling Set(B.scala, C.scala)
+B.scala:3: error: type mismatch;
+ found : C
+ required: ?{val x: ?}
+Note that implicit conversions are not applicable because they are ambiguous:
+ both method any2ArrowAssoc in object Predef of type [A](x: A)ArrowAssoc[A]
+ and method any2Ensuring in object Predef of type [A](x: A)Ensuring[A]
+ are possible conversion functions from C to ?{val x: ?}
+ println( (new C).x )
+ ^
diff --git a/test/files/buildmanager/t2556_3/t2556_3.test b/test/files/buildmanager/t2556_3/t2556_3.test
new file mode 100644
index 0000000000..9f31bb6409
--- /dev/null
+++ b/test/files/buildmanager/t2556_3/t2556_3.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala C.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2557/A.scala b/test/files/buildmanager/t2557/A.scala
new file mode 100644
index 0000000000..3be55f19a6
--- /dev/null
+++ b/test/files/buildmanager/t2557/A.scala
@@ -0,0 +1,4 @@
+trait A {
+ def x = 3
+}
+
diff --git a/test/files/buildmanager/t2557/B.scala b/test/files/buildmanager/t2557/B.scala
new file mode 100644
index 0000000000..ea86a90079
--- /dev/null
+++ b/test/files/buildmanager/t2557/B.scala
@@ -0,0 +1,4 @@
+trait B extends A {
+ override def x = super.x * 2
+}
+
diff --git a/test/files/buildmanager/t2557/C.scala b/test/files/buildmanager/t2557/C.scala
new file mode 100644
index 0000000000..dd575ac38d
--- /dev/null
+++ b/test/files/buildmanager/t2557/C.scala
@@ -0,0 +1,3 @@
+trait C extends A {
+ override def x = super.x + 5
+}
diff --git a/test/files/buildmanager/t2557/D.scala b/test/files/buildmanager/t2557/D.scala
new file mode 100644
index 0000000000..4e662a80ce
--- /dev/null
+++ b/test/files/buildmanager/t2557/D.scala
@@ -0,0 +1 @@
+trait D extends C with B
diff --git a/test/files/buildmanager/t2557/E.scala b/test/files/buildmanager/t2557/E.scala
new file mode 100644
index 0000000000..2aee552675
--- /dev/null
+++ b/test/files/buildmanager/t2557/E.scala
@@ -0,0 +1 @@
+trait E extends D
diff --git a/test/files/buildmanager/t2557/F.scala b/test/files/buildmanager/t2557/F.scala
new file mode 100644
index 0000000000..e1996704e7
--- /dev/null
+++ b/test/files/buildmanager/t2557/F.scala
@@ -0,0 +1,4 @@
+object F extends E {
+ def main(args: Array[String]) =
+ println(x)
+}
diff --git a/test/files/buildmanager/t2557/t2557.changes/D2.scala b/test/files/buildmanager/t2557/t2557.changes/D2.scala
new file mode 100644
index 0000000000..67295f8e6d
--- /dev/null
+++ b/test/files/buildmanager/t2557/t2557.changes/D2.scala
@@ -0,0 +1,2 @@
+trait D extends B with C
+
diff --git a/test/files/buildmanager/t2557/t2557.check b/test/files/buildmanager/t2557/t2557.check
new file mode 100644
index 0000000000..f51e801017
--- /dev/null
+++ b/test/files/buildmanager/t2557/t2557.check
@@ -0,0 +1,10 @@
+builder > A.scala B.scala C.scala D.scala E.scala F.scala
+compiling Set(A.scala, B.scala, C.scala, D.scala, E.scala, F.scala)
+Changes: Map()
+builder > D.scala
+compiling Set(D.scala)
+Changes: Map(trait D -> List(Changed(Class(D))[List((java.lang.Object,java.lang.Object), (C,B), (B,C))]))
+invalidate E.scala because parents have changed [Changed(Class(D))[List((java.lang.Object,java.lang.Object), (C,B), (B,C))]]
+invalidate F.scala because parents have changed [Changed(Class(D))[List((java.lang.Object,java.lang.Object), (C,B), (B,C))]]
+compiling Set(E.scala, F.scala)
+Changes: Map(object F -> List(), trait E -> List())
diff --git a/test/files/buildmanager/t2557/t2557.test b/test/files/buildmanager/t2557/t2557.test
new file mode 100644
index 0000000000..6b0103092f
--- /dev/null
+++ b/test/files/buildmanager/t2557/t2557.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala C.scala D.scala E.scala F.scala
+>>update D.scala=>D2.scala
+>>compile D.scala
diff --git a/test/files/buildmanager/t2559/A.scala b/test/files/buildmanager/t2559/A.scala
new file mode 100644
index 0000000000..fb4f6e3545
--- /dev/null
+++ b/test/files/buildmanager/t2559/A.scala
@@ -0,0 +1,5 @@
+sealed trait A
+class B extends A
+class C extends A
+//class E extends A
+
diff --git a/test/files/buildmanager/t2559/D.scala b/test/files/buildmanager/t2559/D.scala
new file mode 100644
index 0000000000..906b69a3e7
--- /dev/null
+++ b/test/files/buildmanager/t2559/D.scala
@@ -0,0 +1,8 @@
+object D {
+ def x(a: A) =
+ a match {
+ case _: B => ()
+ case _: C => ()
+ }
+}
+
diff --git a/test/files/buildmanager/t2559/t2559.changes/A2.scala b/test/files/buildmanager/t2559/t2559.changes/A2.scala
new file mode 100644
index 0000000000..8e90594e2c
--- /dev/null
+++ b/test/files/buildmanager/t2559/t2559.changes/A2.scala
@@ -0,0 +1,5 @@
+sealed trait A
+class B extends A
+class C extends A
+class E extends A
+
diff --git a/test/files/buildmanager/t2559/t2559.check b/test/files/buildmanager/t2559/t2559.check
new file mode 100644
index 0000000000..752278fbe8
--- /dev/null
+++ b/test/files/buildmanager/t2559/t2559.check
@@ -0,0 +1,14 @@
+builder > A.scala D.scala
+compiling Set(A.scala, D.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class B -> List(), class C -> List(), class E -> List(Changed(Class(A))[class E extends a sealed trait A]), trait A -> List())
+invalidate D.scala because it references changed class [Changed(Class(A))[class E extends a sealed trait A]]
+compiling Set(D.scala)
+D.scala:3: warning: match is not exhaustive!
+missing combination E
+
+ a match {
+ ^
+Changes: Map(object D -> List())
diff --git a/test/files/buildmanager/t2559/t2559.test b/test/files/buildmanager/t2559/t2559.test
new file mode 100644
index 0000000000..b787c5b39f
--- /dev/null
+++ b/test/files/buildmanager/t2559/t2559.test
@@ -0,0 +1,3 @@
+>>compile A.scala D.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2562/A.scala b/test/files/buildmanager/t2562/A.scala
new file mode 100644
index 0000000000..740cd1e868
--- /dev/null
+++ b/test/files/buildmanager/t2562/A.scala
@@ -0,0 +1,7 @@
+object A
+{
+ def x0 = B.x0
+ def x1 = B.x1
+ def x2 = B.x2
+ def x3 = 3
+}
diff --git a/test/files/buildmanager/t2562/B.scala b/test/files/buildmanager/t2562/B.scala
new file mode 100644
index 0000000000..a524e5cc84
--- /dev/null
+++ b/test/files/buildmanager/t2562/B.scala
@@ -0,0 +1,8 @@
+object B
+{
+ def x0 = A.x1
+ def x1 = A.x2
+ def x2 = A.x3
+}
+
+
diff --git a/test/files/buildmanager/t2562/t2562.changes/A2.scala b/test/files/buildmanager/t2562/t2562.changes/A2.scala
new file mode 100644
index 0000000000..c560e1e816
--- /dev/null
+++ b/test/files/buildmanager/t2562/t2562.changes/A2.scala
@@ -0,0 +1,8 @@
+object A
+{
+ def x0 = B.x0
+ def x1 = B.x1
+ def x2 = B.x2
+ def x3 = "3"
+}
+
diff --git a/test/files/buildmanager/t2562/t2562.check b/test/files/buildmanager/t2562/t2562.check
new file mode 100644
index 0000000000..813d2735e1
--- /dev/null
+++ b/test/files/buildmanager/t2562/t2562.check
@@ -0,0 +1,12 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(object A -> List(Changed(Definition(A.x3))[method x3 changed from ()Int to ()java.lang.String flags: <method>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x3))[method x3 changed from ()Int to ()java.lang.String flags: <method>]]
+compiling Set(B.scala)
+Changes: Map(object B -> List(Changed(Definition(B.x2))[method x2 changed from ()Int to ()java.lang.String flags: <method>]))
+invalidate A.scala because it references changed definition [Changed(Definition(B.x2))[method x2 changed from ()Int to ()java.lang.String flags: <method>]]
+compiling Set(A.scala, B.scala)
+Changes: Map(object A -> List(Changed(Definition(A.x0))[method x0 changed from ()Int to ()java.lang.String flags: <method>], Changed(Definition(A.x1))[method x1 changed from ()Int to ()java.lang.String flags: <method>], Changed(Definition(A.x2))[method x2 changed from ()Int to ()java.lang.String flags: <method>]), object B -> List(Changed(Definition(B.x0))[method x0 changed from ()Int to ()java.lang.String flags: <method>], Changed(Definition(B.x1))[method x1 changed from ()Int to ()java.lang.String flags: <method>]))
diff --git a/test/files/buildmanager/t2562/t2562.test b/test/files/buildmanager/t2562/t2562.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2562/t2562.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2649/A.scala b/test/files/buildmanager/t2649/A.scala
new file mode 100644
index 0000000000..86cc3f2c15
--- /dev/null
+++ b/test/files/buildmanager/t2649/A.scala
@@ -0,0 +1,3 @@
+object A {
+ def x(zz: Int, yy: Int) = yy - zz
+}
diff --git a/test/files/buildmanager/t2649/B.scala b/test/files/buildmanager/t2649/B.scala
new file mode 100644
index 0000000000..26c89518cb
--- /dev/null
+++ b/test/files/buildmanager/t2649/B.scala
@@ -0,0 +1,4 @@
+object B {
+ def main(args: Array[String]): Unit =
+ println( A.x(zz = 3, yy = 4) )
+}
diff --git a/test/files/buildmanager/t2649/t2649.changes/A2.scala b/test/files/buildmanager/t2649/t2649.changes/A2.scala
new file mode 100644
index 0000000000..9a6309fca3
--- /dev/null
+++ b/test/files/buildmanager/t2649/t2649.changes/A2.scala
@@ -0,0 +1,4 @@
+object A {
+ def x(yy: Int, zz: Int) = yy - zz
+}
+
diff --git a/test/files/buildmanager/t2649/t2649.check b/test/files/buildmanager/t2649/t2649.check
new file mode 100644
index 0000000000..5b698ec03f
--- /dev/null
+++ b/test/files/buildmanager/t2649/t2649.check
@@ -0,0 +1,9 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (zz: Int,yy: Int)Int to (yy: Int,zz: Int)Int flags: <method>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (zz: Int,yy: Int)Int to (yy: Int,zz: Int)Int flags: <method>]]
+compiling Set(B.scala)
+Changes: Map(object B -> List())
diff --git a/test/files/buildmanager/t2649/t2649.test b/test/files/buildmanager/t2649/t2649.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2649/t2649.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2650_1/A.scala b/test/files/buildmanager/t2650_1/A.scala
new file mode 100644
index 0000000000..74714a3c47
--- /dev/null
+++ b/test/files/buildmanager/t2650_1/A.scala
@@ -0,0 +1,4 @@
+trait A {
+ type S[_]
+}
+
diff --git a/test/files/buildmanager/t2650_1/B.scala b/test/files/buildmanager/t2650_1/B.scala
new file mode 100644
index 0000000000..80f0e30259
--- /dev/null
+++ b/test/files/buildmanager/t2650_1/B.scala
@@ -0,0 +1,3 @@
+trait B extends A {
+ type F = S[Int]
+}
diff --git a/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala b/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala
new file mode 100644
index 0000000000..2b8ead4ff1
--- /dev/null
+++ b/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala
@@ -0,0 +1,3 @@
+trait A {
+ type S
+}
diff --git a/test/files/buildmanager/t2650_1/t2650_1.check b/test/files/buildmanager/t2650_1/t2650_1.check
new file mode 100644
index 0000000000..ecddb33620
--- /dev/null
+++ b/test/files/buildmanager/t2650_1/t2650_1.check
@@ -0,0 +1,11 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Definition(A.S))[type S changed from A.this.S[_] to A.this.S flags: <deferred>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.S))[type S changed from A.this.S[_] to A.this.S flags: <deferred>]]
+compiling Set(B.scala)
+B.scala:2: error: B.this.S does not take type parameters
+ type F = S[Int]
+ ^
diff --git a/test/files/buildmanager/t2650_1/t2650_1.test b/test/files/buildmanager/t2650_1/t2650_1.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2650_1/t2650_1.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2650_2/A.scala b/test/files/buildmanager/t2650_2/A.scala
new file mode 100644
index 0000000000..bcea634485
--- /dev/null
+++ b/test/files/buildmanager/t2650_2/A.scala
@@ -0,0 +1,3 @@
+trait A {
+ type S = Int
+}
diff --git a/test/files/buildmanager/t2650_2/B.scala b/test/files/buildmanager/t2650_2/B.scala
new file mode 100644
index 0000000000..22a3a9a48e
--- /dev/null
+++ b/test/files/buildmanager/t2650_2/B.scala
@@ -0,0 +1,4 @@
+trait B extends A {
+ def x: S
+ def y: Int = x
+}
diff --git a/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala b/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala
new file mode 100644
index 0000000000..8274c1b62d
--- /dev/null
+++ b/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala
@@ -0,0 +1,4 @@
+trait A {
+ type S = Long
+}
+
diff --git a/test/files/buildmanager/t2650_2/t2650_2.check b/test/files/buildmanager/t2650_2/t2650_2.check
new file mode 100644
index 0000000000..7ab72fb619
--- /dev/null
+++ b/test/files/buildmanager/t2650_2/t2650_2.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Definition(A.S))[type S changed from A.this.S to A.this.S flags: ]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.S))[type S changed from A.this.S to A.this.S flags: ]]
+compiling Set(B.scala)
+B.scala:3: error: type mismatch;
+ found : B.this.S
+ required: Int
+ def y: Int = x
+ ^
diff --git a/test/files/buildmanager/t2650_2/t2650_2.test b/test/files/buildmanager/t2650_2/t2650_2.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2650_2/t2650_2.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2650_3/A.scala b/test/files/buildmanager/t2650_3/A.scala
new file mode 100644
index 0000000000..cd13843eb9
--- /dev/null
+++ b/test/files/buildmanager/t2650_3/A.scala
@@ -0,0 +1,4 @@
+trait A {
+ type T = Int
+ def x: T
+}
diff --git a/test/files/buildmanager/t2650_3/B.scala b/test/files/buildmanager/t2650_3/B.scala
new file mode 100644
index 0000000000..46a8cf270a
--- /dev/null
+++ b/test/files/buildmanager/t2650_3/B.scala
@@ -0,0 +1,3 @@
+object B {
+ def x(a: A): Int = a.x
+}
diff --git a/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala b/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala
new file mode 100644
index 0000000000..e5667b2539
--- /dev/null
+++ b/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala
@@ -0,0 +1,4 @@
+trait A {
+ type T = Long
+ def x: T
+}
diff --git a/test/files/buildmanager/t2650_3/t2650_3.check b/test/files/buildmanager/t2650_3/t2650_3.check
new file mode 100644
index 0000000000..27be2f5ae8
--- /dev/null
+++ b/test/files/buildmanager/t2650_3/t2650_3.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : a.T
+ required: Int
+ def x(a: A): Int = a.x
+ ^
diff --git a/test/files/buildmanager/t2650_3/t2650_3.test b/test/files/buildmanager/t2650_3/t2650_3.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2650_3/t2650_3.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2650_4/A.scala b/test/files/buildmanager/t2650_4/A.scala
new file mode 100644
index 0000000000..b9a519eb48
--- /dev/null
+++ b/test/files/buildmanager/t2650_4/A.scala
@@ -0,0 +1,5 @@
+trait A {
+ type T = Int
+ type T2 = T
+ def x: T2
+}
diff --git a/test/files/buildmanager/t2650_4/B.scala b/test/files/buildmanager/t2650_4/B.scala
new file mode 100644
index 0000000000..46a8cf270a
--- /dev/null
+++ b/test/files/buildmanager/t2650_4/B.scala
@@ -0,0 +1,3 @@
+object B {
+ def x(a: A): Int = a.x
+}
diff --git a/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala b/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala
new file mode 100644
index 0000000000..0220e7b7bc
--- /dev/null
+++ b/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala
@@ -0,0 +1,5 @@
+trait A {
+ type T = Long
+ type T2 = T
+ def x: T2
+}
diff --git a/test/files/buildmanager/t2650_4/t2650_4.check b/test/files/buildmanager/t2650_4/t2650_4.check
new file mode 100644
index 0000000000..ba092d013f
--- /dev/null
+++ b/test/files/buildmanager/t2650_4/t2650_4.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : a.T2
+ required: Int
+ def x(a: A): Int = a.x
+ ^
diff --git a/test/files/buildmanager/t2650_4/t2650_4.test b/test/files/buildmanager/t2650_4/t2650_4.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2650_4/t2650_4.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2651_2/A.scala b/test/files/buildmanager/t2651_2/A.scala
new file mode 100644
index 0000000000..d712f6febe
--- /dev/null
+++ b/test/files/buildmanager/t2651_2/A.scala
@@ -0,0 +1 @@
+trait A[T]
diff --git a/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala b/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala
new file mode 100644
index 0000000000..7fb573e077
--- /dev/null
+++ b/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala
@@ -0,0 +1 @@
+trait A[S]
diff --git a/test/files/buildmanager/t2651_2/t2651_2.check b/test/files/buildmanager/t2651_2/t2651_2.check
new file mode 100644
index 0000000000..dd789b7565
--- /dev/null
+++ b/test/files/buildmanager/t2651_2/t2651_2.check
@@ -0,0 +1,6 @@
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List())
diff --git a/test/files/buildmanager/t2651_2/t2651_2.test b/test/files/buildmanager/t2651_2/t2651_2.test
new file mode 100644
index 0000000000..d0614473ce
--- /dev/null
+++ b/test/files/buildmanager/t2651_2/t2651_2.test
@@ -0,0 +1,3 @@
+>>compile A.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2651_3/A.scala b/test/files/buildmanager/t2651_3/A.scala
new file mode 100644
index 0000000000..14f9e4662f
--- /dev/null
+++ b/test/files/buildmanager/t2651_3/A.scala
@@ -0,0 +1,3 @@
+trait A[T, S] {
+ def x: T
+}
diff --git a/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala b/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala
new file mode 100644
index 0000000000..51bf27d1fa
--- /dev/null
+++ b/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala
@@ -0,0 +1,3 @@
+trait A[T, S] {
+ def x: S
+}
diff --git a/test/files/buildmanager/t2651_3/t2651_3.check b/test/files/buildmanager/t2651_3/t2651_3.check
new file mode 100644
index 0000000000..d4bac196e9
--- /dev/null
+++ b/test/files/buildmanager/t2651_3/t2651_3.check
@@ -0,0 +1,6 @@
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()S flags: <deferred> <method>]))
diff --git a/test/files/buildmanager/t2651_3/t2651_3.test b/test/files/buildmanager/t2651_3/t2651_3.test
new file mode 100644
index 0000000000..d0614473ce
--- /dev/null
+++ b/test/files/buildmanager/t2651_3/t2651_3.test
@@ -0,0 +1,3 @@
+>>compile A.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2651_4/A.scala b/test/files/buildmanager/t2651_4/A.scala
new file mode 100644
index 0000000000..63f2a1643e
--- /dev/null
+++ b/test/files/buildmanager/t2651_4/A.scala
@@ -0,0 +1,5 @@
+trait A[T, S] {
+ def x: T
+ def y(a: T)
+ def z[B <: T]
+}
diff --git a/test/files/buildmanager/t2651_4/B.scala b/test/files/buildmanager/t2651_4/B.scala
new file mode 100644
index 0000000000..b33dbde676
--- /dev/null
+++ b/test/files/buildmanager/t2651_4/B.scala
@@ -0,0 +1,3 @@
+trait B extends A[Int, String] {
+ def x = 3
+}
diff --git a/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala b/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala
new file mode 100644
index 0000000000..f155129d13
--- /dev/null
+++ b/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala
@@ -0,0 +1,5 @@
+trait A[S, T] {
+ def x: T
+ def y(a: T)
+ def z[B <: T]
+}
diff --git a/test/files/buildmanager/t2651_4/t2651_4.check b/test/files/buildmanager/t2651_4/t2651_4.check
new file mode 100644
index 0000000000..c4ce382b5f
--- /dev/null
+++ b/test/files/buildmanager/t2651_4/t2651_4.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()T flags: <deferred> <method>], Changed(Definition(A.y))[method y changed from (a: T)Unit to (a: T)Unit flags: <deferred> <method>], Changed(Definition(A.z))[method z changed from [B <: T]()Unit to [B <: T]()Unit flags: <deferred> <method>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from ()T to ()T flags: <deferred> <method>]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : Int(3)
+ required: String
+ def x = 3
+ ^
diff --git a/test/files/buildmanager/t2651_4/t2651_4.test b/test/files/buildmanager/t2651_4/t2651_4.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2651_4/t2651_4.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2652/A.scala b/test/files/buildmanager/t2652/A.scala
new file mode 100644
index 0000000000..a62506e890
--- /dev/null
+++ b/test/files/buildmanager/t2652/A.scala
@@ -0,0 +1,3 @@
+class A {
+ def x[T](t: T) = t
+}
diff --git a/test/files/buildmanager/t2652/B.scala b/test/files/buildmanager/t2652/B.scala
new file mode 100644
index 0000000000..86d08f0d3d
--- /dev/null
+++ b/test/files/buildmanager/t2652/B.scala
@@ -0,0 +1,4 @@
+object B {
+ val y = (new A).x(3)
+}
+
diff --git a/test/files/buildmanager/t2652/t2652.changes/A2.scala b/test/files/buildmanager/t2652/t2652.changes/A2.scala
new file mode 100644
index 0000000000..29135c0e94
--- /dev/null
+++ b/test/files/buildmanager/t2652/t2652.changes/A2.scala
@@ -0,0 +1,4 @@
+class A {
+ def x[@specialized T](t: T) = t
+}
+
diff --git a/test/files/buildmanager/t2652/t2652.check b/test/files/buildmanager/t2652/t2652.check
new file mode 100644
index 0000000000..0e685c1f94
--- /dev/null
+++ b/test/files/buildmanager/t2652/t2652.check
@@ -0,0 +1,9 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Added(Definition(A.x$mBc$sp)), Added(Definition(A.x$mCc$sp)), Added(Definition(A.x$mDc$sp)), Added(Definition(A.x$mFc$sp)), Added(Definition(A.x$mIc$sp)), Added(Definition(A.x$mLc$sp)), Added(Definition(A.x$mSc$sp)), Added(Definition(A.x$mVc$sp)), Added(Definition(A.x$mZc$sp)), Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: <method>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: <method>]]
+compiling Set(B.scala)
+Changes: Map(object B -> List())
diff --git a/test/files/buildmanager/t2652/t2652.test b/test/files/buildmanager/t2652/t2652.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2652/t2652.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2653/A.scala b/test/files/buildmanager/t2653/A.scala
new file mode 100644
index 0000000000..fb17a158c7
--- /dev/null
+++ b/test/files/buildmanager/t2653/A.scala
@@ -0,0 +1,2 @@
+class A[+T]
+
diff --git a/test/files/buildmanager/t2653/B.scala b/test/files/buildmanager/t2653/B.scala
new file mode 100644
index 0000000000..8f55a88e05
--- /dev/null
+++ b/test/files/buildmanager/t2653/B.scala
@@ -0,0 +1,3 @@
+object B {
+ val a: A[Any] = new A[Int]
+}
diff --git a/test/files/buildmanager/t2653/t2653.changes/A2.scala b/test/files/buildmanager/t2653/t2653.changes/A2.scala
new file mode 100644
index 0000000000..51d13cce6e
--- /dev/null
+++ b/test/files/buildmanager/t2653/t2653.changes/A2.scala
@@ -0,0 +1,2 @@
+class A[T]
+
diff --git a/test/files/buildmanager/t2653/t2653.check b/test/files/buildmanager/t2653/t2653.check
new file mode 100644
index 0000000000..0d40601962
--- /dev/null
+++ b/test/files/buildmanager/t2653/t2653.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Changed(Class(A))[ tparams: List((type T,type T))], Changed(Definition(A.<init>))[constructor A changed from ()A[T] to ()A[T] flags: <method>]))
+invalidate B.scala because it references changed class [Changed(Class(A))[ tparams: List((type T,type T))]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : A[Int]
+ required: A[Any]
+ val a: A[Any] = new A[Int]
+ ^
diff --git a/test/files/buildmanager/t2653/t2653.test b/test/files/buildmanager/t2653/t2653.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2653/t2653.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2654/A.scala b/test/files/buildmanager/t2654/A.scala
new file mode 100644
index 0000000000..75f396d039
--- /dev/null
+++ b/test/files/buildmanager/t2654/A.scala
@@ -0,0 +1,2 @@
+class A
+
diff --git a/test/files/buildmanager/t2654/B.scala b/test/files/buildmanager/t2654/B.scala
new file mode 100644
index 0000000000..a18aec3dbe
--- /dev/null
+++ b/test/files/buildmanager/t2654/B.scala
@@ -0,0 +1 @@
+class B extends A
diff --git a/test/files/buildmanager/t2654/t2654.changes/A2.scala b/test/files/buildmanager/t2654/t2654.changes/A2.scala
new file mode 100644
index 0000000000..c302edbd85
--- /dev/null
+++ b/test/files/buildmanager/t2654/t2654.changes/A2.scala
@@ -0,0 +1,4 @@
+class A {
+ private def x = 5
+}
+
diff --git a/test/files/buildmanager/t2654/t2654.check b/test/files/buildmanager/t2654/t2654.check
new file mode 100644
index 0000000000..68f6e8efc0
--- /dev/null
+++ b/test/files/buildmanager/t2654/t2654.check
@@ -0,0 +1,6 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List())
diff --git a/test/files/buildmanager/t2654/t2654.test b/test/files/buildmanager/t2654/t2654.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2654/t2654.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2655/A.scala b/test/files/buildmanager/t2655/A.scala
new file mode 100644
index 0000000000..b2c54ac47d
--- /dev/null
+++ b/test/files/buildmanager/t2655/A.scala
@@ -0,0 +1,4 @@
+object A {
+ def x(i: => String) = ()
+}
+
diff --git a/test/files/buildmanager/t2655/B.scala b/test/files/buildmanager/t2655/B.scala
new file mode 100644
index 0000000000..6c1918c0fb
--- /dev/null
+++ b/test/files/buildmanager/t2655/B.scala
@@ -0,0 +1,3 @@
+object B {
+ val x = A.x("3")
+}
diff --git a/test/files/buildmanager/t2655/t2655.changes/A2.scala b/test/files/buildmanager/t2655/t2655.changes/A2.scala
new file mode 100644
index 0000000000..0d6a7c69bb
--- /dev/null
+++ b/test/files/buildmanager/t2655/t2655.changes/A2.scala
@@ -0,0 +1,4 @@
+object A {
+ def x(i: Function0[String]) = ()
+}
+
diff --git a/test/files/buildmanager/t2655/t2655.check b/test/files/buildmanager/t2655/t2655.check
new file mode 100644
index 0000000000..a4a071ed70
--- /dev/null
+++ b/test/files/buildmanager/t2655/t2655.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: <method>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: <method>]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : java.lang.String("3")
+ required: () => String
+ val x = A.x("3")
+ ^
diff --git a/test/files/buildmanager/t2655/t2655.test b/test/files/buildmanager/t2655/t2655.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2655/t2655.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2657/A.scala b/test/files/buildmanager/t2657/A.scala
new file mode 100644
index 0000000000..2a6c62d29c
--- /dev/null
+++ b/test/files/buildmanager/t2657/A.scala
@@ -0,0 +1,3 @@
+class A {
+ implicit def y(i: Int): String = i.toString
+}
diff --git a/test/files/buildmanager/t2657/B.scala b/test/files/buildmanager/t2657/B.scala
new file mode 100644
index 0000000000..77869890db
--- /dev/null
+++ b/test/files/buildmanager/t2657/B.scala
@@ -0,0 +1,4 @@
+object B extends A {
+ val x: String = 3
+}
+
diff --git a/test/files/buildmanager/t2657/t2657.changes/A2.scala b/test/files/buildmanager/t2657/t2657.changes/A2.scala
new file mode 100644
index 0000000000..7dc99d425e
--- /dev/null
+++ b/test/files/buildmanager/t2657/t2657.changes/A2.scala
@@ -0,0 +1,3 @@
+class A {
+ def y(i: Int): String = i.toString
+}
diff --git a/test/files/buildmanager/t2657/t2657.check b/test/files/buildmanager/t2657/t2657.check
new file mode 100644
index 0000000000..9713f66024
--- /dev/null
+++ b/test/files/buildmanager/t2657/t2657.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Changed(Definition(A.y))[method y changed from (i: Int)java.lang.String to (i: Int)java.lang.String flags: implicit <method>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.y))[method y changed from (i: Int)java.lang.String to (i: Int)java.lang.String flags: implicit <method>]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : Int(3)
+ required: String
+ val x: String = 3
+ ^
diff --git a/test/files/buildmanager/t2657/t2657.test b/test/files/buildmanager/t2657/t2657.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2657/t2657.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2789/A.scala b/test/files/buildmanager/t2789/A.scala
new file mode 100644
index 0000000000..08d5bc840c
--- /dev/null
+++ b/test/files/buildmanager/t2789/A.scala
@@ -0,0 +1,5 @@
+class A {
+ implicit def e: E = new E
+ def x(i: Int)(implicit y: E): String = ""
+}
+class E
diff --git a/test/files/buildmanager/t2789/B.scala b/test/files/buildmanager/t2789/B.scala
new file mode 100644
index 0000000000..dcefbeec1b
--- /dev/null
+++ b/test/files/buildmanager/t2789/B.scala
@@ -0,0 +1,3 @@
+object B extends A {
+ val y = x(3)
+}
diff --git a/test/files/buildmanager/t2789/t2789.changes/A2.scala b/test/files/buildmanager/t2789/t2789.changes/A2.scala
new file mode 100644
index 0000000000..4ba3814e71
--- /dev/null
+++ b/test/files/buildmanager/t2789/t2789.changes/A2.scala
@@ -0,0 +1,5 @@
+class A {
+ def e: E = new E
+ def x(i: Int)(implicit y: E): String = ""
+}
+class E
diff --git a/test/files/buildmanager/t2789/t2789.check b/test/files/buildmanager/t2789/t2789.check
new file mode 100644
index 0000000000..78c5119355
--- /dev/null
+++ b/test/files/buildmanager/t2789/t2789.check
@@ -0,0 +1,11 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit <method>]), class E -> List())
+invalidate B.scala because inherited method changed [Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit <method>]]
+compiling Set(B.scala)
+B.scala:2: error: could not find implicit value for parameter y: E
+ val y = x(3)
+ ^
diff --git a/test/files/buildmanager/t2789/t2789.test b/test/files/buildmanager/t2789/t2789.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2789/t2789.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2790/A.scala b/test/files/buildmanager/t2790/A.scala
new file mode 100644
index 0000000000..6e9c1a90db
--- /dev/null
+++ b/test/files/buildmanager/t2790/A.scala
@@ -0,0 +1,5 @@
+object A {
+ def x(f: String, g: Int): Int = g
+ def x(f: Int, g: Int = 3): Int = g
+}
+
diff --git a/test/files/buildmanager/t2790/B.scala b/test/files/buildmanager/t2790/B.scala
new file mode 100644
index 0000000000..441055ca12
--- /dev/null
+++ b/test/files/buildmanager/t2790/B.scala
@@ -0,0 +1,4 @@
+object B {
+ val y = A.x(5)
+}
+
diff --git a/test/files/buildmanager/t2790/t2790.changes/A2.scala b/test/files/buildmanager/t2790/t2790.changes/A2.scala
new file mode 100644
index 0000000000..704ef4e96e
--- /dev/null
+++ b/test/files/buildmanager/t2790/t2790.changes/A2.scala
@@ -0,0 +1,4 @@
+object A {
+ def x(f: String, g: Int = 3): Int = g
+ def x(f: Int, g: Int): Int = g
+}
diff --git a/test/files/buildmanager/t2790/t2790.check b/test/files/buildmanager/t2790/t2790.check
new file mode 100644
index 0000000000..3a57d28817
--- /dev/null
+++ b/test/files/buildmanager/t2790/t2790.check
@@ -0,0 +1,14 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(object A -> List(Added(Definition(A.x)), Changed(Definition(A.x))[value x changed from (f: java.lang.String,g: Int)Int to (f: java.lang.String,g: Int)Int <and> (f: Int,g: Int)Int flags: <method>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x))[value x changed from (f: java.lang.String,g: Int)Int to (f: java.lang.String,g: Int)Int <and> (f: Int,g: Int)Int flags: <method>]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : Int(5)
+ required: String
+Error occurred in an application involving default arguments.
+ val y = A.x(5)
+ ^
diff --git a/test/files/buildmanager/t2790/t2790.test b/test/files/buildmanager/t2790/t2790.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2790/t2790.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t3045/A.java b/test/files/buildmanager/t3045/A.java
new file mode 100644
index 0000000000..d1acb00cd6
--- /dev/null
+++ b/test/files/buildmanager/t3045/A.java
@@ -0,0 +1,7 @@
+public interface A {
+ public class C implements A {}
+}
+
+class B {
+ static class C {}
+}
diff --git a/test/files/buildmanager/t3045/t3045.check b/test/files/buildmanager/t3045/t3045.check
new file mode 100644
index 0000000000..5e4e71e045
--- /dev/null
+++ b/test/files/buildmanager/t3045/t3045.check
@@ -0,0 +1,3 @@
+builder > A.java
+compiling Set(A.java)
+Changes: Map()
diff --git a/test/files/buildmanager/t3045/t3045.test b/test/files/buildmanager/t3045/t3045.test
new file mode 100644
index 0000000000..6cf7e35543
--- /dev/null
+++ b/test/files/buildmanager/t3045/t3045.test
@@ -0,0 +1 @@
+>>compile A.java
diff --git a/test/files/buildmanager/t3054/bar/Bar.java b/test/files/buildmanager/t3054/bar/Bar.java
new file mode 100644
index 0000000000..e1b056d4e5
--- /dev/null
+++ b/test/files/buildmanager/t3054/bar/Bar.java
@@ -0,0 +1,7 @@
+package bar;
+import foo.Foo$;
+
+
+public class Bar {
+ void bar() { Foo$.MODULE$.foo(); }
+}
diff --git a/test/files/buildmanager/t3054/foo/Foo.scala b/test/files/buildmanager/t3054/foo/Foo.scala
new file mode 100644
index 0000000000..c4838b9958
--- /dev/null
+++ b/test/files/buildmanager/t3054/foo/Foo.scala
@@ -0,0 +1,5 @@
+package foo
+
+class Foo {
+ def foo = println("foo")
+}
diff --git a/test/files/buildmanager/t3054/t3054.check b/test/files/buildmanager/t3054/t3054.check
new file mode 100644
index 0000000000..97cca8862e
--- /dev/null
+++ b/test/files/buildmanager/t3054/t3054.check
@@ -0,0 +1,3 @@
+builder > bar/Bar.java foo/Foo.scala
+compiling Set(bar/Bar.java, foo/Foo.scala)
+Changes: Map()
diff --git a/test/files/buildmanager/t3054/t3054.test b/test/files/buildmanager/t3054/t3054.test
new file mode 100644
index 0000000000..903df24b13
--- /dev/null
+++ b/test/files/buildmanager/t3054/t3054.test
@@ -0,0 +1 @@
+>>compile bar/Bar.java foo/Foo.scala
diff --git a/test/files/buildmanager/t3133/A.java b/test/files/buildmanager/t3133/A.java
new file mode 100644
index 0000000000..c4e7f3af0e
--- /dev/null
+++ b/test/files/buildmanager/t3133/A.java
@@ -0,0 +1,7 @@
+public class A {
+ class Foo {}
+
+ public A(Foo a) {}
+
+ private void bar(Foo z) {}
+}
diff --git a/test/files/buildmanager/t3133/t3133.check b/test/files/buildmanager/t3133/t3133.check
new file mode 100644
index 0000000000..5e4e71e045
--- /dev/null
+++ b/test/files/buildmanager/t3133/t3133.check
@@ -0,0 +1,3 @@
+builder > A.java
+compiling Set(A.java)
+Changes: Map()
diff --git a/test/files/buildmanager/t3133/t3133.test b/test/files/buildmanager/t3133/t3133.test
new file mode 100644
index 0000000000..6cf7e35543
--- /dev/null
+++ b/test/files/buildmanager/t3133/t3133.test
@@ -0,0 +1 @@
+>>compile A.java
diff --git a/test/files/continuations-neg/function0.check b/test/files/continuations-neg/function0.check
new file mode 100644
index 0000000000..0a66763a0f
--- /dev/null
+++ b/test/files/continuations-neg/function0.check
@@ -0,0 +1,6 @@
+function0.scala:11: error: type mismatch;
+ found : () => Int @scala.util.continuations.cpsParam[Int,Int]
+ required: () => Int
+ val g: () => Int = f
+ ^
+one error found
diff --git a/test/files/continuations-neg/function0.scala b/test/files/continuations-neg/function0.scala
new file mode 100644
index 0000000000..6ef0d98b90
--- /dev/null
+++ b/test/files/continuations-neg/function0.scala
@@ -0,0 +1,16 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val f = () => shift { k: (Int=>Int) => k(7) }
+ val g: () => Int = f
+
+ println(reset(g()))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-neg/function2.check b/test/files/continuations-neg/function2.check
new file mode 100644
index 0000000000..4833057652
--- /dev/null
+++ b/test/files/continuations-neg/function2.check
@@ -0,0 +1,6 @@
+function2.scala:11: error: type mismatch;
+ found : () => Int
+ required: () => Int @util.continuations.package.cps[Int]
+ val g: () => Int @cps[Int] = f
+ ^
+one error found
diff --git a/test/files/continuations-neg/function2.scala b/test/files/continuations-neg/function2.scala
new file mode 100644
index 0000000000..402c6dcad8
--- /dev/null
+++ b/test/files/continuations-neg/function2.scala
@@ -0,0 +1,16 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val f = () => 7
+ val g: () => Int @cps[Int] = f
+
+ println(reset(g()))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-neg/function3.check b/test/files/continuations-neg/function3.check
new file mode 100644
index 0000000000..4705ad9ed9
--- /dev/null
+++ b/test/files/continuations-neg/function3.check
@@ -0,0 +1,6 @@
+function3.scala:10: error: type mismatch;
+ found : Int @scala.util.continuations.cpsParam[Int,Int]
+ required: Int
+ val g: () => Int = () => shift { k: (Int=>Int) => k(7) }
+ ^
+one error found
diff --git a/test/files/continuations-neg/function3.scala b/test/files/continuations-neg/function3.scala
new file mode 100644
index 0000000000..c4acc4c2e9
--- /dev/null
+++ b/test/files/continuations-neg/function3.scala
@@ -0,0 +1,15 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val g: () => Int = () => shift { k: (Int=>Int) => k(7) }
+
+ println(reset(g()))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-neg/infer0.check b/test/files/continuations-neg/infer0.check
new file mode 100644
index 0000000000..1dd072ef09
--- /dev/null
+++ b/test/files/continuations-neg/infer0.check
@@ -0,0 +1,4 @@
+infer0.scala:11: error: cannot cps-transform expression 8: type arguments [Int(8),String,Int] do not conform to method shiftUnit's type parameter bounds [A,B,C >: B]
+ test(8)
+ ^
+one error found
diff --git a/test/files/continuations-neg/infer0.scala b/test/files/continuations-neg/infer0.scala
new file mode 100644
index 0000000000..9cf69c5d35
--- /dev/null
+++ b/test/files/continuations-neg/infer0.scala
@@ -0,0 +1,14 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x: => Int @cpsParam[String,Int]) = 7
+
+ def main(args: Array[String]): Any = {
+ test(8)
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-neg/infer2.check b/test/files/continuations-neg/infer2.check
new file mode 100644
index 0000000000..59eb670bc3
--- /dev/null
+++ b/test/files/continuations-neg/infer2.check
@@ -0,0 +1,4 @@
+infer2.scala:14: error: illegal answer type modification: scala.util.continuations.cpsParam[String,Int] andThen scala.util.continuations.cpsParam[String,Int]
+ test { sym(); sym() }
+ ^
+one error found
diff --git a/test/files/continuations-neg/infer2.scala b/test/files/continuations-neg/infer2.scala
new file mode 100644
index 0000000000..eaffbc17fc
--- /dev/null
+++ b/test/files/continuations-neg/infer2.scala
@@ -0,0 +1,19 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x: => Int @cpsParam[String,Int]) = 7
+
+ def sym() = shift { k: (Int => String) => 9 }
+
+
+ def main(args: Array[String]): Any = {
+ test { sym(); sym() }
+ }
+
+}
+
+
diff --git a/test/files/continuations-neg/lazy.check b/test/files/continuations-neg/lazy.check
new file mode 100644
index 0000000000..bfa44c59a4
--- /dev/null
+++ b/test/files/continuations-neg/lazy.check
@@ -0,0 +1,6 @@
+lazy.scala:5: error: type mismatch;
+ found : Unit @scala.util.continuations.cpsParam[Unit,Unit]
+ required: Unit
+ def foo = {
+ ^
+one error found
diff --git a/test/files/continuations-neg/lazy.scala b/test/files/continuations-neg/lazy.scala
new file mode 100644
index 0000000000..d150d5fe51
--- /dev/null
+++ b/test/files/continuations-neg/lazy.scala
@@ -0,0 +1,16 @@
+import scala.util.continuations._
+
+object Test {
+
+ def foo = {
+ lazy val x = shift((k:Unit=>Unit)=>k())
+ println(x)
+ }
+
+ def main(args: Array[String]) {
+ reset {
+ foo
+ }
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-neg/t1929.check b/test/files/continuations-neg/t1929.check
new file mode 100644
index 0000000000..f42c3a1e15
--- /dev/null
+++ b/test/files/continuations-neg/t1929.check
@@ -0,0 +1,6 @@
+t1929.scala:8: error: type mismatch;
+ found : Int @scala.util.continuations.cpsParam[String,java.lang.String] @scala.util.continuations.cpsSynth
+ required: Int @scala.util.continuations.cpsParam[Int,java.lang.String]
+ reset {
+ ^
+one error found
diff --git a/test/files/continuations-neg/t1929.scala b/test/files/continuations-neg/t1929.scala
new file mode 100644
index 0000000000..02eda9170d
--- /dev/null
+++ b/test/files/continuations-neg/t1929.scala
@@ -0,0 +1,17 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+ def main(args : Array[String]) {
+ reset {
+ println("up")
+ val x = shift((k:Int=>String) => k(8) + k(2))
+ println("down " + x)
+ val y = shift((k:Int=>String) => k(3))
+ println("down2 " + y)
+ y + x
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/continuations-neg/t2285.check b/test/files/continuations-neg/t2285.check
new file mode 100644
index 0000000000..d5dff6a4f2
--- /dev/null
+++ b/test/files/continuations-neg/t2285.check
@@ -0,0 +1,6 @@
+t2285.scala:9: error: type mismatch;
+ found : Int @scala.util.continuations.cpsParam[String,String] @scala.util.continuations.cpsSynth
+ required: Int @scala.util.continuations.cpsParam[Int,String]
+ def foo() = reset { bar(); 7 }
+ ^
+one error found
diff --git a/test/files/continuations-neg/t2285.scala b/test/files/continuations-neg/t2285.scala
new file mode 100644
index 0000000000..b906dc455a
--- /dev/null
+++ b/test/files/continuations-neg/t2285.scala
@@ -0,0 +1,11 @@
+// $Id$
+
+import scala.util.continuations._
+
+object Test {
+
+ def bar() = shift { k: (String => String) => k("1") }
+
+ def foo() = reset { bar(); 7 }
+
+}
diff --git a/test/files/continuations-neg/t2949.check b/test/files/continuations-neg/t2949.check
new file mode 100644
index 0000000000..dd9768807c
--- /dev/null
+++ b/test/files/continuations-neg/t2949.check
@@ -0,0 +1,6 @@
+t2949.scala:13: error: type mismatch;
+ found : Int
+ required: ? @scala.util.continuations.cpsParam[List[?],Any]
+ x * y
+ ^
+one error found
diff --git a/test/files/continuations-neg/t2949.scala b/test/files/continuations-neg/t2949.scala
new file mode 100644
index 0000000000..2d426a4d89
--- /dev/null
+++ b/test/files/continuations-neg/t2949.scala
@@ -0,0 +1,15 @@
+// $Id$
+
+import scala.util.continuations._
+
+object Test {
+
+ def reflect[A,B](xs : List[A]) = shift{ xs.flatMap[B, List[B]] }
+ def reify[A, B](x : A @cpsParam[List[A], B]) = reset{ List(x) }
+
+ def main(args: Array[String]): Unit = println(reify {
+ val x = reflect[Int, Int](List(1,2,3))
+ val y = reflect[Int, Int](List(2,4,8))
+ x * y
+ })
+}
diff --git a/test/files/continuations-neg/trycatch2.check b/test/files/continuations-neg/trycatch2.check
new file mode 100644
index 0000000000..5ff2838bad
--- /dev/null
+++ b/test/files/continuations-neg/trycatch2.check
@@ -0,0 +1,7 @@
+trycatch2.scala:11: error: only simple cps types allowed in try/catch blocks (found: Int @scala.util.continuations.cpsParam[String,Int])
+ def foo1 = try {
+ ^
+trycatch2.scala:19: error: only simple cps types allowed in try/catch blocks (found: Int @scala.util.continuations.cpsParam[String,Int])
+ def foo2 = try {
+ ^
+two errors found
diff --git a/test/files/continuations-neg/trycatch2.scala b/test/files/continuations-neg/trycatch2.scala
new file mode 100644
index 0000000000..d61419169b
--- /dev/null
+++ b/test/files/continuations-neg/trycatch2.scala
@@ -0,0 +1,33 @@
+// $Id$
+
+import scala.util.continuations._
+
+object Test {
+
+ def fatal[T]: T = throw new Exception
+ def cpsIntStringInt = shift { k:(Int=>String) => k(3); 7 }
+ def cpsIntIntString = shift { k:(Int=>Int) => k(3); "7" }
+
+ def foo1 = try {
+ fatal[Int]
+ cpsIntStringInt
+ } catch {
+ case ex =>
+ cpsIntStringInt
+ }
+
+ def foo2 = try {
+ fatal[Int]
+ cpsIntStringInt
+ } catch {
+ case ex =>
+ cpsIntStringInt
+ }
+
+
+ def main(args: Array[String]): Unit = {
+ println(reset { foo1; "3" })
+ println(reset { foo2; "3" })
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/basics.check b/test/files/continuations-run/basics.check
new file mode 100755
index 0000000000..54c059fdcb
--- /dev/null
+++ b/test/files/continuations-run/basics.check
@@ -0,0 +1,2 @@
+28
+28 \ No newline at end of file
diff --git a/test/files/continuations-run/basics.scala b/test/files/continuations-run/basics.scala
new file mode 100755
index 0000000000..b63710bc64
--- /dev/null
+++ b/test/files/continuations-run/basics.scala
@@ -0,0 +1,23 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def m0() = {
+ shift((k:Int => Int) => k(k(7))) * 2
+ }
+
+ def m1() = {
+ 2 * shift((k:Int => Int) => k(k(7)))
+ }
+
+ def main(args: Array[String]) = {
+
+ println(reset(m0()))
+ println(reset(m1()))
+
+ }
+
+}
diff --git a/test/files/continuations-run/function1.check b/test/files/continuations-run/function1.check
new file mode 100644
index 0000000000..7f8f011eb7
--- /dev/null
+++ b/test/files/continuations-run/function1.check
@@ -0,0 +1 @@
+7
diff --git a/test/files/continuations-run/function1.scala b/test/files/continuations-run/function1.scala
new file mode 100644
index 0000000000..fbd413ed9d
--- /dev/null
+++ b/test/files/continuations-run/function1.scala
@@ -0,0 +1,16 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val f = () => shift { k: (Int=>Int) => k(7) }
+ val g: () => Int @cps[Int] = f
+
+ println(reset(g()))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/function4.check b/test/files/continuations-run/function4.check
new file mode 100644
index 0000000000..c7930257df
--- /dev/null
+++ b/test/files/continuations-run/function4.check
@@ -0,0 +1 @@
+7 \ No newline at end of file
diff --git a/test/files/continuations-run/function4.scala b/test/files/continuations-run/function4.scala
new file mode 100644
index 0000000000..2ccd0b4ff2
--- /dev/null
+++ b/test/files/continuations-run/function4.scala
@@ -0,0 +1,15 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val g: () => Int @cps[Int] = () => shift { k: (Int=>Int) => k(7) }
+
+ println(reset(g()))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/function5.check b/test/files/continuations-run/function5.check
new file mode 100644
index 0000000000..c7930257df
--- /dev/null
+++ b/test/files/continuations-run/function5.check
@@ -0,0 +1 @@
+7 \ No newline at end of file
diff --git a/test/files/continuations-run/function5.scala b/test/files/continuations-run/function5.scala
new file mode 100644
index 0000000000..fe528e14e7
--- /dev/null
+++ b/test/files/continuations-run/function5.scala
@@ -0,0 +1,15 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val g: () => Int @cps[Int] = () => 7
+
+ println(reset(g()))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/function6.check b/test/files/continuations-run/function6.check
new file mode 100644
index 0000000000..c7930257df
--- /dev/null
+++ b/test/files/continuations-run/function6.check
@@ -0,0 +1 @@
+7 \ No newline at end of file
diff --git a/test/files/continuations-run/function6.scala b/test/files/continuations-run/function6.scala
new file mode 100644
index 0000000000..54a6ffcc93
--- /dev/null
+++ b/test/files/continuations-run/function6.scala
@@ -0,0 +1,16 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val g: PartialFunction[Int, Int @cps[Int]] = { case x => 7 }
+
+ println(reset(g(2)))
+
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/ifelse0.check b/test/files/continuations-run/ifelse0.check
new file mode 100644
index 0000000000..f8bc79860d
--- /dev/null
+++ b/test/files/continuations-run/ifelse0.check
@@ -0,0 +1,2 @@
+10
+9 \ No newline at end of file
diff --git a/test/files/continuations-run/ifelse0.scala b/test/files/continuations-run/ifelse0.scala
new file mode 100644
index 0000000000..2facab4b98
--- /dev/null
+++ b/test/files/continuations-run/ifelse0.scala
@@ -0,0 +1,18 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x:Int) = if (x <= 7)
+ shift { k: (Int=>Int) => k(k(k(x))) }
+ else
+ shift { k: (Int=>Int) => k(x) }
+
+ def main(args: Array[String]): Any = {
+ println(reset(1 + test(7)))
+ println(reset(1 + test(8)))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/ifelse1.check b/test/files/continuations-run/ifelse1.check
new file mode 100644
index 0000000000..86a3fbc0c1
--- /dev/null
+++ b/test/files/continuations-run/ifelse1.check
@@ -0,0 +1,4 @@
+10
+9
+8
+11 \ No newline at end of file
diff --git a/test/files/continuations-run/ifelse1.scala b/test/files/continuations-run/ifelse1.scala
new file mode 100644
index 0000000000..c624b84b75
--- /dev/null
+++ b/test/files/continuations-run/ifelse1.scala
@@ -0,0 +1,25 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test1(x:Int) = if (x <= 7)
+ shift { k: (Int=>Int) => k(k(k(x))) }
+ else
+ x
+
+ def test2(x:Int) = if (x <= 7)
+ x
+ else
+ shift { k: (Int=>Int) => k(k(k(x))) }
+
+ def main(args: Array[String]): Any = {
+ println(reset(1 + test1(7)))
+ println(reset(1 + test1(8)))
+ println(reset(1 + test2(7)))
+ println(reset(1 + test2(8)))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/ifelse2.check b/test/files/continuations-run/ifelse2.check
new file mode 100644
index 0000000000..f97a95b08d
--- /dev/null
+++ b/test/files/continuations-run/ifelse2.check
@@ -0,0 +1,4 @@
+abort
+()
+alive
+()
diff --git a/test/files/continuations-run/ifelse2.scala b/test/files/continuations-run/ifelse2.scala
new file mode 100644
index 0000000000..506acc4d00
--- /dev/null
+++ b/test/files/continuations-run/ifelse2.scala
@@ -0,0 +1,16 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x:Int) = if (x <= 7)
+ shift { k: (Unit=>Unit) => println("abort") }
+
+ def main(args: Array[String]): Any = {
+ println(reset{ test(7); println("alive") })
+ println(reset{ test(8); println("alive") })
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/ifelse3.check b/test/files/continuations-run/ifelse3.check
new file mode 100644
index 0000000000..95b562c8e6
--- /dev/null
+++ b/test/files/continuations-run/ifelse3.check
@@ -0,0 +1,2 @@
+6
+9
diff --git a/test/files/continuations-run/ifelse3.scala b/test/files/continuations-run/ifelse3.scala
new file mode 100644
index 0000000000..54566a421c
--- /dev/null
+++ b/test/files/continuations-run/ifelse3.scala
@@ -0,0 +1,21 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def util(x: Boolean) = shift { k: (Boolean=>Int) => k(x) }
+
+ def test(x:Int) = if (util(x <= 7))
+ x - 1
+ else
+ x + 1
+
+
+ def main(args: Array[String]): Any = {
+ println(reset(test(7)))
+ println(reset(test(8)))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/infer1.scala b/test/files/continuations-run/infer1.scala
new file mode 100644
index 0000000000..10822508e7
--- /dev/null
+++ b/test/files/continuations-run/infer1.scala
@@ -0,0 +1,33 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x: => Int @cpsParam[String,Int]) = 7
+
+ def test2() = {
+ val x = shift { k: (Int => String) => 9 }
+ x
+ }
+
+ def test3(x: => Int @cpsParam[Int,Int]) = 7
+
+
+ def util() = shift { k: (String => String) => "7" }
+
+ def main(args: Array[String]): Any = {
+ test { shift { k: (Int => String) => 9 } }
+ test { shift { k: (Int => String) => 9 }; 2 }
+// test { shift { k: (Int => String) => 9 }; util() } <-- doesn't work
+ test { shift { k: (Int => String) => 9 }; util(); 2 }
+
+
+ test { shift { k: (Int => String) => 9 }; { test3(0); 2 } }
+
+ test3 { { test3(0); 2 } }
+
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/match0.check b/test/files/continuations-run/match0.check
new file mode 100644
index 0000000000..f8bc79860d
--- /dev/null
+++ b/test/files/continuations-run/match0.check
@@ -0,0 +1,2 @@
+10
+9 \ No newline at end of file
diff --git a/test/files/continuations-run/match0.scala b/test/files/continuations-run/match0.scala
new file mode 100644
index 0000000000..b65d343c07
--- /dev/null
+++ b/test/files/continuations-run/match0.scala
@@ -0,0 +1,18 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x:Int) = x match {
+ case 7 => shift { k: (Int=>Int) => k(k(k(x))) }
+ case 8 => shift { k: (Int=>Int) => k(x) }
+ }
+
+ def main(args: Array[String]): Any = {
+ println(reset(1 + test(7)))
+ println(reset(1 + test(8)))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/match1.check b/test/files/continuations-run/match1.check
new file mode 100644
index 0000000000..73053d3f4f
--- /dev/null
+++ b/test/files/continuations-run/match1.check
@@ -0,0 +1,2 @@
+10
+9
diff --git a/test/files/continuations-run/match1.scala b/test/files/continuations-run/match1.scala
new file mode 100644
index 0000000000..20671f26ba
--- /dev/null
+++ b/test/files/continuations-run/match1.scala
@@ -0,0 +1,18 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x:Int) = x match {
+ case 7 => shift { k: (Int=>Int) => k(k(k(x))) }
+ case _ => x
+ }
+
+ def main(args: Array[String]): Any = {
+ println(reset(1 + test(7)))
+ println(reset(1 + test(8)))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/match2.check b/test/files/continuations-run/match2.check
new file mode 100644
index 0000000000..cbf91349cc
--- /dev/null
+++ b/test/files/continuations-run/match2.check
@@ -0,0 +1,2 @@
+B
+B
diff --git a/test/files/continuations-run/match2.scala b/test/files/continuations-run/match2.scala
new file mode 100644
index 0000000000..8b0fb946df
--- /dev/null
+++ b/test/files/continuations-run/match2.scala
@@ -0,0 +1,26 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test1() = {
+ val (a, b) = shift { k: (((String,String)) => String) => k("A","B") }
+ b
+ }
+
+ case class Elem[T,U](a: T, b: U)
+
+ def test2() = {
+ val Elem(a,b) = shift { k: (Elem[String,String] => String) => k(Elem("A","B")) }
+ b
+ }
+
+
+ def main(args: Array[String]): Any = {
+ println(reset(test1()))
+ println(reset(test2()))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/t1807.check b/test/files/continuations-run/t1807.check
new file mode 100644
index 0000000000..56a6051ca2
--- /dev/null
+++ b/test/files/continuations-run/t1807.check
@@ -0,0 +1 @@
+1 \ No newline at end of file
diff --git a/test/files/continuations-run/t1807.scala b/test/files/continuations-run/t1807.scala
new file mode 100644
index 0000000000..278b3a9936
--- /dev/null
+++ b/test/files/continuations-run/t1807.scala
@@ -0,0 +1,14 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val z = reset {
+ val f: (() => Int @cps[Int]) = () => 1
+ f()
+ }
+ println(z)
+ }
+} \ No newline at end of file
diff --git a/test/files/continuations-run/t1808.scala b/test/files/continuations-run/t1808.scala
new file mode 100644
index 0000000000..125c7c1cdf
--- /dev/null
+++ b/test/files/continuations-run/t1808.scala
@@ -0,0 +1,10 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ reset0 { 0 }
+ }
+} \ No newline at end of file
diff --git a/test/files/continuations-run/t1820.scala b/test/files/continuations-run/t1820.scala
new file mode 100644
index 0000000000..893ddab6d1
--- /dev/null
+++ b/test/files/continuations-run/t1820.scala
@@ -0,0 +1,14 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+ def shifted: Unit @suspendable = shift { (k: Unit => Unit) => () }
+ def test1(b: => Boolean) = {
+ reset {
+ if (b) shifted
+ }
+ }
+ def main(args: Array[String]) = test1(true)
+} \ No newline at end of file
diff --git a/test/files/continuations-run/t1821.check b/test/files/continuations-run/t1821.check
new file mode 100644
index 0000000000..f7b76115db
--- /dev/null
+++ b/test/files/continuations-run/t1821.check
@@ -0,0 +1,4 @@
+()
+()
+()
+() \ No newline at end of file
diff --git a/test/files/continuations-run/t1821.scala b/test/files/continuations-run/t1821.scala
new file mode 100644
index 0000000000..0d5fb553be
--- /dev/null
+++ b/test/files/continuations-run/t1821.scala
@@ -0,0 +1,20 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+ def suspended[A](x: A): A @suspendable = x
+ def test1[A](x: A): A @suspendable = suspended(x) match { case x => x }
+ def test2[A](x: List[A]): A @suspendable = suspended(x) match { case List(x) => x }
+
+ def test3[A](x: A): A @suspendable = x match { case x => x }
+ def test4[A](x: List[A]): A @suspendable = x match { case List(x) => x }
+
+ def main(args: Array[String]) = {
+ println(reset(test1()))
+ println(reset(test2(List(()))))
+ println(reset(test3()))
+ println(reset(test4(List(()))))
+ }
+} \ No newline at end of file
diff --git a/test/files/continuations-run/t2864.check b/test/files/continuations-run/t2864.check
new file mode 100644
index 0000000000..d411bb7c1a
--- /dev/null
+++ b/test/files/continuations-run/t2864.check
@@ -0,0 +1 @@
+400
diff --git a/test/files/continuations-run/t2864.scala b/test/files/continuations-run/t2864.scala
new file mode 100644
index 0000000000..7a2579e45c
--- /dev/null
+++ b/test/files/continuations-run/t2864.scala
@@ -0,0 +1,30 @@
+import scala.util.continuations._
+object Test {
+
+ def double[B](n : Int)(k : Int => B) : B = k(n * 2)
+
+ def main(args : Array[String]) {
+ reset {
+ val result1 = shift(double[Unit](100))
+ val result2 = shift(double[Unit](result1))
+ println(result2)
+ }
+ }
+
+ def foo: Int @cps[Int] = {
+ val a0 = shift((k:Int=>Int) => k(0))
+ val x0 = 2
+ val a1 = shift((k:Int=>Int) => x0)
+ 0
+ }
+
+/*
+ def bar: ControlContext[Int,Int,Int] = {
+ shiftR((k:Int=>Int) => k(0)).flatMap { a0 =>
+ val x0 = 2
+ shiftR((k:Int=>Int) => x0).map { a1 =>
+ 0
+ }}
+ }
+*/
+} \ No newline at end of file
diff --git a/test/files/continuations-run/t2934.check b/test/files/continuations-run/t2934.check
new file mode 100644
index 0000000000..a92586538e
--- /dev/null
+++ b/test/files/continuations-run/t2934.check
@@ -0,0 +1 @@
+List(3, 4, 5)
diff --git a/test/files/continuations-run/t2934.scala b/test/files/continuations-run/t2934.scala
new file mode 100644
index 0000000000..a1b8ca9e04
--- /dev/null
+++ b/test/files/continuations-run/t2934.scala
@@ -0,0 +1,10 @@
+import scala.util.continuations._
+
+object Test {
+ def main(args : Array[String]) {
+ println(reset {
+ val x = shift(List(1,2,3).flatMap[Int, List[Int]])
+ List(x + 2)
+ })
+ }
+}
diff --git a/test/files/continuations-run/t3199.check b/test/files/continuations-run/t3199.check
new file mode 100644
index 0000000000..a065247b8c
--- /dev/null
+++ b/test/files/continuations-run/t3199.check
@@ -0,0 +1 @@
+Right(7)
diff --git a/test/files/continuations-run/t3199.scala b/test/files/continuations-run/t3199.scala
new file mode 100644
index 0000000000..7b54793e05
--- /dev/null
+++ b/test/files/continuations-run/t3199.scala
@@ -0,0 +1,20 @@
+import _root_.scala.collection.Seq
+import _root_.scala.util.control.Exception
+import _root_.scala.util.continuations._
+
+object Test {
+
+ trait AbstractResource[+R <: AnyRef] {
+ def reflect[B] : R @cpsParam[B,Either[Throwable, B]] = shift(acquireFor)
+ def acquireFor[B](f : R => B) : Either[Throwable, B] = {
+ import Exception._
+ catching(List(classOf[Throwable]) : _*) either (f(null.asInstanceOf[R]))
+ }
+ }
+
+ def main(args: Array[String]) : Unit = {
+ val x = new AbstractResource[String] { }
+ val result = x.acquireFor( x => 7 )
+ println(result)
+ }
+ }
diff --git a/test/files/continuations-run/t3199b.check b/test/files/continuations-run/t3199b.check
new file mode 100644
index 0000000000..b5d8bb58d9
--- /dev/null
+++ b/test/files/continuations-run/t3199b.check
@@ -0,0 +1 @@
+[1, 2, 3]
diff --git a/test/files/continuations-run/t3199b.scala b/test/files/continuations-run/t3199b.scala
new file mode 100644
index 0000000000..2122c963ac
--- /dev/null
+++ b/test/files/continuations-run/t3199b.scala
@@ -0,0 +1,11 @@
+object Test {
+
+ def test() = {
+ java.util.Arrays.asList(Array(1,2,3):_*)
+ }
+
+ def main(args: Array[String]) = {
+ println(test())
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/t3223.check b/test/files/continuations-run/t3223.check
new file mode 100644
index 0000000000..ec635144f6
--- /dev/null
+++ b/test/files/continuations-run/t3223.check
@@ -0,0 +1 @@
+9
diff --git a/test/files/continuations-run/t3223.scala b/test/files/continuations-run/t3223.scala
new file mode 100644
index 0000000000..efed1ff581
--- /dev/null
+++ b/test/files/continuations-run/t3223.scala
@@ -0,0 +1,19 @@
+import scala.util.continuations._
+object Test {
+
+ def foo(x:Int) = {
+ try {
+ throw new Exception
+ shiftUnit0[Int,Int](7)
+ } catch {
+ case ex =>
+ val g = (a:Int)=>a
+ 9
+ }
+ }
+
+ def main(args: Array[String]) {
+ println(reset(foo(0)))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/t3225.check b/test/files/continuations-run/t3225.check
new file mode 100644
index 0000000000..df1a8a9ce4
--- /dev/null
+++ b/test/files/continuations-run/t3225.check
@@ -0,0 +1,12 @@
+8
+8
+9
+9
+8
+9
+8
+8
+9
+9
+8
+9
diff --git a/test/files/continuations-run/t3225.scala b/test/files/continuations-run/t3225.scala
new file mode 100644
index 0000000000..5b6259c43f
--- /dev/null
+++ b/test/files/continuations-run/t3225.scala
@@ -0,0 +1,56 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ class Bla {
+ val x = 8
+ def y[T] = 9
+ }
+
+/*
+ def bla[A] = shift { k:(Bla=>A) => k(new Bla) }
+*/
+
+ def bla1 = shift { k:(Bla=>Bla) => k(new Bla) }
+ def bla2 = shift { k:(Bla=>Int) => k(new Bla) }
+
+ def fooA = bla2.x
+ def fooB[T] = bla2.y[T]
+
+ def testMono() = {
+ println(reset(bla1).x)
+ println(reset(bla2.x))
+ println(reset(bla2.y[Int]))
+ println(reset(bla2.y))
+ println(reset(fooA))
+ println(reset(fooB))
+ 0
+ }
+
+ def blaX[A] = shift { k:(Bla=>A) => k(new Bla) }
+
+ def fooX[A] = blaX[A].x
+ def fooY[A] = blaX[A].y[A]
+
+ def testPoly() = {
+ println(reset(blaX[Bla]).x)
+ println(reset(blaX[Int].x))
+ println(reset(blaX[Int].y[Int]))
+ println(reset(blaX[Int].y))
+ println(reset(fooX[Int]))
+ println(reset(fooY[Int]))
+ 0
+ }
+
+
+ // TODO: check whether this also applies to a::shift { k => ... }
+
+ def main(args: Array[String]) = {
+ testMono()
+ testPoly()
+ }
+
+}
diff --git a/test/files/continuations-run/trycatch0.check b/test/files/continuations-run/trycatch0.check
new file mode 100644
index 0000000000..36806909d0
--- /dev/null
+++ b/test/files/continuations-run/trycatch0.check
@@ -0,0 +1,2 @@
+10
+10 \ No newline at end of file
diff --git a/test/files/continuations-run/trycatch0.scala b/test/files/continuations-run/trycatch0.scala
new file mode 100644
index 0000000000..ec39863f3f
--- /dev/null
+++ b/test/files/continuations-run/trycatch0.scala
@@ -0,0 +1,25 @@
+// $Id$
+
+import scala.util.continuations._
+
+object Test {
+
+ def foo = try {
+ shift((k: Int=>Int) => k(7))
+ } catch {
+ case ex =>
+ 9
+ }
+
+ def bar = try {
+ 7
+ } catch {
+ case ex =>
+ shiftUnit0[Int,Int](9)
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(reset { foo + 3 })
+ println(reset { bar + 3 })
+ }
+} \ No newline at end of file
diff --git a/test/files/continuations-run/trycatch1.check b/test/files/continuations-run/trycatch1.check
new file mode 100644
index 0000000000..a028d2b1e1
--- /dev/null
+++ b/test/files/continuations-run/trycatch1.check
@@ -0,0 +1,4 @@
+12
+12
+12
+12 \ No newline at end of file
diff --git a/test/files/continuations-run/trycatch1.scala b/test/files/continuations-run/trycatch1.scala
new file mode 100644
index 0000000000..10dfd30bb2
--- /dev/null
+++ b/test/files/continuations-run/trycatch1.scala
@@ -0,0 +1,48 @@
+// $Id$
+
+import scala.util.continuations._
+
+object Test {
+
+ def fatal: Int = throw new Exception()
+
+ def foo1 = try {
+ fatal
+ shift((k: Int=>Int) => k(7))
+ } catch {
+ case ex =>
+ 9
+ }
+
+ def foo2 = try {
+ shift((k: Int=>Int) => k(7))
+ fatal
+ } catch {
+ case ex =>
+ 9
+ }
+
+ def bar1 = try {
+ fatal
+ 7
+ } catch {
+ case ex =>
+ shiftUnit0[Int,Int](9) // regular shift causes no-symbol doesn't have owner
+ }
+
+ def bar2 = try {
+ 7
+ fatal
+ } catch {
+ case ex =>
+ shiftUnit0[Int,Int](9) // regular shift causes no-symbol doesn't have owner
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(reset { foo1 + 3 })
+ println(reset { foo2 + 3 })
+ println(reset { bar1 + 3 })
+ println(reset { bar2 + 3 })
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/while0.check b/test/files/continuations-run/while0.check
new file mode 100644
index 0000000000..d58c55a31d
--- /dev/null
+++ b/test/files/continuations-run/while0.check
@@ -0,0 +1 @@
+9000
diff --git a/test/files/continuations-run/while0.scala b/test/files/continuations-run/while0.scala
new file mode 100644
index 0000000000..46005a4a77
--- /dev/null
+++ b/test/files/continuations-run/while0.scala
@@ -0,0 +1,22 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def foo(): Int @cps[Unit] = 2
+
+ def test(): Unit @cps[Unit] = {
+ var x = 0
+ while (x < 9000) { // pick number large enough to require tail-call opt
+ x += foo()
+ }
+ println(x)
+ }
+
+ def main(args: Array[String]): Any = {
+ reset(test())
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/while1.check b/test/files/continuations-run/while1.check
new file mode 100644
index 0000000000..3d5f0b9a46
--- /dev/null
+++ b/test/files/continuations-run/while1.check
@@ -0,0 +1,11 @@
+up
+up
+up
+up
+up
+10
+down
+down
+down
+down
+down
diff --git a/test/files/continuations-run/while1.scala b/test/files/continuations-run/while1.scala
new file mode 100644
index 0000000000..fd41ab36ee
--- /dev/null
+++ b/test/files/continuations-run/while1.scala
@@ -0,0 +1,22 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def foo(): Int @cps[Unit] = shift { k => println("up"); k(2); println("down") }
+
+ def test(): Unit @cps[Unit] = {
+ var x = 0
+ while (x < 9) {
+ x += foo()
+ }
+ println(x)
+ }
+
+ def main(args: Array[String]): Any = {
+ reset(test())
+ }
+
+} \ No newline at end of file
diff --git a/test/files/continuations-run/while2.check b/test/files/continuations-run/while2.check
new file mode 100644
index 0000000000..9fe515181b
--- /dev/null
+++ b/test/files/continuations-run/while2.check
@@ -0,0 +1,19 @@
+up
+up
+up
+up
+up
+up
+up
+up
+up
+9000
+down
+down
+down
+down
+down
+down
+down
+down
+down
diff --git a/test/files/continuations-run/while2.scala b/test/files/continuations-run/while2.scala
new file mode 100644
index 0000000000..63f9cb99fe
--- /dev/null
+++ b/test/files/continuations-run/while2.scala
@@ -0,0 +1,23 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def foo1(): Int @cps[Unit] = 2
+ def foo2(): Int @cps[Unit] = shift { k => println("up"); k(2); println("down") }
+
+ def test(): Unit @cps[Unit] = {
+ var x = 0
+ while (x < 9000) { // pick number large enough to require tail-call opt
+ x += (if (x % 1000 != 0) foo1() else foo2())
+ }
+ println(x)
+ }
+
+ def main(args: Array[String]): Any = {
+ reset(test())
+ }
+
+} \ No newline at end of file
diff --git a/test/files/files.iml b/test/files/files.iml
deleted file mode 100644
index ac78e33f39..0000000000
--- a/test/files/files.iml
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module relativePaths="true" type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="Scala" name="Scala">
- <configuration />
- </facet>
- </component>
- <component name="NewModuleRootManager" inherit-compiler-output="true">
- <exclude-output />
- <content url="file://$MODULE_DIR$" />
- <orderEntry type="library" name="lib1" level="project" />
- <orderEntry type="inheritedJdk" />
- <orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="library" name="lib" level="project" />
- </component>
-</module>
-
diff --git a/test/files/jvm/JavaInteraction.scala b/test/files/jvm/JavaInteraction.scala
deleted file mode 100644
index 1316fad5d4..0000000000
--- a/test/files/jvm/JavaInteraction.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-//############################################################################
-// Test Java interaction
-//############################################################################
-
-import java.awt.Color;
-import java.awt.Point;
-
-class ColoredPoint(x: Int, y: Int, c_ : Color) extends Point(x, y) {
- val c: Color = c_;
- def getC(): Color = c;
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- val p = new ColoredPoint(5, 7, Color.RED);
- Console.println("p.x = " + p.x);
- Console.println("p.c = " + p.c);
- Console.println("p.getX() = " + p.getX());
- Console.println("p.getC() = " + p.getC());
- }
-}
-
-//############################################################################
diff --git a/test/files/jvm/actor-exceptions.check b/test/files/jvm/actor-exceptions.check
index 021ccea1f1..d86bac9de5 100644
--- a/test/files/jvm/actor-exceptions.check
+++ b/test/files/jvm/actor-exceptions.check
@@ -1,11 +1 @@
-problem
-received A
-problem
-received A
-problem
-received A
-problem
-received A
-problem
-received last A
-slave exited because of java.lang.Exception: unhandled
+OK
diff --git a/test/files/jvm/actor-exceptions.scala b/test/files/jvm/actor-exceptions.scala
index ccec84c56c..3ee4db9ed2 100644
--- a/test/files/jvm/actor-exceptions.scala
+++ b/test/files/jvm/actor-exceptions.scala
@@ -2,39 +2,57 @@
import scala.actors.{Actor, Exit}
import Actor._
-case class MyException(text: String) extends Exception
+case class MyException(text: String) extends Exception {
+ override def fillInStackTrace() = this
+}
+
+case class MyOtherException(text: String) extends Exception {
+ override def fillInStackTrace() = this
+}
object Master extends Actor {
trapExit = true
def act() {
+ try {
link(Slave)
Slave.start()
for (i <- 0 until 10) Slave ! A
react {
- case Exit(from, reason) => println("slave exited because of "+reason)
+ case Exit(from, reason) =>
+ println("OK")
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
}
}
}
object Slave extends Actor {
+ override def toString = "Slave"
override def exceptionHandler: PartialFunction[Exception, Unit] = {
- case MyException(text) => println(text)
+ case MyException(text) =>
+ case other if !other.isInstanceOf[scala.util.control.ControlThrowable] => super.exceptionHandler(other)
}
def act() {
+ try {
var cnt = 0
loop {
react {
case A =>
cnt += 1
if (cnt % 2 != 0) throw MyException("problem")
- if (cnt < 10)
- println("received A")
- else {
- println("received last A")
- throw new Exception("unhandled")
+ if (cnt == 10) {
+ throw MyOtherException("unhandled")
}
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] &&
+ !e.isInstanceOf[MyException] &&
+ !e.isInstanceOf[MyOtherException] =>
+ e.printStackTrace()
+ }
}
}
diff --git a/test/files/jvm/actor-executor.check b/test/files/jvm/actor-executor.check
new file mode 100644
index 0000000000..bdbdb5c6a2
--- /dev/null
+++ b/test/files/jvm/actor-executor.check
@@ -0,0 +1,20 @@
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
diff --git a/test/files/jvm/actor-executor.scala b/test/files/jvm/actor-executor.scala
new file mode 100644
index 0000000000..b1f9caebdd
--- /dev/null
+++ b/test/files/jvm/actor-executor.scala
@@ -0,0 +1,75 @@
+import java.util.concurrent.Executors
+import scala.actors.{Actor, SchedulerAdapter}
+import Actor._
+
+trait AdaptedActor extends Actor {
+ override def scheduler =
+ Test.scheduler
+}
+
+object One extends AdaptedActor {
+ def act() {
+ try {
+ Two.start()
+ var i = 0
+ loopWhile (i < 10000) {
+ i += 1
+ Two ! 'MsgForTwo
+ react {
+ case 'MsgForOne =>
+ if (i % 1000 == 0)
+ println("One: OK")
+ if (i == 10000)
+ Test.executor.shutdown()
+ }
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+}
+
+object Two extends AdaptedActor {
+ def act() {
+ try {
+ var i = 0
+ loopWhile (i < 10000) {
+ i += 1
+ react {
+ case 'MsgForTwo =>
+ if (i % 1000 == 0)
+ println("Two: OK")
+ One ! 'MsgForOne
+ }
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+}
+
+object Test {
+ val executor =
+ Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())
+
+ val scheduler =
+ new SchedulerAdapter {
+ def execute(block: => Unit) {
+ val task = new Runnable {
+ def run() { block }
+ }
+ try {
+ executor.execute(task)
+ } catch {
+ case ree: java.util.concurrent.RejectedExecutionException =>
+ task.run()
+ }
+ }
+ }
+
+ def main(args: Array[String]) {
+ One.start()
+ }
+}
diff --git a/test/files/jvm/actor-executor2.check b/test/files/jvm/actor-executor2.check
new file mode 100644
index 0000000000..da78f45836
--- /dev/null
+++ b/test/files/jvm/actor-executor2.check
@@ -0,0 +1,21 @@
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+One exited
diff --git a/test/files/jvm/actor-executor2.scala b/test/files/jvm/actor-executor2.scala
new file mode 100644
index 0000000000..f8fcaef69f
--- /dev/null
+++ b/test/files/jvm/actor-executor2.scala
@@ -0,0 +1,88 @@
+import scala.actors.{Actor, SchedulerAdapter, Exit}
+import Actor._
+import java.util.concurrent.{Executors, RejectedExecutionException}
+
+object One extends AdaptedActor {
+ def act() {
+ try {
+ Two.start()
+ var i = 0
+ loopWhile (i < Test.NUM_MSG) {
+ i += 1
+ Two ! 'MsgForTwo
+ react {
+ case 'MsgForOne =>
+ if (i % (Test.NUM_MSG/10) == 0)
+ println("One: OK")
+ }
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+}
+
+object Two extends AdaptedActor {
+ def act() {
+ try {
+ var i = 0
+ loopWhile (i < Test.NUM_MSG) {
+ i += 1
+ react {
+ case 'MsgForTwo =>
+ if (i % (Test.NUM_MSG/10) == 0)
+ println("Two: OK")
+ One ! 'MsgForOne
+ }
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+}
+
+trait AdaptedActor extends Actor {
+ override def scheduler =
+ Test.scheduler
+}
+
+object Test {
+ val NUM_MSG = 100000
+
+ val executor =
+ Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())
+
+ val scheduler =
+ new SchedulerAdapter {
+ def execute(block: => Unit) {
+ val task = new Runnable {
+ def run() { block }
+ }
+ try {
+ executor.execute(task)
+ } catch {
+ case ree: RejectedExecutionException =>
+ task.run() // run task on current thread
+ }
+ }
+ }
+
+ def main(args: Array[String]) {
+ try {
+ self.trapExit = true
+ link(One)
+ One.start()
+
+ receive {
+ case Exit(from, reason) =>
+ println("One exited")
+ Test.executor.shutdown()
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+}
diff --git a/test/files/jvm/actor-executor3.check b/test/files/jvm/actor-executor3.check
new file mode 100644
index 0000000000..bdbdb5c6a2
--- /dev/null
+++ b/test/files/jvm/actor-executor3.check
@@ -0,0 +1,20 @@
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
diff --git a/test/files/jvm/actor-executor3.scala b/test/files/jvm/actor-executor3.scala
new file mode 100644
index 0000000000..4fde2c6c5f
--- /dev/null
+++ b/test/files/jvm/actor-executor3.scala
@@ -0,0 +1,62 @@
+import scala.actors.Actor
+import scala.actors.scheduler.ExecutorScheduler
+import java.util.concurrent.Executors
+
+object One extends AdaptedActor {
+ def act() {
+ try {
+ Two.start()
+ var i = 0
+ loopWhile (i < Test.NUM_MSG) {
+ i += 1
+ Two ! 'MsgForTwo
+ react {
+ case 'MsgForOne =>
+ if (i % (Test.NUM_MSG/10) == 0)
+ println("One: OK")
+ }
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+}
+
+object Two extends AdaptedActor {
+ def act() {
+ try {
+ var i = 0
+ loopWhile (i < Test.NUM_MSG) {
+ i += 1
+ react {
+ case 'MsgForTwo =>
+ if (i % (Test.NUM_MSG/10) == 0)
+ println("Two: OK")
+ One ! 'MsgForOne
+ }
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+}
+
+trait AdaptedActor extends Actor {
+ override def scheduler =
+ Test.scheduler
+}
+
+object Test {
+ val NUM_MSG = 100000
+
+ val executor =
+ Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())
+
+ val scheduler = ExecutorScheduler(executor)
+
+ def main(args: Array[String]) {
+ One.start()
+ }
+}
diff --git a/test/files/jvm/actor-getstate.check b/test/files/jvm/actor-getstate.check
new file mode 100644
index 0000000000..2c94e48371
--- /dev/null
+++ b/test/files/jvm/actor-getstate.check
@@ -0,0 +1,2 @@
+OK
+OK
diff --git a/test/files/jvm/actor-getstate.scala b/test/files/jvm/actor-getstate.scala
new file mode 100644
index 0000000000..a6e15a8721
--- /dev/null
+++ b/test/files/jvm/actor-getstate.scala
@@ -0,0 +1,85 @@
+import scala.actors.{Reactor, Actor, TIMEOUT}
+import Actor._
+
+object Test {
+
+ def assert(cond: => Boolean, hint: String) {
+ if (!cond)
+ println("FAIL ["+hint+"]")
+ }
+
+ def expectActorState(a: Reactor[T] forSome { type T }, s: Actor.State.Value) {
+ var done = false
+ var i = 0
+ while (!done) {
+ i = i + 1
+ if (i == 10) { // only wait for 2 seconds total
+ println("FAIL ["+a+": expected "+s+"]")
+ done = true
+ }
+
+ Thread.sleep(200)
+ if (a.getState == s) // success
+ done = true
+ }
+ }
+
+ def main(args: Array[String]) {
+ actor {
+ val a = new Reactor[Any] {
+ def act() {
+ assert(getState == Actor.State.Runnable, "runnable1")
+ react {
+ case 'go =>
+ println("OK")
+ }
+ }
+ }
+ expectActorState(a, Actor.State.New)
+
+ a.start()
+ expectActorState(a, Actor.State.Suspended)
+
+ a ! 'go
+ expectActorState(a, Actor.State.Terminated)
+
+ val b = new Actor {
+ def act() {
+ assert(getState == Actor.State.Runnable, "runnable2: "+getState)
+ react {
+ case 'go =>
+ reactWithin(100000) {
+ case TIMEOUT =>
+ case 'go =>
+ receive {
+ case 'go =>
+ }
+ receiveWithin(100000) {
+ case TIMEOUT =>
+ case 'go =>
+ println("OK")
+ }
+ }
+ }
+ }
+ }
+ expectActorState(b, Actor.State.New)
+
+ b.start()
+ expectActorState(b, Actor.State.Suspended)
+
+ b ! 'go
+ expectActorState(b, Actor.State.TimedSuspended)
+
+ b ! 'go
+ expectActorState(b, Actor.State.Blocked)
+
+ b ! 'go
+ expectActorState(b, Actor.State.TimedBlocked)
+
+ b ! 'go
+ expectActorState(b, Actor.State.Terminated)
+ }
+ }
+
+}
diff --git a/test/files/jvm/actor-link-getstate.check b/test/files/jvm/actor-link-getstate.check
new file mode 100644
index 0000000000..9755447320
--- /dev/null
+++ b/test/files/jvm/actor-link-getstate.check
@@ -0,0 +1,2 @@
+Done
+Terminated
diff --git a/test/files/jvm/actor-link-getstate.scala b/test/files/jvm/actor-link-getstate.scala
new file mode 100644
index 0000000000..c4c33ef752
--- /dev/null
+++ b/test/files/jvm/actor-link-getstate.scala
@@ -0,0 +1,62 @@
+import scala.actors.{Actor, Exit}
+import scala.actors.Actor._
+
+case class MyException(text: String) extends Exception(text) {
+ override def fillInStackTrace() = this
+}
+
+object Slave extends Actor {
+ def act() {
+ try {
+ loop {
+ react {
+ case 'doWork =>
+ Console.err.println("Done")
+ reply('done)
+ }
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+}
+
+object Master extends Actor {
+ override def toString = "Master"
+ def act() {
+ try {
+ link(Slave)
+ Slave ! 'doWork
+ react {
+ case 'done =>
+ throw new MyException("Master crashed")
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+}
+
+object Test {
+
+ def main(args: Array[String]) {
+ actor {
+ try {
+ self.trapExit = true
+ link(Slave)
+ Slave.start()
+ Master.start()
+ react {
+ case Exit(from, reason) if (from == Slave) =>
+ Console.err.println(Slave.getState)
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+ }
+
+}
diff --git a/test/files/jvm/actor-looping.scala b/test/files/jvm/actor-looping.scala
index 9599adbb2c..475d4754ce 100644
--- a/test/files/jvm/actor-looping.scala
+++ b/test/files/jvm/actor-looping.scala
@@ -6,6 +6,7 @@ object Test {
def main(args: Array[String]) {
val a = actor {
+ try {
var cnt = 0
loop {
react {
@@ -20,6 +21,10 @@ object Test {
}
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
for (i <- 0 until 10) a ! A
diff --git a/test/files/jvm/actor-normal-exit.scala b/test/files/jvm/actor-normal-exit.scala
index 40dc7b7da4..20863d5bb0 100644
--- a/test/files/jvm/actor-normal-exit.scala
+++ b/test/files/jvm/actor-normal-exit.scala
@@ -5,18 +5,28 @@ object Test {
object Master extends Actor {
trapExit = true
def act() {
+ try {
Slave.start()
react {
case Exit(from, reason) =>
println("slave exited for reason " + reason)
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
object Slave extends Actor {
def act() {
+ try {
link(Master)
println("Done")
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
diff --git a/test/files/jvm/actor-receivewithin.scala b/test/files/jvm/actor-receivewithin.scala
index c6818cf211..a5c87c2722 100644
--- a/test/files/jvm/actor-receivewithin.scala
+++ b/test/files/jvm/actor-receivewithin.scala
@@ -29,6 +29,7 @@ object A extends Actor {
}
}
B ! 'next
+ receive { case 'done => }
cnt = 0
while (cnt < 501) {
cnt += 1
@@ -56,6 +57,7 @@ object B extends Actor {
for (_ <- 1 to 500) {
A ! 'msg2
}
+ A ! 'done
}
}
}
diff --git a/test/files/jvm/actor-sync-send-timeout.scala b/test/files/jvm/actor-sync-send-timeout.scala
new file mode 100644
index 0000000000..21e624bd0a
--- /dev/null
+++ b/test/files/jvm/actor-sync-send-timeout.scala
@@ -0,0 +1,47 @@
+import scala.actors.Actor
+
+/* This test is a regression test for SI-4759.
+ */
+object Test {
+ val Runs = 5
+
+ def main(args: Array[String]) = {
+ var i = 0
+ while (i < Runs) {
+ i += 1
+ A1 ! 1
+ Thread.sleep(500)
+ }
+ //println("done sending to A1")
+ }
+}
+
+object A2 extends Actor {
+ this.start()
+ def act() {
+ loop {
+ react {
+ case 'stop =>
+ //println("A2 exiting")
+ exit()
+ case _ =>
+ }
+ }
+ }
+}
+
+object A1 extends Actor {
+ this.start()
+ def act() {
+ var i = 0
+ loopWhile(i < Test.Runs) {
+ i += 1
+ react {
+ case any =>
+ A2 !? (500, any)
+ if (i == Test.Runs)
+ A2 ! 'stop
+ }
+ }
+ }
+}
diff --git a/test/files/jvm/actor-termination.scala b/test/files/jvm/actor-termination.scala
index 19dfaf8e17..d8e44a2797 100644
--- a/test/files/jvm/actor-termination.scala
+++ b/test/files/jvm/actor-termination.scala
@@ -5,9 +5,14 @@ import scala.actors.Actor
object Test {
def main(args: Array[String]) {
Actor.actor {
+ try {
println("I'm going to make you wait.")
Thread.sleep(5000)
println("Ok, I'm done.")
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
}
diff --git a/test/files/jvm/actor-uncaught-exception.check b/test/files/jvm/actor-uncaught-exception.check
new file mode 100644
index 0000000000..2c94e48371
--- /dev/null
+++ b/test/files/jvm/actor-uncaught-exception.check
@@ -0,0 +1,2 @@
+OK
+OK
diff --git a/test/files/jvm/actor-uncaught-exception.scala b/test/files/jvm/actor-uncaught-exception.scala
new file mode 100644
index 0000000000..5ae66de640
--- /dev/null
+++ b/test/files/jvm/actor-uncaught-exception.scala
@@ -0,0 +1,63 @@
+import scala.actors.{Actor, Exit}
+
+class MyException(msg: String) extends Exception(msg) {
+ override def fillInStackTrace() = this
+}
+
+object Test {
+
+ case object StartError extends Actor {
+ def act() {
+ try {
+ throw new MyException("I don't want to run!")
+ } catch {
+ case e: Throwable if (!e.isInstanceOf[scala.util.control.ControlThrowable] &&
+ !e.isInstanceOf[MyException]) =>
+ e.printStackTrace()
+ }
+ }
+ }
+
+ case object MessageError extends Actor {
+ def act() {
+ try {
+ react {
+ case _ => throw new MyException("No message for me!")
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+ }
+
+ case object Supervisor extends Actor {
+ def act() {
+ try {
+ trapExit = true
+ link(StartError)
+ link(MessageError)
+ StartError.start()
+ MessageError.start()
+
+ Actor.loop {
+ react {
+ case Exit(actor, reason) =>
+ println("OK")
+ if (actor == StartError)
+ MessageError ! 'ping
+ else
+ exit()
+ }
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+ }
+
+ def main(args: Array[String]) {
+ Supervisor.start()
+ }
+}
diff --git a/test/files/jvm/actor-uncaught-exception2.check b/test/files/jvm/actor-uncaught-exception2.check
new file mode 100644
index 0000000000..870a5d32f9
--- /dev/null
+++ b/test/files/jvm/actor-uncaught-exception2.check
@@ -0,0 +1,2 @@
+UncaughtException(StartError,None,None,MyException: I don't want to run!)
+UncaughtException(MessageError,Some('ping),Some(Supervisor),MyException: No message for me!)
diff --git a/test/files/jvm/actor-uncaught-exception2.scala b/test/files/jvm/actor-uncaught-exception2.scala
new file mode 100644
index 0000000000..0364cbeb03
--- /dev/null
+++ b/test/files/jvm/actor-uncaught-exception2.scala
@@ -0,0 +1,63 @@
+import scala.actors.{Actor, Exit, Debug}
+
+class MyException(msg: String) extends Exception(msg) {
+ override def fillInStackTrace() = this
+}
+
+object Test {
+
+ case object StartError extends Actor {
+ def act() {
+ try {
+ throw new MyException("I don't want to run!")
+ } catch {
+ case e: Throwable if (!e.isInstanceOf[scala.util.control.ControlThrowable] &&
+ !e.isInstanceOf[MyException]) =>
+ e.printStackTrace()
+ }
+ }
+ }
+
+ case object MessageError extends Actor {
+ def act() {
+ try {
+ react {
+ case _ => throw new MyException("No message for me!")
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+ }
+
+ case object Supervisor extends Actor {
+ def act() {
+ try {
+ trapExit = true
+ link(StartError)
+ link(MessageError)
+ StartError.start()
+ MessageError.start()
+
+ Actor.loop {
+ react {
+ case Exit(actor, reason) =>
+ println(reason)
+ if (actor == StartError)
+ MessageError ! 'ping
+ else
+ exit()
+ }
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+ }
+
+ def main(args: Array[String]) {
+ Supervisor.start()
+ }
+}
diff --git a/test/files/jvm/annotations.check b/test/files/jvm/annotations.check
index 128f8e8f6e..e307f8930d 100644
--- a/test/files/jvm/annotations.check
+++ b/test/files/jvm/annotations.check
@@ -23,7 +23,7 @@ public Test4$Foo6(java.lang.String)
public Test4$Foo7()
@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=constructor val)
-private final int Test4$Foo8.n
+public Test4$Foo8(int)
@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com)
private int Test4$Foo9.z
@@ -37,5 +37,16 @@ public int Test4$Foo9.x()
@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://uppla.com)
public void Test4$Foo9.setY(int)
+@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=on param 1)
+public Test4$Foo10(java.lang.String)
+
+@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=on param 2)
+private final java.lang.String Test4$Foo11.name
+
+@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=on param 3)
+public void Test4$Foo12.name_$eq(java.lang.String)
+
0
99
+dylan
+2
diff --git a/test/files/jvm/annotations.scala b/test/files/jvm/annotations.scala
index 227bd919c1..f32eb625e9 100644
--- a/test/files/jvm/annotations.scala
+++ b/test/files/jvm/annotations.scala
@@ -102,17 +102,20 @@ object Test4 {
type myAnn = SourceAnnotation @beanGetter @field
@BeanProperty @myAnn("http://eppli.com") var z = 0
}
+ class Foo10(@SourceAnnotation("on param 1") val name: String)
+ class Foo11(@(SourceAnnotation @scala.annotation.target.field)("on param 2") val name: String)
+ class Foo12(@(SourceAnnotation @scala.annotation.target.setter)("on param 3") var name: String)
def run {
import java.lang.annotation.Annotation
import java.lang.reflect.AnnotatedElement
+ def printSourceAnnotation(a: Annotation) {
+ val ann = a.asInstanceOf[SourceAnnotation]
+ println("@test.SourceAnnotation(mails=" + ann.mails.deepMkString("{", ",", "}") +
+ ", value=" + ann.value + ")")
+ }
def printSourceAnnotations(target: AnnotatedElement) {
//print SourceAnnotation in a predefined way to insure
// against difference in the JVMs (e.g. Sun's vs IBM's)
- def printSourceAnnotation(a: Annotation) {
- val ann = a.asInstanceOf[SourceAnnotation]
- println("@test.SourceAnnotation(mails=" + ann.mails.deepMkString("{", ",", "}") +
- ", value=" + ann.value + ")")
- }
val anns = target.getAnnotations()
anns foreach printSourceAnnotation
if (anns.length > 0) {
@@ -120,6 +123,14 @@ object Test4 {
println
}
}
+ def printParamSourceAnnotations(target: { def getParameterAnnotations(): Array[Array[Annotation]] }) {
+ val anns = target.getParameterAnnotations().flatten
+ anns foreach printSourceAnnotation
+ if (anns.length > 0) {
+ println(target)
+ println
+ }
+ }
printSourceAnnotations(classOf[Foo1])
printSourceAnnotations(classOf[Foo2])
printSourceAnnotations(classOf[Foo3])
@@ -130,8 +141,18 @@ object Test4 {
classOf[Foo7].getDeclaredConstructors foreach printSourceAnnotations
classOf[Foo8].getDeclaredFields foreach printSourceAnnotations
classOf[Foo8].getDeclaredMethods foreach printSourceAnnotations
+ classOf[Foo8].getDeclaredConstructors foreach printParamSourceAnnotations
classOf[Foo9].getDeclaredFields.sortWith((x, y) => x.toString < y.toString) foreach printSourceAnnotations
classOf[Foo9].getDeclaredMethods.sortWith((x, y) => x.toString < y.toString) foreach printSourceAnnotations
+ classOf[Foo10].getDeclaredFields.sortWith((x, y) => x.toString < y.toString) foreach printSourceAnnotations
+ classOf[Foo10].getDeclaredMethods.sortWith((x, y) => x.toString < y.toString) foreach printSourceAnnotations
+ classOf[Foo10].getDeclaredConstructors foreach printParamSourceAnnotations
+ classOf[Foo11].getDeclaredFields.sortWith((x, y) => x.toString < y.toString) foreach printSourceAnnotations
+ classOf[Foo11].getDeclaredMethods.sortWith((x, y) => x.toString < y.toString) foreach printSourceAnnotations
+ classOf[Foo11].getDeclaredConstructors foreach printParamSourceAnnotations
+ classOf[Foo12].getDeclaredFields.sortWith((x, y) => x.toString < y.toString) foreach printSourceAnnotations
+ classOf[Foo12].getDeclaredMethods.sortWith((x, y) => x.toString < y.toString) foreach printSourceAnnotations
+ classOf[Foo12].getDeclaredConstructors foreach printParamSourceAnnotations
}
}
@@ -160,6 +181,27 @@ object Test5 {
}
}
+object Test6 {
+ import scala.reflect.BeanProperty
+ import scala.reflect.BooleanBeanProperty
+ class C(@BeanProperty var text: String)
+ class D(@BooleanBeanProperty var prop: Boolean) {
+ @BeanProperty val m: Int = if (prop) 1 else 2
+ }
+
+ def run {
+ val c = new C("bob")
+ c.setText("dylan")
+ println(c.getText())
+ if (new D(true).isProp()) {
+ println(new D(false).getM())
+ }
+ }
+}
+
+// #3345
+class A3345(@volatile private var i:Int)
+
object Test {
def main(args: Array[String]) {
Test1.run
@@ -167,5 +209,6 @@ object Test {
Test3.run // requires the use of -target:jvm-1.5
Test4.run
Test5.run
+ Test6.run
}
}
diff --git a/test/files/jvm/bigints.scala b/test/files/jvm/bigints.scala
index f4ca2d17a3..7a797879f2 100644
--- a/test/files/jvm/bigints.scala
+++ b/test/files/jvm/bigints.scala
@@ -1,5 +1,5 @@
//############################################################################
-// BigInt, BigDecimal
+// BigInt, BigDecimal
//############################################################################
//############################################################################
diff --git a/test/files/jvm/bug560bis.scala b/test/files/jvm/bug560bis.scala
index 13bf4b1ae0..b04303c8a0 100644
--- a/test/files/jvm/bug560bis.scala
+++ b/test/files/jvm/bug560bis.scala
@@ -5,14 +5,14 @@ import scala.xml._;
case Seq(a,b,c,d @ _*) => Console.println("cool!")
case _ => Console.println("bah")
}
- def foo(args: List[String]) =
+ def foo(args: List[String]) =
Elem(null,"bla",Null, TopScope, (args map {x => Text(x)}):_*) match {
case Elem(_,_,_,_,Text("1"),_*) =>
Console.println("cool!")
case _ =>
Console.println("bah")
}
-
+
def main(args: Array[String]) = {
val li = List("1","2","3","4")
bar(li)
diff --git a/test/files/jvm/console.scala b/test/files/jvm/console.scala
index b07765675c..6d1aa1eafd 100644
--- a/test/files/jvm/console.scala
+++ b/test/files/jvm/console.scala
@@ -9,6 +9,6 @@ object Test extends Application {
flush
println("..")
println(1)
- printf("Argument nr. %d has value %1.2f\n",
+ printf("Argument nr. %d has value %1.2f\n",
1, 10.0/3)
}
diff --git a/test/files/jvm/daemon-actor-termination.scala b/test/files/jvm/daemon-actor-termination.scala
index 8e64749b1c..6ddfc3139d 100644
--- a/test/files/jvm/daemon-actor-termination.scala
+++ b/test/files/jvm/daemon-actor-termination.scala
@@ -5,6 +5,7 @@ object Test {
class MyDaemon extends DaemonActor {
def act() {
+ try {
react {
case 'hello =>
println("MSG1")
@@ -14,6 +15,10 @@ object Test {
println("done")
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
@@ -21,8 +26,13 @@ object Test {
val daemon = new MyDaemon
daemon.start()
Actor.actor {
+ try {
daemon !? 'hello
println("MSG2")
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
}
diff --git a/test/files/jvm/deprecation.cmds b/test/files/jvm/deprecation.cmds
new file mode 100644
index 0000000000..4c0f73c58b
--- /dev/null
+++ b/test/files/jvm/deprecation.cmds
@@ -0,0 +1,3 @@
+javac Defs.java
+scalac Test_1.scala
+javac Use_2.java
diff --git a/test/files/jvm/deprecation/Defs.java b/test/files/jvm/deprecation/Defs.java
new file mode 100644
index 0000000000..52101c342a
--- /dev/null
+++ b/test/files/jvm/deprecation/Defs.java
@@ -0,0 +1,12 @@
+public class Defs {
+ /** @deprecated */
+ public int i = 1;
+
+ /** @deprecated */
+ public int bar() { return 0; }
+
+ /** @deprecated */
+ public class Inner {
+ public int buz() { return 0; }
+ }
+}
diff --git a/test/files/jvm/deprecation/Test_1.scala b/test/files/jvm/deprecation/Test_1.scala
new file mode 100644
index 0000000000..5bd87b89c4
--- /dev/null
+++ b/test/files/jvm/deprecation/Test_1.scala
@@ -0,0 +1,17 @@
+class Test {
+ def test {
+ val d = new Defs
+ val u = d.i + 1
+ d.i = 2
+ val v = d.bar()
+ val i = new d.Inner
+ val w = i.buz()
+ }
+
+ @deprecated("no longer!") class Inner {
+ @deprecated("uncool") def f: Int = 1
+ @deprecated("this one as well!") var g = -1
+ }
+}
+
+object Test { def main(args: Array[String]) { } }
diff --git a/test/files/jvm/deprecation/Use_2.java b/test/files/jvm/deprecation/Use_2.java
new file mode 100644
index 0000000000..65da8a8fac
--- /dev/null
+++ b/test/files/jvm/deprecation/Use_2.java
@@ -0,0 +1,10 @@
+class Use_2 {
+ public int test() {
+ Test u = new Test();
+ Test.Inner a = u.new Inner();
+ int i = a.f();
+ int j = a.g();
+ a.g_$eq(5);
+ return i + j;
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/future-alarm.check b/test/files/jvm/future-alarm.check
new file mode 100644
index 0000000000..01a87d1c4c
--- /dev/null
+++ b/test/files/jvm/future-alarm.check
@@ -0,0 +1,20 @@
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
diff --git a/test/files/jvm/future-alarm.scala b/test/files/jvm/future-alarm.scala
new file mode 100644
index 0000000000..8ee902b5ea
--- /dev/null
+++ b/test/files/jvm/future-alarm.scala
@@ -0,0 +1,21 @@
+import scala.actors.Futures
+
+object Test {
+ def main(args: Array[String]) {
+ try {
+ for (i <- 1 to 100000) {
+ Futures.alarm(0)
+ if (i % 10000 == 0)
+ println("OK")
+ }
+ for (_ <- 1 to 10) {
+ val ft = Futures.alarm(100)
+ ft()
+ println("OK")
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+}
diff --git a/test/files/jvm/future-awaitall-zero.check b/test/files/jvm/future-awaitall-zero.check
new file mode 100644
index 0000000000..d86bac9de5
--- /dev/null
+++ b/test/files/jvm/future-awaitall-zero.check
@@ -0,0 +1 @@
+OK
diff --git a/test/files/jvm/future-awaitall-zero.scala b/test/files/jvm/future-awaitall-zero.scala
new file mode 100644
index 0000000000..cd6ba172cc
--- /dev/null
+++ b/test/files/jvm/future-awaitall-zero.scala
@@ -0,0 +1,22 @@
+import scala.actors.Futures._
+import scala.actors.Actor._
+
+object Test {
+ def main(args: Array[String]) {
+ try {
+ val ft1 = future { reactWithin(10000) {
+ case _ => println("FAIL")
+ } }
+
+ val ft2 = future { reactWithin(20000) {
+ case _ => println("FAIL")
+ } }
+
+ val res = awaitAll(0, ft1, ft2)
+ println("OK")
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+}
diff --git a/test/files/jvm/future-termination.scala b/test/files/jvm/future-termination.scala
index c448a88aa7..f51642cb7a 100644
--- a/test/files/jvm/future-termination.scala
+++ b/test/files/jvm/future-termination.scala
@@ -1,14 +1,19 @@
-import scala.actors.Futures
-
-/* Test that unevaluated futures do not prevent program termination */
-
-object Test {
- def main(args: Array[String]) {
- val meaningOfLife = Futures.future {
- Thread.sleep(5000) // pretend this is a harder problem than it is
- println("I have the answer!")
- 42
+import scala.actors.Futures
+
+/* Test that unevaluated futures do not prevent program termination */
+
+object Test {
+ def main(args: Array[String]) {
+ try {
+ val meaningOfLife = Futures.future {
+ Thread.sleep(5000) // pretend this is a harder problem than it is
+ println("I have the answer!")
+ 42
+ }
+ println("I can't wait that long, bye.")
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
}
- println("I can't wait that long, bye.")
- }
+ }
}
diff --git a/test/files/jvm/inner.scala b/test/files/jvm/inner.scala
index 51e3909ef3..156d9e69a8 100644
--- a/test/files/jvm/inner.scala
+++ b/test/files/jvm/inner.scala
@@ -14,7 +14,7 @@ class A {
trait Itf {
def method1(x: Int): Int
-
+
trait Itf2 extends Itf {
def method2: Unit
}
@@ -53,8 +53,8 @@ class A {
}
object Scalatest {
- private val outputdir = System.getProperty("scalatest.output", "inner-jvm.obj")
- private val scalalib = System.getProperty("scalatest.lib", "")
+ private val outputdir = System.getProperty("partest.output", "inner.obj")
+ private val scalalib = System.getProperty("partest.lib", "")
private val classpath = outputdir + File.pathSeparator + scalalib
private val javabin = {
val jhome = new File(System.getProperty("java.home"))
diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check
index e17cc15a0a..b74ba1be0a 100644
--- a/test/files/jvm/interpreter.check
+++ b/test/files/jvm/interpreter.check
@@ -19,7 +19,7 @@ scala> defined type alias anotherint
scala> four: anotherint = 4
-scala> <console>:5: error: type mismatch;
+scala> <console>:6: error: type mismatch;
found : java.lang.String("hello")
required: anotherint
val bogus: anotherint = "hello"
@@ -169,7 +169,7 @@ scala> res4: Array[_] = Array(2)
scala> res5: Array[java.lang.String] = Array(abc, abc)
-scala> res6: scala.collection.mutable.GenericArray[_] = GenericArray(1, 2)
+scala> res6: scala.collection.mutable.ArraySeq[_] = ArraySeq(1, 2)
scala> res7: Array[(_$1, _$1)] forSome { type _$1 } = Array((1,1), (2,2))
@@ -189,7 +189,7 @@ scala>
scala>
scala> | | | | res8: scala.xml.Elem =
<a>
- <b d="dd" c="c"></b></a>
+<b d="dd" c="c"></b></a>
scala>
scala>
@@ -199,9 +199,9 @@ scala>
scala>
scala> | | | res9: java.lang.String =
- hello
- there
-
+hello
+there
+
scala>
scala> | | You typed two blank lines. Starting a new command.
@@ -217,9 +217,9 @@ scala> defined class Exp
defined class Fact
defined class Term
-scala> | | <console>:15: warning: match is not exhaustive!
-missing combination Term
+scala> | | <console>:16: warning: match is not exhaustive!
missing combination Exp
+missing combination Term
def f(e: Exp) = e match { // non-exhaustive warning here
^
@@ -229,7 +229,7 @@ scala>
scala>
plusOne: (x: Int)Int
res0: Int = 6
-res0: java.lang.String = after reset
-<console>:5: error: not found: value plusOne
+res1: java.lang.String = after reset
+<console>:6: error: not found: value plusOne
plusOne(5) // should be undefined now
^
diff --git a/test/files/jvm/interpreter.scala b/test/files/jvm/interpreter.scala
index 2c2756828b..1437b97f79 100644
--- a/test/files/jvm/interpreter.scala
+++ b/test/files/jvm/interpreter.scala
@@ -30,7 +30,7 @@ val atom = new scala.xml.Atom()
class S(override val toString : String)
val fish = new S("fish")
// Test that arrays pretty print nicely.
-val arr = Array("What's", "up", "doc?")
+val arr = Array("What's", "up", "doc?")
// Test that arrays pretty print nicely, even when we give them type Any
val arrInt : Any = Array(1,2,3)
// Test that nested arrays are pretty-printed correctly
@@ -133,8 +133,8 @@ there
// defining and using quoted names should work (ticket #323)
-def `match` = 1
-val x = `match`
+def `match` = 1
+val x = `match`
// multiple classes defined on one line
sealed class Exp; class Fact extends Exp; class Term extends Exp
@@ -151,8 +151,8 @@ def f(e: Exp) = e match {{ // non-exhaustive warning here
var seenNL = false
def write(cbuf: Array[Char], off: Int, len: Int) {
- if (seenNL)
- writer.write(cbuf, off, len)
+ if (seenNL)
+ writer.write(cbuf, off, len)
else {
val slice : Array[Char] = cbuf.slice(off, off+len)
val i = slice.indexOf('\n')
diff --git a/test/disabled/jvm/libnatives-32.so b/test/files/jvm/libnatives-32.so
index ccbcdd646f..ccbcdd646f 100644
--- a/test/disabled/jvm/libnatives-32.so
+++ b/test/files/jvm/libnatives-32.so
Binary files differ
diff --git a/test/disabled/jvm/libnatives-64.so b/test/files/jvm/libnatives-64.so
index 8cc6152057..8cc6152057 100644
--- a/test/disabled/jvm/libnatives-64.so
+++ b/test/files/jvm/libnatives-64.so
Binary files differ
diff --git a/test/disabled/jvm/libnatives.jnilib b/test/files/jvm/libnatives.jnilib
index daac50e3df..daac50e3df 100644
--- a/test/disabled/jvm/libnatives.jnilib
+++ b/test/files/jvm/libnatives.jnilib
Binary files differ
diff --git a/test/files/jvm/methvsfield.java b/test/files/jvm/methvsfield.java
index dadc98669a..c1b2b87b48 100644
--- a/test/files/jvm/methvsfield.java
+++ b/test/files/jvm/methvsfield.java
@@ -1,11 +1,11 @@
// This should be compiled with javac and saved
// in ../lib/methvsfield.jar .
-class MethVsField
+class MethVsField
{
int three = 3;
- int three()
- {
+ int three()
+ {
return 3;
}
}
diff --git a/test/disabled/jvm/mkLibNatives.bat b/test/files/jvm/mkLibNatives.bat
index 100246af79..100246af79 100755
--- a/test/disabled/jvm/mkLibNatives.bat
+++ b/test/files/jvm/mkLibNatives.bat
diff --git a/test/disabled/jvm/mkLibNatives.sh b/test/files/jvm/mkLibNatives.sh
index ed80c24c3e..ed80c24c3e 100755
--- a/test/disabled/jvm/mkLibNatives.sh
+++ b/test/files/jvm/mkLibNatives.sh
diff --git a/test/disabled/jvm/natives-32.dll b/test/files/jvm/natives-32.dll
index a06c1da3e3..a06c1da3e3 100644
--- a/test/disabled/jvm/natives-32.dll
+++ b/test/files/jvm/natives-32.dll
Binary files differ
diff --git a/test/disabled/jvm/natives.c b/test/files/jvm/natives.c
index 7b6d7b5ba5..7b6d7b5ba5 100644
--- a/test/disabled/jvm/natives.c
+++ b/test/files/jvm/natives.c
diff --git a/test/disabled/jvm/natives.check b/test/files/jvm/natives.check
index 2265459198..2265459198 100644
--- a/test/disabled/jvm/natives.check
+++ b/test/files/jvm/natives.check
diff --git a/test/disabled/jvm/natives.h b/test/files/jvm/natives.h
index 0d360d3654..0d360d3654 100644
--- a/test/disabled/jvm/natives.h
+++ b/test/files/jvm/natives.h
diff --git a/test/files/jvm/nest/nest.java b/test/files/jvm/nest/nest.java
new file mode 100644
index 0000000000..3f6f0bebbd
--- /dev/null
+++ b/test/files/jvm/nest/nest.java
@@ -0,0 +1,38 @@
+package nestpkg;
+
+
+/** This file is needed for test 'nest.scala'. It should
+ * be compiled with javac and packaged into lib/nest.jar
+ */
+public class nest {
+ public static class best {
+ public static class rest {
+ public static rest test = new rest();
+ public static int x = 10;
+ public int inc(int i) {
+ return i + 1;
+ }
+ }
+ }
+
+
+ String name = "Outer name";
+
+ public class Inn {
+ int x;
+
+ public Inn(int x) {
+ this.x = x;
+ }
+
+ public void doSomething() {
+ System.out.println("Inn " + name + " x: " + x);
+ }
+ }
+
+ protected class ProtInn {
+ public void doSomething() {
+ System.out.println("ProtInn " + name);
+ }
+ }
+}
diff --git a/test/files/jvm/nest/nest.scala b/test/files/jvm/nest/nest.scala
new file mode 100644
index 0000000000..3ab62484fa
--- /dev/null
+++ b/test/files/jvm/nest/nest.scala
@@ -0,0 +1,21 @@
+//############################################################################
+// Test Scala interaction with Java nested classes and static members.
+//############################################################################
+
+/** found in nest.jar, compiled from nest.java */
+import nestpkg._;
+
+object Test extends Application {
+ val x = nest.best.rest.test
+ Console.println(x.inc(1))
+
+ val o = new nest.best;
+ val r = new nest.best.rest;
+ Console.println(nest.best.rest.test.inc(2))
+ Console.println(nest.best.rest.x)
+
+ print("Instantiating public inner class: ")
+ val outer = new nest
+ val inn = new outer.Inn(42)
+ inn.doSomething
+}
diff --git a/test/files/jvm/protectedacc.scala b/test/files/jvm/protectedacc.scala
index c3b07a0a7e..525725f2b9 100644
--- a/test/files/jvm/protectedacc.scala
+++ b/test/files/jvm/protectedacc.scala
@@ -16,7 +16,7 @@ object Test {
val ji = new p.b.JavaInteraction(Array('a', 'b', 'c'));
(new ji.Inner).m;
-
+
(new p.b.OuterObj.Inner).m
}
}
@@ -36,13 +36,13 @@ package p {
def getA: this.type = this;
}
-
+
/** Test type members */
trait HighlighterXXX {
type Node;
protected def highlight(node : Node) : Unit;
}
-
+
/** Test type parameters */
abstract class PolyA[a] {
protected def m(x: a): Unit;
@@ -119,22 +119,22 @@ package p {
val inc = meth2(1)_;
Console.println("100 = " + inc("10"));
-
+
getA.x;
}
}
}
-
+
trait ScalaAutoEditXXX extends HighlighterXXX {
- trait NodeImpl {
+ trait NodeImpl {
def self : Node;
highlight(self);
}
}
-
+
abstract class X[T] extends PolyA[T] {
- trait Inner extends B {
+ trait Inner extends B {
def self: T;
def self2: Node;
def getB: Inner;
diff --git a/test/files/jvm/reactor-exceptionOnSend.scala b/test/files/jvm/reactor-exceptionOnSend.scala
index 3684943b9b..3d9a042131 100644
--- a/test/files/jvm/reactor-exceptionOnSend.scala
+++ b/test/files/jvm/reactor-exceptionOnSend.scala
@@ -3,7 +3,7 @@ import scala.actors.Actor._
case class MyException(text: String) extends Exception(text)
-object A extends Reactor {
+object A extends Reactor[Any] {
override def exceptionHandler = {
case MyException(text) =>
println("receiver handles exception")
@@ -19,6 +19,7 @@ object A extends Reactor {
var state = 0
def act() {
+ try {
loop {
react {
case 'hello if guard() =>
@@ -26,14 +27,24 @@ object A extends Reactor {
exit()
}
}
+ } catch {
+ case e: Throwable if (!e.isInstanceOf[scala.util.control.ControlThrowable] &&
+ !e.isInstanceOf[MyException]) =>
+ e.printStackTrace()
+ }
}
}
-object B extends Reactor {
+object B extends Reactor[Any] {
def act() {
+ try {
A.start()
A ! 'hello
A ! 'hello
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
diff --git a/test/files/jvm/reactor-producer-consumer.check b/test/files/jvm/reactor-producer-consumer.check
new file mode 100644
index 0000000000..d971cea19e
--- /dev/null
+++ b/test/files/jvm/reactor-producer-consumer.check
@@ -0,0 +1,10 @@
+42
+42
+42
+42
+42
+42
+42
+42
+42
+42
diff --git a/test/files/jvm/reactor-producer-consumer.scala b/test/files/jvm/reactor-producer-consumer.scala
new file mode 100644
index 0000000000..8a6b17c3ad
--- /dev/null
+++ b/test/files/jvm/reactor-producer-consumer.scala
@@ -0,0 +1,95 @@
+import scala.actors.Reactor
+
+object Test {
+ case class Stop()
+ case class Get(from: Reactor[Any])
+ case class Put(x: Int)
+
+ class UnboundedBuffer extends Reactor[Any] {
+ def act() {
+ try {
+ react {
+ case Stop() =>
+ case Get(from) =>
+ val consumer = from
+ react {
+ case msg @ Put(x) =>
+ consumer ! x
+ act()
+ }
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+ }
+
+ class Producer(buf: UnboundedBuffer, n: Int, delay: Long, parent: Reactor[Any]) extends Reactor[Any] {
+ def act() {
+ try {
+ var i = 0
+ while (i < n) {
+ i += 1
+ if (delay > 0) Thread.sleep(delay)
+ buf ! Put(42)
+ }
+ parent ! Stop()
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+ }
+
+ class Consumer(buf: UnboundedBuffer, n: Int, delay: Long, parent: Reactor[Any]) extends Reactor[Any] {
+ val step = n / 10
+ var i = 0
+ def act() {
+ try {
+ if (i < n) {
+ i += 1
+ if (delay > 0) Thread.sleep(delay)
+ buf ! Get(this)
+ react {
+ case res =>
+ if (i % step == 0)
+ println(res)
+ act()
+ }
+ } else {
+ parent ! Stop()
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+ }
+
+ def main(args: Array[String]) {
+ val parent = new Reactor[Any] {
+ def act() {
+ try {
+ val buffer = new UnboundedBuffer
+ buffer.start()
+ val producer = new Producer(buffer, 10000, 0, this)
+ producer.start()
+ val consumer = new Consumer(buffer, 10000, 0, this)
+ consumer.start()
+ react {
+ case Stop() =>
+ react {
+ case Stop() =>
+ buffer ! Stop()
+ }
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+ }
+ parent.start()
+ }
+}
diff --git a/test/files/jvm/reactor.scala b/test/files/jvm/reactor.scala
index 8985f90569..dbc9a6bdda 100644
--- a/test/files/jvm/reactor.scala
+++ b/test/files/jvm/reactor.scala
@@ -1,13 +1,12 @@
import scala.actors.Reactor
-import scala.actors.Actor._
-case class Ping(from: Reactor)
+case class Ping(from: Reactor[Any])
case object Pong
case object Stop
/**
- * Ping pong example for OutputChannelActor.
+ * Ping pong example for Reactor.
*
* @author Philipp Haller
*/
@@ -20,8 +19,9 @@ object Test {
}
}
-class PingActor(count: Int, pong: Reactor) extends Reactor {
+class PingActor(count: Int, pong: Reactor[Any]) extends Reactor[Any] {
def act() {
+ try {
var pingsLeft = count - 1
pong ! Ping(this)
loop {
@@ -39,11 +39,16 @@ class PingActor(count: Int, pong: Reactor) extends Reactor {
}
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
-class PongActor extends Reactor {
+class PongActor extends Reactor[Any] {
def act() {
+ try {
var pongCount = 0
loop {
react {
@@ -57,5 +62,9 @@ class PongActor extends Reactor {
exit()
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
diff --git a/test/files/jvm/replyablereactor.scala b/test/files/jvm/replyablereactor.scala
index 368d172e3f..e1fabc98da 100644
--- a/test/files/jvm/replyablereactor.scala
+++ b/test/files/jvm/replyablereactor.scala
@@ -1,8 +1,8 @@
-import scala.actors._
-import scala.actors.Actor._
+import scala.actors.ReplyReactor
class MyActor extends ReplyReactor {
def act() {
+ try {
loop {
react {
case 'hello =>
@@ -11,6 +11,10 @@ class MyActor extends ReplyReactor {
exit()
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
@@ -19,8 +23,9 @@ object Test {
val a = new MyActor
a.start()
- val b = new Reactor {
+ val b = new ReplyReactor {
def act() {
+ try {
react {
case r: MyActor =>
var i = 0
@@ -38,6 +43,10 @@ object Test {
}
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
b.start()
diff --git a/test/files/jvm/replyablereactor2.scala b/test/files/jvm/replyablereactor2.scala
index 22622274dd..da9e0e269e 100644
--- a/test/files/jvm/replyablereactor2.scala
+++ b/test/files/jvm/replyablereactor2.scala
@@ -3,6 +3,7 @@ import scala.actors.Actor._
class MyActor extends ReplyReactor {
def act() {
+ try {
loop {
react {
case 'hello =>
@@ -11,6 +12,10 @@ class MyActor extends ReplyReactor {
exit()
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
@@ -19,8 +24,9 @@ object Test {
val a = new MyActor
a.start()
- val b = new Reactor {
+ val b = new Reactor[Any] {
def act() {
+ try {
react {
case r: MyActor =>
var i = 0
@@ -36,6 +42,10 @@ object Test {
}
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
b.start()
diff --git a/test/files/jvm/replyablereactor3.scala b/test/files/jvm/replyablereactor3.scala
index 676ffe98e6..2c26b8a176 100644
--- a/test/files/jvm/replyablereactor3.scala
+++ b/test/files/jvm/replyablereactor3.scala
@@ -3,6 +3,7 @@ import scala.actors.Actor._
class MyActor extends ReplyReactor {
def act() {
+ try {
loop {
react {
case 'hello =>
@@ -11,6 +12,10 @@ class MyActor extends ReplyReactor {
exit()
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
@@ -19,8 +24,9 @@ object Test {
val a = new MyActor
a.start()
- val b = new Reactor {
+ val b = new Reactor[Any] {
def act() {
+ try {
react {
case r: MyActor =>
var i = 0
@@ -35,6 +41,10 @@ object Test {
}
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
b.start()
diff --git a/test/files/jvm/replyablereactor4.scala b/test/files/jvm/replyablereactor4.scala
index d61fb64287..7679a5adf8 100644
--- a/test/files/jvm/replyablereactor4.scala
+++ b/test/files/jvm/replyablereactor4.scala
@@ -3,6 +3,7 @@ import scala.actors.Actor._
class MyActor extends ReplyReactor {
def act() {
+ try {
loop {
react {
case 'hello =>
@@ -11,6 +12,10 @@ class MyActor extends ReplyReactor {
exit()
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
@@ -19,8 +24,9 @@ object Test {
val a = new MyActor
a.start()
- val b = new Reactor {
+ val b = new Reactor[Any] {
def act() {
+ try {
react {
case r: MyActor =>
var i = 0
@@ -35,6 +41,10 @@ object Test {
}
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
b.start()
diff --git a/test/files/jvm/replyreactor-react-sender.scala b/test/files/jvm/replyreactor-react-sender.scala
index 1127dfd0a5..c9884295f6 100644
--- a/test/files/jvm/replyreactor-react-sender.scala
+++ b/test/files/jvm/replyreactor-react-sender.scala
@@ -10,6 +10,7 @@ object Test {
val a = new ReplyReactor {
def act() {
+ try {
var i = 0
loopWhile (i < NUM) {
i += 1
@@ -20,12 +21,17 @@ object Test {
} andThen {
b ! 'ok
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
a.start()
b = new ReplyReactor {
def act() {
+ try {
for (_ <- 0 until NUM)
a ! 'hello
react {
@@ -33,6 +39,10 @@ object Test {
case 'ok => println("OK")
case other => println(other)
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
b.start()
diff --git a/test/files/jvm/replyreactor.scala b/test/files/jvm/replyreactor.scala
index 0f452dbc7b..0cecf29ec7 100644
--- a/test/files/jvm/replyreactor.scala
+++ b/test/files/jvm/replyreactor.scala
@@ -1,27 +1,37 @@
-import scala.actors.{Reactor, ReplyReactor}
+import scala.actors.ReplyReactor
object Test {
def main(args: Array[String]) {
val a = new ReplyReactor {
def act() {
+ try {
react {
case 'hello =>
sender ! 'hello
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
a.start()
- val b = new Reactor {
+ val b = new ReplyReactor {
def act() {
+ try {
react {
- case r: Reactor =>
+ case r: ReplyReactor =>
r ! 'hello
react {
case any =>
println(any)
}
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
b.start()
diff --git a/test/files/jvm/scheduler-adapter.scala b/test/files/jvm/scheduler-adapter.scala
index 79c901f91e..d6a8a446a0 100644
--- a/test/files/jvm/scheduler-adapter.scala
+++ b/test/files/jvm/scheduler-adapter.scala
@@ -7,22 +7,32 @@ trait AdaptedActor extends Actor {
object One extends AdaptedActor {
def act() {
+ try {
Two.start()
Two ! 'MsgForTwo
react {
case 'MsgForOne =>
println("One: received msg")
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
object Two extends AdaptedActor {
def act() {
+ try {
react {
case 'MsgForTwo =>
println("Two: received msg")
One ! 'MsgForOne
}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index da11d7c7f0..3f095cb51e 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -1,126 +1,209 @@
-x0 = List(1, 2, 3)
-y0 = List(1, 2, 3)
-x0 eq y0: false - y0 eq x0: false
-x0 equals y0: true - y0 equals x0: true
+a1 = Array[1,2,3]
+_a1 = Array[1,2,3]
+arrayEquals(a1, _a1): true
-x1 = List()
-y1 = List()
-x1 eq y1: true - y1 eq x1: true
+c1 = Cell(a)
+_c1 = Cell(a)
+c1 eq _c1: false, _c1 eq c1: false
+c1 equals _c1: true, _c1 equals c1: true
-x2 = None
-y2 = None
-x2 eq y2: true - y2 eq x2: true
-
-x3 = Array[1,2,3]
-y3 = Array[1,2,3]
-arrayEquals(x3, y3): true
-
-x4 = <na>
-y4 = <na>
-x4(2): 4 - y4(2): 4
-
-x5 = 'hello
-y5 = 'hello
-x5 eq y5: true - y5 eq x5: true
-x5 equals y5: true - y5 equals x5: true
-
-x6 = (BannerLimit,12345)
-y6 = (BannerLimit,12345)
-x6 eq y6: false - y6 eq x6: false
-x6 equals y6: true - y6 equals x6: true
+e1 = Left(1)
+_e1 = Left(1)
+e1 eq _e1: false, _e1 eq e1: false
+e1 equals _e1: true, _e1 equals e1: true
x7 = RoundingMode
y7 = RoundingMode
-x7 eq y7: true - y7 eq x7: true
-x7 equals y7: true - y7 equals x7: true
+x7 eq y7: true, y7 eq x7: true
+x7 equals y7: true, y7 equals x7: true
x8 = WeekDay
y8 = WeekDay
-x8 eq y8: true - y8 eq x8: true
-x8 equals y8: true - y8 equals x8: true
+x8 eq y8: true, y8 eq x8: true
+x8 equals y8: true, y8 equals x8: true
x9 = UP
y9 = UP
-x9 eq y9: true - y9 eq x9: true
-x9 equals y9: true - y9 equals x9: true
+x9 eq y9: true, y9 eq x9: true
+x9 equals y9: true, y9 equals x9: true
x10 = Monday
y10 = Monday
-x10 eq y10: true - y10 eq x10: true
-x10 equals y10: true - y10 equals x10: true
+x10 eq y10: true, y10 eq x10: true
+x10 equals y10: true, y10 equals x10: true
+
+x9 eq x10: false, x10 eq x9: false
+x9 equals x10: false, x10 equals x9: false
+x9 eq y10: false, y10 eq x9: false
+x9 equals y10: false, y10 equals x9: false
+
+f1 = <na>
+_f1 = <na>
+f1(2): 4, _f1(2): 4
+
+xs0 = List(1, 2, 3)
+_xs0 = List(1, 2, 3)
+xs0 eq _xs0: false, _xs0 eq xs0: false
+xs0 equals _xs0: true, _xs0 equals xs0: true
+
+xs1 = List()
+_xs1 = List()
+xs1 eq _xs1: true, _xs1 eq xs1: true
+
+o1 = None
+_o1 = None
+o1 eq _o1: true, _o1 eq o1: true
+
+o2 = Some(1)
+_o2 = Some(1)
+o2 eq _o2: false, _o2 eq o2: false
+o2 equals _o2: true, _o2 equals o2: true
+
+s1 = 'hello
+_s1 = 'hello
+s1 eq _s1: true, _s1 eq s1: true
+s1 equals _s1: true, _s1 equals s1: true
+
+t1 = (BannerLimit,12345)
+_t1 = (BannerLimit,12345)
+t1 eq _t1: false, _t1 eq t1: false
+t1 equals _t1: true, _t1 equals t1: true
+
+x = BitSet(1, 2)
+y = BitSet(1, 2)
+x equals y: true, y equals x: true
+
+x = BitSet(2, 3)
+y = BitSet(2, 3)
+x equals y: true, y equals x: true
+
+x = Map(1 -> A, 2 -> B, 3 -> C)
+y = Map(1 -> A, 2 -> B, 3 -> C)
+x equals y: true, y equals x: true
-x9 eq x10: false - x10 eq x9: false
-x9 equals x10: true - x10 equals x9: true
-x9 eq y10: false - y10 eq x9: false
-x9 equals y10: true - y10 equals x9: true
+x = Set(1, 2)
+y = Set(1, 2)
+x equals y: true, y equals x: true
x = List((buffers,20), (layers,2), (title,3))
y = List((buffers,20), (layers,2), (title,3))
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Map(buffers -> 20, layers -> 2, title -> 3)
y = Map(buffers -> 20, layers -> 2, title -> 3)
-x equals y: true - y equals x: true
-
-x = BitSet(2, 3)
-y = BitSet(2, 3)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Set(5, 3)
y = Set(5, 3)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Queue(a, b, c)
y = Queue(a, b, c)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
+
+x = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+y = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+x equals y: true, y equals x: true
+
+x = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+y = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+x equals y: true, y equals x: true
+
+x = Map(1 -> A, 2 -> B, 3 -> C)
+y = Map(1 -> A, 2 -> B, 3 -> C)
+x equals y: true, y equals x: true
+
+x = TreeSet(1, 2, 3)
+y = TreeSet(1, 2, 3)
+x equals y: true, y equals x: true
x = Stack(c, b, a)
y = Stack(c, b, a)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
+
+x = Stream(0, ?)
+y = Stream(0, ?)
+x equals y: true, y equals x: true
x = Map(42 -> FortyTwo)
y = Map(42 -> FortyTwo)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = TreeSet(0, 2)
y = TreeSet(0, 2)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
+
+x = Vector('a, 'b, 'c)
+y = Vector('a, 'b, 'c)
+x equals y: true, y equals x: true
x = ArrayBuffer(one, two)
y = ArrayBuffer(one, two)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
+
+x = ArrayBuilder.ofLong
+y = ArrayBuilder.ofLong
+x equals y: true, y equals x: true
+
+x = ArrayBuilder.ofFloat
+y = ArrayBuilder.ofFloat
+x equals y: true, y equals x: true
-x = Map(title -> 3, buffers -> 20, layers -> 2)
-y = Map(title -> 3, buffers -> 20, layers -> 2)
-x equals y: true - y equals x: true
+x = ArrayStack(3, 2, 20)
+y = ArrayStack(3, 2, 20)
+x equals y: true, y equals x: true
x = BitSet(0, 8, 9)
y = BitSet(0, 8, 9)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
+
+x = Map(A -> 1, C -> 3, B -> 2)
+y = Map(B -> 2, C -> 3, A -> 1)
+x equals y: true, y equals x: true
x = Set(layers, buffers, title)
y = Set(layers, buffers, title)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
+
+x = History()
+y = History()
+x equals y: true, y equals x: true
+
+x = ListBuffer(white, black)
+y = ListBuffer(white, black)
+x equals y: true, y equals x: true
x = Queue(20, 2, 3)
y = Queue(20, 2, 3)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Stack(3, 2, 20)
y = Stack(3, 2, 20)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
-x = ListBuffer(white, black)
-y = ListBuffer(white, black)
-x equals y: true - y equals x: true
+x = abc
+y = abc
+x equals y: true, y equals x: true
+
+x = WrappedArray(1, 2, 3)
+y = WrappedArray(1, 2, 3)
+x equals y: true, y equals x: true
+
+x = xml:src="hello"
+y = xml:src="hello"
+x equals y: true, y equals x: true
+
+x = <title></title>
+y = <title></title>
+x equals y: true, y equals x: true
x = <html><title>title</title><body></body></html>
y = <html><title>title</title><body></body></html>
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = <html>
<body>
- <table cellpadding="2" cellspacing="0">
+ <table cellpadding="2" cellspacing="0">
<tr>
<th>Last Name</th>
<th>First Name</th>
@@ -137,10 +220,10 @@ x = <html>
</tr>
</table>
</body>
- </html>
+ </html>
y = <html>
<body>
- <table cellpadding="2" cellspacing="0">
+ <table cellpadding="2" cellspacing="0">
<tr>
<th>Last Name</th>
<th>First Name</th>
@@ -157,26 +240,26 @@ y = <html>
</tr>
</table>
</body>
- </html>
-x equals y: true - y equals x: true
+ </html>
+x equals y: true, y equals x: true
x = Tim
y = Tim
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Bob
y = Bob
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = John
y = John
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Bill
y = Bill
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Paul
y = Paul
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala
index 81d21e6dc5..336e339fd4 100644
--- a/test/files/jvm/serialization.scala
+++ b/test/files/jvm/serialization.scala
@@ -2,17 +2,6 @@
// Serialization
//############################################################################
-import java.lang.System
-
-object EqualityTest {
- def check[A, B](x: A, y: B) {
- println("x = " + x)
- println("y = " + y)
- println("x equals y: " + (x equals y) + " - y equals x: " + (y equals x))
- println()
- }
-}
-
object Serialize {
@throws(classOf[java.io.IOException])
def write[A](o: A): Array[Byte] = {
@@ -29,7 +18,14 @@ object Serialize {
new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(buffer))
in.readObject().asInstanceOf[A]
}
+ def check[A, B](x: A, y: B) {
+ println("x = " + x)
+ println("y = " + y)
+ println("x equals y: " + (x equals y) + ", y equals x: " + (y equals x))
+ println()
+ }
}
+import Serialize._
//############################################################################
// Test classes in package "scala"
@@ -50,92 +46,140 @@ object Test1_scala {
}
import WeekDay._, BigDecimal._, RoundingMode._
- val x0 = List(1, 2, 3)
- val x1 = Nil
- val x2 = None
- val x3 = Array(1, 2, 3)
- val x4 = { x: Int => 2 * x }
- val x5 = 'hello
- val x6 = ("BannerLimit", 12345)
- val x7 = BigDecimal.RoundingMode
- val x8 = WeekDay
- val x9 = UP
- val x10 = Monday
-
+ // in alphabetic order
try {
- val y0: List[Int] = Serialize.read(Serialize.write(x0))
- val y1: List[Nothing] = Serialize.read(Serialize.write(x1))
- val y2: Option[Nothing] = Serialize.read(Serialize.write(x2))
- val y3: Array[Int] = Serialize.read(Serialize.write(x3))
- val y4: Function[Int, Int] = Serialize.read(Serialize.write(x4))
- val y5: Symbol = Serialize.read(Serialize.write(x5))
- val y6: (String, Int) = Serialize.read(Serialize.write(x6))
- val y7: RoundingMode.type = Serialize.read(Serialize.write(x7))
- val y8: WeekDay.type = Serialize.read(Serialize.write(x8))
- val y9: RoundingMode = Serialize.read(Serialize.write(x9))
- val y10: WeekDay = Serialize.read(Serialize.write(x10))
-
- println("x0 = " + x0)
- println("y0 = " + y0)
- println("x0 eq y0: " + (x0 eq y0) + " - y0 eq x0: " + (y0 eq x0))
- println("x0 equals y0: " + (x0 equals y0) + " - y0 equals x0: " + (y0 equals x0))
- println()
- println("x1 = " + x1)
- println("y1 = " + y1)
- println("x1 eq y1: " + (x1 eq y1) + " - y1 eq x1: " + (y1 eq x1))
+ // Array
+ val a1 = Array(1, 2, 3)
+ val _a1: Array[Int] = read(write(a1))
+ println("a1 = " + arrayToString(a1))
+ println("_a1 = " + arrayToString(_a1))
+ println("arrayEquals(a1, _a1): " + arrayEquals(a1, _a1))
println()
- println("x2 = " + x2)
- println("y2 = " + y2)
- println("x2 eq y2: " + (x2 eq y2) + " - y2 eq x2: " + (y2 eq x2))
- println()
- println("x3 = " + arrayToString(x3))
- println("y3 = " + arrayToString(y3))
- println("arrayEquals(x3, y3): " + arrayEquals(x3, y3))
- println()
- println("x4 = <na>")
- println("y4 = <na>")
- println("x4(2): " + x4(2) + " - y4(2): " + y4(2))
- println()
- println("x5 = " + x5)
- println("y5 = " + y5)
- println("x5 eq y5: " + (x5 eq y5) + " - y5 eq x5: " + (y5 eq x5))
- println("x5 equals y5: " + (x5 equals y5) + " - y5 equals x5: " + (y5 equals x5))
+
+ // Cell
+ val c1 = new Cell('a')
+ val _c1: Cell[Char] = read(write(c1))
+ println("c1 = " + c1)
+ println("_c1 = " + _c1)
+ println("c1 eq _c1: " + (c1 eq _c1) + ", _c1 eq c1: " + (_c1 eq c1))
+ println("c1 equals _c1: " + (c1 equals _c1) + ", _c1 equals c1: " + (_c1 equals c1))
println()
- println("x6 = " + x6)
- println("y6 = " + y6)
- println("x6 eq y6: " + (x6 eq y6) + " - y6 eq x6: " + (y6 eq x6))
- println("x6 equals y6: " + (x6 equals y6) + " - y6 equals x6: " + (y6 equals x6))
+
+ // Either
+ val e1 = Left(1)
+ val _e1: Either[Int, String] = read(write(e1))
+ println("e1 = " + e1)
+ println("_e1 = " + _e1)
+ println("e1 eq _e1: " + (e1 eq _e1) + ", _e1 eq e1: " + (_e1 eq e1))
+ println("e1 equals _e1: " + (e1 equals _e1) + ", _e1 equals e1: " + (_e1 equals e1))
println()
+
+ // Enumeration
+ val x7 = BigDecimal.RoundingMode
+ val y7: RoundingMode.type = read(write(x7))
println("x7 = " + x7)
println("y7 = " + y7)
- println("x7 eq y7: " + (x7 eq y7) + " - y7 eq x7: " + (y7 eq x7))
- println("x7 equals y7: " + (x7 equals y7) + " - y7 equals x7: " + (y7 equals x7))
+ println("x7 eq y7: " + (x7 eq y7) + ", y7 eq x7: " + (y7 eq x7))
+ println("x7 equals y7: " + (x7 equals y7) + ", y7 equals x7: " + (y7 equals x7))
println()
+
+ val x8 = WeekDay
+ val y8: WeekDay.type = read(write(x8))
println("x8 = " + x8)
println("y8 = " + y8)
- println("x8 eq y8: " + (x8 eq y8) + " - y8 eq x8: " + (y8 eq x8))
- println("x8 equals y8: " + (x8 equals y8) + " - y8 equals x8: " + (y8 equals x8))
+ println("x8 eq y8: " + (x8 eq y8) + ", y8 eq x8: " + (y8 eq x8))
+ println("x8 equals y8: " + (x8 equals y8) + ", y8 equals x8: " + (y8 equals x8))
println()
+
+ val x9 = UP
+ val y9: RoundingMode = read(write(x9))
println("x9 = " + x9)
println("y9 = " + y9)
- println("x9 eq y9: " + (x9 eq y9) + " - y9 eq x9: " + (y9 eq x9))
- println("x9 equals y9: " + (x9 equals y9) + " - y9 equals x9: " + (y9 equals x9))
+ println("x9 eq y9: " + (x9 eq y9) + ", y9 eq x9: " + (y9 eq x9))
+ println("x9 equals y9: " + (x9 equals y9) + ", y9 equals x9: " + (y9 equals x9))
println()
+
+ val x10 = Monday
+ val y10: WeekDay = read(write(x10))
println("x10 = " + x10)
println("y10 = " + y10)
- println("x10 eq y10: " + (x10 eq y10) + " - y10 eq x10: " + (y10 eq x10))
- println("x10 equals y10: " + (x10 equals y10) + " - y10 equals x10: " + (y10 equals x10))
+ println("x10 eq y10: " + (x10 eq y10) + ", y10 eq x10: " + (y10 eq x10))
+ println("x10 equals y10: " + (x10 equals y10) + ", y10 equals x10: " + (y10 equals x10))
+ println()
+
+ println("x9 eq x10: " + (x9 eq x10) + ", x10 eq x9: " + (x10 eq x9))
+ println("x9 equals x10: " + (x9 equals x10) + ", x10 equals x9: " + (x10 equals x9))
+ println("x9 eq y10: " + (x9 eq y10) + ", y10 eq x9: " + (y10 eq x9))
+ println("x9 equals y10: " + (x9 equals y10) + ", y10 equals x9: " + (y10 equals x9))
println()
- println("x9 eq x10: " + (x9 eq x10) + " - x10 eq x9: " + (x10 eq x9))
- println("x9 equals x10: " + (x9 equals x10) + " - x10 equals x9: " + (x10 equals x9))
- println("x9 eq y10: " + (x9 eq y10) + " - y10 eq x9: " + (y10 eq x9))
- println("x9 equals y10: " + (x9 equals y10) + " - y10 equals x9: " + (y10 equals x9))
+
+ // Function
+ val f1 = { x: Int => 2 * x }
+ val _f1: Function[Int, Int] = read(write(f1))
+ println("f1 = <na>")
+ println("_f1 = <na>")
+ println("f1(2): " + f1(2) + ", _f1(2): " + _f1(2))
+ println()
+
+ // List
+ val xs0 = List(1, 2, 3)
+ val _xs0: List[Int] = read(write(xs0))
+ println("xs0 = " + xs0)
+ println("_xs0 = " + _xs0)
+ println("xs0 eq _xs0: " + (xs0 eq _xs0) + ", _xs0 eq xs0: " + (_xs0 eq xs0))
+ println("xs0 equals _xs0: " + (xs0 equals _xs0) + ", _xs0 equals xs0: " + (_xs0 equals xs0))
+ println()
+
+ val xs1 = Nil
+ val _xs1: List[Nothing] = read(write(xs1))
+ println("xs1 = " + xs1)
+ println("_xs1 = " + _xs1)
+ println("xs1 eq _xs1: " + (xs1 eq _xs1) + ", _xs1 eq xs1: " + (_xs1 eq xs1))
+ println()
+
+ // Option
+ val o1 = None
+ val _o1: Option[Nothing] = read(write(o1))
+ println("o1 = " + o1)
+ println("_o1 = " + _o1)
+ println("o1 eq _o1: " + (o1 eq _o1) + ", _o1 eq o1: " + (_o1 eq o1))
+ println()
+
+ val o2 = Some(1)
+ val _o2: Option[Int] = read(write(o2))
+ println("o2 = " + o2)
+ println("_o2 = " + _o2)
+ println("o2 eq _o2: " + (o2 eq _o2) + ", _o2 eq o2: " + (_o2 eq o2))
+ println("o2 equals _o2: " + (o2 equals _o2) + ", _o2 equals o2: " + (_o2 equals o2))
+ println()
+/*
+ // Responder
+ val r1 = Responder.constant("xyz")
+ val _r1: Responder[String] = read(write(r1))
+ check(r1, _r1)
+*/
+ // Symbol
+ val s1 = 'hello
+ val _s1: Symbol = read(write(s1))
+ println("s1 = " + s1)
+ println("_s1 = " + _s1)
+ println("s1 eq _s1: " + (s1 eq _s1) + ", _s1 eq s1: " + (_s1 eq s1))
+ println("s1 equals _s1: " + (s1 equals _s1) + ", _s1 equals s1: " + (_s1 equals s1))
+ println()
+
+ // Tuple
+ val t1 = ("BannerLimit", 12345)
+ val _t1: (String, Int) = read(write(t1))
+ println("t1 = " + t1)
+ println("_t1 = " + _t1)
+ println("t1 eq _t1: " + (t1 eq _t1) + ", _t1 eq t1: " + (_t1 eq t1))
+ println("t1 equals _t1: " + (t1 equals _t1) + ", _t1 equals t1: " + (_t1 equals t1))
println()
}
catch {
case e: Exception =>
- e.printStackTrace()
println("Error in Test1_scala: " + e)
+ throw e
}
}
@@ -145,50 +189,97 @@ object Test1_scala {
@serializable
object Test2_immutable {
import scala.collection.immutable.{
- BitSet, ListMap, ListSet, Queue, Stack, TreeSet, TreeMap}
-
- val x1 = List(
- Pair("buffers", 20),
- Pair("layers", 2),
- Pair("title", 3)
- )
-
- val x2 = new ListMap[String, Int] + ("buffers" -> 20, "layers" -> 2, "title" -> 3)
-
- val x3 = {
- val bs = new collection.mutable.BitSet()
- bs += 2; bs += 3
- bs.toImmutable
- }
-
- val x4 = new ListSet[Int]() + 3 + 5
-
- val x5 = Queue("a", "b", "c")
-
- val x6 = new Stack().push("a", "b", "c")
-
- val x7 = new TreeMap[Int, String] + (42 -> "FortyTwo")
-
- val x8 = new TreeSet[Int]() + 2 + 0
+ BitSet, HashMap, HashSet, ListMap, ListSet, Queue, Range, SortedMap,
+ SortedSet, Stack, Stream, TreeMap, TreeSet, Vector}
+ // in alphabetic order
try {
- val y1: List[Pair[String, Int]] = Serialize.read(Serialize.write(x1))
- val y2: ListMap[String, Int] = Serialize.read(Serialize.write(x2))
- val y3: BitSet = Serialize.read(Serialize.write(x3))
- val y4: ListSet[Int] = Serialize.read(Serialize.write(x4))
- val y5: Queue[String] = Serialize.read(Serialize.write(x5))
- val y6: Stack[String] = Serialize.read(Serialize.write(x6))
- val y7: TreeMap[Int, String] = Serialize.read(Serialize.write(x7))
- val y8: TreeSet[Int] = Serialize.read(Serialize.write(x8))
-
- EqualityTest.check(x1, y1)
- EqualityTest.check(x2, y2)
- EqualityTest.check(x3, y3)
- EqualityTest.check(x4, y4)
- EqualityTest.check(x5, y5)
- EqualityTest.check(x6, y6)
- EqualityTest.check(x7, y7)
- EqualityTest.check(x8, y8)
+ // BitSet
+ val bs1 = BitSet.empty + 1 + 2
+ val _bs1: BitSet = read(write(bs1))
+ check(bs1, _bs1)
+
+ val bs2 = {
+ val bs = new collection.mutable.BitSet()
+ bs += 2; bs += 3
+ bs.toImmutable
+ }
+ val _bs2: BitSet = read(write(bs2))
+ check(bs2, _bs2)
+
+ // HashMap
+ val hm1 = new HashMap[Int, String] + (1 -> "A", 2 -> "B", 3 -> "C")
+ val _hm1: HashMap[Int, String] = read(write(hm1))
+ check(hm1, _hm1)
+
+ // HashSet
+ val hs1 = new HashSet[Int] + 1 + 2
+ val _hs1: HashSet[Int] = read(write(hs1))
+ check(hs1, _hs1)
+
+ // List
+ val xs1 = List(("buffers", 20), ("layers", 2), ("title", 3))
+ val _xs1: List[(String, Int)] = read(write(xs1))
+ check(xs1, _xs1)
+
+ // ListMap
+ val lm1 = new ListMap[String, Int] + ("buffers" -> 20, "layers" -> 2, "title" -> 3)
+ val _lm1: ListMap[String, Int] = read(write(lm1))
+ check(lm1, _lm1)
+
+ // ListSet
+ val ls1 = new ListSet[Int] + 3 + 5
+ val _ls1: ListSet[Int] = read(write(ls1))
+ check(ls1, _ls1)
+
+ // Queue
+ val q1 = Queue("a", "b", "c")
+ val _q1: Queue[String] = read(write(q1))
+ check(q1, _q1)
+
+ // Range
+ val r1 = 0 until 10
+ val _r1: Range = read(write(r1))
+ check(r1, _r1)
+
+ val r2 = Range.Long(0L, 10L, 1)
+ val _r2: r2.type = read(write(r2))
+ check(r2, _r2)
+
+ // SortedMap
+ val sm1 = SortedMap.empty[Int, String] + (2 -> "B", 3 -> "C", 1 -> "A")
+ val _sm1: SortedMap[Int, String] = read(write(sm1))
+ check(sm1, _sm1)
+
+ // SortedSet
+ val ss1 = SortedSet.empty[Int] + 2 + 3 + 1
+ val _ss1: SortedSet[Int] = read(write(ss1))
+ check(ss1, _ss1)
+
+ // Stack
+ val s1 = new Stack().push("a", "b", "c")
+ val _s1: Stack[String] = read(write(s1))
+ check(s1, _s1)
+
+ // Stream
+ val st1 = Stream.range(0, 10)
+ val _st1: Stream[Int] = read(write(st1))
+ check(st1, _st1)
+
+ // TreeMap
+ val tm1 = new TreeMap[Int, String] + (42 -> "FortyTwo")
+ val _tm1: TreeMap[Int, String] = read(write(tm1))
+ check(tm1, _tm1)
+
+ // TreeSet
+ val ts1 = new TreeSet[Int]() + 2 + 0
+ val _ts1: TreeSet[Int] = read(write(ts1))
+ check(ts1, _ts1)
+
+ // Vector
+ val v1 = Vector('a, 'b, 'c)
+ val _v1: Vector[Symbol] = read(write(v1))
+ check(v1, _v1)
}
catch {
case e: Exception =>
@@ -201,65 +292,110 @@ object Test2_immutable {
// Test classes in package "scala.collection.mutable"
object Test3_mutable {
+ import scala.reflect.ClassManifest
import scala.collection.mutable.{
- ArrayBuffer, BitSet, HashMap, HashSet, History, LinkedList, ListBuffer,
- Publisher, Queue, Stack}
-
- val x0 = new ArrayBuffer[String]
- x0 ++= List("one", "two")
-
- val x2 = new BitSet()
- x2 += 0
- x2 += 8
- x2 += 9
-
- val x1 = new HashMap[String, Int]
- x1 ++= Test2_immutable.x1
-
- val x3 = new HashSet[String]
- x3 ++= Test2_immutable.x1.map(p => p._1)
-
- @serializable
- class Feed extends Publisher[String, Feed]
-
- val x8 = new History[String, Feed]
-
- val x4 = new LinkedList[Int](2, null)
- x4.append(new LinkedList(3, null))
-
- val x7 = new ListBuffer[String]
- x7 ++= List("white", "black")
-
- val x5 = new Queue[Int]
- x5 ++= Test2_immutable.x1.map(p => p._2)
-
- val x6 = new Stack[Int]
- x6 ++= x5
+ ArrayBuffer, ArrayBuilder, ArrayStack, BitSet, DoubleLinkedList,
+ HashMap, HashSet, History, LinkedList, ListBuffer, Publisher, Queue,
+ Stack, StringBuilder, WrappedArray}
+ // in alphabetic order
try {
- val y0: ArrayBuffer[String] = Serialize.read(Serialize.write(x0))
- val y1: HashMap[String, Int] = Serialize.read(Serialize.write(x1))
- val y2: BitSet = Serialize.read(Serialize.write(x2))
- val y3: HashSet[String] = Serialize.read(Serialize.write(x3))
-// val y4: LinkedList[Int] = Serialize.read(Serialize.write(x4))
- val y5: Queue[Int] = Serialize.read(Serialize.write(x5))
- val y6: Stack[Int] = Serialize.read(Serialize.write(x6))
- val y7: ListBuffer[String] = Serialize.read(Serialize.write(x7))
- val y8: History[String, Feed] = Serialize.read(Serialize.write(x8))
-
- EqualityTest.check(x0, y0)
- EqualityTest.check(x1, y1)
- EqualityTest.check(x2, y2)
- EqualityTest.check(x3, y3)
- //EqualityTest.check(x4, y4) //todo
- EqualityTest.check(x5, y5)
- EqualityTest.check(x6, y6)
- EqualityTest.check(x7, y7)
- //EqualityTest.check(x8, y8) //todo
+ // ArrayBuffer
+ val ab1 = new ArrayBuffer[String]
+ ab1 ++= List("one", "two")
+ val _ab1: ArrayBuffer[String] = read(write(ab1))
+ check(ab1, _ab1)
+
+ // ArrayBuilder
+ val abu1 = ArrayBuilder.make[Long]
+ val _abu1: ArrayBuilder[ClassManifest[Long]] = read(write(abu1))
+ check(abu1, _abu1)
+
+ val abu2 = ArrayBuilder.make[Float]
+ val _abu2: ArrayBuilder[ClassManifest[Float]] = read(write(abu2))
+ check(abu2, _abu2)
+
+ // ArrayStack
+ val as1 = new ArrayStack[Int]
+ as1 ++= List(20, 2, 3).iterator
+ val _as1: ArrayStack[Int] = read(write(as1))
+ check(as1, _as1)
+
+ // BitSet
+ val bs1 = new BitSet()
+ bs1 += 0
+ bs1 += 8
+ bs1 += 9
+ val _bs1: BitSet = read(write(bs1))
+ check(bs1, _bs1)
+/*
+ // DoubleLinkedList
+ val dl1 = new DoubleLinkedList[Int](2, null)
+ dl1.append(new DoubleLinkedList(3, null))
+ val _dl1: DoubleLinkedList[Int] = read(write(dl1))
+ check(dl1, _dl1)
+*/
+ // HashMap
+ val hm1 = new HashMap[String, Int]
+ hm1 ++= List(("A", 1), ("B", 2), ("C", 3)).iterator
+ val _hm1: HashMap[String, Int] = read(write(hm1))
+ check(hm1, _hm1)
+
+ // HashSet
+ val hs1 = new HashSet[String]
+ hs1 ++= List("layers", "buffers", "title").iterator
+ val _hs1: HashSet[String] = read(write(hs1))
+ check(hs1, _hs1)
+
+ // History
+ @serializable
+ class Feed extends Publisher[String]
+
+ val h1 = new History[String, Int]
+ val _h1: History[String, Int] = read(write(h1))
+ check(h1, _h1)
+/*
+ // LinkedList
+ val ll1 = new LinkedList[Int](2, null)
+ ll1.append(new LinkedList(3, null))
+ val _ll1: LinkedList[Int] = read(write(ll1))
+ check(ll1, _ll1)
+*/
+ // ListBuffer
+ val lb1 = new ListBuffer[String]
+ lb1 ++= List("white", "black")
+ val _lb1: ListBuffer[String] = read(write(lb1))
+ check(lb1, _lb1)
+
+ // Publisher
+
+ // Queue
+ val q1 = new Queue[Int]
+ q1 ++= List(20, 2, 3).iterator
+ val _q1: Queue[Int] = read(write(q1))
+ check(q1, _q1)
+
+ // Stack
+ val s1 = new Stack[Int]
+ s1 pushAll q1
+ val _s1: Stack[Int] = read(write(s1))
+ check(s1, _s1)
+
+ // StringBuilder
+ val sb1 = new StringBuilder
+ sb1 append "abc"
+ val _sb1: StringBuilder = read(write(sb1))
+ check(sb1, _sb1)
+
+ // WrappedArray
+ val wa1 = WrappedArray.make(Array(1, 2, 3))
+ val _wa1: WrappedArray[Int] = read(write(wa1))
+ check(wa1, _wa1)
}
catch {
case e: Exception =>
println("Error in Test3_mutable: " + e)
+ throw e
}
}
@@ -267,15 +403,31 @@ object Test3_mutable {
// Test classes in package "scala.xml"
object Test4_xml {
- import scala.xml.Elem
-
- val x1 = <html><title>title</title><body></body></html>;
+ import scala.xml.{Attribute, Document, Elem, Null, PrefixedAttribute, Text}
case class Person(name: String, age: Int)
- class AddressBook(a: Person*) {
- private val people: List[Person] = a.toList
- def toXHTML =
+ try {
+ // Attribute
+ val a1 = new PrefixedAttribute("xml", "src", Text("hello"), Null)
+ val _a1: Attribute = read(write(a1))
+ check(a1, _a1)
+
+ // Document
+ val d1 = new Document
+ d1.docElem = <title></title>
+ d1.encoding = Some("UTF-8")
+ val _d1: Document = read(write(d1))
+ check(d1, _d1)
+
+ // Elem
+ val e1 = <html><title>title</title><body></body></html>;
+ val _e1: Elem = read(write(e1))
+ check(e1, _e1)
+
+ class AddressBook(a: Person*) {
+ private val people: List[Person] = a.toList
+ def toXHTML =
<table cellpadding="2" cellspacing="0">
<tr>
<th>Last Name</th>
@@ -287,30 +439,26 @@ object Test4_xml {
<td> { p.age.toString() } </td>
</tr> }
</table>;
- }
+ }
- val people = new AddressBook(
- Person("Tom", 20),
- Person("Bob", 22),
- Person("James", 19))
+ val people = new AddressBook(
+ Person("Tom", 20),
+ Person("Bob", 22),
+ Person("James", 19))
- val x2 =
- <html>
+ val e2 =
+ <html>
<body>
- { people.toXHTML }
+ { people.toXHTML }
</body>
- </html>;
-
- try {
- val y1: scala.xml.Elem = Serialize.read(Serialize.write(x1))
- val y2: scala.xml.Elem = Serialize.read(Serialize.write(x2))
-
- EqualityTest.check(x1, y1)
- EqualityTest.check(x2, y2)
+ </html>;
+ val _e2: Elem = read(write(e2))
+ check(e2, _e2)
}
catch {
case e: Exception =>
println("Error in Test4_xml: " + e)
+ throw e
}
}
@@ -339,11 +487,11 @@ object Test5 {
val x2 = bob
try {
- val y1: Person = Serialize.read(Serialize.write(x1))
- val y2: Employee = Serialize.read(Serialize.write(x2))
+ val y1: Person = read(write(x1))
+ val y2: Employee = read(write(x2))
- EqualityTest.check(x1, y1)
- EqualityTest.check(x2, y2)
+ check(x1, y1)
+ check(x2, y2)
}
catch {
case e: Exception =>
@@ -369,13 +517,13 @@ object Test6 {
val x3 = paul
try {
- val y1: Person = Serialize.read(Serialize.write(x1))
- val y2: Employee = Serialize.read(Serialize.write(x2))
- val y3: Person = Serialize.read(Serialize.write(x3))
+ val y1: Person = read(write(x1))
+ val y2: Employee = read(write(x2))
+ val y3: Person = read(write(x3))
- EqualityTest.check(x1, y1)
- EqualityTest.check(x2, y2)
- EqualityTest.check(x3, y3)
+ check(x1, y1)
+ check(x2, y2)
+ check(x3, y3)
}
catch {
case e: Exception =>
@@ -384,6 +532,31 @@ object Test6 {
}
//############################################################################
+// Nested objects cannot get readresolve automatically because after deserialization
+// they would be null (they are treated as lazy vals)
+@serializable
+class Outer {
+
+ @serializable
+ object Inner
+}
+
+object Test7 {
+ val x = new Outer
+ x.Inner // initialize
+ try {
+ val y:Outer = read(write(x))
+ if (y.Inner == null)
+ println("Inner object is null")
+ }
+ catch {
+ case e: Exception =>
+ println("Error in Test7: " + e)
+ }
+
+}
+
+//############################################################################
// Test code
object Test {
@@ -394,6 +567,7 @@ object Test {
Test4_xml
Test5
Test6
+ Test7
}
}
diff --git a/test/files/jvm/stringbuilder.scala b/test/files/jvm/stringbuilder.scala
index bacd13c715..8f73cfab7b 100644
--- a/test/files/jvm/stringbuilder.scala
+++ b/test/files/jvm/stringbuilder.scala
@@ -35,7 +35,7 @@ Scala is a general purpose programming language designed to express common progr
val j3 = j2; j3 setCharAt (0, j3 charAt 2)
val s3 = s2; s3(0) = s3(2)
//println("j3="+j3+", s3="+s3)//debug
- assertEquals("s3.toString equals j3.toString", true, s3.toString equals j3.toString)
+ assertEquals("s3.toString equals j3.toString", true, s3.toString equals j3.toString)
}
}
@@ -53,7 +53,7 @@ object Test2 extends TestCase("append") with Assert {
val j1 = new java.lang.StringBuilder // Java 1.5+
val s1 = new StringBuilder
j1 append "###" append Array('0', '1', '2') append "xyz".subSequence(0, 3)
- s1 append "###" append Array('0', '1', '2') append List('x', 'y', 'z')
+ s1 append "###" appendAll Array('0', '1', '2') appendAll List('x', 'y', 'z')
assertEquals("s1.toString equals j1.toString", true, s1.toString equals j1.toString)
}
}
@@ -72,7 +72,7 @@ object Test3 extends TestCase("insert") with Assert {
val j1 = new java.lang.StringBuilder // Java 1.5+
val s1 = new StringBuilder
j1 insert (0, "###") insert (0, Array('0', '1', '2')) insert (0, "xyz".subSequence(0, 3))
- s1 insert (0, "###") insert (0, Array('0', '1', '2')) insert (0, List('x', 'y', 'z'))
+ s1 insert (0, "###") insertAll (0, Array('0', '1', '2')) insertAll (0, List('x', 'y', 'z'))
//println("j1="+j1+", s1="+s1)//debug
assertEquals("s1.toString equals j1.toString", true, s1.toString equals j1.toString)
diff --git a/test/files/jvm/t1449.check b/test/files/jvm/t1449.check
new file mode 100644
index 0000000000..d81cc0710e
--- /dev/null
+++ b/test/files/jvm/t1449.check
@@ -0,0 +1 @@
+42
diff --git a/test/files/jvm/t1449.scala b/test/files/jvm/t1449.scala
new file mode 100644
index 0000000000..3822cf7dd5
--- /dev/null
+++ b/test/files/jvm/t1449.scala
@@ -0,0 +1,25 @@
+import scala.actors.Actor._
+import scala.actors.Future
+import scala.actors.Futures._
+object Test {
+ def main(args: Array[String]) {
+ val a = actor {
+ try {
+ react {
+ case ft: Future[a] =>
+ println(ft())
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+ try {
+ val ft = future { 42 }
+ a ! ft
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+}
diff --git a/test/files/jvm/t1461.scala b/test/files/jvm/t1461.scala
index f0e3cea6cd..a963ec6a12 100644
--- a/test/files/jvm/t1461.scala
+++ b/test/files/jvm/t1461.scala
@@ -3,7 +3,7 @@ object Test {
def main(args: Array[String]) {
val jl = classOf[Foo].getMethod("jl", classOf[Baz[_]])
jl.getGenericParameterTypes // works fine
-
+
val l = classOf[Foo].getMethod("l", classOf[Baz[_]])
// By debugger inspection l.signature is (Ltest/Baz<J>;)V
l.getGenericParameterTypes // throws GenericSignatureFormatError
diff --git a/test/files/jvm/t1464/MyTrait.scala b/test/files/jvm/t1464/MyTrait.scala
index 0b8ccc412b..014ddf86c3 100644
--- a/test/files/jvm/t1464/MyTrait.scala
+++ b/test/files/jvm/t1464/MyTrait.scala
@@ -1,5 +1,5 @@
trait MyTrait {
type K
def findChildByClass[T <: K with MyTrait]: Unit
-
+
}
diff --git a/test/files/jvm/t1600.scala b/test/files/jvm/t1600.scala
new file mode 100644
index 0000000000..7e23687425
--- /dev/null
+++ b/test/files/jvm/t1600.scala
@@ -0,0 +1,76 @@
+
+/**
+ * Checks that serialization of hash-based collections works correctly if the hashCode
+ * changes on deserialization.
+ */
+object Test {
+
+ import collection._
+ def main(args: Array[String]) {
+ for (i <- Seq(0, 1, 2, 10, 100)) {
+ def entries = (0 until i).map(i => (new Foo, i)).toList
+ def elements = entries.map(_._1)
+
+ val maps = Seq[Map[Foo, Int]](new mutable.HashMap, new mutable.LinkedHashMap,
+ immutable.HashMap.empty).map(_ ++ entries)
+ test[Map[Foo, Int]](maps, entries.size, assertMap _)
+
+ val sets = Seq[Set[Foo]](new mutable.HashSet, new mutable.LinkedHashSet,
+ immutable.HashSet.empty).map(_ ++ elements)
+ test[Set[Foo]](sets, entries.size, assertSet _)
+ }
+ }
+
+ private def test[A <: AnyRef](collections: Seq[A], expectedSize: Int, assertFunction: (A, Int) => Unit) {
+ for (collection <- collections) {
+ assertFunction(collection, expectedSize)
+
+ val bytes = toBytes(collection)
+ Foo.hashCodeModifier = 1
+ val deserializedCollection = toObject[A](bytes)
+
+ assertFunction(deserializedCollection, expectedSize)
+ assert(deserializedCollection.getClass == collection.getClass,
+ "collection class should remain the same after deserialization ("+deserializedCollection.getClass+" != "+collection.getClass+")")
+ Foo.hashCodeModifier = 0
+ }
+ }
+
+ private def toObject[A](bytes: Array[Byte]): A = {
+ val in = new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(bytes))
+ in.readObject.asInstanceOf[A]
+ }
+
+ private def toBytes(o: AnyRef): Array[Byte] = {
+ val bos = new java.io.ByteArrayOutputStream
+ val out = new java.io.ObjectOutputStream(bos)
+ out.writeObject(o)
+ out.close
+ bos.toByteArray
+ }
+
+ private def assertMap[A, B](map: Map[A, B], expectedSize: Int) {
+ assert(expectedSize == map.size, "expected map size: " + expectedSize + ", actual size: " + map.size)
+ map.foreach { case (k, v) =>
+ assert(map.contains(k), "contains should return true for key in the map, key: " + k)
+ assert(map(k) == v)
+ }
+ }
+
+ private def assertSet[A](set: Set[A], expectedSize: Int) {
+ assert(expectedSize == set.size, "expected set size: " + expectedSize + ", actual size: " + set.size)
+ set.foreach { e => assert(set.contains(e), "contains should return true for element in the set, element: " + e) }
+ }
+
+ object Foo {
+ /* Used to simulate a hashCode change caused by deserializing an instance with an
+ * identity-based hashCode in another JVM.
+ */
+ var hashCodeModifier = 0
+ }
+
+ @serializable
+ class Foo {
+ override def hashCode = System.identityHashCode(this) + Foo.hashCodeModifier
+ }
+}
diff --git a/test/files/jvm/t1948.scala b/test/files/jvm/t1948.scala
index fc1fedac1f..084c956398 100644
--- a/test/files/jvm/t1948.scala
+++ b/test/files/jvm/t1948.scala
@@ -4,9 +4,21 @@ import scala.actors.Actor._
object Test {
def main (args: Array[String]) {
- val actors = (1 to 1000).toList map { x => actor { loop { react {
- case x: Array[Int] => reply ("OK"); exit }}}}
+ val actors = (1 to 1000).toList map { x => actor {
+ try {
+ loop { react {
+ case x: Array[Int] => reply ("OK"); exit }}
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ } }
+ try {
actors foreach { x => x !? new Array[Int] (1000000) }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
diff --git a/test/files/jvm/t2359.scala b/test/files/jvm/t2359.scala
index 1b4d5e0a27..69c69d7b3b 100644
--- a/test/files/jvm/t2359.scala
+++ b/test/files/jvm/t2359.scala
@@ -3,19 +3,44 @@ import scala.actors.Futures._
object Test {
def main(args: Array[String]) {
val x = future {
+ try {
System.out.println(1)
future {
+ try {
System.out.println(2)
future {
+ try {
System.out.println(3)
future {
+ try {
System.out.println(4)
future {
+ try {
System.out.println(5)
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}()
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}()
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}()
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}()
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}()
}
}
diff --git a/test/files/jvm/t2470.check b/test/files/jvm/t2470.check
new file mode 100644
index 0000000000..11539b2a9f
--- /dev/null
+++ b/test/files/jvm/t2470.check
@@ -0,0 +1 @@
+List(@Action(block=ACTION))
diff --git a/test/files/jvm/t2470.cmds b/test/files/jvm/t2470.cmds
new file mode 100644
index 0000000000..b4ef0f4aeb
--- /dev/null
+++ b/test/files/jvm/t2470.cmds
@@ -0,0 +1,3 @@
+javac Action.java Task.java
+scalac Test_1.scala
+scalac Read_Classfile_2.scala
diff --git a/test/files/jvm/t2470/Action.java b/test/files/jvm/t2470/Action.java
new file mode 100644
index 0000000000..62c71c43f1
--- /dev/null
+++ b/test/files/jvm/t2470/Action.java
@@ -0,0 +1,6 @@
+import java.lang.annotation.*;
+
+@Retention(value=RetentionPolicy.RUNTIME)
+public @interface Action {
+ Task.Scope block() default Task.Scope.ACTION;
+}
diff --git a/test/files/jvm/t2470/Read_Classfile_2.scala b/test/files/jvm/t2470/Read_Classfile_2.scala
new file mode 100644
index 0000000000..07961776dd
--- /dev/null
+++ b/test/files/jvm/t2470/Read_Classfile_2.scala
@@ -0,0 +1,3 @@
+class Read {
+ val t = Test
+}
diff --git a/test/files/jvm/t2470/Task.java b/test/files/jvm/t2470/Task.java
new file mode 100644
index 0000000000..64004b5867
--- /dev/null
+++ b/test/files/jvm/t2470/Task.java
@@ -0,0 +1,3 @@
+class Task {
+ public enum Scope { ACTION, HIKA }
+}
diff --git a/test/files/jvm/t2470/Test_1.scala b/test/files/jvm/t2470/Test_1.scala
new file mode 100644
index 0000000000..00cf287482
--- /dev/null
+++ b/test/files/jvm/t2470/Test_1.scala
@@ -0,0 +1,11 @@
+object Test {
+ class Foo {
+ @Action(block = Task.Scope.ACTION)
+ def foo = 0
+ }
+
+ def main(args: Array[String]) {
+ val m = classOf[Foo].getDeclaredMethods().find(_.toString.contains("foo")).get
+ println(m.getAnnotations().toList)
+ }
+}
diff --git a/test/files/jvm/t2515.scala b/test/files/jvm/t2515.scala
deleted file mode 100644
index 83cb058168..0000000000
--- a/test/files/jvm/t2515.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-import scala.actors.{Futures, TIMEOUT}
-import scala.actors.Actor._
-
-object Test {
-
- def compute(): Option[Boolean] = {
- val fts = for (j <- 0 until 5) yield Futures.future {
- receiveWithin (100) {
- case TIMEOUT => true
- case other => false
- }
- }
- val done = Futures.awaitAll(2000, fts.toArray: _*) // list to array, as varargs
- if (done.contains(None))
- None
- else
- Some(true)
- }
-
- def main(args:Array[String]) : Unit = {
- val format = new java.text.DecimalFormat("000.00'ms'")
- var iter = 1
- val done = 11
- while (iter < done) {
- val start = System.nanoTime()
- val result = compute()
- val time = System.nanoTime() - start
- result match {
- case Some(result) =>
- //printf("Iteration %2d succeeded after %s %n", iter, format.format(time / 1e6))
- printf("Iteration %2d succeeded%n", iter)
- iter += 1
- case None =>
- printf(">>>> Iteration %2d failed after %s <<<<< %n", iter, format.format(time / 1e6))
- iter = done
- }
- }
- }
-
-}
diff --git a/test/files/jvm/t2530.scala b/test/files/jvm/t2530.scala
index 642fb05a5d..c2925a92d1 100644
--- a/test/files/jvm/t2530.scala
+++ b/test/files/jvm/t2530.scala
@@ -50,6 +50,7 @@ case class Matrix(numRows: Int, numCols: Int, values: Array[Double]) {
val rows = for (j <- 0 until m) yield {
Futures.future {
+ try {
val b_j = new Array[Double](n)
var k = 0
while (k < n) { // sadly, while loops are still faster than for loops
@@ -69,13 +70,22 @@ case class Matrix(numRows: Int, numCols: Int, values: Array[Double]) {
}
//printf("future %d of %d completed.%n", j, m)
j
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
}
}
// rows.foreach { x=> x() } // This appears to force sequential execution, so use:
// timeout is 10 years; see http://lampsvn.epfl.ch/trac/scala/ticket/2515
-
- val done = Futures.awaitAll(10*365*24*60*60*1000, rows.toArray : _*) // list to array, as varargs.
+ val done: List[Option[Any]] = try {
+ Futures.awaitAll(10*365*24*60*60*1000, rows.toArray : _*) // list to array, as varargs.
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ List()
+ }
if (done.contains(None))
None
diff --git a/test/files/jvm/t2570.check b/test/files/jvm/t2570.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/jvm/t2570.check
diff --git a/test/files/jvm/t2570/Test.scala b/test/files/jvm/t2570/Test.scala
new file mode 100644
index 0000000000..7944aedae6
--- /dev/null
+++ b/test/files/jvm/t2570/Test.scala
@@ -0,0 +1,3 @@
+class Test2 extends Test1[Test3[Test4]]
+class Test4
+object Test extends Application {} \ No newline at end of file
diff --git a/test/files/jvm/t2570/Test1.java b/test/files/jvm/t2570/Test1.java
new file mode 100644
index 0000000000..f305736581
--- /dev/null
+++ b/test/files/jvm/t2570/Test1.java
@@ -0,0 +1,2 @@
+public class Test1<T extends Test3> {
+} \ No newline at end of file
diff --git a/test/files/jvm/t2570/Test3.java b/test/files/jvm/t2570/Test3.java
new file mode 100644
index 0000000000..97603b5ca6
--- /dev/null
+++ b/test/files/jvm/t2570/Test3.java
@@ -0,0 +1,2 @@
+public class Test3<T> {
+} \ No newline at end of file
diff --git a/test/files/jvm/t2585.check b/test/files/jvm/t2585.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/jvm/t2585.check
diff --git a/test/files/jvm/t2585/Test.java b/test/files/jvm/t2585/Test.java
new file mode 100644
index 0000000000..51fe20d81e
--- /dev/null
+++ b/test/files/jvm/t2585/Test.java
@@ -0,0 +1,16 @@
+class J { S s ; }
+
+public class Test {
+ public static void main(String[] args) {
+ final X x = new X();
+ final OuterImpl o = new OuterImpl(x);
+
+ final OuterImpl.Inner i1 = o.newInner();
+ i1.getT().getI().getT().getI(); // <--- Error: "The method getI() is undefined for the type Object"
+
+ final Outer<X>.Inner i2 = o.newInner();
+ i2.getT().getI().getT().getI(); // <--- Error: "The method getI() is undefined for the type Object"
+
+ HashMap<String, String> map = new HashMap<String, String>();
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/t2585/genericouter.scala b/test/files/jvm/t2585/genericouter.scala
new file mode 100644
index 0000000000..e06aa8101e
--- /dev/null
+++ b/test/files/jvm/t2585/genericouter.scala
@@ -0,0 +1,25 @@
+case class S(n:Int)
+
+trait TraversableLike[+A, +Repr] {
+ class WithFilter(p: A => Boolean)
+ def withFilter(p: A => Boolean): WithFilter = new WithFilter(p)
+}
+
+class HashMap[K, +V] extends TraversableLike[(K, V), HashMap[K, V]]
+
+class Outer[T](val t: T) {
+ class Inner {
+ def getT : T = t
+ }
+}
+
+class OuterImpl(x: X) extends Outer[X](x) {
+ def newInner = new Inner
+}
+
+class X {
+ def getI : Outer[X]#Inner = {
+ val oImpl = new OuterImpl(this)
+ new oImpl.Inner
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/t2827.check b/test/files/jvm/t2827.check
new file mode 100644
index 0000000000..c318e01ae5
--- /dev/null
+++ b/test/files/jvm/t2827.check
@@ -0,0 +1,3 @@
+Larry
+Curly
+Moe
diff --git a/test/files/jvm/t2827.scala b/test/files/jvm/t2827.scala
new file mode 100644
index 0000000000..d89e68516b
--- /dev/null
+++ b/test/files/jvm/t2827.scala
@@ -0,0 +1,14 @@
+object Stooges extends Enumeration {
+ type Stooge = Value
+ val Larry, Curly, Moe = Value
+ def nextStooge(v:Stooges.Stooge):Stooges.Stooge =
+ Stooges((v.id+1) % Stooges.maxId)
+}
+
+object Test {
+ def main(args: Array[String]) {
+ println(Stooges.Larry)
+ println(Stooges.Curly)
+ println(Stooges.Moe)
+ }
+}
diff --git a/test/files/jvm/t3003.check b/test/files/jvm/t3003.check
new file mode 100644
index 0000000000..c69e389d13
--- /dev/null
+++ b/test/files/jvm/t3003.check
@@ -0,0 +1 @@
+List(List(@Annot(optionType=class java.lang.String)))
diff --git a/test/files/jvm/t3003.cmds b/test/files/jvm/t3003.cmds
new file mode 100644
index 0000000000..c00396627c
--- /dev/null
+++ b/test/files/jvm/t3003.cmds
@@ -0,0 +1,2 @@
+javac Annot.java
+scalac Test_1.scala
diff --git a/test/files/jvm/t3003/Annot.java b/test/files/jvm/t3003/Annot.java
new file mode 100644
index 0000000000..1d5f206fd7
--- /dev/null
+++ b/test/files/jvm/t3003/Annot.java
@@ -0,0 +1,4 @@
+@java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.RUNTIME)
+public @interface Annot {
+ Class<?> optionType();
+}
diff --git a/test/files/jvm/t3003/Test_1.scala b/test/files/jvm/t3003/Test_1.scala
new file mode 100644
index 0000000000..ec7f220c94
--- /dev/null
+++ b/test/files/jvm/t3003/Test_1.scala
@@ -0,0 +1,8 @@
+class C {
+ @Annot(optionType=classOf[String]) val k = 0
+}
+object Test {
+ def main(args: Array[String]) {
+ println(classOf[C].getDeclaredFields.toList.sortBy(f => f.getName).map(f => f.getAnnotations.toList))
+ }
+}
diff --git a/test/files/jvm/t3102.check b/test/files/jvm/t3102.check
new file mode 100644
index 0000000000..d705e0b20e
--- /dev/null
+++ b/test/files/jvm/t3102.check
@@ -0,0 +1,2 @@
+42
+OK
diff --git a/test/files/jvm/t3102.scala b/test/files/jvm/t3102.scala
new file mode 100644
index 0000000000..fbcf2e60e6
--- /dev/null
+++ b/test/files/jvm/t3102.scala
@@ -0,0 +1,36 @@
+import scala.actors.{Actor, TIMEOUT}
+import Actor._
+
+object Test {
+ def main(args: Array[String]) {
+ val a = actor {
+ try {
+ react {
+ case 'hello =>
+ reply(42)
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+
+ val b = actor {
+ try {
+ self.trapExit = true
+ val ft = a !! 'hello
+ println(ft())
+ // no message should be left over in mailbox
+ reactWithin(0) {
+ case TIMEOUT =>
+ println("OK")
+ case any =>
+ println(any)
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+ }
+}
diff --git a/test/files/jvm/t3356.check b/test/files/jvm/t3356.check
new file mode 100644
index 0000000000..6a9284d0aa
--- /dev/null
+++ b/test/files/jvm/t3356.check
@@ -0,0 +1 @@
+sending download requests
diff --git a/test/files/jvm/t3356.scala b/test/files/jvm/t3356.scala
new file mode 100644
index 0000000000..5626281e7d
--- /dev/null
+++ b/test/files/jvm/t3356.scala
@@ -0,0 +1,54 @@
+import scala.actors.{Actor, Exit, !, UncaughtException}
+import Actor._
+
+case class ImageInfo(text: String) {
+ def downloadImage(): ImageData = {
+ ImageData(text)
+ }
+}
+
+case class ImageData(text: String)
+case class Download(info: ImageInfo)
+
+object Test {
+
+ def scanForImageInfo(url: String): List[ImageInfo] =
+ List(ImageInfo("A"), ImageInfo("B"))
+
+ def renderImage(data: ImageData) {
+ println("rendering image "+data.text)
+ }
+
+ def renderImages(url: String) {
+ val imageInfos = scanForImageInfo(url)
+ println("sending download requests")
+ val dataFutures = for (info <- imageInfos) yield {
+ val loader = link {
+ react { case Download(info) =>
+ throw new Exception("no connection")
+ reply(info.downloadImage())
+ }; {}
+ }
+ loader !! Download(info)
+ }
+ var i = 0
+ loopWhile (i < imageInfos.size) {
+ i += 1
+ val FutureInput = dataFutures(i-1).inputChannel
+ react {
+ case FutureInput ! (data @ ImageData(_)) =>
+ renderImage(data)
+ case Exit(from, UncaughtException(_, Some(Download(info)), _, _, cause)) =>
+ println("Couldn't download image "+info+" because of "+cause)
+ }
+ }
+ println("OK, all images rendered.")
+ }
+
+ def main(args: Array[String]) {
+ actor {
+ renderImages("panorama.epfl.ch")
+ }
+ }
+
+}
diff --git a/test/files/jvm/t3365.check b/test/files/jvm/t3365.check
new file mode 100644
index 0000000000..0944b17279
--- /dev/null
+++ b/test/files/jvm/t3365.check
@@ -0,0 +1,5 @@
+'hello
+'hello
+'hello
+'hello
+'hello
diff --git a/test/files/jvm/t3365.scala b/test/files/jvm/t3365.scala
new file mode 100644
index 0000000000..b94e804e63
--- /dev/null
+++ b/test/files/jvm/t3365.scala
@@ -0,0 +1,65 @@
+import scala.actors.{ReplyReactor, Channel, Actor, Future}
+
+case class ChannelMsg(chan: Channel[Any])
+
+class MyActor extends Actor {
+ def act() {
+ try {
+ val chan = new Channel[Any](this)
+ loop {
+ react {
+ case other: ReplyReactor =>
+ other ! ChannelMsg(chan)
+ loop {
+ chan.react {
+ case 'hello =>
+ reply('hello)
+ case 'stop =>
+ exit()
+ }
+ }
+ }
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val a = new MyActor
+ a.start()
+
+ val b = new Actor {
+ def act() {
+ try {
+ react {
+ case ChannelMsg(c) =>
+ var i = 0
+ loop {
+ i += 1
+ val ft: Future[Any] = c !! 'hello
+ ft.inputChannel.react {
+ case msg =>
+ if (i % 10000 == 0)
+ println(msg)
+ if (i >= 50000) {
+ c ! 'stop
+ exit()
+ }
+ }
+ }
+ }
+ } catch {
+ case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
+ e.printStackTrace()
+ }
+ }
+ }
+ b.start()
+
+ a ! b
+ }
+}
diff --git a/test/files/jvm/t3407.check b/test/files/jvm/t3407.check
new file mode 100644
index 0000000000..a133c88bbe
--- /dev/null
+++ b/test/files/jvm/t3407.check
@@ -0,0 +1,10 @@
+result: 42
+result: 42
+result: 42
+result: 42
+result: 42
+result: 42
+result: 42
+result: 42
+result: 42
+result: 42
diff --git a/test/files/jvm/t3407.scala b/test/files/jvm/t3407.scala
new file mode 100644
index 0000000000..6c2ce85c71
--- /dev/null
+++ b/test/files/jvm/t3407.scala
@@ -0,0 +1,19 @@
+import scala.actors._, scala.actors.Actor._
+
+object Test {
+
+ def main(args: Array[String]) {
+ for (i <- 1 to 10) {
+ val ft = Futures.future { 42 }
+ println("result: " + ft())
+ }
+
+ for (i <- 1 to 10) {
+ receiveWithin(0) {
+ case TIMEOUT =>
+ case msg => println("unexpected: " + msg)
+ }
+ }
+ }
+
+}
diff --git a/test/files/jvm/t3412-channel.check b/test/files/jvm/t3412-channel.check
new file mode 100644
index 0000000000..954c6e835d
--- /dev/null
+++ b/test/files/jvm/t3412-channel.check
@@ -0,0 +1,10 @@
+6
+6
+6
+6
+6
+6
+6
+6
+6
+6
diff --git a/test/files/jvm/t3412-channel.scala b/test/files/jvm/t3412-channel.scala
new file mode 100644
index 0000000000..fcc439b488
--- /dev/null
+++ b/test/files/jvm/t3412-channel.scala
@@ -0,0 +1,38 @@
+import scala.actors._, scala.actors.Actor._, scala.actors.Futures._
+
+object Test {
+
+ def main(args: Array[String]) {
+
+ actor {
+ val C: Channel[Int] = new Channel[Int](self)
+
+ def respondAll(fts: List[Future[Int]], cnt: Int): Unit =
+ fts match {
+ case List() => C ! 0
+ case ft :: rest =>
+ if (cnt % 100 == 0)
+ println(ft())
+ respondAll(rest, cnt + 1)
+ }
+
+ actor {
+ val fts = for (_ <- 1 to 1000)
+ yield C !! (3, {case x: Int => x})
+
+ actor {
+ respondAll(fts.toList, 0)
+ }
+ }
+
+ loop {
+ C.react {
+ case 0 => exit()
+ case i => reply(i * 2)
+ }
+ }
+ }
+
+ }
+
+}
diff --git a/test/files/jvm/t3412.check b/test/files/jvm/t3412.check
new file mode 100644
index 0000000000..954c6e835d
--- /dev/null
+++ b/test/files/jvm/t3412.check
@@ -0,0 +1,10 @@
+6
+6
+6
+6
+6
+6
+6
+6
+6
+6
diff --git a/test/files/jvm/t3412.scala b/test/files/jvm/t3412.scala
new file mode 100644
index 0000000000..ced15ab5dc
--- /dev/null
+++ b/test/files/jvm/t3412.scala
@@ -0,0 +1,32 @@
+import scala.actors._, scala.actors.Actor._, scala.actors.Futures._
+
+object Test {
+
+ def main(args: Array[String]) {
+
+ val a = actor {
+ loop { react {
+ case i: Int => reply(i * 2)
+ case 'stop => exit()
+ } }
+ }
+
+ val fts = for (_ <- 1 to 1000)
+ yield a !! (3, {case x: Int => x})
+
+ def respondAll(fts: List[Future[Int]], cnt: Int): Unit =
+ fts match {
+ case List() => a ! 'stop
+ case ft :: rest =>
+ if (cnt % 100 == 0)
+ println(ft())
+ respondAll(rest, cnt + 1)
+ }
+
+ actor {
+ respondAll(fts.toList, 0)
+ }
+
+ }
+
+}
diff --git a/test/files/jvm/t3415/Hello.java b/test/files/jvm/t3415/Hello.java
new file mode 100644
index 0000000000..ae7206453b
--- /dev/null
+++ b/test/files/jvm/t3415/Hello.java
@@ -0,0 +1,3 @@
+public @interface Hello {
+ String msg() default "hoi";
+}
diff --git a/test/files/jvm/t3415/HelloWorld.scala b/test/files/jvm/t3415/HelloWorld.scala
new file mode 100644
index 0000000000..53bf55e444
--- /dev/null
+++ b/test/files/jvm/t3415/HelloWorld.scala
@@ -0,0 +1,4 @@
+object Test extends Application {
+ @Hello
+ def foo() { }
+}
diff --git a/test/files/jvm/t3470.check b/test/files/jvm/t3470.check
new file mode 100644
index 0000000000..94cb526756
--- /dev/null
+++ b/test/files/jvm/t3470.check
@@ -0,0 +1,3 @@
+A: started: 1
+A: started: 2
+A: started: 3
diff --git a/test/files/jvm/t3470.scala b/test/files/jvm/t3470.scala
new file mode 100644
index 0000000000..5e4242cdd7
--- /dev/null
+++ b/test/files/jvm/t3470.scala
@@ -0,0 +1,30 @@
+import scala.actors._
+
+object Test {
+
+ def expectActorState(a: Reactor[T] forSome { type T }, s: Actor.State.Value) {
+ var done = false
+ var i = 0
+ while (!done) {
+ i = i + 1
+ if (i == 10) { // only wait for 2 seconds total
+ println("FAIL ["+a+": expected "+s+"]")
+ done = true
+ }
+
+ Thread.sleep(200)
+ if (a.getState == s) // success
+ done = true
+ }
+ }
+
+ def main(args: Array[String]) {
+ val a = new Actor { var c = 0; def act() = { c += 1; println("A: started: " + c) } }
+ a.start()
+ expectActorState(a, Actor.State.Terminated)
+ a.restart()
+ expectActorState(a, Actor.State.Terminated)
+ a.restart()
+ }
+
+}
diff --git a/test/files/jvm/t3838.check b/test/files/jvm/t3838.check
new file mode 100644
index 0000000000..154227a350
--- /dev/null
+++ b/test/files/jvm/t3838.check
@@ -0,0 +1 @@
+caught java.lang.RuntimeException: unhandled timeout
diff --git a/test/files/jvm/t3838.scala b/test/files/jvm/t3838.scala
new file mode 100644
index 0000000000..ba8f15fc31
--- /dev/null
+++ b/test/files/jvm/t3838.scala
@@ -0,0 +1,15 @@
+import scala.actors.Actor._
+
+object Test {
+ def main(args: Array[String]) {
+ actor {
+ try {
+ receiveWithin(1) {
+ case str: String => println(str)
+ }
+ } catch {
+ case e: Exception => println("caught "+e)
+ }
+ }
+ }
+}
diff --git a/test/files/jvm/throws-annot.scala b/test/files/jvm/throws-annot.scala
index 90b58b9976..b679b6c121 100644
--- a/test/files/jvm/throws-annot.scala
+++ b/test/files/jvm/throws-annot.scala
@@ -43,24 +43,24 @@ object TestThrows {
/** Test the top-level mirror that is has the annotations. */
object TL {
-
+
@throws(classOf[IOException])
def read(): Int = 0
-
+
@throws(classOf[ClassCastException])
@throws(classOf[IOException])
def readWith2(): Int = 0
-
+
@throws(classOf[IOException])
@Deprecated
@throws(classOf[NullPointerException])
def readMixed(): Int = 0
-
+
@Deprecated
@throws(classOf[IOException])
@throws(classOf[NullPointerException])
def readMixed2(): Int = 0
-
+
@Deprecated
def readNoEx(): Int = 0
}
diff --git a/test/files/jvm/typerep.scala b/test/files/jvm/typerep.scala
index 51a848240a..cb00da4c29 100644
--- a/test/files/jvm/typerep.scala
+++ b/test/files/jvm/typerep.scala
@@ -109,7 +109,7 @@ object testFuncs {
def f5(f: Int => Int, x: Int) = f(x)
println(getType(f5 _))
println(getType(f5(f1, 1)))
- println
+ println
}
class Foo {
diff --git a/test/files/jvm/unittest_io.scala b/test/files/jvm/unittest_io.scala
index 0cf1165ec8..42c793f35a 100644
--- a/test/files/jvm/unittest_io.scala
+++ b/test/files/jvm/unittest_io.scala
@@ -8,15 +8,15 @@ object Test extends TestConsoleMain {
)
class ReadlinesTest extends TestCase("scala.io.Source method getLines()") {
-
- val src = Source.fromString("""
+
+ val src = Source.fromString("""
This is a file
it is split on several lines.
isn't it?
""")
- def runTest() = assertEquals("wrong number of lines",src.getLines("\n").toList.length,5) // five new lines in there
- //for(val line <- src.getLines) {
+ def runTest() = assertEquals("wrong number of lines",src.getLines.toList.length,5) // five new lines in there
+ //for (line <- src.getLines) {
// Console.print(line)
//}
}
diff --git a/test/files/jvm/unittest_xml.scala b/test/files/jvm/unittest_xml.scala
index 1569bb13af..1c36e746ea 100644
--- a/test/files/jvm/unittest_xml.scala
+++ b/test/files/jvm/unittest_xml.scala
@@ -12,7 +12,7 @@ object Test {
val hasBar = new HasKeyValue("bar")
x match {
//case Node("foo", hasBar(z), _*) => z
- case Node("foo", md, _*) if !hasBar.unapplySeq(md).isEmpty =>
+ case Node("foo", md, _*) if !hasBar.unapplySeq(md).isEmpty =>
md("bar")(0)
case _ => new Atom(3)
}
@@ -22,8 +22,8 @@ object Test {
var x: MetaData = Null
var s: NamespaceBinding = TopScope
- // testing method def apply(uri:String, scp:NamespaceBinding, k:String): Seq[Node]
- // def apply(k:String): Seq[Node]
+ // testing method def apply(uri:String, scp:NamespaceBinding, k:String): Seq[Node]
+ // def apply(k:String): Seq[Node]
assertEquals("absent element (prefixed) 1", null, x("za://foo.com", s, "bar" ))
assertEquals("absent element (unprefix) 1", null, x("bar"))
@@ -51,8 +51,8 @@ object Test {
val z = <foo bar="gar"/>
val z2 = <foo/>
- assertEquals("attribute extractor 1", Text("gar"), domatch(z))
- assertEquals("attribute extractor 2", new Atom(3), domatch(z2))
+ assertEquals("attribute extractor 1", Text("gar"), domatch(z))
+ assertEquals("attribute extractor 2", new Atom(3), domatch(z2))
}
}
@@ -61,14 +61,14 @@ object Test {
def runTest() = {
assertTrue(Utility.isNameStart('b'))
assertFalse(Utility.isNameStart(':'))
-
-
+
+
val x = <foo>
<toomuchws/>
</foo>
val y = xml.Utility.trim(x)
-
+
assertEquals("trim 1 ", 1, y match { case <foo><toomuchws/></foo> => 1 })
val x2 = <foo>
@@ -100,7 +100,7 @@ object Test {
def main(args:Array[String]) = {
val ts = new TestSuite(
new MetaDataTest,
- new UtilityTest
+ new UtilityTest
)
val tr = new TestResult()
ts.run(tr)
diff --git a/test/files/jvm/xml01.scala b/test/files/jvm/xml01.scala
index e305f516d7..574d20720f 100644
--- a/test/files/jvm/xml01.scala
+++ b/test/files/jvm/xml01.scala
@@ -1,7 +1,7 @@
import java.io.StringReader
import org.xml.sax.InputSource
-import scala.testing.SUnit._
+import scala.testing.SUnit._
import scala.util.logging._
import scala.xml._
@@ -18,29 +18,27 @@ object Test extends Application with Assert {
val c = new Node {
def label = "hello"
- override def hashCode() =
+ override def hashCode() =
Utility.hashCode(prefix, label, attributes.hashCode(), scope.hashCode(), child);
def child = Elem(null, "world", e, sc);
//def attributes = e;
override def text = ""
}
- assertSameElements(List(3), List(3))
-
println("equality")
- assertEquals(c, parsedxml11)
- assertEquals(parsedxml1, parsedxml11)
- assertSameElements(List(parsedxml1), List(parsedxml11))
- assertSameElements(Array(parsedxml1).toList, List(parsedxml11))
+ assertEqualsXML(c, parsedxml11)
+ assertEqualsXML(parsedxml1, parsedxml11)
+ assertSameElementsXML(List(parsedxml1), List(parsedxml11))
+ assertSameElementsXML(Array(parsedxml1).toList, List(parsedxml11))
val x2 = "<book><author>Peter Buneman</author><author>Dan Suciu</author><title>Data on ze web</title></book>";
val i = new InputSource(new StringReader(x2))
val x2p = XML.load(i)
- assertEquals(x2p, Elem(null, "book" , e, sc,
- Elem(null, "author", e, sc,Text("Peter Buneman")),
- Elem(null, "author", e, sc,Text("Dan Suciu")),
+ assertEqualsXML(x2p, Elem(null, "book" , e, sc,
+ Elem(null, "author", e, sc,Text("Peter Buneman")),
+ Elem(null, "author", e, sc,Text("Dan Suciu")),
Elem(null, "title" , e, sc,Text("Data on ze web"))));
val xmlFile2 = "<bib><book><author>Peter Buneman</author><author>Dan Suciu</author><title>Data on ze web</title></book><book><author>John Mitchell</author><title>Foundations of Programming Languages</title></book></bib>";
@@ -51,9 +49,9 @@ object Test extends Application with Assert {
println("xpath \\")
- assertSameElements(parsedxml1 \ "_" , List(Elem(null,"world", e, sc)))
+ assertSameElementsXML(parsedxml1 \ "_" , List(Elem(null,"world", e, sc)))
- assertSameElements(parsedxml1 \ "world", List(Elem(null,"world", e, sc)))
+ assertSameElementsXML(parsedxml1 \ "world", List(Elem(null,"world", e, sc)))
/*
Console.println( parsedxml2 \ "_" );
@@ -63,13 +61,13 @@ object Test extends Application with Assert {
};
*/
- assertSameElements(
- parsedxml2 \ "_" ,
-
+ assertSameElementsXML(
+ parsedxml2 \ "_" ,
+
List(
Elem(null,"book", e, sc,
- Elem(null,"author", e, sc, Text("Peter Buneman")),
- Elem(null,"author", e, sc, Text("Dan Suciu")),
+ Elem(null,"author", e, sc, Text("Peter Buneman")),
+ Elem(null,"author", e, sc, Text("Dan Suciu")),
Elem(null,"title" , e, sc, Text("Data on ze web"))),
Elem(null,"book",e,sc,
Elem(null,"author",e,sc,Text("John Mitchell")),
@@ -77,13 +75,13 @@ object Test extends Application with Assert {
);
assertEquals( (parsedxml2 \ "author").length, 0 );
- assertSameElements(
- parsedxml2 \ "book",
-
+ assertSameElementsXML(
+ parsedxml2 \ "book",
+
List(
Elem(null,"book",e,sc,
- Elem(null,"author", e, sc, Text("Peter Buneman")),
- Elem(null,"author", e, sc, Text("Dan Suciu")),
+ Elem(null,"author", e, sc, Text("Peter Buneman")),
+ Elem(null,"author", e, sc, Text("Dan Suciu")),
Elem(null,"title" , e, sc, Text("Data on ze web"))),
Elem(null,"book",e,sc,
Elem(null,"author", e, sc, Text("John Mitchell")),
@@ -91,49 +89,49 @@ object Test extends Application with Assert {
)
);
- assertSameElements(
+ assertSameElementsXML(
parsedxml2 \ "_" \ "_",
List(
- Elem(null,"author", e, sc, Text("Peter Buneman")),
- Elem(null,"author", e, sc, Text("Dan Suciu")),
+ Elem(null,"author", e, sc, Text("Peter Buneman")),
+ Elem(null,"author", e, sc, Text("Dan Suciu")),
Elem(null,"title" , e, sc, Text("Data on ze web")),
Elem(null,"author", e, sc, Text("John Mitchell")),
Elem(null,"title" , e, sc, Text("Foundations of Programming Languages"))
)
);
- assertSameElements(
+ assertSameElementsXML(
parsedxml2 \ "_" \ "author",
List(
- Elem(null,"author", e, sc, Text("Peter Buneman")),
- Elem(null,"author", e, sc, Text("Dan Suciu")),
+ Elem(null,"author", e, sc, Text("Peter Buneman")),
+ Elem(null,"author", e, sc, Text("Dan Suciu")),
Elem(null,"author", e, sc, Text("John Mitchell"))
)
-
+
);
- assertSameElements( (parsedxml2 \ "_" \ "_" \ "author"), List() );
+ assertSameElementsXML( (parsedxml2 \ "_" \ "_" \ "author"), List() );
Console.println("xpath \\\\ DESCENDANTS");
- assertSameElements(
+ assertSameElementsXML(
parsedxml2 \\ "author",
List(
- Elem(null,"author", e, sc, Text("Peter Buneman")),
- Elem(null,"author", e, sc, Text("Dan Suciu")),
+ Elem(null,"author", e, sc, Text("Peter Buneman")),
+ Elem(null,"author", e, sc, Text("Dan Suciu")),
Elem(null,"author", e, sc, Text("John Mitchell"))
)
);
- assertSameElements(
+ assertSameElementsXML(
parsedxml2 \\ "title",
@@ -144,25 +142,25 @@ object Test extends Application with Assert {
println(
- (parsedxml2 \\ "book" ){ n:Node => n \ "title" == "Data on ze web" }
+ (parsedxml2 \\ "book" ){ n:Node => (n \ "title") xml_== "Data on ze web" }
);
- assertEquals(
+ assertEqualsXML(
(new NodeSeq { val theSeq = List( parsedxml2 ) }) \\ "_",
List(
Elem(null,"bib",e,sc,
Elem(null,"book",e,sc,
- Elem(null, "author", e, sc, Text("Peter Buneman")),
- Elem(null, "author", e, sc, Text("Dan Suciu")),
+ Elem(null, "author", e, sc, Text("Peter Buneman")),
+ Elem(null, "author", e, sc, Text("Dan Suciu")),
Elem(null, "title" , e, sc, Text("Data on ze web"))),
Elem(null,"book",e,sc,
Elem(null,"author",e,sc,Text("John Mitchell")),
Elem(null,"title",e,sc,Text("Foundations of Programming Languages")))),
Elem(null,"book",e,sc,
- Elem(null,"author",e,sc,Text("Peter Buneman")),
- Elem(null,"author",e,sc,Text("Dan Suciu")),
+ Elem(null,"author",e,sc,Text("Peter Buneman")),
+ Elem(null,"author",e,sc,Text("Dan Suciu")),
Elem(null,"title",e,sc,Text("Data on ze web"))),
Elem(null,"author",e,sc,Text("Peter Buneman")),
//Text("Peter Buneman"),
@@ -191,14 +189,14 @@ object Test extends Application with Assert {
val zz1 = <xml:group><a/><b/><c/></xml:group>
- assertTrue(zx1 == zz1)
+ assertTrue(zx1 xml_== zz1)
assertTrue(zz1.length == 3)
// unparsed
- val uup = <xml:unparsed>&<<>""^%@$!#</xml:unparsed>
- assertTrue(uup == "&<<>\"\"^%@$!#")
- // test unicode escapes backslash u
+ // val uup = <xml:unparsed>&<<>""^%@$!#</xml:unparsed>
+ // assertTrue(uup == "&<<>\"\"^%@$!#")
+ // test unicode escapes backslash u
println("attribute value normalization")
val xmlAttrValueNorm = "<personne id='p0003' nom='&#x015e;ahingöz' />";
diff --git a/test/files/jvm/xml02.scala b/test/files/jvm/xml02.scala
index 0cbeb27ce2..02aabf3678 100644
--- a/test/files/jvm/xml02.scala
+++ b/test/files/jvm/xml02.scala
@@ -18,10 +18,10 @@ object Test extends TestConsoleMain {
object XmlEx extends TestCase("attributes") with Assert {
override def runTest = {
- assertTrue("@one", ax \ "@foo" == "bar") // uses NodeSeq.view!
- assertTrue("@two", ax \ "@foo" == xml.Text("bar")) // dto.
- assertTrue("@three", bx \ "@foo" == "bar&x") // dto.
- assertTrue ("@four", (bx \ "@foo") sameElements List(xml.Text("bar&x")))
+ assertTrue("@one", (ax \ "@foo") xml_== "bar") // uses NodeSeq.view!
+ assertTrue("@two", (ax \ "@foo") xml_== xml.Text("bar")) // dto.
+ assertTrue("@three", (bx \ "@foo") xml_== "bar&x") // dto.
+ assertTrue ("@four", (bx \ "@foo") xml_sameElements List(xml.Text("bar&x")))
assertEquals("@five", "<hello foo=\"bar&amp;x\"></hello>", bx.toString)
}
}
@@ -29,28 +29,28 @@ object Test extends TestConsoleMain {
object XmlEy extends TestCase("attributes with namespace") with Assert {
override def runTest = {
val z = ax \ "@{the namespace from outer space}foo"
- assertTrue("@six", ax \ "@{the namespace from outer space}foo" == "baz")
- assertTrue("@eight", cx \ "@{the namespace from outer space}foo" == "baz")
-
+ assertTrue("@six", (ax \ "@{the namespace from outer space}foo") xml_== "baz")
+ assertTrue("@eight", (cx \ "@{the namespace from outer space}foo") xml_== "baz")
+
try {
ax \ "@"
assertTrue("wrong1", false)
} catch {
- case _: IllegalArgumentException =>
+ case _: IllegalArgumentException =>
}
try {
ax \ "@{"
assertTrue("wrong2", false)
} catch {
- case _: IllegalArgumentException =>
+ case _: IllegalArgumentException =>
}
try {
ax \ "@{}"
assertTrue("wrong3", false)
} catch {
- case _: IllegalArgumentException =>
+ case _: IllegalArgumentException =>
}
-
+
}
}
@@ -58,8 +58,8 @@ object Test extends TestConsoleMain {
override def runTest = {
assertTrue(<hello/> match { case <hello/> => true; case _ => false; })
assertTrue(<x:ga xmlns:x="z"/> match { case <x:ga/> => true; case _ => false; });
- assertTrue(Utility.trim(cx) match { case n @ <hello>crazy text world</hello> if n \ "@foo" == "bar" => true; })
- assertTrue(Utility.trim(cx) match { case n @ <z:hello>crazy text world</z:hello> if n \ "@foo" == "bar" => true; })
+ assertTrue(Utility.trim(cx) match { case n @ <hello>crazy text world</hello> if (n \ "@foo") xml_== "bar" => true; })
+ assertTrue(Utility.trim(cx) match { case n @ <z:hello>crazy text world</z:hello> if (n \ "@foo") xml_== "bar" => true; })
}
}
diff --git a/test/files/jvm/xmlattr.scala b/test/files/jvm/xmlattr.scala
index a947adf231..4dda84373a 100644
--- a/test/files/jvm/xmlattr.scala
+++ b/test/files/jvm/xmlattr.scala
@@ -3,7 +3,7 @@ import xml.{NodeSeq, Null, Text, UnprefixedAttribute}
object Test extends TestConsoleMain {
def suite = new TestSuite(UnprefixedAttributeTest, AttributeWithOptionTest)
-
+
object UnprefixedAttributeTest extends TestCase("UnprefixedAttribute") with Assert {
override def runTest {
var x = new UnprefixedAttribute("foo","bar", Null)
@@ -13,13 +13,13 @@ object Test extends TestConsoleMain {
assertEquals(Text("bar"), x("foo"))
assertEquals(None, x.get("no_foo"))
assertEquals(null, x("no_foo"))
-
+
val y = x.remove("foo")
assertEquals(Null, y)
val z = new UnprefixedAttribute("foo", null:NodeSeq, x)
assertEquals(None, z.get("foo"))
-
+
var appended = x append x append x append x
var len = 0; while (appended ne Null) {
appended = appended.next
diff --git a/test/files/jvm/xmlmore.scala b/test/files/jvm/xmlmore.scala
index 0ba60b05b7..9f37ad61e6 100644
--- a/test/files/jvm/xmlmore.scala
+++ b/test/files/jvm/xmlmore.scala
@@ -11,7 +11,7 @@ Ours is the portal of hope, come as you are."
Mevlana Celaleddin Rumi]]>
val nazim = <foo>{myBreak}</foo> // shows use of unparsed
-
+
Console println com
Console println pi
Console println crz // this guy will escaped, and rightly so
diff --git a/test/files/jvm/xmlpull.scala b/test/files/jvm/xmlpull.scala
index d2bb72a071..9ba7d4cf02 100644
--- a/test/files/jvm/xmlpull.scala
+++ b/test/files/jvm/xmlpull.scala
@@ -5,7 +5,7 @@ import scala.io.Source
object Test {
val src = Source.fromString("<hello><world/>!</hello>")
-
+
def main(args: Array[String]) {
var er = new XMLEventReader(src)
er.next match {
@@ -28,4 +28,4 @@ object Test {
//println("6")
}
}
-
+
diff --git a/test/files/jvm/xmlstuff.scala b/test/files/jvm/xmlstuff.scala
index 46faf283dc..3508070007 100644
--- a/test/files/jvm/xmlstuff.scala
+++ b/test/files/jvm/xmlstuff.scala
@@ -59,23 +59,23 @@ passed ok
<baz bazValue="8"/>
<bar value="5" gi='go'/>
</foo>;
-
+
val pelems_1 = for( val x <- p \ "bar"; val y <- p \ "baz" ) yield {
Text(x.attributes("value").toString + y.attributes("bazValue").toString+ "!")
};
val pelems_2 = new NodeSeq { val theSeq = List(Text("38!"),Text("58!")) };
- assertSameElements(pelems_1, pelems_2)
+ assertSameElementsXML(pelems_1, pelems_2)
- assertEquals(p \\ "@bazValue", Text("8"))
+ assertEqualsXML(p \\ "@bazValue", Text("8"))
- val books =
+ val books =
<bks>
<book><title>Blabla</title></book>
<book><title>Blubabla</title></book>
<book><title>Baaaaaaalabla</title></book>
</bks>;
- val reviews =
+ val reviews =
<reviews>
<entry><title>Blabla</title>
<remarks>
@@ -97,7 +97,7 @@ passed ok
println( new scala.xml.PrettyPrinter(80, 5).formatNodes (
for (t <- books \\ "title";
r <- reviews \\ "entry"
- if r \ "title" == t) yield
+ if (r \ "title") xml_== t) yield
<result>
{ t }
{ r \ "remarks" }
@@ -105,32 +105,32 @@ passed ok
));
// example
- println(
+ println(
for (t @ <book><title>Blabla</title></book> <- new NodeSeq { val theSeq = books.child }.toList)
yield t
);
-val phoneBook =
+val phoneBook =
<phonebook>
<descr>
- This is the <b>phonebook</b> of the
+ This is the <b>phonebook</b> of the
<a href="http://acme.org">ACME</a> corporation.
</descr>
<entry>
- <name>John</name>
+ <name>John</name>
<phone where="work"> +41 21 693 68 67</phone>
<phone where="mobile">+41 79 602 23 23</phone>
</entry>
</phonebook>;
-val addrBook =
+val addrBook =
<addrbook>
<descr>
- This is the <b>addressbook</b> of the
+ This is the <b>addressbook</b> of the
<a href="http://acme.org">ACME</a> corporation.
</descr>
<entry>
- <name>John</name>
+ <name>John</name>
<street> Elm Street</street>
<city>Dolphin City</city>
</entry>
@@ -139,14 +139,14 @@ val addrBook =
println( new scala.xml.PrettyPrinter(80, 5).formatNodes (
for (t <- addrBook \\ "entry";
r <- phoneBook \\ "entry"
- if t \ "name" == r \ "name") yield
+ if (t \ "name") xml_== (r \ "name")) yield
<result>
{ t.child }
{ r \ "phone" }
</result>
));
-
+
/* namespaces */
// begin tmp
println("namespaces")
@@ -168,9 +168,9 @@ val addrBook =
import scala.xml.dtd.ELEMENTS
import scala.xml.dtd.ContentModel._
vtor.setContentModel(
- ELEMENTS(
+ ELEMENTS(
Sequ(
- Letter(ElemName("bar")),
+ Letter(ElemName("bar")),
Star(Letter(ElemName("baz"))) )));
}
@@ -178,11 +178,11 @@ val addrBook =
{
import scala.xml.dtd.MIXED
import scala.xml.dtd.ContentModel._
-
+
vtor.setContentModel(
MIXED(
- Alt(Letter(ElemName("bar")),
- Letter(ElemName("baz")),
+ Alt(Letter(ElemName("bar")),
+ Letter(ElemName("baz")),
Letter(ElemName("bal")))));
}
@@ -194,20 +194,20 @@ val addrBook =
vtor.setContentModel(null)
vtor.setMetaData(List())
assertEquals(vtor( <foo bar="hello"/> ), false)
-
- {
- import scala.xml.dtd._
+
+ {
+ import scala.xml.dtd._
vtor setMetaData List(AttrDecl("bar", "CDATA", IMPLIED))
}
assertEquals(vtor(<foo href="http://foo.com" bar="hello"/>), false)
assertEquals(vtor(<foo bar="hello"/>), true)
- {
+ {
import scala.xml.dtd._
vtor.setMetaData(List(AttrDecl("bar","CDATA",REQUIRED)))
}
assertEquals( vtor( <foo href="http://foo.com" /> ), false )
assertEquals( vtor( <foo bar="http://foo.com" /> ), true )
-
+
}
}
diff --git a/test/files/lib/scalacheck.jar.desired.sha1 b/test/files/lib/scalacheck.jar.desired.sha1
new file mode 100644
index 0000000000..ed9c46c3db
--- /dev/null
+++ b/test/files/lib/scalacheck.jar.desired.sha1
@@ -0,0 +1 @@
+4c76385b1a9cb7cd619739776b940d98c4aadc6d ?scalacheck.jar
diff --git a/test/files/neg/abstract-vars.check b/test/files/neg/abstract-vars.check
new file mode 100644
index 0000000000..8aa47745f6
--- /dev/null
+++ b/test/files/neg/abstract-vars.check
@@ -0,0 +1,21 @@
+abstract-vars.scala:5: error: class Fail1 needs to be abstract, since variable x is not defined
+(Note that variables need to be initialized to be defined)
+class Fail1 extends A {
+ ^
+abstract-vars.scala:9: error: class Fail2 needs to be abstract, since variable x in class A of type Int is not defined
+(Note that variables need to be initialized to be defined)
+class Fail2 extends A { }
+ ^
+abstract-vars.scala:11: error: class Fail3 needs to be abstract, since variable x in class A of type Int is not defined
+(Note that an abstract var requires a setter in addition to the getter)
+class Fail3 extends A {
+ ^
+abstract-vars.scala:14: error: class Fail4 needs to be abstract, since variable x in class A of type Int is not defined
+(Note that an abstract var requires a setter in addition to the getter)
+class Fail4 extends A {
+ ^
+abstract-vars.scala:18: error: class Fail5 needs to be abstract, since variable x in class A of type Int is not defined
+(Note that an abstract var requires a getter in addition to the setter)
+class Fail5 extends A {
+ ^
+5 errors found
diff --git a/test/files/neg/abstract-vars.scala b/test/files/neg/abstract-vars.scala
new file mode 100644
index 0000000000..df6109d3a8
--- /dev/null
+++ b/test/files/neg/abstract-vars.scala
@@ -0,0 +1,29 @@
+abstract class A {
+ var x: Int
+}
+
+class Fail1 extends A {
+ var x: Int
+}
+
+class Fail2 extends A { }
+
+class Fail3 extends A {
+ val x: Int = 5
+}
+class Fail4 extends A {
+ def x: Int = 5
+}
+
+class Fail5 extends A {
+ def x_=(y: Int) = ()
+}
+
+class Success1 extends A {
+ val x: Int = 5
+ def x_=(y: Int) = ()
+}
+
+class Success2 extends A {
+ var x: Int = 5
+}
diff --git a/test/files/neg/accesses.scala b/test/files/neg/accesses.scala
index 2a6b45c35b..934e83e705 100644
--- a/test/files/neg/accesses.scala
+++ b/test/files/neg/accesses.scala
@@ -6,7 +6,7 @@ abstract class A {
private[p1] def f4: Unit
protected[p1] def f5: Unit
}
-
+
abstract class OK1 extends A {
private[p1] def f2: Unit
protected[p2] def f3: Unit
diff --git a/test/files/neg/array-not-seq.check b/test/files/neg/array-not-seq.check
new file mode 100644
index 0000000000..c16ecdad72
--- /dev/null
+++ b/test/files/neg/array-not-seq.check
@@ -0,0 +1,7 @@
+array-not-seq.scala:2: error: An Array will no longer match as Seq[_].
+ def f1(x: Any) = x.isInstanceOf[Seq[_]]
+ ^
+error: An Array will no longer match as Seq[_].
+error: An Array will no longer match as Seq[_].
+error: An Array will no longer match as Seq[_].
+four errors found
diff --git a/test/files/neg/array-not-seq.flags b/test/files/neg/array-not-seq.flags
new file mode 100644
index 0000000000..4e9f7e4a56
--- /dev/null
+++ b/test/files/neg/array-not-seq.flags
@@ -0,0 +1 @@
+-Xmigration -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/array-not-seq.scala b/test/files/neg/array-not-seq.scala
new file mode 100644
index 0000000000..5f367bdd85
--- /dev/null
+++ b/test/files/neg/array-not-seq.scala
@@ -0,0 +1,26 @@
+object Test {
+ def f1(x: Any) = x.isInstanceOf[Seq[_]]
+ def f2(x: Any) = x match {
+ case _: Seq[_] => true
+ case _ => false
+ }
+
+ def f3(x: Any) = x match {
+ case _: Array[_] => true
+ case _ => false
+ }
+
+ def f4(x: Any) = x.isInstanceOf[Traversable[_]]
+
+ def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match {
+ case (Some(_: Seq[_]), Nil, _) => 1
+ case (None, List(_: List[_], _), _) => 2
+ case _ => 3
+ }
+
+ def main(args: Array[String]): Unit = {
+ // println(f1(Array(1)))
+ // println(f2(Array(1)))
+ // println(f3(Array(1))
+ }
+}
diff --git a/test/files/neg/bug1011.scala b/test/files/neg/bug1011.scala
index 7dc3f49469..57a6ad7b45 100644
--- a/test/files/neg/bug1011.scala
+++ b/test/files/neg/bug1011.scala
@@ -3,7 +3,7 @@ import scala.xml._;
abstract class Test {
//val entity : String;
- def primitiveHeader : NodeSeq =
+ def primitiveHeader : NodeSeq =
Group({
<dl><code>{Text(entity)}</code>
<code>{Text(entity)}</code>
diff --git a/test/files/neg/bug1112.scala b/test/files/neg/bug1112.scala
index 1a88629faf..b2a374c785 100644
--- a/test/files/neg/bug1112.scala
+++ b/test/files/neg/bug1112.scala
@@ -1,13 +1,13 @@
// checks that error doesn't crash the compiler
-// (due to isFunctionType normalizing Type1 to a function type,
+// (due to isFunctionType normalizing Type1 to a function type,
// but then the code that used that test not using the normalized type for further operations)
class Test {
type Type1 = () => Unit
-
+
def call(p: Int)(f: => Type1) = {
f()
}
-
+
def run = {
call(0,() => System.out.println("here we are"))
}
diff --git a/test/files/neg/bug112706A.scala b/test/files/neg/bug112706A.scala
index 11304720bc..b7799af96d 100644
--- a/test/files/neg/bug112706A.scala
+++ b/test/files/neg/bug112706A.scala
@@ -2,7 +2,7 @@ package test;
trait Test {
def foo(p : List[Tuple2[String,String]]) = {
for (t <- p) t._1 match {
- case Tuple2(node,_) =>
+ case Tuple2(node,_) =>
}
}
}
diff --git a/test/files/neg/bug1183.scala b/test/files/neg/bug1183.scala
index a845126488..ee9385fc1a 100644
--- a/test/files/neg/bug1183.scala
+++ b/test/files/neg/bug1183.scala
@@ -13,7 +13,7 @@ object Test extends TestConsoleMain {
case class Bar(i:Int)
}
-
+
class Test717 extends TestCase("#717 test path of case classes") {
val foo1 = new Foo(1)
@@ -31,7 +31,7 @@ object Test extends TestConsoleMain {
class Baz
object Bam
object Bar
-
+
def unapply(s : String) : Option[Bar] = Some(new Bar(s))
}
diff --git a/test/files/neg/bug1275.check b/test/files/neg/bug1275.check
index 9f806c0689..6ee8365796 100644
--- a/test/files/neg/bug1275.check
+++ b/test/files/neg/bug1275.check
@@ -1,4 +1,6 @@
-bug1275.scala:13: error: The kind of type MyType does not conform to the expected kind of type MyType[+t] <: TestCovariance.Seq[t] in trait Seq.
- def span[a, s <: Seq[a] { type MyType <: s } ](xs: s): s = xs f
- ^
+bug1275.scala:11: error: type mismatch;
+ found : xs.MyType[a]
+ required: s
+ = xs f
+ ^
one error found
diff --git a/test/files/neg/bug1275.scala b/test/files/neg/bug1275.scala
index 7b9b044346..1175b30763 100644
--- a/test/files/neg/bug1275.scala
+++ b/test/files/neg/bug1275.scala
@@ -1,14 +1,15 @@
-// tested using Scala compiler version 2.6.0-RC1 -- (c) 2002-2009 LAMP/EPFL
-
-// prompted by "Covariant return types" mailing list question
-object TestCovariance {
-
- // see Type constructor polymorphism in http://www.scala-lang.org/docu/changelog.html
- trait Seq[+t] {
- type MyType[+t] <: Seq[t]
-
- def f: MyType[t]
- }
-
- def span[a, s <: Seq[a] { type MyType <: s } ](xs: s): s = xs f
-}
+object Test {
+ trait Seq[+t] {
+ type MyType[+t] <: Seq[t]
+
+ def f: MyType[t]
+ }
+
+ // illegal abstract type member refinement: changes the arity of MyType
+ // the error is pretty strange, since the compiler forms the illegal type xs.MyType[a] anyway
+ def span[a, s <: Seq[a] { type MyType/*look ma, no type parameters!*/ <: s } ](xs: s): s
+ = xs f
+// ^
+// found : xs.MyType[a]
+// required: s
+} \ No newline at end of file
diff --git a/test/files/neg/bug1279a.check b/test/files/neg/bug1279a.check
deleted file mode 100644
index edfd1fe871..0000000000
--- a/test/files/neg/bug1279a.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug1279a.scala:34: error: type mismatch;
- found : first.selfType
- required: M{type T <: this.T}
- def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl(first.next))
- ^
-one error found
diff --git a/test/files/neg/bug1286.check b/test/files/neg/bug1286.check
index 9bf63252cc..734964e9cf 100644
--- a/test/files/neg/bug1286.check
+++ b/test/files/neg/bug1286.check
@@ -1,2 +1,7 @@
-error: fatal error: Companions 'object Foo' and 'trait Foo' must be defined in same file.
-one error found
+a.scala:1: error: Companions 'object Foo' and 'trait Foo' must be defined in same file
+trait Foo {
+ ^
+b.scala:1: error: Companions 'trait Foo' and 'object Foo' must be defined in same file
+object Foo extends Foo {
+ ^
+two errors found
diff --git a/test/files/neg/bug1392.check b/test/files/neg/bug1392.check
deleted file mode 100644
index e4c9630435..0000000000
--- a/test/files/neg/bug1392.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1392.scala:1: error: object Int is not a value
-object X extends Application { Int }
- ^
-one error found
diff --git a/test/files/neg/bug1392.scala b/test/files/neg/bug1392.scala
deleted file mode 100644
index 54a4b9e908..0000000000
--- a/test/files/neg/bug1392.scala
+++ /dev/null
@@ -1 +0,0 @@
-object X extends Application { Int }
diff --git a/test/files/neg/bug1523.scala b/test/files/neg/bug1523.scala
index 219fb0c060..b8754f4e15 100644
--- a/test/files/neg/bug1523.scala
+++ b/test/files/neg/bug1523.scala
@@ -1,5 +1,5 @@
-object test {
+object test {
def bug(x: Any) = x
-
+
def go() = bug("a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a")
}
diff --git a/test/files/neg/bug1623.scala b/test/files/neg/bug1623.scala
index 1c368e7ef5..d4bd6f4e86 100644
--- a/test/files/neg/bug1623.scala
+++ b/test/files/neg/bug1623.scala
@@ -1,7 +1,7 @@
package test
-trait A
-trait B
+trait A
+trait B
class BImpl extends B {
this: A =>
diff --git a/test/files/neg/bug1878.check b/test/files/neg/bug1878.check
index 5484d675af..f760781fa0 100644
--- a/test/files/neg/bug1878.check
+++ b/test/files/neg/bug1878.check
@@ -1,10 +1,15 @@
bug1878.scala:3: error: _* may only come last
val err1 = "" match { case Seq(f @ _*, ',') => f }
^
+bug1878.scala:3: error: scrutinee is incompatible with pattern type;
+ found : Seq[A]
+ required: java.lang.String
+ val err1 = "" match { case Seq(f @ _*, ',') => f }
+ ^
bug1878.scala:9: error: _* may only come last
val List(List(_*, arg2), _) = List(List(1,2,3), List(4,5,6))
^
bug1878.scala:13: error: _* may only come last
case <p> { _* } </p> =>
^
-three errors found
+four errors found
diff --git a/test/files/neg/bug1878.scala b/test/files/neg/bug1878.scala
index df0e1b6caf..32d5ce5e0f 100644
--- a/test/files/neg/bug1878.scala
+++ b/test/files/neg/bug1878.scala
@@ -1,13 +1,13 @@
object Test extends Application {
- // illegal
+ // illegal
val err1 = "" match { case Seq(f @ _*, ',') => f }
-
+
// no error
val List(List(arg1, _*), _) = List(List(1,2,3), List(4,5,6))
-
+
// illegal
val List(List(_*, arg2), _) = List(List(1,2,3), List(4,5,6))
-
+
// illegal - bug #1764
null match {
case <p> { _* } </p> =>
diff --git a/test/files/neg/bug2148.check b/test/files/neg/bug2148.check
new file mode 100644
index 0000000000..22be424c39
--- /dev/null
+++ b/test/files/neg/bug2148.check
@@ -0,0 +1,4 @@
+bug2148.scala:9: error: type A is not a stable prefix
+ val b = new A with A#A1
+ ^
+one error found
diff --git a/test/files/neg/bug2148.scala b/test/files/neg/bug2148.scala
new file mode 100644
index 0000000000..c0521d9864
--- /dev/null
+++ b/test/files/neg/bug2148.scala
@@ -0,0 +1,10 @@
+class A {
+ var i = 0
+ trait A1 extends A {
+ i += 1
+ }
+}
+
+object Bob {
+ val b = new A with A#A1
+} \ No newline at end of file
diff --git a/test/files/neg/bug2206.check b/test/files/neg/bug2206.check
new file mode 100644
index 0000000000..3deb4d99ef
--- /dev/null
+++ b/test/files/neg/bug2206.check
@@ -0,0 +1,5 @@
+bug2206.scala:10: error: value f is not a member of o.A
+ Note: implicit method ax is not applicable here because it comes after the application point and it lacks an explicit result type
+ a.f()
+ ^
+one error found
diff --git a/test/files/neg/bug2206.scala b/test/files/neg/bug2206.scala
new file mode 100644
index 0000000000..cd2ec225e9
--- /dev/null
+++ b/test/files/neg/bug2206.scala
@@ -0,0 +1,15 @@
+object o {
+ class A
+
+ class AX {
+ def f() { }
+ }
+
+ import Implicits._
+ val a = new A
+ a.f()
+
+ object Implicits {
+ implicit def ax(a: A) = new AX
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/bug2213.check b/test/files/neg/bug2213.check
new file mode 100644
index 0000000000..b24f7dc554
--- /dev/null
+++ b/test/files/neg/bug2213.check
@@ -0,0 +1,15 @@
+bug2213.scala:9: error: class C needs to be abstract, since:
+value y in class A of type Int is not defined
+value x in class A of type Int is not defined
+method g in class A of type => Int is not defined
+method f in class A of type => Int is not defined
+class C extends A {}
+ ^
+bug2213.scala:11: error: object creation impossible, since:
+value y in class A of type Int is not defined
+value x in class A of type Int is not defined
+method g in class A of type => Int is not defined
+method f in class A of type => Int is not defined
+object Q extends A { }
+ ^
+two errors found
diff --git a/test/files/neg/bug2213.scala b/test/files/neg/bug2213.scala
new file mode 100644
index 0000000000..af1df3ccfe
--- /dev/null
+++ b/test/files/neg/bug2213.scala
@@ -0,0 +1,11 @@
+abstract class A {
+ def f: Int
+ def g: Int
+
+ val x: Int
+ val y: Int
+}
+
+class C extends A {}
+
+object Q extends A { } \ No newline at end of file
diff --git a/test/files/neg/bug278.check b/test/files/neg/bug278.check
index a3d44f6508..ad0a97371e 100644
--- a/test/files/neg/bug278.check
+++ b/test/files/neg/bug278.check
@@ -1,4 +1,7 @@
-bug278.scala:5: error: overloaded method value a with alternatives => (C.this.A) => Unit <and> => () => Unit does not take type parameters
+bug278.scala:5: error: overloaded method value a with alternatives:
+ => (C.this.A) => Unit <and>
+ => () => Unit
+ does not take type parameters
a[A]
^
bug278.scala:4: error: method a is defined twice
diff --git a/test/files/neg/bug3209.check b/test/files/neg/bug3209.check
new file mode 100644
index 0000000000..fa50f4ce1d
--- /dev/null
+++ b/test/files/neg/bug3209.check
@@ -0,0 +1,4 @@
+bug3209.scala:2: error: expected start of definition
+package test
+^
+one error found
diff --git a/test/files/neg/bug3209.scala b/test/files/neg/bug3209.scala
new file mode 100644
index 0000000000..d893726659
--- /dev/null
+++ b/test/files/neg/bug3209.scala
@@ -0,0 +1,2 @@
+@javax.annotation.Generated(Array("test"))
+package test \ No newline at end of file
diff --git a/test/files/neg/bug3631.check b/test/files/neg/bug3631.check
new file mode 100644
index 0000000000..12d94aa4dc
--- /dev/null
+++ b/test/files/neg/bug3631.check
@@ -0,0 +1,4 @@
+bug3631.scala:3: error: Implementation restriction: case classes cannot have more than 22 parameters.
+case class X23(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int) { }
+ ^
+one error found
diff --git a/test/files/neg/bug3631.scala b/test/files/neg/bug3631.scala
new file mode 100644
index 0000000000..bcf91619ee
--- /dev/null
+++ b/test/files/neg/bug3631.scala
@@ -0,0 +1,3 @@
+case class X22(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int) { }
+
+case class X23(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int) { } \ No newline at end of file
diff --git a/test/files/neg/bug414.check b/test/files/neg/bug414.check
index c0f039ad26..ec23e26337 100644
--- a/test/files/neg/bug414.check
+++ b/test/files/neg/bug414.check
@@ -1,7 +1,3 @@
-bug414.scala:1: warning: case classes without a parameter list have been deprecated;
-use either case objects or case classes with `()' as parameter list.
-case class Empty[a] extends IntMap[a];
- ^
bug414.scala:5: error: pattern type is incompatible with expected type;
found : object Empty
required: IntMap[a]
@@ -12,5 +8,4 @@ bug414.scala:7: error: type mismatch;
required: a
case _ =>
^
-one warning found
two errors found
diff --git a/test/files/neg/bug414.scala b/test/files/neg/bug414.scala
index 7983fe88b9..2bc83eedcb 100644
--- a/test/files/neg/bug414.scala
+++ b/test/files/neg/bug414.scala
@@ -1,4 +1,4 @@
-case class Empty[a] extends IntMap[a];
+case class Empty[a]() extends IntMap[a];
case class Node[a](left: IntMap[a], keyVal: Pair[Int, a], right: IntMap[a]) extends IntMap[a];
abstract class IntMap[a] {
def lookup(key: Int): a = this match {
diff --git a/test/files/neg/bug520.scala b/test/files/neg/bug520.scala
index 076aca3122..949a509518 100644
--- a/test/files/neg/bug520.scala
+++ b/test/files/neg/bug520.scala
@@ -4,6 +4,6 @@ object test {
assert(keyword != null);
}
- def verifyKeyword(source : java.io.File, pos : Int) =
+ def verifyKeyword(source : java.io.File, pos : Int) =
verifyKeyword("", source, pos);
}
diff --git a/test/files/neg/bug558.scala b/test/files/neg/bug558.scala
index 58b030347c..4941a06c3b 100644
--- a/test/files/neg/bug558.scala
+++ b/test/files/neg/bug558.scala
@@ -11,7 +11,7 @@ abstract class NewModel {
val parent : SymbolURL;
final val top = parent.top;
final val source = top.file;
-
+
}
abstract class RootURL extends SymbolURL {
final val top : RootURL = this;
diff --git a/test/files/neg/bug563.scala b/test/files/neg/bug563.scala
index d8e026e656..3261491dcd 100644
--- a/test/files/neg/bug563.scala
+++ b/test/files/neg/bug563.scala
@@ -1,7 +1,7 @@
object Test {
def map[A,R](a : List[A], f : A => R) : List[R] = a.map(f);
-
- def split(sn : Iterable[List[Cell[int]]]) : unit =
+
+ def split(sn : Iterable[List[Cell[Int]]]) : Unit =
for (n <- sn)
map(n,ptr => new Cell(ptr.elem));
}
diff --git a/test/files/neg/bug576.scala b/test/files/neg/bug576.scala
index fd83217a45..fa7ee6019f 100644
--- a/test/files/neg/bug576.scala
+++ b/test/files/neg/bug576.scala
@@ -4,16 +4,16 @@ abstract class BaseListXXX {
type Node <: BaseNode;
abstract class BaseNode {
}
-}
+}
trait PriorityTreeXXX extends BaseListXXX {
type Node <: BasicTreeNode;
-
+
trait BasicTreeNode extends BaseNode {
- def sibling: Node;
+ def sibling: Node;
def insert(dir : Int, node : Node) = {
if (true) sibling.insert(node);
//else insert(node);
-
+
}
def insert(node : Node) : Unit = {}
}
diff --git a/test/files/neg/bug588.scala b/test/files/neg/bug588.scala
index f30937377e..1bc6d2680f 100644
--- a/test/files/neg/bug588.scala
+++ b/test/files/neg/bug588.scala
@@ -1,15 +1,15 @@
abstract class Test0 {
- def visit(f: Int => Unit): Boolean
+ def visit(f: Int => Unit): Boolean
def visit(f: Int => String): Boolean
}
trait Test {
type TypeA <: TraitA;
type TypeB <: TypeA with TraitB;
-
+
def f(node : TypeA) : Unit;
def f(brac : TypeB) : Unit;
-
+
trait TraitA;
trait TraitB;
-
+
}
diff --git a/test/files/neg/bug591.scala b/test/files/neg/bug591.scala
index 0f0b02395c..5f2397e6a9 100644
--- a/test/files/neg/bug591.scala
+++ b/test/files/neg/bug591.scala
@@ -1,17 +1,17 @@
abstract class BaseList {
type Node <: BaseNode;
-
-
+
+
abstract class BaseNode {
protected def self : Node;
private[BaseList] def self00 = self;
def dirty : Unit = {}
def replaceWith(node : Node) = {}
}
-
+
implicit def baseNode2Node(bnode : BaseNode): Node = bnode.self00;
-
+
}
@@ -20,22 +20,22 @@ trait BaseFlow extends BaseList {
type Flow <: FlowBase;
type Output <: OutputBase;
type Input <: InputBase;
-
+
abstract class FlowBase {
-
+
}
trait OutputBase extends FlowBase {
-
+
}
trait InputBase extends FlowBase {
-
+
}
-
+
trait BFNode extends BaseNode {
private var input : Input = _;
private var output : Output = _;
-
+
def input_=(in : Input) = {}
-
+
}
}
diff --git a/test/files/neg/bug608.check b/test/files/neg/bug608.check
index 4e8eb4d709..a8e32e4c10 100644
--- a/test/files/neg/bug608.check
+++ b/test/files/neg/bug608.check
@@ -1,6 +1,6 @@
bug608.scala:16: error: type mismatch;
- found : (a) => a
- required: (ha) => ?
- = g(f(x).bimap(id))
- ^
+ found : hs{type a = ha}
+ required: hs{type s = hs; type a = ha}
+ = g(f(x).bimap(id))
+ ^
one error found
diff --git a/test/files/neg/bug608.scala b/test/files/neg/bug608.scala
index 24f515651a..1f12764dd1 100644
--- a/test/files/neg/bug608.scala
+++ b/test/files/neg/bug608.scala
@@ -1,17 +1,17 @@
trait CrashDueToTypeError {
- def id[a](x :a) :a = x
+ def id[a](x :a) :a = x
- trait Bifunctor {
- type a; // content
- type s <: Bifunctor
+ trait Bifunctor {
+ type a; // content
+ type s <: Bifunctor
- // uncomment this-vvvvvvvvvvvvvvvvvvvvvvvvvvvv, and it compiles
- def bimap[c](f :a=>c) :s{/*type s=Bifunctor.this.s;*/type a=c; }
- }
+ // uncomment this-vvvvvvvvvvvvvvvvvvvvvvvvvvvv, and it compiles
+ def bimap[c](f :a=>c) :s{/*type s=Bifunctor.this.s;*/type a=c; }
+ }
- def hylo[hs <: Bifunctor,ha,hb,hc]
- (f :hb=>hs{type s=hs; type a=ha},
- g :hs{type s=hs; type a=ha}=>hc)(x :hb)
- :hc
- = g(f(x).bimap(id))
+ def hylo[hs <: Bifunctor,ha,hb,hc]
+ (f :hb=>hs{type s=hs; type a=ha},
+ g :hs{type s=hs; type a=ha}=>hc)(x :hb)
+ :hc
+ = g(f(x).bimap(id))
}
diff --git a/test/files/neg/bug692.scala b/test/files/neg/bug692.scala
index 24e1d2fea3..f230a6bacf 100644
--- a/test/files/neg/bug692.scala
+++ b/test/files/neg/bug692.scala
@@ -5,15 +5,15 @@ abstract class test3 {
abstract class RefType[C <: AnyRef] extends Type[C];
case class ObjectType() extends RefType[AnyRef];
abstract class ClassType[C <: Z, Z <: AnyRef](zuper : RefType[Z]) extends RefType[C];
-
+
case class FooType() extends ClassType[Foo,AnyRef](ObjectType());
implicit def typeOfFoo = FooType();
case class BarType[T3 <: Foo](tpeT : RefType[T3]) extends ClassType[Bar[T3],Foo](FooType);
- implicit def typeOfBar[T4 <: Foo](implicit elem : RefType[T4]) : RefType[Bar[T4]] =
+ implicit def typeOfBar[T4 <: Foo](implicit elem : RefType[T4]) : RefType[Bar[T4]] =
BarType(elem);
-
+
class Foo[A <: AnyRef];
class Bar[A <: Foo](implicit tpeA : Type[A]) extends Foo;
diff --git a/test/files/neg/bug693.scala b/test/files/neg/bug693.scala
index 3a9e6247a8..d2074bee38 100644
--- a/test/files/neg/bug693.scala
+++ b/test/files/neg/bug693.scala
@@ -1,5 +1,5 @@
abstract class test4 {
trait Type;
val x : Type = null;
- val x : Int = 10;
+ val x : Int = 10;
}
diff --git a/test/files/neg/bug700.check b/test/files/neg/bug700.check
index 33a67e5094..5c2854069c 100644
--- a/test/files/neg/bug700.check
+++ b/test/files/neg/bug700.check
@@ -1,4 +1,4 @@
bug700.scala:6: error: method foobar in trait Foo is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'
- def foobar: unit = super.foobar
+ def foobar: Unit = super.foobar
^
one error found
diff --git a/test/files/neg/bug700.scala b/test/files/neg/bug700.scala
index 7477bb54f6..b08c8b5529 100644
--- a/test/files/neg/bug700.scala
+++ b/test/files/neg/bug700.scala
@@ -3,7 +3,7 @@ trait Foo {
}
trait Bar extends Foo {
- def foobar: unit = super.foobar
+ def foobar: Unit = super.foobar
}
// the following definition breaks the compiler
diff --git a/test/files/neg/bug715.scala b/test/files/neg/bug715.scala
index 87b2525a63..a5ccd567f9 100644
--- a/test/files/neg/bug715.scala
+++ b/test/files/neg/bug715.scala
@@ -1,4 +1,4 @@
-package test;
+package test;
trait B {
type Node <: NodeImpl;
trait NodeImpl {
diff --git a/test/files/neg/bug783.scala b/test/files/neg/bug783.scala
index 3818910b8e..0920089509 100644
--- a/test/files/neg/bug783.scala
+++ b/test/files/neg/bug783.scala
@@ -18,7 +18,7 @@ object Main extends Application {
def globalInit : Global;
final def globalInit0 = globalInit.asInstanceOf[global.type];
}
-
+
object global0 extends Global {
object analyzer extends Analyzer {
type Global = global0.type;
diff --git a/test/files/neg/bug798.scala b/test/files/neg/bug798.scala
index a2bf66d19b..b4a1939444 100644
--- a/test/files/neg/bug798.scala
+++ b/test/files/neg/bug798.scala
@@ -4,5 +4,5 @@ trait Test[Bracks <: Bracks] {
class C[T]
val bracks : Bracks;
val singletons = f(bracks);
-
+
}
diff --git a/test/files/neg/bug836.scala b/test/files/neg/bug836.scala
index 3633b816c6..de23cf5387 100644
--- a/test/files/neg/bug836.scala
+++ b/test/files/neg/bug836.scala
@@ -10,7 +10,7 @@ abstract class A {
}
class B extends A {
- type MyObj = ObjImpl
+ type MyObj = ObjImpl
val myString: S = "hello"
val realString: String = myString // error: type missmatch
}
diff --git a/test/files/neg/bug856.check b/test/files/neg/bug856.check
index e1d0801c5f..168855d6a2 100644
--- a/test/files/neg/bug856.check
+++ b/test/files/neg/bug856.check
@@ -1,4 +1,6 @@
-bug856.scala:3: error: class ComplexRect needs to be abstract, since method _2 in trait Product2 of type => Double is not defined
+bug856.scala:3: error: class ComplexRect needs to be abstract, since:
+method _2 in trait Product2 of type => Double is not defined
+method canEqual in trait Equals of type (that: Any)Boolean is not defined
class ComplexRect(val _1:Double, _2:Double) extends Complex {
^
one error found
diff --git a/test/files/neg/bug856.scala b/test/files/neg/bug856.scala
index fea216bfad..e50084b6bd 100644
--- a/test/files/neg/bug856.scala
+++ b/test/files/neg/bug856.scala
@@ -1,4 +1,4 @@
-trait Complex extends Product2[Double,Double]
+trait Complex extends Product2[Double,Double]
class ComplexRect(val _1:Double, _2:Double) extends Complex {
override def toString = "ComplexRect("+_1+","+_2+")"
diff --git a/test/files/neg/bug875.check b/test/files/neg/bug875.check
index d547c8d69c..16a982241e 100644
--- a/test/files/neg/bug875.check
+++ b/test/files/neg/bug875.check
@@ -4,8 +4,8 @@ bug875.scala:3: error: no `: _*' annotation allowed here
^
bug875.scala:6: error: no `: _*' annotation allowed here
(such annotations are only allowed in arguments to *-parameters)
- mkList(xs: _*)
- ^
+ mkList1(xs: _*)
+ ^
bug875.scala:15: error: no `: _*' annotation allowed here
(such annotations are only allowed in arguments to *-parameters)
f(true, 1, xs: _*)
diff --git a/test/files/neg/bug875.scala b/test/files/neg/bug875.scala
index 9c579b0166..38affd5a43 100644
--- a/test/files/neg/bug875.scala
+++ b/test/files/neg/bug875.scala
@@ -1,9 +1,9 @@
object Test extends Application {
val xs = List(4, 5, 6)
val ys = List(1, 2, 3, xs: _*)
- def mkList(x: Int) = List(x)
- def mkList(x: Boolean) = List(x)
- mkList(xs: _*)
+ def mkList1(x: Int) = List(x)
+ def mkList2(x: Boolean) = List(x)
+ mkList1(xs: _*)
def f(x: Int*) = List(x: _*)
diff --git a/test/files/neg/bug876.scala b/test/files/neg/bug876.scala
index 8a94dd9db5..f5e1f9c663 100644
--- a/test/files/neg/bug876.scala
+++ b/test/files/neg/bug876.scala
@@ -11,11 +11,11 @@ object AssertionError extends AnyRef with Application
class Manager
{
final class B {}
-
+
val map = new HashMap[A, B]
}
-
-
+
+
def test[T](f: => T) { f }
test {
diff --git a/test/files/neg/bug877.scala b/test/files/neg/bug877.scala
index 5e132a1dd4..8cb98279db 100644
--- a/test/files/neg/bug877.scala
+++ b/test/files/neg/bug877.scala
@@ -1,3 +1,3 @@
-class A
+class A
trait Foo extends A(22A, Bug!) {}
diff --git a/test/files/neg/bug882.check b/test/files/neg/bug882.check
index 8f47fefd9b..4e3e6d0860 100644
--- a/test/files/neg/bug882.check
+++ b/test/files/neg/bug882.check
@@ -1,4 +1,4 @@
-bug882.scala:2: error: traits cannot have type parameters with context bounds `: ...'
+bug882.scala:2: error: traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'
trait SortedSet[A <% Ordered[A]] {
^
one error found
diff --git a/test/files/neg/bug910.check b/test/files/neg/bug910.check
index fe4ad4fca4..2bc2d986fa 100644
--- a/test/files/neg/bug910.check
+++ b/test/files/neg/bug910.check
@@ -1,6 +1,6 @@
bug910.scala:4: error: type mismatch;
found : Seq[Char]
- required: scala.Seq[int]
- val y: Seq[int] = rest
+ required: scala.Seq[Int]
+ val y: Seq[Int] = rest
^
one error found
diff --git a/test/files/neg/bug910.scala b/test/files/neg/bug910.scala
index 2f28ea408f..540ee7001d 100644
--- a/test/files/neg/bug910.scala
+++ b/test/files/neg/bug910.scala
@@ -1,7 +1,7 @@
object RegExpTest1 extends Application {
def co(x: Seq[Char]) = x match {
case Seq('s','c','a','l','a', rest @ _*) =>
- val y: Seq[int] = rest
+ val y: Seq[Int] = rest
y
}
}
diff --git a/test/files/neg/bug944.scala b/test/files/neg/bug944.scala
index dc80e5f49f..352269a80b 100644
--- a/test/files/neg/bug944.scala
+++ b/test/files/neg/bug944.scala
@@ -1,6 +1,6 @@
object TooManyArgsFunction {
- val f = (a1:Int, a2:Int, a3:Int, a4:Int, a5:Int, a6:Int, a7:Int, a8:Int,
- a9:Int, a10:Int, a11:Int, a12:Int, a13:Int, a14:Int, a15:Int,
- a16:Int, a17:Int, a18:Int, a19:Int, a20:Int, a21:Int, a22:Int,
+ val f = (a1:Int, a2:Int, a3:Int, a4:Int, a5:Int, a6:Int, a7:Int, a8:Int,
+ a9:Int, a10:Int, a11:Int, a12:Int, a13:Int, a14:Int, a15:Int,
+ a16:Int, a17:Int, a18:Int, a19:Int, a20:Int, a21:Int, a22:Int,
a23:Int) => 1
}
diff --git a/test/files/neg/bug961.check b/test/files/neg/bug961.check
index 8b407d1d0c..439ed98675 100644
--- a/test/files/neg/bug961.check
+++ b/test/files/neg/bug961.check
@@ -1,9 +1,4 @@
-bug961.scala:4: warning: case classes without a parameter list have been deprecated;
-use either case objects or case classes with `()' as parameter list.
- private case class B_inner extends A
- ^
bug961.scala:11: error: Temp.this.B of type object Temp.B does not take parameters
B() match {
^
-one warning found
one error found
diff --git a/test/files/neg/bug961.scala b/test/files/neg/bug961.scala
index 15309b96b4..088bddd7ee 100644
--- a/test/files/neg/bug961.scala
+++ b/test/files/neg/bug961.scala
@@ -1,7 +1,7 @@
-object Temp{
+object Temp {
abstract class A
- object B{
- private case class B_inner extends A
+ object B {
+ private case class B_inner() extends A
def apply: A = B_inner()
def unapply(a: A) = a match {
case B_inner() => true
diff --git a/test/files/neg/bug987.scala b/test/files/neg/bug987.scala
index 0c65b98049..5b2addf5ef 100644
--- a/test/files/neg/bug987.scala
+++ b/test/files/neg/bug987.scala
@@ -1,4 +1,4 @@
-// tested using Scala compiler version 2.4.0-RC1 -- (c) 2002-2009 LAMP/EPFL
+// tested using Scala compiler version 2.4.0-RC1 -- (c) 2002-2010 LAMP/EPFL
// Many thanks to all at LAMP for the work that goes into Scala.
diff --git a/test/files/neg/checksensible.scala b/test/files/neg/checksensible.scala
index 092c08592f..06452e5674 100644
--- a/test/files/neg/checksensible.scala
+++ b/test/files/neg/checksensible.scala
@@ -18,7 +18,7 @@ class Test {
def main(args: Array[String]) = {
val in = new java.io.FileInputStream(args(0))
- var c = 0
+ var c = 0
while ((c = in.read) != -1)
print(c.toChar)
diff --git a/test/files/neg/constrs.check b/test/files/neg/constrs.check
index 3524709785..4f4a12bc13 100644
--- a/test/files/neg/constrs.check
+++ b/test/files/neg/constrs.check
@@ -8,7 +8,7 @@ constrs.scala:10: error: called constructor's definition must precede calling co
def this() = this("abc")
^
constrs.scala:12: error: called constructor's definition must precede calling constructor's definition
- def this(x: boolean) = this(x)
+ def this(x: Boolean) = this(x)
^
constrs.scala:16: error: type mismatch;
found : Int(1)
diff --git a/test/files/neg/constrs.scala b/test/files/neg/constrs.scala
index 969f593a2d..016df098f0 100644
--- a/test/files/neg/constrs.scala
+++ b/test/files/neg/constrs.scala
@@ -9,7 +9,7 @@ object test {
class Foo(x: Int) {
def this() = this("abc")
def this(x: String) = this(1)
- def this(x: boolean) = this(x)
+ def this(x: Boolean) = this(x)
}
class Bar[a](x: a) {
diff --git a/test/files/neg/depmet_1.check b/test/files/neg/depmet_1.check
new file mode 100644
index 0000000000..7a4f845fd5
--- /dev/null
+++ b/test/files/neg/depmet_1.check
@@ -0,0 +1,10 @@
+depmet_1.scala:2: error: illegal dependent method type: parameter appears in the type of another parameter in the same section or an earlier one
+ def precise0(y: x.type)(x: String): Unit = {}
+ ^
+depmet_1.scala:3: error: illegal dependent method type: parameter appears in the type of another parameter in the same section or an earlier one
+ def precise1(x: String, y: x.type): Unit = {}
+ ^
+depmet_1.scala:4: error: not found: value y
+ def precise2[T <: y.type](y: String): Unit = {}
+ ^
+three errors found
diff --git a/test/files/neg/depmet_1.flags b/test/files/neg/depmet_1.flags
new file mode 100644
index 0000000000..1c26b24745
--- /dev/null
+++ b/test/files/neg/depmet_1.flags
@@ -0,0 +1 @@
+-Ydependent-method-types \ No newline at end of file
diff --git a/test/files/neg/depmet_1.scala b/test/files/neg/depmet_1.scala
new file mode 100644
index 0000000000..fc672e1ed8
--- /dev/null
+++ b/test/files/neg/depmet_1.scala
@@ -0,0 +1,5 @@
+object Test {
+ def precise0(y: x.type)(x: String): Unit = {}
+ def precise1(x: String, y: x.type): Unit = {}
+ def precise2[T <: y.type](y: String): Unit = {}
+} \ No newline at end of file
diff --git a/test/files/neg/forward.scala b/test/files/neg/forward.scala
index d5c0851f09..3774fa838f 100644
--- a/test/files/neg/forward.scala
+++ b/test/files/neg/forward.scala
@@ -5,20 +5,20 @@ object Test {
{
def f: Int = x;
val x: Int = f;
- }
+ }
{
def f: Int = g;
val x: Int = f;
def g: Int = x;
- }
+ }
{
def f: Int = g;
var x: Int = f;
def g: Int = x;
- }
+ }
{
def f: Int = g;
Console.println("foo");
def g: Int = f;
- }
+ }
}
diff --git a/test/files/neg/gadts1.scala b/test/files/neg/gadts1.scala
index 67aef4f2d9..1b455f408d 100644
--- a/test/files/neg/gadts1.scala
+++ b/test/files/neg/gadts1.scala
@@ -1,8 +1,8 @@
object Test{
abstract class Number
-case class Int(n: int) extends Number
-case class Double(d: double) extends Number
+case class Int(n: scala.Int) extends Number
+case class Double(d: scala.Double) extends Number
trait Term[+a]
case class Cell[a](var x: a) extends Term[a]
@@ -10,15 +10,15 @@ case class NumTerm(val n: Number) extends Term[Number]
class IntTerm(n: Int) extends NumTerm(n) with Term[Int]
-def f[a](t:Term[a], c:Cell[a]): unit =
- t match {
- case NumTerm(n) => c.x = Double(1.0)
+def f[a](t:Term[a], c:Cell[a]): Unit =
+ t match {
+ case NumTerm(n) => c.x = Double(1.0)
}
val x:Term[Number] = NumTerm(Int(5))
-def main(args: Array[String]): unit = {
+def main(args: Array[String]): Unit = {
val cell = Cell[Int](Int(6))
Console.println(cell)
f[Int](new IntTerm(Int(5)), cell)
diff --git a/test/files/neg/illegal-stmt-start.check b/test/files/neg/illegal-stmt-start.check
new file mode 100644
index 0000000000..01747524f8
--- /dev/null
+++ b/test/files/neg/illegal-stmt-start.check
@@ -0,0 +1,4 @@
+illegal-stmt-start.scala:3: error: illegal start of statement (no modifiers allowed here)
+ private def bar {}
+ ^
+one error found
diff --git a/test/files/neg/illegal-stmt-start.scala b/test/files/neg/illegal-stmt-start.scala
new file mode 100644
index 0000000000..48ae0a8b0a
--- /dev/null
+++ b/test/files/neg/illegal-stmt-start.scala
@@ -0,0 +1,5 @@
+class Test {
+ def foo {
+ private def bar {}
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/implicits.check b/test/files/neg/implicits.check
index d94e1f27f2..d975ccfa84 100644
--- a/test/files/neg/implicits.check
+++ b/test/files/neg/implicits.check
@@ -1,12 +1,3 @@
-implicits.scala:21: error: type mismatch;
- found : Pos
- required: ?{val +: ?}
-Note that implicit conversions are not applicable because they are ambiguous:
- both method any2plus in object Sub of type (x: Any)Sub.Plus
- and method pos2int in object Super of type (p: Pos)int
- are possible conversion functions from Pos to ?{val +: ?}
- f(p+1)
- ^
implicits.scala:38: error: type mismatch;
found : test2.HSome[java.lang.String,test2.HMap]
required: Int
@@ -17,4 +8,4 @@ implicits.scala:46: error: type mismatch;
required: scala.List[Mxml]
children.toList.flatMap ( e => {
^
-three errors found
+two errors found
diff --git a/test/files/neg/implicits.scala b/test/files/neg/implicits.scala
index be85029660..e908fb03e8 100644
--- a/test/files/neg/implicits.scala
+++ b/test/files/neg/implicits.scala
@@ -3,21 +3,21 @@ class Pos
class Super
object Super {
- implicit def pos2int(p: Pos): int = 0
-}
+ implicit def pos2int(p: Pos): Int = 0
+}
object Sub extends Super {
class Plus(x: Any) {
def +(y: String): String = x.toString + y
}
implicit def any2plus(x: Any): Plus = new Plus(x)
-}
+}
object Test {
import Super._
import Sub._
val p = new Pos
- def f(x: int): int = x
+ def f(x: Int): Int = x
f(p+1)
}
@@ -33,7 +33,7 @@ object test2 {
val set = HEmpty + 3 + "3"
implicit def select[T](t: HSome[T,_]) = t.head
implicit def selectTail[L](t: HSome[_,L]) = t.tail
-
+
def foo(x: Int) = 3
foo(set)
}
@@ -55,4 +55,4 @@ class Mxml {
}
-}
+}
diff --git a/test/files/neg/java-access-neg.check b/test/files/neg/java-access-neg.check
new file mode 100644
index 0000000000..af2812b579
--- /dev/null
+++ b/test/files/neg/java-access-neg.check
@@ -0,0 +1,16 @@
+S2.scala:12: error: method packageAbstract overrides nothing
+ override private[b] def packageAbstract() = () // fail
+ ^
+S2.scala:16: error: method packageConcrete overrides nothing
+ override private[b] def packageConcrete() = () // fail
+ ^
+S2.scala:36: error: method packageConcrete overrides nothing
+ override protected[b] def packageConcrete() = () // fail
+ ^
+S2.scala:47: error: method packageConcrete overrides nothing
+ override private[a] def packageConcrete() = () // fail
+ ^
+S2.scala:58: error: method packageConcrete overrides nothing
+ override def packageConcrete() = () // fail
+ ^
+5 errors found
diff --git a/test/files/neg/java-access-neg/J.java b/test/files/neg/java-access-neg/J.java
new file mode 100644
index 0000000000..4f2024673c
--- /dev/null
+++ b/test/files/neg/java-access-neg/J.java
@@ -0,0 +1,15 @@
+package a.b;
+
+public abstract class J {
+ public J() { }
+ J(int x1) { }
+ protected J(int x1, int x2) { }
+
+ abstract void packageAbstract();
+ protected abstract void protectedAbstract();
+ public abstract void publicAbstract();
+
+ void packageConcrete() { return; }
+ protected void protectedConcrete() { return; }
+ public void publicConcrete() { return; }
+}
diff --git a/test/files/neg/java-access-neg/S2.scala b/test/files/neg/java-access-neg/S2.scala
new file mode 100644
index 0000000000..dd0af8d39d
--- /dev/null
+++ b/test/files/neg/java-access-neg/S2.scala
@@ -0,0 +1,61 @@
+package a.b
+package c
+
+import a.b.J
+
+/** Variations of java-access-pos with us in a nested package.
+ */
+
+/** Declaring "override" all the time.
+ */
+class S1 extends J {
+ override private[b] def packageAbstract() = () // fail
+ override protected[b] def protectedAbstract() = ()
+ override def publicAbstract() = ()
+
+ override private[b] def packageConcrete() = () // fail
+ override protected[b] def protectedConcrete() = ()
+ override def publicConcrete() = ()
+}
+
+/** Implementing abstracts.
+ */
+class S2 extends J {
+ private[b] def packageAbstract() = () // fail
+ protected[b] def protectedAbstract() = ()
+ def publicAbstract() = ()
+}
+
+/** Widening access.
+ */
+class S3 extends J {
+ protected[b] def packageAbstract() = () // fail
+ protected[b] def protectedAbstract() = ()
+ def publicAbstract() = ()
+
+ override protected[b] def packageConcrete() = () // fail
+ override protected[b] def protectedConcrete() = ()
+ override def publicConcrete() = ()
+}
+/** More widening.
+ */
+class S4 extends J {
+ private[a] def packageAbstract() = () // fail
+ protected[a] def protectedAbstract() = ()
+ def publicAbstract() = ()
+
+ override private[a] def packageConcrete() = () // fail
+ override protected[a] def protectedConcrete() = ()
+ override def publicConcrete() = ()
+}
+/** Yet more widening.
+ */
+class S5 extends J {
+ def packageAbstract() = () // fail
+ def protectedAbstract() = ()
+ def publicAbstract() = ()
+
+ override def packageConcrete() = () // fail
+ override def protectedConcrete() = ()
+ override def publicConcrete() = ()
+}
diff --git a/test/files/neg/lazy-override.scala b/test/files/neg/lazy-override.scala
index f41d7f038b..a0f6e3bd73 100644
--- a/test/files/neg/lazy-override.scala
+++ b/test/files/neg/lazy-override.scala
@@ -5,7 +5,7 @@
lazy val y: Int = { print("/*A.y*/"); 2 }
}
-
+
class B extends A {
// lazy overrides strict val
override lazy val x: Int = { print("/*B.x*/"); 3 }
diff --git a/test/files/neg/lazyvals.scala b/test/files/neg/lazyvals.scala
index f92534f506..bbc4fe114a 100644
--- a/test/files/neg/lazyvals.scala
+++ b/test/files/neg/lazyvals.scala
@@ -1,7 +1,7 @@
/** Test which should fail compilation */
class Lazy {
-
+
// no abstract lazy values
lazy val t: Int
@@ -31,7 +31,7 @@ object T2 {
lazy val y: Int = { print("/*A.y*/"); 2 }
}
-
+
class B extends A {
// lazy overrides strict val
override lazy val x: Int = { print("/*B.x*/"); 3 }
diff --git a/test/files/neg/lubs.scala b/test/files/neg/lubs.scala
index 3524fa4d87..639117f7bf 100644
--- a/test/files/neg/lubs.scala
+++ b/test/files/neg/lubs.scala
@@ -18,7 +18,7 @@ object test2 {
class D extends A { type T = D }
def f = if (1 == 2) new C else new D
-
+
val x1: A { type T } = f
val x2: A { type T >: Null <: A } = f
val x3: A { type T >: Null <: A { type T >: Null <: A } } = f
diff --git a/test/files/neg/migration28.check b/test/files/neg/migration28.check
new file mode 100644
index 0000000000..4c8c58e0fd
--- /dev/null
+++ b/test/files/neg/migration28.check
@@ -0,0 +1,8 @@
+migration28.scala:5: error: method ++= in class Stack is deprecated: use pushAll
+ s ++= List(1,2,3)
+ ^
+migration28.scala:7: error: method foreach in class Stack has changed semantics:
+Stack iterator and foreach now traverse in FIFO order.
+ s foreach (_ => ())
+ ^
+two errors found
diff --git a/test/files/neg/migration28.flags b/test/files/neg/migration28.flags
new file mode 100644
index 0000000000..197b3198c8
--- /dev/null
+++ b/test/files/neg/migration28.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xmigration
diff --git a/test/files/neg/migration28.scala b/test/files/neg/migration28.scala
new file mode 100644
index 0000000000..346c1a6448
--- /dev/null
+++ b/test/files/neg/migration28.scala
@@ -0,0 +1,12 @@
+object Test {
+ import scala.collection.mutable._
+
+ val s = new Stack[Int]
+ s ++= List(1,2,3)
+ s map (_ + 1)
+ s foreach (_ => ())
+
+ def main(args: Array[String]): Unit = {
+
+ }
+}
diff --git a/test/files/neg/multi-array.flags b/test/files/neg/multi-array.flags
new file mode 100644
index 0000000000..c36e713ab8
--- /dev/null
+++ b/test/files/neg/multi-array.flags
@@ -0,0 +1 @@
+-deprecation \ No newline at end of file
diff --git a/test/files/neg/names-defaults-neg-ref.check b/test/files/neg/names-defaults-neg-ref.check
index ac37816762..00052c72dc 100644
--- a/test/files/neg/names-defaults-neg-ref.check
+++ b/test/files/neg/names-defaults-neg-ref.check
@@ -2,7 +2,7 @@ names-defaults-neg-ref.scala:3: error: in anonymous class $anon, multiple overlo
The members with defaults are defined in trait B2235 and trait A2235.
new A2235 with B2235
^
-names-defaults-neg-ref.scala:7: error: in class A, multiple overloaded alternatives of method foo define default arguments()
+names-defaults-neg-ref.scala:7: error: in class A, multiple overloaded alternatives of method foo define default arguments.
class A {
^
names-defaults-neg-ref.scala:17: error: in class C, multiple overloaded alternatives of method bar define default arguments.
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index e47cf8c420..c695dd37dc 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -1,4 +1,4 @@
-names-defaults-neg.scala:66: error: not enough arguments for method apply: (a: Int,b: String)(c: Int*)Fact in object Fact.
+names-defaults-neg.scala:65: error: not enough arguments for method apply: (a: Int,b: String)(c: Int*)Fact in object Fact.
Unspecified value parameter b.
val fac = Fact(1)(2, 3)
^
@@ -8,103 +8,142 @@ names-defaults-neg.scala:5: error: type mismatch;
test1(b = 2, a = "#")
^
names-defaults-neg.scala:8: error: positional after named argument.
- test1(a = 1, "*")
- ^
-names-defaults-neg.scala:9: error: positional after named argument.
test1(b = "(*", 23)
^
-names-defaults-neg.scala:14: error: reference to x is ambiguous; it is both, a parameter
+names-defaults-neg.scala:13: error: reference to x is ambiguous; it is both, a parameter
name of the method and the name of a variable currently in scope.
test2(x = 1)
^
-names-defaults-neg.scala:16: error: not found: value c
+names-defaults-neg.scala:15: error: not found: value c
test1(c = 0, b = "joke")
^
-names-defaults-neg.scala:17: error: not found: value m
+names-defaults-neg.scala:16: error: not found: value m
test7((m = 1)) // named arguments must be top-level assignments
^
-names-defaults-neg.scala:18: error: not found: value m
+names-defaults-neg.scala:17: error: not found: value m
test7({m = 1})
^
-names-defaults-neg.scala:19: error: not found: value m
+names-defaults-neg.scala:18: error: not found: value m
test7 { m = 1 } // no named arguments in argument block
^
-names-defaults-neg.scala:20: error: reference to x is ambiguous; it is both, a parameter
+names-defaults-neg.scala:19: error: reference to x is ambiguous; it is both, a parameter
name of the method and the name of a variable currently in scope.
test8(x = 1)
^
-names-defaults-neg.scala:23: error: parameter specified twice: a
+names-defaults-neg.scala:22: error: parameter specified twice: a
test1(1, a = 2)
^
-names-defaults-neg.scala:24: error: parameter specified twice: b
+names-defaults-neg.scala:23: error: parameter specified twice: b
test1(b = 1, b = "2")
^
-names-defaults-neg.scala:27: error: {
+names-defaults-neg.scala:26: error: {
val x$1: Int(3) = 3;
val x$2: Int(1) = 1;
Test.this.test3(1, 3)
} of type Int does not take parameters
test3(b = 3, a = 1)(3)
^
-names-defaults-neg.scala:36: error: ambiguous reference to overloaded definition,
+names-defaults-neg.scala:35: error: ambiguous reference to overloaded definition,
both method f in object t1 of type (b: String,a: Int)java.lang.String
and method f in object t1 of type (a: Int,b: String)java.lang.String
match argument types (b: java.lang.String,a: Int)
t1.f(b = "dkljf", a = 1)
^
-names-defaults-neg.scala:43: error: ambiguous reference to overloaded definition,
+names-defaults-neg.scala:42: error: ambiguous reference to overloaded definition,
both method f in object t3 of type (a2: Int)(b: Int)java.lang.String
and method f in object t3 of type (a1: Int)java.lang.String
match argument types (Int)
t3.f(1)
^
-names-defaults-neg.scala:44: error: ambiguous reference to overloaded definition,
+names-defaults-neg.scala:43: error: ambiguous reference to overloaded definition,
both method f in object t3 of type (a2: Int)(b: Int)java.lang.String
and method f in object t3 of type (a1: Int)java.lang.String
match argument types (Int)
t3.f(1)(2)
^
-names-defaults-neg.scala:50: error: ambiguous reference to overloaded definition,
+names-defaults-neg.scala:49: error: ambiguous reference to overloaded definition,
both method g in object t7 of type (a: B)java.lang.String
and method g in object t7 of type (a: C,b: Int*)java.lang.String
match argument types (C)
t7.g(new C()) // ambigous reference
^
-names-defaults-neg.scala:54: error: parameter specified twice: b
+names-defaults-neg.scala:53: error: parameter specified twice: b
test5(a = 1, b = "dkjl", b = "dkj")
^
-names-defaults-neg.scala:55: error: parameter specified twice: b
+names-defaults-neg.scala:54: error: parameter specified twice: b
test5(1, "2", b = 3)
^
-names-defaults-neg.scala:56: error: when using named arguments, the vararg parameter has to be specified exactly once
+names-defaults-neg.scala:55: error: when using named arguments, the vararg parameter has to be specified exactly once
test5(b = "dlkj")
^
-names-defaults-neg.scala:62: error: ambiguous reference to overloaded definition,
+names-defaults-neg.scala:61: error: ambiguous reference to overloaded definition,
both method f in object t8 of type (b: String,a: Int)java.lang.String
and method f in object t8 of type (a: Int,b: java.lang.Object)java.lang.String
match argument types (a: Int,b: java.lang.String) and expected result type Any
println(t8.f(a = 0, b = "1")) // ambigous reference
^
-names-defaults-neg.scala:70: error: wrong number of arguments for <none>: (x: Int,y: String)A1
+names-defaults-neg.scala:69: error: wrong number of arguments for <none>: (x: Int,y: String)A1
A1() match { case A1(_) => () }
^
-names-defaults-neg.scala:77: error: no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])T[T[List[T[X forSome { type X }]]]] exist so that it can be applied to arguments (List[Int])
+names-defaults-neg.scala:76: error: no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])T[T[List[T[X forSome { type X }]]]] exist so that it can be applied to arguments (List[Int])
--- because ---
argument expression's type is not compatible with formal parameter type;
found : List[Int]
required: ?T[ ?T[ scala.List[?T[ X forSome { type X } ]] ] ]
-Error occured in an application involving default arguments.
+Error occurred in an application involving default arguments.
test4()
^
-names-defaults-neg.scala:80: error: type mismatch;
+names-defaults-neg.scala:79: error: type mismatch;
found : List[Int]
required: scala.List[scala.List[?]]
def test6[T](x: List[List[T]] = List(1,2)) = x
^
-names-defaults-neg.scala:83: error: type mismatch;
+names-defaults-neg.scala:82: error: type mismatch;
found : Int
required: String
-Error occured in an application involving default arguments.
+Error occurred in an application involving default arguments.
new A2[String]()
^
-25 errors found
+names-defaults-neg.scala:86: error: module extending its companion class cannot use default constructor arguments
+ object C extends C()
+ ^
+names-defaults-neg.scala:120: error: reference to var2 is ambiguous; it is both, a parameter
+name of the method and the name of a variable currently in scope.
+ delay(var2 = 40)
+ ^
+names-defaults-neg.scala:123: error: missing parameter type for expanded function ((x$1) => a = x$1)
+ val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
+ ^
+names-defaults-neg.scala:124: error: parameter specified twice: a
+ val taf3 = testAnnFun(b = _: String, a = get(8))
+ ^
+names-defaults-neg.scala:125: error: wrong number of parameters; expected = 2
+ val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
+ ^
+names-defaults-neg.scala:133: error: variable definition needs type because the name is used as named argument the definition.
+ def t3 { var x = t.f(x = 1) }
+ ^
+names-defaults-neg.scala:136: error: variable definition needs type because the name is used as named argument the definition.
+ object t6 { var x = t.f(x = 1) }
+ ^
+names-defaults-neg.scala:139: error: variable definition needs type because the name is used as named argument the definition.
+ class t9 { var x = t.f(x = 1) }
+ ^
+names-defaults-neg.scala:153: error: variable definition needs type because the name is used as named argument the definition.
+ def u3 { var x = u.f(x = 1) }
+ ^
+names-defaults-neg.scala:156: error: variable definition needs type because the name is used as named argument the definition.
+ def u6 { var x = u.f(x = "32") }
+ ^
+names-defaults-neg.scala:159: error: reference to x is ambiguous; it is both, a parameter
+name of the method and the name of a variable currently in scope.
+ def u9 { var x: Int = u.f(x = 1) }
+ ^
+names-defaults-neg.scala:166: error: variable definition needs type because the name is used as named argument the definition.
+ class u15 { var x = u.f(x = 1) }
+ ^
+names-defaults-neg.scala:169: error: reference to x is ambiguous; it is both, a parameter
+name of the method and the name of a variable currently in scope.
+ class u18 { var x: Int = u.f(x = 1) }
+ ^
+37 errors found
diff --git a/test/files/neg/names-defaults-neg.scala b/test/files/neg/names-defaults-neg.scala
index 5829da361c..daa97a8638 100644
--- a/test/files/neg/names-defaults-neg.scala
+++ b/test/files/neg/names-defaults-neg.scala
@@ -5,7 +5,6 @@ object Test extends Application {
test1(b = 2, a = "#")
// mixing named and positional
- test1(a = 1, "*")
test1(b = "(*", 23)
// assignment / names
@@ -82,6 +81,11 @@ object Test extends Application {
// correct error message
new A2[String]()
+ object t3648 {
+ class C(val s: String = "")
+ object C extends C()
+ }
+
// DEFINITIONS
def test1(a: Int, b: String) = a +": "+ b
def test2(x: Unit) = println("test2")
@@ -107,3 +111,63 @@ case class Fact(a: Int, b: String)(c: Int*)
case class A1(x: Int = 1, y: String = "2")
class A2[T](a: T = 1)
+
+
+// anonymous functions
+object anfun {
+ var var2 = 0
+ def delay(var2: => Unit) { var2 }
+ delay(var2 = 40)
+
+ def testAnnFun(a: Int, b: String) = println(a +": "+ b)
+ val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
+ val taf3 = testAnnFun(b = _: String, a = get(8))
+ val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
+}
+
+object t3685 {
+ object t { def f(x: Int) = x }
+
+ def t1 { def x = t.f(x = 1) }
+ def t2 { val x = t.f(x = 1) }
+ def t3 { var x = t.f(x = 1) }
+ object t4 { def x = t.f(x = 1) }
+ object t5 { val x = t.f(x = 1) }
+ object t6 { var x = t.f(x = 1) }
+ class t7 { def x = t.f(x = 1) }
+ class t8 { val x = t.f(x = 1) }
+ class t9 { var x = t.f(x = 1) }
+
+ def t10 { def x: Int = t.f(x = 1) }
+ def t11 { val x: Int = t.f(x = 1) }
+ def t12 { var x: Int = t.f(x = 1) }
+ class t13 { def x: Int = t.f(x = 1) }
+ class t14 { val x: Int = t.f(x = 1) }
+ class t15 { var x: Int = t.f(x = 1) }
+
+
+ object u { def f[T](x: T) = 100 }
+
+ def u1 { def x = u.f(x = 1) }
+ def u2 { val x = u.f(x = 1) }
+ def u3 { var x = u.f(x = 1) }
+ def u4 { def x = u.f(x = "23") }
+ def u5 { val x = u.f(x = "32") }
+ def u6 { var x = u.f(x = "32") }
+ def u7 { def x: Int = u.f(x = 1) }
+ def u8 { val x: Int = u.f(x = 1) }
+ def u9 { var x: Int = u.f(x = 1) }
+ def u10 { def x: Int = u.f(x = "32") }
+ def u11 { val x: Int = u.f(x = "32") }
+ def u12 { var x: Int = u.f(x = "32") }
+
+ class u13 { def x = u.f(x = 1) }
+ class u14 { val x = u.f(x = 1) }
+ class u15 { var x = u.f(x = 1) }
+ class u16 { def x: Int = u.f(x = 1) }
+ class u17 { val x: Int = u.f(x = 1) }
+ class u18 { var x: Int = u.f(x = 1) }
+ class u19 { def x: Int = u.f(x = "32") }
+ class u20 { val x: Int = u.f(x = "32") }
+ class u21 { var x: Int = u.f(x = "32") }
+}
diff --git a/test/files/neg/null-unsoundness.scala b/test/files/neg/null-unsoundness.scala
index 15dedfbb97..f4ba859bc0 100644
--- a/test/files/neg/null-unsoundness.scala
+++ b/test/files/neg/null-unsoundness.scala
@@ -12,4 +12,4 @@ object Test extends A with Application {
type A = C
y = 42
}
-
+
diff --git a/test/files/neg/overload-msg.check b/test/files/neg/overload-msg.check
new file mode 100644
index 0000000000..780830bff9
--- /dev/null
+++ b/test/files/neg/overload-msg.check
@@ -0,0 +1,13 @@
+overload-msg.scala:3: error: overloaded method value + with alternatives:
+ (Double)Double <and>
+ (Float)Float <and>
+ (Long)Long <and>
+ (scala.Int)scala.Int <and>
+ (Char)scala.Int <and>
+ (Short)scala.Int <and>
+ (Byte)scala.Int <and>
+ (java.lang.String)java.lang.String
+ cannot be applied to (Int(in method f))
+ def f[Int](y: Int) = x + y
+ ^
+one error found
diff --git a/test/files/neg/overload-msg.scala b/test/files/neg/overload-msg.scala
new file mode 100644
index 0000000000..896722262d
--- /dev/null
+++ b/test/files/neg/overload-msg.scala
@@ -0,0 +1,4 @@
+// type parameter shadows actual type, massive overload error confuses.
+class A(x: Int) {
+ def f[Int](y: Int) = x + y
+}
diff --git a/test/files/neg/overload.check b/test/files/neg/overload.check
index 0faa97adb1..abfabaf3f2 100644
--- a/test/files/neg/overload.check
+++ b/test/files/neg/overload.check
@@ -1,6 +1,6 @@
overload.scala:10: error: ambiguous reference to overloaded definition,
both method f in class D of type (x: Any)Unit
-and method f in class C of type (x: int)Unit
+and method f in class C of type (x: Int)Unit
match argument types (Int)
(new D).f(1)
^
diff --git a/test/files/neg/overload.scala b/test/files/neg/overload.scala
index 311ea3874b..6ad911e90e 100644
--- a/test/files/neg/overload.scala
+++ b/test/files/neg/overload.scala
@@ -1,5 +1,5 @@
class C {
- def f(x: int) {}
+ def f(x: Int) {}
}
class D extends C {
diff --git a/test/files/neg/override.check b/test/files/neg/override.check
new file mode 100644
index 0000000000..0336fb2b11
--- /dev/null
+++ b/test/files/neg/override.check
@@ -0,0 +1,5 @@
+override.scala:9: error: overriding type T in trait A with bounds >: Int <: Int;
+ type T in trait B with bounds >: String <: String has incompatible type
+ lazy val x : A with B = x
+ ^
+one error found
diff --git a/test/files/neg/override.scala b/test/files/neg/override.scala
new file mode 100755
index 0000000000..764b06603a
--- /dev/null
+++ b/test/files/neg/override.scala
@@ -0,0 +1,15 @@
+trait X {
+ trait A { type T >: Int <: Int }
+ val x : A
+ var n : x.T = 3
+}
+
+trait Y extends X {
+ trait B { type T >: String <: String }
+ lazy val x : A with B = x
+ n = "foo"
+}
+
+object Test extends Application {
+ new Y {}
+}
diff --git a/test/files/neg/pat_unreachable.scala b/test/files/neg/pat_unreachable.scala
index 04a7fa580c..527731e58a 100644
--- a/test/files/neg/pat_unreachable.scala
+++ b/test/files/neg/pat_unreachable.scala
@@ -8,7 +8,7 @@ object Test extends Application {
case Seq(x, y, _*) => x::y::Nil
case Seq(x, y) => List(x, y)
}
-
+
def not_unreachable(xs:Seq[Char]) = xs match {
case Seq(x, y, _*) => x::y::Nil
case Seq(x) => List(x)
diff --git a/test/files/neg/patmat-type-check.check b/test/files/neg/patmat-type-check.check
new file mode 100644
index 0000000000..ab638b616d
--- /dev/null
+++ b/test/files/neg/patmat-type-check.check
@@ -0,0 +1,21 @@
+patmat-type-check.scala:18: error: scrutinee is incompatible with pattern type;
+ found : Seq[A]
+ required: java.lang.String
+ def f1 = "bob".reverse match { case Seq('b', 'o', 'b') => true } // fail
+ ^
+patmat-type-check.scala:19: error: scrutinee is incompatible with pattern type;
+ found : Seq[A]
+ required: Array[Char]
+ def f2 = "bob".toArray match { case Seq('b', 'o', 'b') => true } // fail
+ ^
+patmat-type-check.scala:23: error: scrutinee is incompatible with pattern type;
+ found : Seq[A]
+ required: Test.Bop2
+ def f3(x: Bop2) = x match { case Seq('b', 'o', 'b') => true } // fail
+ ^
+patmat-type-check.scala:27: error: scrutinee is incompatible with pattern type;
+ found : Seq[A]
+ required: Test.Bop3[T]
+ def f4[T](x: Bop3[T]) = x match { case Seq('b', 'o', 'b') => true } // fail
+ ^
+four errors found
diff --git a/test/files/neg/patmat-type-check.scala b/test/files/neg/patmat-type-check.scala
new file mode 100644
index 0000000000..f6658b0187
--- /dev/null
+++ b/test/files/neg/patmat-type-check.scala
@@ -0,0 +1,28 @@
+object Test
+{
+ def s1 = "bob".toList match { case Seq('b', 'o', 'b') => true } // list ok
+
+ // not final, allowed
+ class Bop
+ def s2(x: Bop) = x match { case Seq('b', 'o', 'b') => true }
+
+ // covariance, allowed
+ final class Bop4[+T]
+ def s3[T](x: Bop4[T]) = x match { case Seq('b', 'o', 'b') => true }
+
+ // contravariance, allowed
+ final class Bop5[T, U, -V]
+ def s4[T1, T2](x: Bop5[_, T1, T2]) = x match { case Seq('b', 'o', 'b') => true }
+
+ // String and Array are final/invariant, disallowed
+ def f1 = "bob".reverse match { case Seq('b', 'o', 'b') => true } // fail
+ def f2 = "bob".toArray match { case Seq('b', 'o', 'b') => true } // fail
+
+ // final, no type parameters, should be disallowed
+ final class Bop2
+ def f3(x: Bop2) = x match { case Seq('b', 'o', 'b') => true } // fail
+
+ // final, invariant type parameter, should be disallowed
+ final class Bop3[T]
+ def f4[T](x: Bop3[T]) = x match { case Seq('b', 'o', 'b') => true } // fail
+}
diff --git a/test/files/neg/patmatexhaust.check b/test/files/neg/patmatexhaust.check
index 1c46b6c9e5..ca769300c0 100644
--- a/test/files/neg/patmatexhaust.check
+++ b/test/files/neg/patmatexhaust.check
@@ -15,8 +15,8 @@ missing combination Qult Qult
def ma3(x:Mult) = (x,x) match { // not exhaustive
^
patmatexhaust.scala:49: warning: match is not exhaustive!
-missing combination Gu
missing combination Gp
+missing combination Gu
def ma4(x:Deep) = x match { // missing cases: Gu, Gp
^
diff --git a/test/files/neg/patmatexhaust.scala b/test/files/neg/patmatexhaust.scala
index b172df8740..81812d6fd3 100644
--- a/test/files/neg/patmatexhaust.scala
+++ b/test/files/neg/patmatexhaust.scala
@@ -22,14 +22,14 @@ class TestSealedExhaustive { // compile only
def ma3(x:Mult) = (x,x) match { // not exhaustive
case (Kult(_), Qult()) => // Kult missing
- //case Pair(Kult(_), Kult(_)) =>
+ //case Pair(Kult(_), Kult(_)) =>
case (Qult(), Kult(_)) => // Qult missing
- //case Pair(Qult(), Qult()) =>
+ //case Pair(Qult(), Qult()) =>
}
def ma3u(x:Mult) = ((x,x) : @unchecked) match { // not exhaustive, but not checked!
- case (Kult(_), Qult()) =>
- case (Qult(), Kult(_)) =>
+ case (Kult(_), Qult()) =>
+ case (Qult(), Kult(_)) =>
}
sealed abstract class Deep
@@ -37,22 +37,22 @@ class TestSealedExhaustive { // compile only
case object Ga extends Deep
sealed class Gp extends Deep
case object Gu extends Gp
-
+
def zma3(x:Deep) = x match { // exhaustive!
case _ =>
}
def zma4(x:Deep) = x match { // exhaustive!
- case Ga =>
+ case Ga =>
case _ =>
}
def ma4(x:Deep) = x match { // missing cases: Gu, Gp
- case Ga =>
+ case Ga =>
}
def ma5(x:Deep) = x match { // Gp
case Gu =>
- case _ if 1 == 0 =>
+ case _ if 1 == 0 =>
case Ga =>
}
@@ -65,7 +65,7 @@ class TestSealedExhaustive { // compile only
case 1::2::Nil =>
case _ =>
}
-
+
sealed class B
case class B1() extends B
case object B2 extends B
@@ -85,11 +85,11 @@ class TestSealedExhaustive { // compile only
def ma10(x: C) = x match { // exhaustive
case C4() => true
case C2 | C6 => true
- }
-
+ }
+
def redundant = 1 match { // include this otherwise script won't test this in files/neg
case 1 =>
- case 1 =>
+ case 1 =>
}
}
diff --git a/test/files/neg/patternalts.scala b/test/files/neg/patternalts.scala
index 539df43201..56b682b0ec 100644
--- a/test/files/neg/patternalts.scala
+++ b/test/files/neg/patternalts.scala
@@ -1,5 +1,5 @@
object Test {
List(1) match {
case List(x) | List() => Console.println(x)
- }
+ }
}
diff --git a/test/files/neg/plugin-after-terminal/lib/plugins.jar.desired.sha1 b/test/files/neg/plugin-after-terminal/lib/plugins.jar.desired.sha1
deleted file mode 100644
index 23c54fb2de..0000000000
--- a/test/files/neg/plugin-after-terminal/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-cf80703ed94f5176dc099c60b32d0483322de9eb ?plugins.jar
diff --git a/test/files/neg/plugin-before-parser/lib/plugins.jar.desired.sha1 b/test/files/neg/plugin-before-parser/lib/plugins.jar.desired.sha1
deleted file mode 100644
index 27c022e853..0000000000
--- a/test/files/neg/plugin-before-parser/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2e05e73ed90aaf286fa6555d992c1da18de3ceca ?plugins.jar
diff --git a/test/files/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha1 b/test/files/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha1
deleted file mode 100644
index f1fb45387c..0000000000
--- a/test/files/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d96a5f1b3885225a5e203ae59ae2f2f3e3c65c9b ?plugins.jar
diff --git a/test/files/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha1 b/test/files/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha1
deleted file mode 100644
index 9706b6dd60..0000000000
--- a/test/files/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ee728763d50a19e725f729797d5afab05d395c4e ?plugins.jar
diff --git a/test/files/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha1 b/test/files/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha1
deleted file mode 100644
index b07cece8b9..0000000000
--- a/test/files/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-398fa866d1d5c6608c9607dd1079afc12182119f ?plugins.jar
diff --git a/test/files/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha1 b/test/files/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha1
deleted file mode 100644
index 78a4fc0c98..0000000000
--- a/test/files/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b2166b6bc2a55264d959e613fa52695295db2281 ?plugins.jar
diff --git a/test/files/neg/saito.scala b/test/files/neg/saito.scala
index b4fcd598e4..bfabb4edf4 100644
--- a/test/files/neg/saito.scala
+++ b/test/files/neg/saito.scala
@@ -2,13 +2,13 @@ class B {}
class A { self: B =>
def m(): B = {
this
- }
+ }
}
object Exec{
def main(args: Array[String]): Unit = {
val a: A = new A; // should not be allowed
- val b: B = a.m();
+ val b: B = a.m();
}
}
diff --git a/test/files/neg/sensitive.scala b/test/files/neg/sensitive.scala
index f435a385e1..fa1b940844 100644
--- a/test/files/neg/sensitive.scala
+++ b/test/files/neg/sensitive.scala
@@ -5,8 +5,8 @@ object Admin extends Certificate;
class SecurityViolationException extends Exception
object Sensitive {
- def makeSensitive(credentials: Certificate): Sensitive =
- if (credentials == Admin) new Sensitive()
+ def makeSensitive(credentials: Certificate): Sensitive =
+ if (credentials == Admin) new Sensitive()
else throw new SecurityViolationException
}
class Sensitive private () {
@@ -16,4 +16,4 @@ object Attacker {
val x = Sensitive.makeSensitive(null)
val y = new Sensitive()
}
-
+
diff --git a/test/files/neg/spec-overrides.check b/test/files/neg/spec-overrides.check
new file mode 100644
index 0000000000..639186af40
--- /dev/null
+++ b/test/files/neg/spec-overrides.check
@@ -0,0 +1,7 @@
+spec-overrides.scala:8: error: Type parameter has to be specialized at least for the same types as in the overridden method. Missing types: Int
+ override def a[@specialized(Double) T](t: T): List[T] = Nil
+ ^
+spec-overrides.scala:12: error: Type parameter has to be specialized at least for the same types as in the overridden method. Missing types: Int
+ override def a[T](t: T): List[T] = Nil
+ ^
+two errors found
diff --git a/test/files/neg/spec-overrides.scala b/test/files/neg/spec-overrides.scala
new file mode 100644
index 0000000000..27d39ad4a4
--- /dev/null
+++ b/test/files/neg/spec-overrides.scala
@@ -0,0 +1,26 @@
+class P {
+ def a[@specialized(Int) T](t: T): List[T] = List(t)
+}
+class FX extends P {
+ override def a[@specialized(Int) T](t: T): List[T] = Nil
+}
+class FX1 extends P {
+ override def a[@specialized(Double) T](t: T): List[T] = Nil
+}
+
+class FX2 extends P {
+ override def a[T](t: T): List[T] = Nil
+}
+
+object Test extends Application {
+ val fx = new FX
+ val p = new P
+
+ println(fx.a(3))
+ println((fx: P).a(3))
+ println((fx: P).a(3.0))
+
+
+ // val d = new Derived[Int]
+ // println((d: Base[Int]).m(10))
+}
diff --git a/test/files/neg/structural.scala b/test/files/neg/structural.scala
index 181a32654e..de7b6efed2 100644
--- a/test/files/neg/structural.scala
+++ b/test/files/neg/structural.scala
@@ -1,54 +1,54 @@
object Test extends Application {
-
+
def f(x: { type D; def m: D }): Null = null
-
+
class Tata
-
+
abstract class Toto[A <: Object] {
type B <: Object
-
+
def f1[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: A): Object; val x: A }) = x.m[Tata](x.x) //fail
def f2[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: B): Object; val x: B }) = x.m[Tata](x.x) //fail
def f3[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: C): Object; val x: C }) = x.m[Tata](x.x) //fail
def f4[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: D): Object; val x: D }) = x.m[Tata](x.x) //fail
def f5[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: E): Object; val x: Tata }) = x.m[Tata](x.x) //suceed
-
+
def f6[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): A }) = x.m[Tata](null) //suceed
def f7[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): B }) = x.m[Tata](null) //suceed
def f8[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): C }) = x.m[Tata](null) //suceed
def f9[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): D }) = x.m[Tata](null) //fail
def f0[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): E }) = x.m[Tata](null) //suceed
-
+
}
-
+
val tata = new Tata
val toto = new Toto[Tata] {
type B = Tata
}
-
+
//toto.f1[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Tata): Object = null; val x = tata })
//toto.f2[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Tata): Object = null; val x = tata })
//toto.f3[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Tata): Object = null; val x = tata })
//toto.f4[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: D): Object = null; val x = tata })
toto.f5[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: E): Object = null; val x: Test.Tata = tata })
-
+
toto.f6[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Object): Tata = null })
toto.f7[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Object): Tata = null })
toto.f8[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Object): Tata = null })
//toto.f9[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Object): D = null })
toto.f0[Tata](new Object{ type D = Tata; def m[E >: Null <: Object](x: Object): E = null })
-
+
/* Bug #1246 */
type Summable[T] = { def +(v : T) : T }
def sum[T <: Summable[T]](xs : List[T]) = xs.reduceLeft[T](_ + _)
-
+
/* Bug #1004 & #967 */
type S1 = { def f(p: this.type): Unit }
val s1 = new { def f(p: this.type): Unit = () }
-
+
type S2 = { type T; def f(p: T): Unit }
//val s2: S2 = new { type T = A; def f(p: T): Unit = () }
-
+
def s3[U >: Null <: Object](p: { def f(p: U): Unit; def u: U }) = ()
-
+
}
diff --git a/test/files/neg/switch.scala b/test/files/neg/switch.scala
index da7c867ba0..a3dfd869d6 100644
--- a/test/files/neg/switch.scala
+++ b/test/files/neg/switch.scala
@@ -16,44 +16,44 @@ object Main {
case 'f' | 'g' => true
case _ => false
}
-
+
def succ2(c: Char) = (c: @switch) match {
case 'A' | 'B' | 'C' => true
case Other.C2 => true
case Other.C4 => true
case _ => false
}
-
+
// has a guard
def fail1(c: Char) = (c: @switch) match {
case 'A' | 'B' | 'C' => true
case x if x == 'A' => true
case _ => false
}
-
+
// throwing in @unchecked on the next two to make sure
// multiple annotations are processed correctly
-
+
// thinks a val in an object is constant... so naive
def fail2(c: Char) = (c: @switch @unchecked) match {
case 'A' => true
case Other.C1 => true
case _ => false
}
-
+
// more naivete
def fail3(c: Char) = (c: @unchecked @switch) match {
case 'A' => true
case Other.C3 => true
case _ => false
}
-
+
// guard case done correctly
def succ3(c: Char) = (c: @switch) match {
case 'A' | 'B' | 'C' => true
case x => x == 'A'
}
-
+
// some ints just to mix it up a bit
def succ4(x: Int, y: Int) = ((x+y): @switch) match {
case 1 => 5
@@ -62,5 +62,5 @@ object Main {
case 4 => 50
case 5|6|7|8 => 100
case _ => -1
- }
+ }
}
diff --git a/test/files/neg/t0117.scala b/test/files/neg/t0117.scala
index dd200b1cf9..81167205a7 100644
--- a/test/files/neg/t0117.scala
+++ b/test/files/neg/t0117.scala
@@ -3,4 +3,4 @@ trait B extends A { println(super[A].a) }
object Test extends Application {
new B {}
}
-
+
diff --git a/test/files/neg/t0152.scala b/test/files/neg/t0152.scala
index eef69bf767..8b2cae685f 100644
--- a/test/files/neg/t0152.scala
+++ b/test/files/neg/t0152.scala
@@ -8,6 +8,6 @@ trait PlusOne extends Value[Int] {
object Test extends Application {
object boom extends Value[java.lang.String]("foo") with PlusOne
-
+
println(boom.value) // class cast exception!
}
diff --git a/test/files/neg/t0218.scala b/test/files/neg/t0218.scala
index 282e85e814..3d1f52e39a 100644
--- a/test/files/neg/t0218.scala
+++ b/test/files/neg/t0218.scala
@@ -1,4 +1,4 @@
-trait APQ {
+trait APQ {
class Placement {
}
@@ -6,7 +6,7 @@ trait APQ {
type PP = P
- def pq(numQueens: int, numRows: int) : List[Placement] = {
- List(new PP)
+ def pq(numQueens: Int, numRows: Int) : List[Placement] = {
+ List(new PP)
}
}
diff --git a/test/files/neg/t0226.check b/test/files/neg/t0226.check
index e27ffbc1e1..af81e41a6a 100644
--- a/test/files/neg/t0226.check
+++ b/test/files/neg/t0226.check
@@ -4,8 +4,7 @@ t0226.scala:5: error: not found: type A1
t0226.scala:5: error: not found: type A1
(implicit _1: Foo[List[A1]], _2: Foo[A2]): Foo[Tuple2[List[A1], A2]] =
^
-t0226.scala:8: error: diverging implicit expansion for type Test.this.Foo[((List[Char], Int), (object Nil, Int))]
-starting with method list2Foo in class Test
+t0226.scala:8: error: could not find implicit value for parameter rep: Test.this.Foo[((List[Char], Int), (object Nil, Int))]
foo(((List('b'), 3), (Nil, 4)))
^
three errors found
diff --git a/test/files/neg/t0503.scala b/test/files/neg/t0503.scala
index 322e1ad132..a9b5dcb1dd 100644
--- a/test/files/neg/t0503.scala
+++ b/test/files/neg/t0503.scala
@@ -1,3 +1,3 @@
val x = new { } with { }
-trait A
+trait A
val y = new { } with A
diff --git a/test/files/neg/t0528neg.scala b/test/files/neg/t0528neg.scala
index 911745b763..30d20c95b1 100644
--- a/test/files/neg/t0528neg.scala
+++ b/test/files/neg/t0528neg.scala
@@ -3,7 +3,7 @@ trait Sequ[+A] {
}
class RichStr extends Sequ[Char] {
- // override to a primitve array
+ // override to a primitive array
def toArray: Array[Char] = new Array[Char](10)
}
diff --git a/test/files/neg/t0764.scala b/test/files/neg/t0764.scala
index daeeb21d91..9aebe04b79 100644
--- a/test/files/neg/t0764.scala
+++ b/test/files/neg/t0764.scala
@@ -9,6 +9,6 @@ trait Node extends NotNull { outer =>
class Main[NextType <: Node](value: Node { type T = NextType })
extends Top[Node { type T = NextType }] {
-
+
new Main[AType]( (value: AType).prepend )
}
diff --git a/test/files/neg/t0851.check b/test/files/neg/t0851.check
deleted file mode 100644
index 61d2a98632..0000000000
--- a/test/files/neg/t0851.check
+++ /dev/null
@@ -1,9 +0,0 @@
-t0851.scala:14: error: not enough arguments for method apply: (v1: Int,v2: String)java.lang.String in trait Function2.
-Unspecified value parameter v2.
- println(f(1))
- ^
-t0851.scala:22: error: not enough arguments for method apply: (v1: Int,v2: String)java.lang.String in trait Function2.
-Unspecified value parameter v2.
- println(fn(1))
- ^
-two errors found
diff --git a/test/files/neg/t0851.scala b/test/files/neg/t0851.scala
deleted file mode 100644
index b28be2c697..0000000000
--- a/test/files/neg/t0851.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-package test
-
-// This gives now type errors about missing parameters, which seems OK to me.
-// The tests just make sure it does not crash
-
-object test1 {
- case class Foo[T,T2](f : (T,T2) => String) extends (((T,T2)) => String){
- def apply(t : T) = (s:T2) => f(t,s)
- def apply(p : (T,T2)) = f(p._1,p._2)
- }
- implicit def g[T](f : (T,String) => String) = Foo(f)
- def main(args : Array[String]) : Unit = {
- val f = (x:Int,s:String) => s + x
- println(f(1))
- ()
- }
-}
-object Main {
- def main(args : Array[String]) {
- val fn = (a : Int, str : String) => "a: " + a + ", str: " + str
- implicit def fx[T](f : (T,String) => String) = (x:T) => f(x,null)
- println(fn(1))
- ()
- }
-}
diff --git a/test/files/neg/t1422.check b/test/files/neg/t1422.check
new file mode 100644
index 0000000000..5931fcb049
--- /dev/null
+++ b/test/files/neg/t1422.check
@@ -0,0 +1,4 @@
+t1422.scala:1: error: private[this] not allowed for case class parameters
+case class A(private[this] val foo:String)
+ ^
+one error found
diff --git a/test/files/neg/t1422.scala b/test/files/neg/t1422.scala
new file mode 100644
index 0000000000..751f05a764
--- /dev/null
+++ b/test/files/neg/t1422.scala
@@ -0,0 +1 @@
+case class A(private[this] val foo:String)
diff --git a/test/files/neg/t1477.check b/test/files/neg/t1477.check
new file mode 100644
index 0000000000..e497637857
--- /dev/null
+++ b/test/files/neg/t1477.check
@@ -0,0 +1,5 @@
+t1477.scala:13: error: overriding type V in trait C with bounds >: Nothing <: Middle.this.D;
+ type V is a volatile type; cannot override a type with non-volatile upper bound
+ type V <: (D with U)
+ ^
+one error found
diff --git a/test/files/neg/t1477.scala b/test/files/neg/t1477.scala
new file mode 100644
index 0000000000..3ecd275a55
--- /dev/null
+++ b/test/files/neg/t1477.scala
@@ -0,0 +1,25 @@
+object Test extends Application {
+ trait A
+ trait B extends A
+
+ trait C {
+ type U
+ trait D { type T >: B <: A }
+ type V <: D
+ val y: V#T = new B { }
+ }
+
+ trait Middle extends C {
+ type V <: (D with U)
+ }
+
+ class D extends Middle {
+ trait E
+ trait F { type T = E }
+ type U = F
+ def frob(arg : E) : E = arg
+ frob(y)
+ }
+
+ new D
+}
diff --git a/test/files/neg/t1705.scala b/test/files/neg/t1705.scala
index fabdca0ec6..bf1fcea815 100644
--- a/test/files/neg/t1705.scala
+++ b/test/files/neg/t1705.scala
@@ -17,7 +17,7 @@ object crashing {
}
}
}
-/*
+/*
Infinite loop in Typer.addLocals. Printing all calls to it:
diff --git a/test/files/neg/t2139.check b/test/files/neg/t2139.check
new file mode 100644
index 0000000000..e26f290761
--- /dev/null
+++ b/test/files/neg/t2139.check
@@ -0,0 +1,6 @@
+t2139.scala:13: error: type mismatch;
+ found : Int(4)
+ required: Nothing
+ val z:Int=(u.f _)(4)
+ ^
+one error found
diff --git a/test/files/neg/t2139.scala b/test/files/neg/t2139.scala
new file mode 100644
index 0000000000..4f09b5a5d1
--- /dev/null
+++ b/test/files/neg/t2139.scala
@@ -0,0 +1,15 @@
+/*
+ NOTE: if inference is changed so that
+ T is inferred to be Int, rather than Nothing,
+ the piece of code below will start to compile OK.
+ In that case, see ticket #2139, and make sure that
+ the generated code will no longer crash!
+*/
+class U {
+ def f[T](x:T):T=x
+}
+object H extends Application {
+ val u=new U
+ val z:Int=(u.f _)(4)
+ println("done")
+}
diff --git a/test/files/neg/t2179.check b/test/files/neg/t2179.check
new file mode 100644
index 0000000000..aa94fabe1f
--- /dev/null
+++ b/test/files/neg/t2179.check
@@ -0,0 +1,9 @@
+t2179.scala:2: error: inferred type arguments [scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}}] do not conform to method reduceLeft's type parameter bounds [B >: List[Double]]
+ (Nil:List[List[Double]]).reduceLeft((_: Any, _: Any) => Nil.indices.map(_ => 0d))
+ ^
+t2179.scala:2: error: type mismatch;
+ found : (Any, Any) => scala.collection.immutable.IndexedSeq[Double]
+ required: (scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}}, List[Double]) => scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}}
+ (Nil:List[List[Double]]).reduceLeft((_: Any, _: Any) => Nil.indices.map(_ => 0d))
+ ^
+two errors found
diff --git a/test/files/neg/t2179.scala b/test/files/neg/t2179.scala
new file mode 100755
index 0000000000..89e22b6e2a
--- /dev/null
+++ b/test/files/neg/t2179.scala
@@ -0,0 +1,3 @@
+object Test {
+ (Nil:List[List[Double]]).reduceLeft((_: Any, _: Any) => Nil.indices.map(_ => 0d))
+}
diff --git a/test/files/neg/t2386.check b/test/files/neg/t2386.check
new file mode 100644
index 0000000000..2caa46c731
--- /dev/null
+++ b/test/files/neg/t2386.check
@@ -0,0 +1,4 @@
+t2386.scala:2: error: could not find implicit value for evidence parameter of type scala.reflect.ClassManifest[Array[_ >: java.lang.String with Int]]
+ val a = Array(Array(1, 2), Array("a","b"))
+ ^
+one error found
diff --git a/test/files/neg/t2386.scala b/test/files/neg/t2386.scala
new file mode 100644
index 0000000000..56146cc5c3
--- /dev/null
+++ b/test/files/neg/t2386.scala
@@ -0,0 +1,3 @@
+object Test {
+ val a = Array(Array(1, 2), Array("a","b"))
+}
diff --git a/test/files/neg/t2416.check b/test/files/neg/t2416.check
new file mode 100644
index 0000000000..0899ad09d5
--- /dev/null
+++ b/test/files/neg/t2416.check
@@ -0,0 +1,10 @@
+t2416.scala:3: error: type arguments [Int] do not conform to trait A's type parameter bounds [X <: Double]
+ def x : A[Int]#B = 10 // no you won't
+ ^
+t2416.scala:8: error: type arguments [Boolean] do not conform to type B's type parameter bounds [Y <: Double]
+ def x : A#B[Boolean] = 10 // seriously?
+ ^
+t2416.scala:13: error: type arguments [String] do not conform to type B's type parameter bounds [Z <: Double]
+ type C[Z <: A] = Z#B[String] // nuh-uh!
+ ^
+three errors found
diff --git a/test/files/neg/t2416.scala b/test/files/neg/t2416.scala
new file mode 100644
index 0000000000..6bb57a984b
--- /dev/null
+++ b/test/files/neg/t2416.scala
@@ -0,0 +1,14 @@
+object t2416a {
+ trait A[X <: Double] { type B = X }
+ def x : A[Int]#B = 10 // no you won't
+}
+
+object t2416b {
+ trait A{type B[Y <: Double] = Int}
+ def x : A#B[Boolean] = 10 // seriously?
+}
+
+object t2416c {
+ trait A{type B[Z <: Double] = Int}
+ type C[Z <: A] = Z#B[String] // nuh-uh!
+} \ No newline at end of file
diff --git a/test/files/neg/t2421b.check b/test/files/neg/t2421b.check
new file mode 100644
index 0000000000..f666a7d9d7
--- /dev/null
+++ b/test/files/neg/t2421b.check
@@ -0,0 +1,4 @@
+t2421b.scala:12: error: could not find implicit value for parameter aa: Test.F[Test.A]
+ f
+ ^
+one error found \ No newline at end of file
diff --git a/test/files/neg/t2421b.scala b/test/files/neg/t2421b.scala
new file mode 100644
index 0000000000..a8d22f285e
--- /dev/null
+++ b/test/files/neg/t2421b.scala
@@ -0,0 +1,17 @@
+object Test {
+ class A
+ class B
+ class C
+ class F[X]
+
+ def f(implicit aa: F[A]) = println(aa)
+
+ // implicit def a : F[A] = new F[A]()
+ implicit def b[X <: B] = new F[X]()
+
+ f
+}
+
+/* bug:
+error: type arguments [Test2.A] do not conform to method b's type parameter bounds [X <: Test2.B]
+*/ \ No newline at end of file
diff --git a/test/files/neg/t2462a.check b/test/files/neg/t2462a.check
new file mode 100644
index 0000000000..040a01f3a1
--- /dev/null
+++ b/test/files/neg/t2462a.check
@@ -0,0 +1,4 @@
+t2462a.scala:2: error: Cannot construct a collection of type List[String] with elements of type Int based on a collection of type List[String].
+ List(1,2,3).map[Int, List[String]](x => 1)
+ ^
+one error found
diff --git a/test/files/neg/t2462a.scala b/test/files/neg/t2462a.scala
new file mode 100644
index 0000000000..2d523b4dd8
--- /dev/null
+++ b/test/files/neg/t2462a.scala
@@ -0,0 +1,3 @@
+object Test {
+ List(1,2,3).map[Int, List[String]](x => 1)
+} \ No newline at end of file
diff --git a/test/files/neg/t2462b.check b/test/files/neg/t2462b.check
new file mode 100644
index 0000000000..bc0d9aa469
--- /dev/null
+++ b/test/files/neg/t2462b.check
@@ -0,0 +1,14 @@
+t2462b.scala:6: warning: Invalid implicitNotFound message for trait Meh in package test:
+The type parameters Too, Elem referenced in the message of the @implicitNotFound annotation are not defined by trait Meh.
+trait Meh[-From, +To]
+ ^
+t2462b.scala:9: warning: Invalid implicitNotFound message for trait Meh2 in package test:
+The type parameter Elem referenced in the message of the @implicitNotFound annotation is not defined by trait Meh2.
+trait Meh2[-From, +To]
+ ^
+t2462b.scala:12: error: overriding method x in class thankyoupartest of type => Int;
+ method x needs `override' modifier
+class testmustfail extends thankyoupartest { def x = 43 }
+ ^
+two warnings found
+one error found
diff --git a/test/files/neg/t2462b.scala b/test/files/neg/t2462b.scala
new file mode 100644
index 0000000000..7a1389cc8e
--- /dev/null
+++ b/test/files/neg/t2462b.scala
@@ -0,0 +1,12 @@
+package test
+
+import scala.annotation.implicitNotFound
+
+@implicitNotFound(msg = "Cannot construct a collection of type ${Too} with elements of type ${Elem} based on a collection of type ${From}.")
+trait Meh[-From, +To]
+
+@implicitNotFound(msg = "Cannot construct a collection of type ${To} ${Elem}.")
+trait Meh2[-From, +To]
+
+class thankyoupartest { def x = 42 }
+class testmustfail extends thankyoupartest { def x = 43 }
diff --git a/test/files/neg/t2641.check b/test/files/neg/t2641.check
new file mode 100644
index 0000000000..771624e8d9
--- /dev/null
+++ b/test/files/neg/t2641.check
@@ -0,0 +1,35 @@
+t2641.scala:19: error: illegal cyclic reference involving trait ManagedSeq
+ with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A]]
+ ^
+t2641.scala:17: error: illegal inheritance;
+ self-type ManagedSeq does not conform to ManagedSeqStrict[A]'s selftype ManagedSeqStrict[A]
+ extends ManagedSeqStrict[A]
+ ^
+t2641.scala:18: error: illegal inheritance;
+ self-type ManagedSeq does not conform to scala.collection.TraversableView[A,ManagedSeqStrict[A]]'s selftype scala.collection.TraversableView[A,ManagedSeqStrict[A]]
+ with TraversableView[A, ManagedSeqStrict[A]]
+ ^
+t2641.scala:17: error: illegal inheritance;
+ self-type ManagedSeq does not conform to ScalaObject's selftype ScalaObject
+ extends ManagedSeqStrict[A]
+ ^
+t2641.scala:25: error: something is wrong (wrong class file?): trait ManagedSeq with type parameters [A,Coll] gets applied to arguments [], phase = typer
+ trait Transformed[+B] extends ManagedSeq[B, Coll] with super.Transformed[B]
+ ^
+t2641.scala:27: error: something is wrong (wrong class file?): trait ManagedSeq with type parameters [A,Coll] gets applied to arguments [], phase = namer
+ trait Sliced extends Transformed[A] with super.Sliced {
+ ^
+t2641.scala:27: error: illegal inheritance; superclass Any
+ is not a subclass of the superclass ManagedSeqStrict
+ of the mixin trait Transformed
+ trait Sliced extends Transformed[A] with super.Sliced {
+ ^
+t2641.scala:27: error: illegal inheritance; superclass Any
+ is not a subclass of the superclass Object
+ of the mixin trait Sliced
+ trait Sliced extends Transformed[A] with super.Sliced {
+ ^
+t2641.scala:28: error: value managedIterator is not a member of ManagedSeq
+ override def managedIterator = self.managedIterator slice (from, until)
+ ^
+9 errors found
diff --git a/test/files/neg/t2641.scala b/test/files/neg/t2641.scala
new file mode 100644
index 0000000000..68a4ca35b2
--- /dev/null
+++ b/test/files/neg/t2641.scala
@@ -0,0 +1,31 @@
+import scala.collection._
+import scala.collection.generic._
+import scala.collection.mutable.Builder
+
+
+abstract class ManagedSeqStrict[+A]
+ extends Traversable[A]
+ with GenericTraversableTemplate[A, ManagedSeqStrict]
+{
+ override def companion: GenericCompanion[ManagedSeqStrict] = null
+
+ override def foreach[U](f: A => U): Unit =
+ null
+}
+
+trait ManagedSeq[+A, +Coll]
+ extends ManagedSeqStrict[A]
+ with TraversableView[A, ManagedSeqStrict[A]]
+ with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A]]
+{ self =>
+
+ override def underlying = throw new Exception("no underlying")
+
+ //trait Transformed[+B] extends ManagedSeq[B] with super.Transformed[B]
+ trait Transformed[+B] extends ManagedSeq[B, Coll] with super.Transformed[B]
+
+ trait Sliced extends Transformed[A] with super.Sliced {
+ override def managedIterator = self.managedIterator slice (from, until)
+ }
+
+}
diff --git a/test/files/neg/t2773.check b/test/files/neg/t2773.check
new file mode 100644
index 0000000000..6e88762144
--- /dev/null
+++ b/test/files/neg/t2773.check
@@ -0,0 +1,7 @@
+t2773.scala:5: error: x is not a member of c
+ import c.x
+ ^
+t2773.scala:6: error: not found: value x
+ println(x)
+ ^
+two errors found
diff --git a/test/files/neg/t2773.scala b/test/files/neg/t2773.scala
new file mode 100755
index 0000000000..aaa6351c83
--- /dev/null
+++ b/test/files/neg/t2773.scala
@@ -0,0 +1,8 @@
+class C(x: Int) { def foo = x }
+
+object Test {
+ val c = new C(0)
+ import c.x
+ println(x)
+}
+
diff --git a/test/files/neg/t2775.check b/test/files/neg/t2775.check
new file mode 100644
index 0000000000..a30d35fdd9
--- /dev/null
+++ b/test/files/neg/t2775.check
@@ -0,0 +1,4 @@
+t2775.scala:1: error: cannot find class manifest for element type B.this.T
+trait B[S] { type T = S; val c = new Array[T](1) }
+ ^
+one error found
diff --git a/test/files/neg/t2775.scala b/test/files/neg/t2775.scala
new file mode 100644
index 0000000000..9e4f2f606d
--- /dev/null
+++ b/test/files/neg/t2775.scala
@@ -0,0 +1 @@
+trait B[S] { type T = S; val c = new Array[T](1) }
diff --git a/test/files/neg/t2779.check b/test/files/neg/t2779.check
new file mode 100644
index 0000000000..4f94a780a1
--- /dev/null
+++ b/test/files/neg/t2779.check
@@ -0,0 +1,4 @@
+t2779.scala:16: error: method f is defined twice
+ override def f = List(M1)
+ ^
+one error found
diff --git a/test/files/neg/t2779.scala b/test/files/neg/t2779.scala
new file mode 100755
index 0000000000..d025055aa0
--- /dev/null
+++ b/test/files/neg/t2779.scala
@@ -0,0 +1,25 @@
+abstract class M
+{
+ def f: List[M] = Nil
+}
+
+object M1 extends M
+
+object M2 extends M
+{
+ override def f = List(M1)
+}
+
+object M3 extends M
+{
+ override def f = List(M1)
+ override def f = List(M1)
+}
+
+object M4 extends M
+{
+ override def f = List(
+ M3,
+ M2
+ )
+}
diff --git a/test/files/neg/t2801.check b/test/files/neg/t2801.check
new file mode 100644
index 0000000000..25320de5bc
--- /dev/null
+++ b/test/files/neg/t2801.check
@@ -0,0 +1,6 @@
+t2801.scala:2: error: type mismatch;
+ found : Null(null)
+ required: A
+ def f[A <: AnyRef] = { val a: A = null ; a }
+ ^
+one error found
diff --git a/test/files/neg/t2801.scala b/test/files/neg/t2801.scala
new file mode 100644
index 0000000000..d425f58b56
--- /dev/null
+++ b/test/files/neg/t2801.scala
@@ -0,0 +1,3 @@
+object Test {
+ def f[A <: AnyRef] = { val a: A = null ; a }
+}
diff --git a/test/files/neg/t2870.check b/test/files/neg/t2870.check
new file mode 100644
index 0000000000..6577577d3f
--- /dev/null
+++ b/test/files/neg/t2870.check
@@ -0,0 +1,7 @@
+t2870.scala:1: error: not found: type Jar
+class Jars(jar: Jar)
+ ^
+t2870.scala:6: error: illegal cyclic reference involving value <import>
+ val scala = fromClasspathString(javaClassPath)
+ ^
+two errors found
diff --git a/test/files/neg/t2870.scala b/test/files/neg/t2870.scala
new file mode 100755
index 0000000000..59fba3e76d
--- /dev/null
+++ b/test/files/neg/t2870.scala
@@ -0,0 +1,9 @@
+class Jars(jar: Jar)
+
+object Jars {
+ import scala.util.Properties.javaClassPath
+
+ val scala = fromClasspathString(javaClassPath)
+
+ def fromClasspathString(s: String): Jars = null
+}
diff --git a/test/files/neg/t2918.check b/test/files/neg/t2918.check
new file mode 100644
index 0000000000..e67f24ec57
--- /dev/null
+++ b/test/files/neg/t2918.check
@@ -0,0 +1,7 @@
+t2918.scala:2: error: cyclic aliasing or subtyping involving type A
+ def g[X, A[X] <: A[X]](x: A[X]) = x
+ ^
+t2918.scala:2: error: A does not take type parameters
+ def g[X, A[X] <: A[X]](x: A[X]) = x
+ ^
+two errors found
diff --git a/test/files/neg/t2918.scala b/test/files/neg/t2918.scala
new file mode 100755
index 0000000000..03477ccfbf
--- /dev/null
+++ b/test/files/neg/t2918.scala
@@ -0,0 +1,3 @@
+object Test {
+ def g[X, A[X] <: A[X]](x: A[X]) = x
+}
diff --git a/test/files/neg/t3006.check b/test/files/neg/t3006.check
new file mode 100644
index 0000000000..9a90d32b28
--- /dev/null
+++ b/test/files/neg/t3006.check
@@ -0,0 +1,6 @@
+t3006.scala:8: error: type mismatch;
+ found : java.lang.String("H")
+ required: Int
+ println(A(3) + "H")
+ ^
+one error found
diff --git a/test/files/neg/t3006.scala b/test/files/neg/t3006.scala
new file mode 100755
index 0000000000..f476c1717d
--- /dev/null
+++ b/test/files/neg/t3006.scala
@@ -0,0 +1,10 @@
+object Test extends Application {
+ case class A(x: Int);
+
+ class Foo(a: A) { println("Foo created!"); def +(x: Int) = new A(this.a.x + x); }
+
+ implicit def aToFoo(x: A) = new Foo(x);
+
+ println(A(3) + "H")
+
+}
diff --git a/test/files/neg/t3015.check b/test/files/neg/t3015.check
new file mode 100644
index 0000000000..32809b0669
--- /dev/null
+++ b/test/files/neg/t3015.check
@@ -0,0 +1,11 @@
+t3015.scala:7: error: scrutinee is incompatible with pattern type;
+ found : _$1 where type _$1
+ required: java.lang.String
+ val b(foo) = "foo"
+ ^
+t3015.scala:7: error: type mismatch;
+ found : _$1(in value foo) where type _$1(in value foo) <: java.lang.String
+ required: (some other)_$1(in value foo) where type (some other)_$1(in value foo)
+ val b(foo) = "foo"
+ ^
+two errors found
diff --git a/test/files/neg/t3015.scala b/test/files/neg/t3015.scala
new file mode 100644
index 0000000000..a23e96776d
--- /dev/null
+++ b/test/files/neg/t3015.scala
@@ -0,0 +1,8 @@
+class UnApp[P] {
+ def unapply(a: P): Option[P] = Some(a)
+}
+
+object Test extends Application {
+ val b: UnApp[_] = new UnApp[String]
+ val b(foo) = "foo"
+}
diff --git a/test/files/neg/t3115.check b/test/files/neg/t3115.check
new file mode 100644
index 0000000000..04f64eec3f
--- /dev/null
+++ b/test/files/neg/t3115.check
@@ -0,0 +1,10 @@
+t3115.scala:6: error: object Math in object sc is deprecated:
+ println(sc.Math)
+ ^
+t3115.scala:7: error: object Math in package scala is deprecated: use scala.math package instead
+ println(scala.Math)
+ ^
+t3115.scala:8: error: object Math in package scala is deprecated: use scala.math package instead
+ scala.Math.Pi
+ ^
+three errors found
diff --git a/test/files/neg/t3115.flags b/test/files/neg/t3115.flags
new file mode 100644
index 0000000000..d1b831ea87
--- /dev/null
+++ b/test/files/neg/t3115.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t3115.scala b/test/files/neg/t3115.scala
new file mode 100755
index 0000000000..4aeeb4063e
--- /dev/null
+++ b/test/files/neg/t3115.scala
@@ -0,0 +1,9 @@
+object sc {
+ @deprecated("") object Math
+}
+
+object Test {
+ println(sc.Math)
+ println(scala.Math)
+ scala.Math.Pi
+}
diff --git a/test/files/neg/t3118.check b/test/files/neg/t3118.check
new file mode 100644
index 0000000000..da00f1c330
--- /dev/null
+++ b/test/files/neg/t3118.check
@@ -0,0 +1,7 @@
+t3118.scala:6: error: value C is not a member of O1
+ println(x.C()) // should not be accessible
+ ^
+t3118.scala:7: error: type C is not a member of O1
+ println(new x.C) // is correctly not accessible
+ ^
+two errors found
diff --git a/test/files/neg/t3118.scala b/test/files/neg/t3118.scala
new file mode 100644
index 0000000000..75f3b8f50c
--- /dev/null
+++ b/test/files/neg/t3118.scala
@@ -0,0 +1,8 @@
+class O1 {
+ private[this] case class C()
+
+ val x = new O1
+
+ println(x.C()) // should not be accessible
+ println(new x.C) // is correctly not accessible
+}
diff --git a/test/files/neg/t3222.check b/test/files/neg/t3222.check
new file mode 100644
index 0000000000..6170827cc9
--- /dev/null
+++ b/test/files/neg/t3222.check
@@ -0,0 +1,13 @@
+t3222.scala:1: error: not found: type B
+@throws(classOf[B])
+ ^
+t3222.scala:4: error: not found: type D
+ def foo(@throws(classOf[D]) x: Int) {}
+ ^
+t3222.scala:3: error: not found: type C
+ @throws(classOf[C])
+ ^
+t3222.scala:6: error: not found: type E
+ @throws(classOf[E])
+ ^
+four errors found
diff --git a/test/files/neg/t3222.scala b/test/files/neg/t3222.scala
new file mode 100644
index 0000000000..448292e8a7
--- /dev/null
+++ b/test/files/neg/t3222.scala
@@ -0,0 +1,9 @@
+@throws(classOf[B])
+class ExceptionTest {
+ @throws(classOf[C])
+ def foo(@throws(classOf[D]) x: Int) {}
+
+ @throws(classOf[E])
+ type t = String
+}
+
diff --git a/test/files/neg/t3224.check b/test/files/neg/t3224.check
new file mode 100644
index 0000000000..29304c567a
--- /dev/null
+++ b/test/files/neg/t3224.check
@@ -0,0 +1,6 @@
+t3224.scala:29: error: polymorphic expression cannot be instantiated to expected type;
+ found : [T]Array[T]
+ required: List[?]
+ println(Texts textL Array()); println(Texts textL Array(1)); println(Texts textL Array(1, 1))
+ ^
+one error found
diff --git a/test/files/neg/t3224.scala b/test/files/neg/t3224.scala
new file mode 100755
index 0000000000..6b6ed95bcf
--- /dev/null
+++ b/test/files/neg/t3224.scala
@@ -0,0 +1,30 @@
+object Texts{
+ def textL[T](list: List[T]) = {
+ list match{
+ case List() => "Empty"
+ case List(_) => "One"
+ case List(_*) => "Many"
+ }
+ }
+
+ def textA[T](array: Array[T]) = {
+ array match{
+ case Array() => "Empty"
+ case Array(_) => "One"
+ case Array(_*) => "Many"
+ }
+ }
+}
+
+object Test extends Application {
+
+ implicit def array2list[T](array: Array[T]) = {
+ println(array.toList.size)
+ array.toList
+ }
+
+
+ println(Texts textL List()); println(Texts textL List(1)); println(Texts textL List(1, 1));
+
+ println(Texts textL Array()); println(Texts textL Array(1)); println(Texts textL Array(1, 1))
+}
diff --git a/test/files/neg/t3399.check b/test/files/neg/t3399.check
new file mode 100644
index 0000000000..eb6c679704
--- /dev/null
+++ b/test/files/neg/t3399.check
@@ -0,0 +1,4 @@
+t3399.scala:23: error: could not find implicit value for parameter e: =:=[Nats.Add[Nats._1,Nats._1],Nats._1]
+ implicitly[ Add[_1, _1] =:= _1]
+ ^
+one error found
diff --git a/test/files/neg/t3399.scala b/test/files/neg/t3399.scala
new file mode 100644
index 0000000000..b1fe4e5ee2
--- /dev/null
+++ b/test/files/neg/t3399.scala
@@ -0,0 +1,24 @@
+object Nats {
+ sealed trait Nat {
+ // fold right on N, N-1, ..., 1
+ type FoldR[Init <: Type, Type, F <: Fold[Nat, Type]] <: Type
+ }
+ sealed trait _0 extends Nat {
+ type FoldR[Init <: Type, Type, F <: Fold[Nat, Type]] = Init
+ }
+ sealed trait Succ[N <: Nat] extends Nat {
+ type FoldR[Init <: Type, Type, F <: Fold[Nat, Type]] =
+ F#Apply[Succ[N], N#FoldR[Init, Type, F]]
+ }
+
+ type Add[A <: Nat, B <: Nat] = A#FoldR[B, Nat, Inc]
+ trait Fold[-Elem, Value] {
+ type Apply[N <: Elem, Acc <: Value] <: Value
+ }
+ type Inc = Fold[Any, Nat] {
+ type Apply[N <: Any, Acc <: Nat] = Succ[Acc]
+ }
+
+ type _1 = Succ[_0]
+ implicitly[ Add[_1, _1] =:= _1]
+} \ No newline at end of file
diff --git a/test/files/neg/t3403.check b/test/files/neg/t3403.check
new file mode 100644
index 0000000000..e52d140e6a
--- /dev/null
+++ b/test/files/neg/t3403.check
@@ -0,0 +1,4 @@
+t3403.scala:2: error: implementation limitation: the BeanProperty annotation cannot be used in a type alias or renamed import
+class Foo { @bp var bar: Int = 1 }
+ ^
+one error found
diff --git a/test/files/neg/t3403.scala b/test/files/neg/t3403.scala
new file mode 100644
index 0000000000..8be6ab2a31
--- /dev/null
+++ b/test/files/neg/t3403.scala
@@ -0,0 +1,2 @@
+import scala.reflect.{BeanProperty => bp}
+class Foo { @bp var bar: Int = 1 }
diff --git a/test/files/neg/t3453.check b/test/files/neg/t3453.check
new file mode 100644
index 0000000000..52c948128c
--- /dev/null
+++ b/test/files/neg/t3453.check
@@ -0,0 +1,21 @@
+t3453.scala:18: error: type mismatch;
+ found : A
+ required: B
+ new A
+ ^
+t3453.scala:36: error: type mismatch;
+ found : A
+ required: B
+ new A
+ ^
+t3453.scala:50: error: type mismatch;
+ found : A
+ required: B
+ new A
+ ^
+t3453.scala:64: error: type mismatch;
+ found : A
+ required: B
+ new A
+ ^
+four errors found
diff --git a/test/files/neg/t3453.scala b/test/files/neg/t3453.scala
new file mode 100644
index 0000000000..0f1c6e0282
--- /dev/null
+++ b/test/files/neg/t3453.scala
@@ -0,0 +1,66 @@
+// test shadowing of implicits by synonymous non-implicit symbols
+// whether they be inherited, imported (explicitly or using a wildcard) or defined directly
+class A
+class B
+
+trait S {
+ implicit def aToB(a: A): B = new B
+}
+
+class T1 extends S {
+ def x: B = {
+ val aToB = 3
+ // ok: doesn't compile, because aToB method requires 'T.this.' prefix
+ //aToB(new A)
+
+ // bug: compiles, using T.this.aToB,
+ // despite it not being accessible without a prefix
+ new A
+ }
+}
+
+object O {
+ implicit def aToB(a: A): B = new B
+}
+
+class T2a {
+ import O._
+
+ def x: B = {
+ val aToB = 3
+ // ok: doesn't compile, because aToB method requires 'T.this.' prefix
+ //aToB(new A)
+
+ // bug: compiles, using T.this.aToB,
+ // despite it not being accessible without a prefix
+ new A
+ }
+}
+
+class T2b {
+ import O.aToB
+
+ def x: B = {
+ val aToB = 3
+ // ok: doesn't compile, because aToB method requires 'T.this.' prefix
+ //aToB(new A)
+
+ // bug: compiles, using T.this.aToB,
+ // despite it not being accessible without a prefix
+ new A
+ }
+}
+
+class T3 {
+ implicit def aToB(a: A): B = new B
+
+ def x: B = {
+ val aToB = 3
+ // ok: doesn't compile, because aToB method requires 'T.this.' prefix
+ //aToB(new A)
+
+ // bug: compiles, using T.this.aToB,
+ // despite it not being accessible without a prefix
+ new A
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/t3507.check b/test/files/neg/t3507.check
new file mode 100644
index 0000000000..1246a20d09
--- /dev/null
+++ b/test/files/neg/t3507.check
@@ -0,0 +1,4 @@
+t3507.scala:13: error: could not find implicit value for evidence parameter of type Manifest[object _1.b.c]
+ mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
+ ^
+one error found
diff --git a/test/files/neg/t3507.scala b/test/files/neg/t3507.scala
new file mode 100644
index 0000000000..32688d3934
--- /dev/null
+++ b/test/files/neg/t3507.scala
@@ -0,0 +1,15 @@
+class A {
+ object b {
+ object c
+ }
+ def m = b.c
+}
+
+object Test {
+ var a: A = new A // mutable
+ val c /*: object _1.b.c forSome { val _1: A } */ = a.m // widening using existential
+
+ def mani[T: Manifest](x: T) = ()
+ mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
+ // --> _1 is not in scope here
+} \ No newline at end of file
diff --git a/test/files/neg/t3604.check b/test/files/neg/t3604.check
new file mode 100644
index 0000000000..b07c5c9c71
--- /dev/null
+++ b/test/files/neg/t3604.check
@@ -0,0 +1,7 @@
+t3604.scala:3: error: in XML literal: expected closing tag of abbr
+ <abbr></div>
+ ^
+t3604.scala:3: error: start tag was here: abbr>
+ <abbr></div>
+ ^
+two errors found
diff --git a/test/files/neg/t3604.scala b/test/files/neg/t3604.scala
new file mode 100644
index 0000000000..f890a58e58
--- /dev/null
+++ b/test/files/neg/t3604.scala
@@ -0,0 +1,6 @@
+object Main {
+ <div>
+ <abbr></div>
+ { "..." }
+ </div>
+}
diff --git a/test/files/neg/t3653.check b/test/files/neg/t3653.check
new file mode 100644
index 0000000000..ac6e2ca9dc
--- /dev/null
+++ b/test/files/neg/t3653.check
@@ -0,0 +1,7 @@
+t3653.scala:3: error: double definition:
+method x:(implicit x: Int)Int and
+method x:(i: Int)Int at line 2
+have same type after erasure: (x: Int)Int
+ def x(implicit x: Int) = 5
+ ^
+one error found
diff --git a/test/files/neg/t3653.scala b/test/files/neg/t3653.scala
new file mode 100644
index 0000000000..96cf96008a
--- /dev/null
+++ b/test/files/neg/t3653.scala
@@ -0,0 +1,4 @@
+class B {
+ def x(i: Int) = 3
+ def x(implicit x: Int) = 5
+} \ No newline at end of file
diff --git a/test/files/neg/t3663.check b/test/files/neg/t3663.check
new file mode 100644
index 0000000000..09ea25ad91
--- /dev/null
+++ b/test/files/neg/t3663.check
@@ -0,0 +1,4 @@
+main.scala:11: error: variable foo cannot be accessed in test.Test
+ println(t.foo)
+ ^
+one error found
diff --git a/test/files/neg/t3663/PackageProtected.java b/test/files/neg/t3663/PackageProtected.java
new file mode 100644
index 0000000000..f4535a55b4
--- /dev/null
+++ b/test/files/neg/t3663/PackageProtected.java
@@ -0,0 +1,5 @@
+package test;
+
+class PackageProtected {
+ int foo;
+}
diff --git a/test/files/neg/t3663/main.scala b/test/files/neg/t3663/main.scala
new file mode 100644
index 0000000000..29619550cc
--- /dev/null
+++ b/test/files/neg/t3663/main.scala
@@ -0,0 +1,14 @@
+package test
+
+final class Test extends PackageProtected {
+ def bar = foo
+}
+
+package another {
+ object Main {
+ def bug(t: Test) {
+ // Can always be replicated.
+ println(t.foo)
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/t3691.check b/test/files/neg/t3691.check
new file mode 100644
index 0000000000..1b548cc84d
--- /dev/null
+++ b/test/files/neg/t3691.check
@@ -0,0 +1,16 @@
+t3691.scala:4: error: type mismatch;
+ found : java.lang.Object with Test.A[String]
+ required: AnyRef{type A[x]}
+ val b = (new A[String]{}): { type A[x] } // not ok
+ ^
+t3691.scala:5: error: type mismatch;
+ found : java.lang.Object with Test.A[String]
+ required: AnyRef{type A}
+ val c = (new A[String]{}): { type A } // not ok
+ ^
+t3691.scala:7: error: type mismatch;
+ found : java.lang.Object{type A = String}
+ required: AnyRef{type A[X]}
+ val x = (new { type A = String }): { type A[X] } // not ok
+ ^
+three errors found
diff --git a/test/files/neg/t3691.scala b/test/files/neg/t3691.scala
new file mode 100644
index 0000000000..69e8bef630
--- /dev/null
+++ b/test/files/neg/t3691.scala
@@ -0,0 +1,11 @@
+object Test {
+ trait A[X] { type A[x <: X] = x }
+ val a = (new A[String]{}): { type A[x <: String] } // ok
+ val b = (new A[String]{}): { type A[x] } // not ok
+ val c = (new A[String]{}): { type A } // not ok
+
+ val x = (new { type A = String }): { type A[X] } // not ok
+//a: AnyRef{type A[X]}
+
+ identity[x.A[Any]] _
+} \ No newline at end of file
diff --git a/test/files/neg/t3692.check b/test/files/neg/t3692.check
new file mode 100644
index 0000000000..ce89a6563d
--- /dev/null
+++ b/test/files/neg/t3692.check
@@ -0,0 +1,14 @@
+t3692.scala:11: warning: type Integer in package scala is deprecated: use <code>java.lang.Integer</code> instead
+ case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer]
+ ^
+t3692.scala:12: warning: type Integer in package scala is deprecated: use <code>java.lang.Integer</code> instead
+ case m1: Map[Int, V] => new java.util.HashMap[Integer, V]
+ ^
+t3692.scala:13: warning: type Integer in package scala is deprecated: use <code>java.lang.Integer</code> instead
+ case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
+ ^
+t3692.scala:13: error: unreachable code
+ case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
+ ^
+three warnings found
+one error found
diff --git a/test/files/neg/t3692.scala b/test/files/neg/t3692.scala
new file mode 100644
index 0000000000..78b0e4b843
--- /dev/null
+++ b/test/files/neg/t3692.scala
@@ -0,0 +1,17 @@
+object ManifestTester {
+ def main(args: Array[String]) = {
+ val map = Map("John" -> 1, "Josh" -> 2)
+ new ManifestTester().toJavaMap(map)
+ }
+}
+
+class ManifestTester {
+ private final def toJavaMap[T, V](map: Map[T, V])(implicit m1: Manifest[T], m2: Manifest[V]): java.util.Map[_, _] = {
+ map match {
+ case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer]
+ case m1: Map[Int, V] => new java.util.HashMap[Integer, V]
+ case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
+ case _ => new java.util.HashMap[T, V]
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/t3757.check b/test/files/neg/t3757.check
new file mode 100644
index 0000000000..1507df8c4f
--- /dev/null
+++ b/test/files/neg/t3757.check
@@ -0,0 +1,4 @@
+B.scala:4: error: method foo overrides nothing
+ override def foo = "B"
+ ^
+one error found
diff --git a/test/files/neg/t3757/A.java b/test/files/neg/t3757/A.java
new file mode 100644
index 0000000000..37da86fe15
--- /dev/null
+++ b/test/files/neg/t3757/A.java
@@ -0,0 +1,5 @@
+package a;
+
+public abstract class A {
+ abstract String foo(); // package protected!
+} \ No newline at end of file
diff --git a/test/files/neg/t3757/B.scala b/test/files/neg/t3757/B.scala
new file mode 100644
index 0000000000..68766a9f6e
--- /dev/null
+++ b/test/files/neg/t3757/B.scala
@@ -0,0 +1,5 @@
+package b
+
+class B extends a.A {
+ override def foo = "B"
+} \ No newline at end of file
diff --git a/test/files/neg/t3769.check b/test/files/neg/t3769.check
new file mode 100644
index 0000000000..7537ed2ddf
--- /dev/null
+++ b/test/files/neg/t3769.check
@@ -0,0 +1,10 @@
+t3769.scala:2: error: in XML literal: expected closing tag of a
+ val x = <b> <c><a></c> {"text"} </b>
+ ^
+t3769.scala:2: error: start tag was here: a>
+ val x = <b> <c><a></c> {"text"} </b>
+ ^
+t3769.scala:2: error: ';' expected but '{' found.
+ val x = <b> <c><a></c> {"text"} </b>
+ ^
+three errors found
diff --git a/test/files/neg/t3769.scala b/test/files/neg/t3769.scala
new file mode 100644
index 0000000000..0132b59edf
--- /dev/null
+++ b/test/files/neg/t3769.scala
@@ -0,0 +1,3 @@
+object Test {
+ val x = <b> <c><a></c> {"text"} </b>
+}
diff --git a/test/files/neg/t3773.check b/test/files/neg/t3773.check
new file mode 100644
index 0000000000..29a3e14d83
--- /dev/null
+++ b/test/files/neg/t3773.check
@@ -0,0 +1,4 @@
+t3773.scala:3: error: method elements in trait IterableLike is deprecated: use `iterator' instead
+ for ((v, t) <- m.elements) ()
+ ^
+one error found
diff --git a/test/files/neg/t3773.flags b/test/files/neg/t3773.flags
new file mode 100644
index 0000000000..d1b831ea87
--- /dev/null
+++ b/test/files/neg/t3773.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t3773.scala b/test/files/neg/t3773.scala
new file mode 100644
index 0000000000..3b92ed2d23
--- /dev/null
+++ b/test/files/neg/t3773.scala
@@ -0,0 +1,5 @@
+object t {
+ val m = Map(1 -> "one")
+ for ((v, t) <- m.elements) ()
+}
+
diff --git a/test/files/neg/t3774.check b/test/files/neg/t3774.check
new file mode 100644
index 0000000000..f1a787a7a1
--- /dev/null
+++ b/test/files/neg/t3774.check
@@ -0,0 +1,7 @@
+t3774.scala:4: error: overloaded method value ++ with alternatives:
+ [B1 >: List[Int]](xs: scala.collection.TraversableOnce[((Int, Int), B1)])scala.collection.immutable.Map[(Int, Int),B1] <and>
+ [B >: ((Int, Int), List[Int]),That](that: scala.collection.TraversableOnce[B])(implicit bf: scala.collection.generic.CanBuildFrom[scala.collection.immutable.Map[(Int, Int),List[Int]],B,That])That
+ cannot be applied to (scala.collection.immutable.IndexedSeq[((Int, Int), scala.collection.immutable.Range.Inclusive with scala.collection.immutable.Range.ByOne)])
+ Map[(Int,Int),List[Int]]() ++ (for(x <- 0 to 1 ; y <- 0 to 1) yield {(x,y)-> (0 to 1)})
+ ^
+one error found
diff --git a/test/files/neg/t3774.scala b/test/files/neg/t3774.scala
new file mode 100644
index 0000000000..2869925b01
--- /dev/null
+++ b/test/files/neg/t3774.scala
@@ -0,0 +1,5 @@
+// This used to hang the lub process. Now it rejects the file. This is still not correct,
+// but we can solve this only after a redesign of lub a la dot.
+object Hang {
+ Map[(Int,Int),List[Int]]() ++ (for(x <- 0 to 1 ; y <- 0 to 1) yield {(x,y)-> (0 to 1)})
+}
diff --git a/test/files/neg/t3776.check b/test/files/neg/t3776.check
new file mode 100644
index 0000000000..e8798df6f3
--- /dev/null
+++ b/test/files/neg/t3776.check
@@ -0,0 +1,4 @@
+t3776.scala:8: error: value someOperation is not a member of _$1
+ def parsedAs[T](v: T) = MyParser.parse(pattern, a).get someOperation v
+ ^
+one error found
diff --git a/test/files/neg/t3776.scala b/test/files/neg/t3776.scala
new file mode 100644
index 0000000000..454f914316
--- /dev/null
+++ b/test/files/neg/t3776.scala
@@ -0,0 +1,10 @@
+import util.parsing.combinator.{PackratParsers, RegexParsers}
+
+object MyParser extends RegexParsers with PackratParsers {
+}
+
+object Test {
+ class ParsedAs(a: String) (implicit pattern: MyParser.Parser[_]) {
+ def parsedAs[T](v: T) = MyParser.parse(pattern, a).get someOperation v
+ }
+}
diff --git a/test/files/neg/t3873.check b/test/files/neg/t3873.check
new file mode 100644
index 0000000000..54d6abdf63
--- /dev/null
+++ b/test/files/neg/t3873.check
@@ -0,0 +1,6 @@
+t3873.scala:11: error: type mismatch;
+ found : Test.a.B
+ required: a.B
+ wrongf(new A)(a.b) // should not compile -- TODO: improve error message? the "a" is ambiguous
+ ^
+one error found
diff --git a/test/files/neg/t3873.flags b/test/files/neg/t3873.flags
new file mode 100644
index 0000000000..1c26b24745
--- /dev/null
+++ b/test/files/neg/t3873.flags
@@ -0,0 +1 @@
+-Ydependent-method-types \ No newline at end of file
diff --git a/test/files/neg/t3873.scala b/test/files/neg/t3873.scala
new file mode 100644
index 0000000000..e7815f0937
--- /dev/null
+++ b/test/files/neg/t3873.scala
@@ -0,0 +1,12 @@
+class A {
+ class B
+ def b: B = new B
+}
+
+object Test {
+ def wrongf(a: A)(b: a.B): a.B = b
+
+ val a = new A
+ wrongf(a)(a.b)
+ wrongf(new A)(a.b) // should not compile -- TODO: improve error message? the "a" is ambiguous
+} \ No newline at end of file
diff --git a/test/files/neg/t742.check b/test/files/neg/t742.check
new file mode 100644
index 0000000000..f587948ef1
--- /dev/null
+++ b/test/files/neg/t742.check
@@ -0,0 +1,5 @@
+t742.scala:5: error: kinds of the type arguments (Crash._1,Crash._2,Any) do not conform to the expected kinds of the type parameters (type m,type n,type z).
+Crash._1's type parameters do not match type m's expected parameters: type s1 has one type parameter, but type n has two
+ type p = mul[_1, _2, Any] // mul[_1, _1, Any] needs -Yrecursion
+ ^
+one error found
diff --git a/test/files/neg/t742.scala b/test/files/neg/t742.scala
new file mode 100644
index 0000000000..bb1c2f85ea
--- /dev/null
+++ b/test/files/neg/t742.scala
@@ -0,0 +1,8 @@
+object Crash {
+ type mul[m[n[s[_], z], z], n[s[_], z], z] = m[n, z]
+ type _1[s1[_], z1] = s1[z1]
+ type _2[s1[_], z1] = s1[z1]
+ type p = mul[_1, _2, Any] // mul[_1, _1, Any] needs -Yrecursion
+ // _1[_2, Zero]
+ // _2[Zero]
+} \ No newline at end of file
diff --git a/test/files/neg/t771.check b/test/files/neg/t771.check
new file mode 100644
index 0000000000..c0d1e002f8
--- /dev/null
+++ b/test/files/neg/t771.check
@@ -0,0 +1,4 @@
+t771.scala:4: error: trait Iterator is abstract; cannot be instantiated
+ def c[A](it:java.util.Iterator[A]) = new scala.Iterator[A]
+ ^
+one error found
diff --git a/test/files/neg/t771.scala b/test/files/neg/t771.scala
new file mode 100755
index 0000000000..26bf441648
--- /dev/null
+++ b/test/files/neg/t771.scala
@@ -0,0 +1,5 @@
+class Foo {
+ def a = c(b)
+ def b[List[AnyRef]] = new java.util.Iterator[List[Object]] { }
+ def c[A](it:java.util.Iterator[A]) = new scala.Iterator[A]
+}
diff --git a/test/files/neg/tailrec.check b/test/files/neg/tailrec.check
index 22d70e82a0..27d99f632e 100644
--- a/test/files/neg/tailrec.check
+++ b/test/files/neg/tailrec.check
@@ -1,10 +1,16 @@
-tailrec.scala:6: error: could not optimize @tailrec annotated method
+tailrec.scala:43: error: could not optimize @tailrec annotated method: it contains a recursive call not in tail position
def facfail(n: Int): Int =
^
-tailrec.scala:42: error: could not optimize @tailrec annotated method
+tailrec.scala:50: error: could not optimize @tailrec annotated method: it is neither private nor final so can be overridden
@tailrec def fail1(x: Int): Int = fail1(x)
^
-tailrec.scala:45: error: could not optimize @tailrec annotated method
- @tailrec def fail2[T](xs: List[T]): List[T] = xs match {
- ^
-three errors found
+tailrec.scala:53: error: could not optimize @tailrec annotated method: it contains a recursive call not in tail position
+ @tailrec final def fail2[T](xs: List[T]): List[T] = xs match {
+ ^
+tailrec.scala:59: error: could not optimize @tailrec annotated method: it is called recursively with different type arguments
+ @tailrec final def fail3[T](x: Int): Int = fail3(x - 1)
+ ^
+tailrec.scala:63: error: could not optimize @tailrec annotated method: it changes type of 'this' on a polymorphic recursive call
+ @tailrec final def fail4[U](other: Tom[U], x: Int): Int = other.fail4[U](other, x - 1)
+ ^
+5 errors found
diff --git a/test/files/neg/tailrec.scala b/test/files/neg/tailrec.scala
index 4c45672f93..6d836df0e3 100644
--- a/test/files/neg/tailrec.scala
+++ b/test/files/neg/tailrec.scala
@@ -1,53 +1,65 @@
import scala.annotation.tailrec
// putting @tailrec through the paces
-object Main {
- @tailrec
- def facfail(n: Int): Int =
- if (n == 0) 1
- else n * facfail(n - 1)
-
+object Winners {
@tailrec
def facsucc(n: Int, acc: Int): Int =
if (n == 0) acc
else facsucc(n - 1, n * acc)
- @tailrec def loopy1(x: Int): Int = loopy1(x - 1)
-
+ @tailrec def loopsucc1(x: Int): Int = loopsucc1(x - 1)
+ @tailrec def loopsucc2[T](x: Int): Int = loopsucc2[T](x - 1)
+
def ding {
object dong {
- @tailrec def loopy2(x: Int): Int = loopy2(x)
+ @tailrec def loopsucc3(x: Int): Int = loopsucc3(x)
}
()
}
-
+
def inner(q: Int) = {
@tailrec
- def loopy3(x: Int): Int = loopy3(x + 1)
-
- loopy3(q)
+ def loopsucc4(x: Int): Int = loopsucc4(x + 1)
+
+ loopsucc4(q)
+ }
+
+ object innerBob {
+ @tailrec def loopsucc5(x: Int): Int = loopsucc5(x)
}
}
-class Bob {
- // these should work
+class Winners {
@tailrec private def succ1(x: Int): Int = succ1(x)
@tailrec final def succ2(x: Int): Int = succ2(x)
@tailrec final def succ3[T](in: List[T], acc: List[T]): List[T] = in match {
case Nil => Nil
case x :: xs => succ3(xs, x :: acc)
}
+}
+
+object Failures {
+ @tailrec
+ def facfail(n: Int): Int =
+ if (n == 0) 1
+ else n * facfail(n - 1)
+}
+class Failures {
// not private, not final
@tailrec def fail1(x: Int): Int = fail1(x)
-
+
// a typical between-chair-and-keyboard error
- @tailrec def fail2[T](xs: List[T]): List[T] = xs match {
+ @tailrec final def fail2[T](xs: List[T]): List[T] = xs match {
case Nil => Nil
- case x :: xs => x :: fail2(xs)
+ case x :: xs => x :: fail2[T](xs)
}
-
- object innerBob {
- @tailrec def succ4(x: Int): Int = succ4(x)
+
+ // unsafe
+ @tailrec final def fail3[T](x: Int): Int = fail3(x - 1)
+
+ // unsafe
+ class Tom[T](x: Int) {
+ @tailrec final def fail4[U](other: Tom[U], x: Int): Int = other.fail4[U](other, x - 1)
}
}
diff --git a/test/files/neg/tcpoly_infer_ticket1162.scala b/test/files/neg/tcpoly_infer_ticket1162.scala
index 0552b42a22..b88bd358d9 100644
--- a/test/files/neg/tcpoly_infer_ticket1162.scala
+++ b/test/files/neg/tcpoly_infer_ticket1162.scala
@@ -1,8 +1,8 @@
object Test {
- trait Expression[A,B]
+ trait Expression[A,B]
case class Lift[A,B,F[_]]() extends Expression[F[A],F[B]]
-
+
def simplify[A,B]: Expression[A,B] = Lift[A,B]()
}
diff --git a/test/files/neg/tcpoly_ticket2101.scala b/test/files/neg/tcpoly_ticket2101.scala
index 68f061ce70..3af07acfac 100644
--- a/test/files/neg/tcpoly_ticket2101.scala
+++ b/test/files/neg/tcpoly_ticket2101.scala
@@ -4,10 +4,10 @@ class T2[X] extends T[T2, X] // ill-typed
// Forall Y. T2[Y] <: T[T2, X]
// debugging before fix:
-// def isSubType0 -->
+// def isSubType0 -->
// case (PolyType(tparams1, res1), PolyType(tparams2, res2)) => println("<:<PT: "+((tparams1, res1), (tparams2, res2))) //@MDEBUG
// (tparams1.length == tparams2.length &&
-// List.forall2(tparams1, tparams2)
+// List.forall2(tparams1, tparams2)
// ((p1, p2) => p2.info.substSym(tparams2, tparams1) <:< p1.info) &&
// res1 <:< res2.substSym(tparams2, tparams1))
@@ -22,7 +22,7 @@ class T2[X] extends T[T2, X] // ill-typed
// (tparams1.length == tparams2.length &&
// {
// val tpsFresh = cloneSymbols(tparams1) // @M cloneSymbols(tparams2) should be equivalent -- TODO: check
-// List.forall2(tparams1, tparams2)
+// List.forall2(tparams1, tparams2)
// ((p1, p2) => p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh)) &&
-// res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh)
+// res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh)
// })
diff --git a/test/files/neg/tcpoly_typealias.scala b/test/files/neg/tcpoly_typealias.scala
index 6c7f80cc0b..96e9349298 100644
--- a/test/files/neg/tcpoly_typealias.scala
+++ b/test/files/neg/tcpoly_typealias.scala
@@ -12,7 +12,7 @@ trait A3 {
trait FooCov[+x]
trait FooCon[-x]
-trait FooBound[+x <: String]
+trait FooBound[+x <: String]
trait BOk1 extends A {
type m[+x] = FooCov[x]
@@ -30,8 +30,8 @@ trait BOk4 extends A3 {
type m[+x] = FooCov[x] // weaker variance
}
-// there are two aspects to check:
- // does type alias signature (not considering RHS) correspond to abstract type member in super class
+// there are two aspects to check:
+ // does type alias signature (not considering RHS) correspond to abstract type member in super class
// does RHS correspond to the type alias sig
trait BInv extends A{
type m[x] = FooCov[x] // error: invariant x in alias def
diff --git a/test/files/neg/tcpoly_variance_enforce.scala b/test/files/neg/tcpoly_variance_enforce.scala
index f3962d6757..417fc5470a 100644
--- a/test/files/neg/tcpoly_variance_enforce.scala
+++ b/test/files/neg/tcpoly_variance_enforce.scala
@@ -6,7 +6,7 @@ trait coll3[m[x]]
trait coll4[m[x <: y], y]
-class FooInvar[x]
+class FooInvar[x]
class FooContra[-x]
class FooCov[+x]
class FooString[+x <: String]
@@ -15,15 +15,15 @@ object fcollok extends coll[FooCov]
object fcollinv extends coll[FooInvar] // error
object fcollcon extends coll[FooContra] // error
object fcollwb extends coll[FooString] // error
-
+
object fcoll2ok extends coll2[FooCov] // error
object fcoll2inv extends coll2[FooInvar] // error
-object fcoll2con extends coll2[FooContra]
+object fcoll2con extends coll2[FooContra]
object fcoll2wb extends coll2[FooString] // error
-
-object fcoll3ok extends coll3[FooCov]
-object fcoll3inv extends coll3[FooInvar]
-object fcoll3con extends coll3[FooContra]
+
+object fcoll3ok extends coll3[FooCov]
+object fcoll3inv extends coll3[FooInvar]
+object fcoll3con extends coll3[FooContra]
object fcoll3wb extends coll3[FooString] // error
object fcoll4ok extends coll4[FooString, String]
@@ -33,7 +33,7 @@ object fcoll4_2 extends coll4[FooString, Any] // error
object test {
var ok: coll[FooCov] = _
-
+
def x: coll[FooInvar] = error("foo") // error
def y: coll[FooContra] = error("foo") // error
}
diff --git a/test/files/neg/typeerror.check b/test/files/neg/typeerror.check
index 3e21a79ad5..3ce11dad8a 100644
--- a/test/files/neg/typeerror.check
+++ b/test/files/neg/typeerror.check
@@ -1,6 +1,6 @@
typeerror.scala:6: error: type mismatch;
found : Long(in method add)
- required: Long(in package scala)
+ required: scala.Long
else add2(x.head, y.head) :: add(x.tail, y.tail)
^
one error found
diff --git a/test/files/neg/unit2anyref.check b/test/files/neg/unit2anyref.check
index 7af4564ffb..2616fd35f9 100644
--- a/test/files/neg/unit2anyref.check
+++ b/test/files/neg/unit2anyref.check
@@ -1,10 +1,8 @@
unit2anyref.scala:2: error: type mismatch;
found : Unit
required: AnyRef
-Note that implicit conversions are not applicable because they are ambiguous:
- both method any2stringadd in object Predef of type (x: Any)scala.runtime.StringAdd
- and method any2ArrowAssoc in object Predef of type [A](x: A)ArrowAssoc[A]
- are possible conversion functions from Unit to AnyRef
+Note: primitive types are not implicitly converted to AnyRef.
+You can safely force boxing by casting x.asInstanceOf[AnyRef].
val x: AnyRef = () // this should not succeed.
^
one error found
diff --git a/test/files/neg/variances.check b/test/files/neg/variances.check
index e6abdcbb53..d395e45e4e 100644
--- a/test/files/neg/variances.check
+++ b/test/files/neg/variances.check
@@ -7,7 +7,10 @@ variances.scala:14: error: covariant type A occurs in contravariant position in
variances.scala:16: error: covariant type A occurs in invariant position in supertype test.C[A] with ScalaObject of object Baz
object Baz extends C[A]
^
-variances.scala:63: error: covariant type A occurs in contravariant position in type => test.Covariant.T[A]{def m: (A) => A} of value x
+variances.scala:63: error: covariant type A occurs in contravariant position in type => test.Covariant.T[A]{val m: (A) => A} of value x
val x: T[A] {
^
-four errors found
+variances.scala:79: error: covariant type T occurs in contravariant position in type => test.TestAlias.B[C.this.A] of method foo
+ def foo: B[A]
+ ^
+5 errors found
diff --git a/test/files/neg/variances.scala b/test/files/neg/variances.scala
index 67783bc4aa..57abba130d 100644
--- a/test/files/neg/variances.scala
+++ b/test/files/neg/variances.scala
@@ -1,7 +1,7 @@
package test
trait Vector[+A] {
- def append(x: Vector[A]): Vector[A]
+ def append(x: Vector[A]): Vector[A]
private[this] def append3(x: Vector[A]): Vector[A] = append(x)
}
@@ -11,8 +11,8 @@ object Covariant {
class Foo[+A] {
private[this] var a : A = _
def getA : A = a
- private[this] def setA(a : A) = this.a = a
-
+ private[this] def setA(a : A) = this.a = a
+
object Baz extends C[A]
trait Convert[B] {
def b2a(b : B) : A
@@ -22,8 +22,8 @@ object Covariant {
class Foo2[+A] {
private[this] var a : A = _
def getA : A = a
- private[this] def setA(a : A) = this.a = a
-
+ private[this] def setA(a : A) = this.a = a
+
{
trait Convert[B] {
def b2a(b : B) : A
@@ -35,8 +35,8 @@ object Covariant {
class Foo3[+A] {
private[this] var a : A = _
def getA : A = a
- private[this] def setA(a : A) = this.a = a
-
+ private[this] def setA(a : A) = this.a = a
+
private[this] trait Convert[B] {
def b2a(b : B) : A
def doit(b : B) = setA(b2a(b))
@@ -71,3 +71,11 @@ object Covariant {
val t: T[Any] = ST
t.x.m(new Object)
}
+
+object TestAlias {
+ class B[-T]
+ trait C[+T] {
+ type A = T
+ def foo: B[A]
+ }
+}
diff --git a/test/files/neg/viewtest.scala b/test/files/neg/viewtest.scala
index 778e672d91..ddb7fa4a3b 100644
--- a/test/files/neg/viewtest.scala
+++ b/test/files/neg/viewtest.scala
@@ -12,13 +12,13 @@ trait Ordered[+a] {
*/
def compareTo [b >: a <% Ordered[b]](that: b): Int
- def < [b >: a <% Ordered[b]](that: b): boolean = (this compareTo that) < 0
+ def < [b >: a <% Ordered[b]](that: b): Boolean = (this compareTo that) < 0
- def > [b >: a <% Ordered[b]](that: b): boolean = (this compareTo that) > 0
+ def > [b >: a <% Ordered[b]](that: b): Boolean = (this compareTo that) > 0
- def <= [b >: a <% Ordered[b]](that: b): boolean = (this compareTo that) <= 0
+ def <= [b >: a <% Ordered[b]](that: b): Boolean = (this compareTo that) <= 0
- def >= [b >: a <% Ordered[b]](that: b): boolean = (this compareTo that) >= 0
+ def >= [b >: a <% Ordered[b]](that: b): Boolean = (this compareTo that) >= 0
}
@@ -30,14 +30,14 @@ object O {
case _ => -(y compareTo x)
}
}
- implicit def view2(x: char): Ordered[char] = new Ordered[char] {
- def compareTo [b >: char <% Ordered[b]](y: b): Int = y match {
- case y1: char => x - y1
+ implicit def view2(x: Char): Ordered[Char] = new Ordered[Char] {
+ def compareTo [b >: Char <% Ordered[b]](y: b): Int = y match {
+ case y1: Char => x - y1
case _ => -(y compareTo x)
}
}
- implicit def view3[a <% Ordered[a]](x: List[a]): Ordered[List[a]] =
+ implicit def view3[a <% Ordered[a]](x: List[a]): Ordered[List[a]] =
new Ordered[List[a]] {
def compareTo [b >: List[a] <% Ordered[b]](y: b): Int = y match {
case y1: List[a] => compareLists(x, y1)
@@ -72,7 +72,7 @@ class Node[a <% Ordered[a]](elem: a, l: Tree[a], r: Tree[a]) extends Tree[a] {
if (x == elem) this
else if (x < elem) new Node(elem, l insert x, r)
else new Node(elem, l, r insert x)
- def elements: List[a] =
+ def elements: List[a] =
l.elements ::: List(elem) ::: r.elements
}
@@ -86,7 +86,7 @@ case class Str(elem: String) extends Ordered[Str] {
object Test {
import O._
- private def toCharList(s: String): List[Char] =
+ private def toCharList(s: String): List[Char] =
if (s.length() == 0) List()
else s.charAt(0) :: toCharList(s.substring(1))
@@ -106,7 +106,7 @@ object Test {
Console.println(t.elements)
}
{
- var t: Tree[List[char]] = Empty
+ var t: Tree[List[Char]] = Empty
for (s <- args) {
t = t insert toCharList(s)
}
diff --git a/test/files/pos/List1.scala b/test/files/pos/List1.scala
index 1321d95c20..9d3a51f4e3 100644
--- a/test/files/pos/List1.scala
+++ b/test/files/pos/List1.scala
@@ -17,7 +17,7 @@ object lists {
def isEmpty = false;
def head = x;
def tail = xs;
- }
+ }
def foo = {
val intnil = Nil[Int];
diff --git a/test/files/pos/MailBox.scala b/test/files/pos/MailBox.scala
index 67b923ea3e..35b38f4725 100644
--- a/test/files/pos/MailBox.scala
+++ b/test/files/pos/MailBox.scala
@@ -23,7 +23,7 @@ class MailBox {
private val sent = new LinkedList[Any];
private var lastSent = sent;
private val receivers = new LinkedList[Receiver];
- private var lastReceiver = receivers;
+ private var lastReceiver = receivers;
def send(msg: Any): Unit = synchronized {
var r = receivers;
@@ -58,7 +58,7 @@ class MailBox {
}
f(msg)
}
-
+
def receiveWithin[a](msec: Long)(f: PartialFunction[Any, a]): a = {
val msg: Any = synchronized {
var s = sent;
diff --git a/test/files/pos/Transactions.scala b/test/files/pos/Transactions.scala
index ed989e178e..9b4388300b 100644
--- a/test/files/pos/Transactions.scala
+++ b/test/files/pos/Transactions.scala
@@ -28,7 +28,7 @@ class Transaction {
var next: Transaction = null
def this(hd: Transaction, tl: Transaction) = { this(); this.head = head; this.next = next }
-
+
def makeAbort() = synchronized {
while (status != Transaction.Aborted && status != Transaction.Committed) {
status = Transaction.Abortable
@@ -48,7 +48,7 @@ class Transaction {
case ex: AbortException => abort(); None
case ex: Throwable => abort(); throw ex
}
-
+
}
trait Transactional {
@@ -58,7 +58,7 @@ trait Transactional {
/** copy back snapshot */
def rollBack(): Unit
-
+
var readers: Transaction
var writer: Transaction
@@ -66,11 +66,11 @@ trait Transactional {
if (writer == null) null
else if (writer.status == Transaction.Running) writer
else {
- if (writer.status != Transaction.Committed) rollBack();
- writer = null;
- null
+ if (writer.status != Transaction.Committed) rollBack();
+ writer = null;
+ null
}
-
+
def getter(thisTrans: Transaction) {
if (writer == thisTrans) return
var r = readers
@@ -96,7 +96,7 @@ trait Transactional {
synchronized {
val w = currentWriter()
if (w != null)
- if (thisTrans.id < w.id) { w.makeAbort(); rollBack() }
+ if (thisTrans.id < w.id) { w.makeAbort(); rollBack() }
else throw new AbortException
var r = readers
while (r != null && r.head.status != Transaction.Running) { r = r.next; readers = r }
@@ -111,4 +111,4 @@ trait Transactional {
}
}
}
-
+
diff --git a/test/files/pos/annotations.scala b/test/files/pos/annotations.scala
index d1bd6ba264..0819379d86 100644
--- a/test/files/pos/annotations.scala
+++ b/test/files/pos/annotations.scala
@@ -1,4 +1,5 @@
class ann(i: Int) extends Annotation
+class cfann(x: String) extends ClassfileAnnotation
// annotations on abstract types
abstract class C1[@serializable @cloneable +T, U, V[_]]
@@ -35,6 +36,10 @@ object Test {
// annotation on annotation constructor
@(ann @ann(100))(200) def foo() = 300
+
+ // #2984
+ private final val NAMESPACE = "/info"
+ @cfann(x = NAMESPACE + "/index") def index = "success"
}
// test forward references to getters / setters
@@ -84,3 +89,22 @@ trait BeanF {
def isG(): Boolean
def setG(nb: Boolean): Unit
}
+
+
+class Ann3(arr: Array[String]) extends ClassfileAnnotation
+class Ann4(i: Int) extends ClassfileAnnotation
+class Ann5(value: Class[_]) extends ClassfileAnnotation
+
+object Test3 {
+ final val i = 1083
+ final val cls = classOf[String]
+}
+
+class Test4 {
+ @Ann3(arr = Array("dlkfj", "DSF"))
+ @Ann4(i = 2908)
+ @Ann4(i = Test3.i)
+ @Ann5(value = classOf[Int])
+ @Ann5(Test3.cls)
+ def foo {}
+}
diff --git a/test/files/pos/arrays2.scala b/test/files/pos/arrays2.scala
index 795c486e37..2d5409cbb8 100644
--- a/test/files/pos/arrays2.scala
+++ b/test/files/pos/arrays2.scala
@@ -11,8 +11,8 @@ object arrays2 {
// #2422
object arrays4 {
- val args = Array[String]("World")
- "Hello %1$s".format(args: _*)
+ val args = Array[String]("World")
+ "Hello %1$s".format(args: _*)
}
// #2461
diff --git a/test/files/pos/bug0031.scala b/test/files/pos/bug0031.scala
index aa787ca794..ec6eae9282 100644
--- a/test/files/pos/bug0031.scala
+++ b/test/files/pos/bug0031.scala
@@ -4,7 +4,7 @@ object Main {
def ensure(postcondition: a => Boolean): a
}
- def require[a](precondition: => Boolean)(command: => a): Ensure[a] =
+ def require[a](precondition: => Boolean)(command: => a): Ensure[a] =
if (precondition)
new Ensure[a] {
def ensure(postcondition: a => Boolean): a = {
diff --git a/test/files/pos/bug0066.scala b/test/files/pos/bug0066.scala
index 2153264e7a..9317da7165 100644
--- a/test/files/pos/bug0066.scala
+++ b/test/files/pos/bug0066.scala
@@ -3,5 +3,5 @@ class GBTree[A, B] /*with Map[A, B, GBTree[A,B]]*/ {
case class Node[A,B](key:A,value:B,smaller:Node[A,B],bigger:Node[A,B])
extends Tree[A,B];
case class Nil[A,B]() extends Tree[A,B];
-
+
}
diff --git a/test/files/pos/bug0069.scala b/test/files/pos/bug0069.scala
index 113820613f..5a8c15cd5a 100644
--- a/test/files/pos/bug0069.scala
+++ b/test/files/pos/bug0069.scala
@@ -8,4 +8,4 @@ object testCQ {
*/
}
-
+
diff --git a/test/files/pos/bug0091.scala b/test/files/pos/bug0091.scala
index 54c821b41c..d491b7cfb9 100644
--- a/test/files/pos/bug0091.scala
+++ b/test/files/pos/bug0091.scala
@@ -1,6 +1,6 @@
class Bug {
def main(args: Array[String]) = {
var msg: String = null; // no bug if "null" instead of "_"
- val f: PartialFunction[Any, unit] = { case 42 => msg = "coucou" };
+ val f: PartialFunction[Any, Unit] = { case 42 => msg = "coucou" };
}
}
diff --git a/test/files/pos/bug0599.scala b/test/files/pos/bug0599.scala
index 885159af66..6125b99ce2 100644
--- a/test/files/pos/bug0599.scala
+++ b/test/files/pos/bug0599.scala
@@ -15,4 +15,4 @@ abstract class FooA {
a.xxx;
doB.xxx;
}
- }
+ }
diff --git a/test/files/pos/bug0646.scala b/test/files/pos/bug0646.scala
index 64214f65b1..6146e60020 100644
--- a/test/files/pos/bug0646.scala
+++ b/test/files/pos/bug0646.scala
@@ -2,7 +2,7 @@ object xfor {
import scala.xml.NodeSeq
- val books =
+ val books =
<bks>
<title>Blabla</title>
<title>Blubabla</title>
@@ -10,10 +10,10 @@ object xfor {
</bks>;
new NodeSeq { val theSeq = books.child } match {
- case t @ <title>Blabla</title> => t
+ case t @ Seq(<title>Blabla</title>) => t
}
- //val n: NodeSeq = new NodeSeq { val theSeq = books.child }
+ //val n: NodeSeq = new NodeSeq { val theSeq = books.child }
//n match {
// case t @ <title>Blabla</title> => t
//}
diff --git a/test/files/pos/bug1075.scala b/test/files/pos/bug1075.scala
index 936ef72272..0f518b24db 100644
--- a/test/files/pos/bug1075.scala
+++ b/test/files/pos/bug1075.scala
@@ -5,7 +5,7 @@ class Directory(var dir_ : String)
}
dir_ = dir_.replaceAll("/{2,}", "/")
- def this(serialized : Array[byte]) = {
+ def this(serialized : Array[Byte]) = {
this(new String(serialized, "UTF-8"))
}
diff --git a/test/files/pos/bug1090.scala b/test/files/pos/bug1090.scala
index a9bce90b00..dca762af4f 100644
--- a/test/files/pos/bug1090.scala
+++ b/test/files/pos/bug1090.scala
@@ -10,7 +10,7 @@ object Test {
type Node = Core.this.Node
}
def f(manager : Manager) = manager.iterator.foreach{
- case node : NodeImpl =>
+ case node : NodeImpl =>
}
}
}
diff --git a/test/files/pos/bug1123.scala b/test/files/pos/bug1123.scala
index 3812fa3eb3..a7b009cbbe 100644
--- a/test/files/pos/bug1123.scala
+++ b/test/files/pos/bug1123.scala
@@ -7,5 +7,5 @@ object Test {
}
def f = extraListener.h
}
- def main(args : Array[String]) : Unit = (new Editor).f
+ def main(args : Array[String]) : Unit = (new Editor).f
}
diff --git a/test/files/pos/bug1168.scala b/test/files/pos/bug1168.scala
index 58407e328e..ae0ffb3cac 100644
--- a/test/files/pos/bug1168.scala
+++ b/test/files/pos/bug1168.scala
@@ -1,5 +1,5 @@
object Test extends Application {
-
+
trait SpecialException {}
try {
diff --git a/test/files/pos/bug1210a.scala b/test/files/pos/bug1210a.scala
index b3492f96e4..fbb0a611d6 100644
--- a/test/files/pos/bug1210a.scala
+++ b/test/files/pos/bug1210a.scala
@@ -1,9 +1,9 @@
// both styles of abstraction should behave the same
// related to 1210 because that bug broke the OO version below
-trait OO {
+trait OO {
abstract class Test { self =>
type T
-
+
val v: Test {type T = self.T} = self.v.v
}
}
diff --git a/test/files/pos/bug122.scala b/test/files/pos/bug122.scala
index 630e24ce4a..e3daeef73e 100644
--- a/test/files/pos/bug122.scala
+++ b/test/files/pos/bug122.scala
@@ -1,4 +1,4 @@
class L {
- val List(v:Int, 2) = List(2, v:Int)
+ val List(v:Int, 2) = List(2, v:Int)
val (a:Int, b:Int) = (1, a)
}
diff --git a/test/files/pos/bug1237.scala b/test/files/pos/bug1237.scala
index 7777372138..0d1dd05d50 100644
--- a/test/files/pos/bug1237.scala
+++ b/test/files/pos/bug1237.scala
@@ -1,11 +1,11 @@
-class HelloWorld {
- def main(args: Array[String]) {
+class HelloWorld {
+ def main(args: Array[String]) {
object TypeBool;
trait Fct {
def g(x : Int) = TypeBool // breaks.
-
+
// def g(x : Int) = 3 // fine.
}
diff --git a/test/files/pos/bug1272.scala b/test/files/pos/bug1272.scala
index d86a909ae5..aab1a886c7 100644
--- a/test/files/pos/bug1272.scala
+++ b/test/files/pos/bug1272.scala
@@ -2,8 +2,8 @@ object ImplicitTest {
implicit val i : Int = 10
implicit def a(implicit i : Int) : Array[Byte] = null
implicit def b[T](implicit i : Int) : Array[T] = null
-
+
def fn[T](implicit x : T) = 0
-
+
val x = fn[Array[Byte]]
} \ No newline at end of file
diff --git a/test/files/pos/bug1279a.scala b/test/files/pos/bug1279a.scala
index 7568d3afcd..9212b583d4 100644
--- a/test/files/pos/bug1279a.scala
+++ b/test/files/pos/bug1279a.scala
@@ -1,3 +1,4 @@
+// see #13
// providing the type parameter in the recursive call to all4Impl
// avoids the problem
@@ -31,8 +32,9 @@ abstract class M
object Unrelated
{
- def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl(first.next))
+ // TODO!!! fix this bug for real, it compiles successfully, but weird types are inferred
+ // def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl(first.next))
// compiles successfully
-// def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl[U](first.next))
+ def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl[U](first.next))
}
diff --git a/test/files/pos/bug1292.scala b/test/files/pos/bug1292.scala
index 83a996d530..3ed153abf2 100644
--- a/test/files/pos/bug1292.scala
+++ b/test/files/pos/bug1292.scala
@@ -1,5 +1,5 @@
trait Foo[T <: Foo[T, Enum], Enum <: Enumeration] {
- type StV = Enum#Value
+ type StV = Enum#Value
type Meta = MegaFoo[T, Enum]
type Slog <: Enumeration
diff --git a/test/files/pos/bug1385.scala b/test/files/pos/bug1385.scala
index 55356c1f25..59953bcc39 100644
--- a/test/files/pos/bug1385.scala
+++ b/test/files/pos/bug1385.scala
@@ -1,3 +1,3 @@
-@serializable object Test {
+@serializable object Test {
private def readResolve:AnyRef = this
}
diff --git a/test/files/pos/bug1560.scala b/test/files/pos/bug1560.scala
index 384e808e4b..b1d04153b3 100644
--- a/test/files/pos/bug1560.scala
+++ b/test/files/pos/bug1560.scala
@@ -2,10 +2,10 @@ object Test extends Application {
trait C[T] {
def t: T
}
-
+
def b: Option[C[_]] = null
-
+
def c = b match {
case Some(b) => b.t
- }
+ }
}
diff --git a/test/files/pos/bug1565.scala b/test/files/pos/bug1565.scala
index df333151d5..030086c541 100644
--- a/test/files/pos/bug1565.scala
+++ b/test/files/pos/bug1565.scala
@@ -3,7 +3,7 @@ object Bug1565 {
def x() = { 0; (a : Int, b : Int) => println(List(a, b)) ; 0 }
(a : Int, b : Int) => println(List(a, b))
-
+
// various function syntaxes to exercise the parser
val xs = List(1,2,3)
xs.filter(x => x < 2)
diff --git a/test/files/pos/bug1737/A.java b/test/files/pos/bug1737/A.java
new file mode 100644
index 0000000000..ee87e29a35
--- /dev/null
+++ b/test/files/pos/bug1737/A.java
@@ -0,0 +1,3 @@
+public interface A<T extends String> {
+ T get();
+} \ No newline at end of file
diff --git a/test/files/pos/bug1737/B.java b/test/files/pos/bug1737/B.java
new file mode 100644
index 0000000000..28a1907a04
--- /dev/null
+++ b/test/files/pos/bug1737/B.java
@@ -0,0 +1 @@
+public abstract class B implements A {} \ No newline at end of file
diff --git a/test/files/pos/bug1737/c.scala b/test/files/pos/bug1737/c.scala
new file mode 100644
index 0000000000..782ec18b9e
--- /dev/null
+++ b/test/files/pos/bug1737/c.scala
@@ -0,0 +1,4 @@
+class C extends B {
+ this: A[_] =>
+ def get = "foo"
+} \ No newline at end of file
diff --git a/test/files/pos/bug2018.scala b/test/files/pos/bug2018.scala
new file mode 100644
index 0000000000..1736c394c9
--- /dev/null
+++ b/test/files/pos/bug2018.scala
@@ -0,0 +1,15 @@
+class A {
+ val b = new B
+
+ def getChildren = List(new A).iterator
+
+ class B {
+ private def check = true
+
+ private def getAncestor(p: A): A = {
+ val c = (p.getChildren.find(_.b.check)) match {case Some(d) => d case None => p}
+
+ if (c == p) p else c.b.getAncestor(c)
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/bug2023.scala b/test/files/pos/bug2023.scala
index 21c6fc96a6..de3e848fbd 100644
--- a/test/files/pos/bug2023.scala
+++ b/test/files/pos/bug2023.scala
@@ -3,11 +3,11 @@ trait C[A]
object C {
implicit def ipl[A](implicit from: A => Ordered[A]): C[A] = null
}
-
+
object P {
def foo[A](i: A, j: A)(implicit c: C[A]): Unit = ()
}
-
+
class ImplicitChainTest {
def testTrivial: Unit = {
P.foo('0', '9')
diff --git a/test/files/pos/bug2081.scala b/test/files/pos/bug2081.scala
index 52388464a5..d772c02dc2 100644
--- a/test/files/pos/bug2081.scala
+++ b/test/files/pos/bug2081.scala
@@ -7,5 +7,5 @@ object ScalaForRubyists {
val x = 10.days
// a couple parser corner cases I wanted not to break
- val y = 5.e0 + 5e7
+ val y = 5.e0 + 5e7
}
diff --git a/test/files/pos/bug2168.scala b/test/files/pos/bug2168.scala
index 1cf73446a8..d7bfa9687b 100644
--- a/test/files/pos/bug2168.scala
+++ b/test/files/pos/bug2168.scala
@@ -2,5 +2,5 @@ object Test extends Application {
def foo1(x: AnyRef) = x match { case x: Function0[_] => x() }
def foo2(x: AnyRef) = x match { case x: Function0[Any] => x() }
}
-
-
+
+
diff --git a/test/files/pos/bug2187-2.scala b/test/files/pos/bug2187-2.scala
new file mode 100644
index 0000000000..3f2742dd89
--- /dev/null
+++ b/test/files/pos/bug2187-2.scala
@@ -0,0 +1,7 @@
+class Test {
+ def test[A](list: List[A]) = list match {
+ case Seq(x, y) => "xy"
+ case Seq(x) => "x"
+ case _ => "something else"
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/bug2310.scala b/test/files/pos/bug2310.scala
new file mode 100644
index 0000000000..e08411a3df
--- /dev/null
+++ b/test/files/pos/bug2310.scala
@@ -0,0 +1,38 @@
+import scala.Stream._
+
+object consistencyError {
+ /* this gives an error:
+ Consistency problem compiling (virtual file)!
+ Trying to call method body%1(List(scala.collection.immutable.Stream[A])) with arguments (List(tp2, temp6, temp5))
+ case (l #:: ls, rs) => None
+ ^
+ scala.tools.nsc.symtab.Types$TypeError: too many arguments for method body%1: (val rs: scala.collection.immutable.Stream[A])None.type
+
+ two errors found
+ vss(0) =
+ args = List(tp2, temp6, temp5)
+ vss(1) = value rs, value ls, value l
+ args = List(tp2, temp6, temp5)
+ targets(0) = FinalState(,scala.None)
+ targets(1) = FinalState(,scala.None)
+ labels(1) = method body%1
+ labels(0) = method body%0
+ bx = 1
+ label.tpe = (val rs: scala.collection.immutable.Stream[A])None.type
+ */
+ def crash[A](lefts: Stream[A], rights: Stream[A]) = (lefts, rights) match {
+ case (Stream.Empty, Stream.Empty) => None
+ case (l #:: ls, rs) => None
+ }
+
+ // These work
+ // def works1[A](lefts: Stream[A]) = lefts match {
+ // case Stream.Empty => None
+ // case l #:: ls => None
+ // }
+ //
+ // def works2[A](lefts: Stream[A], rights: Stream[A]) = (lefts, rights) match {
+ // case (Stream.Empty, Stream.Empty) => None
+ // case (ls, rs) => None
+ // }
+}
diff --git a/test/files/pos/bug2409/J.java b/test/files/pos/bug2409/J.java
new file mode 100644
index 0000000000..6b7c45ff6d
--- /dev/null
+++ b/test/files/pos/bug2409/J.java
@@ -0,0 +1,4 @@
+class J {
+ static class J2 { }
+ J(J2 j2) { }
+}
diff --git a/test/files/pos/bug2409/bug2409.scala b/test/files/pos/bug2409/bug2409.scala
new file mode 100644
index 0000000000..5775008fc4
--- /dev/null
+++ b/test/files/pos/bug2409/bug2409.scala
@@ -0,0 +1 @@
+object S { new J(null) } \ No newline at end of file
diff --git a/test/files/pos/bug247.scala b/test/files/pos/bug247.scala
index e976404e61..983b7998a9 100644
--- a/test/files/pos/bug247.scala
+++ b/test/files/pos/bug247.scala
@@ -12,7 +12,7 @@ class TreeMapFactory[KEY](newOrder:Order[KEY]) extends MapFactory[KEY] {
def Empty[V] = new TreeMap[KEY,V](new TreeMapFactory[KEY](order));
}
-class Tree[KEY,Entry](order:Order[KEY]) {
+class Tree[KEY,Entry](order:Order[KEY]) {
def size =0;
}
diff --git a/test/files/pos/bug262.scala b/test/files/pos/bug262.scala
index ec6187b36b..b81490977c 100644
--- a/test/files/pos/bug262.scala
+++ b/test/files/pos/bug262.scala
@@ -1,11 +1,11 @@
object O {
abstract class A {
- def f:A;
+ def f:A;
}
class B extends A {
def f = if(1 == 2) new C else new D;
}
- class C extends A {
+ class C extends A {
def f = this;
}
class D extends A {
diff --git a/test/files/pos/bug2691.scala b/test/files/pos/bug2691.scala
new file mode 100644
index 0000000000..d289605ba6
--- /dev/null
+++ b/test/files/pos/bug2691.scala
@@ -0,0 +1,10 @@
+object Breakdown {
+ def unapplySeq(x: Int): Some[List[String]] = Some(List("", "there"))
+}
+object Test {
+ 42 match {
+ case Breakdown("") => // needed to trigger bug
+ case Breakdown("foo") => // needed to trigger bug
+ case Breakdown("", who) => println ("hello " + who)
+ }
+}
diff --git a/test/files/pos/bug287.scala b/test/files/pos/bug287.scala
index 81a01951b2..8e5e8831c1 100644
--- a/test/files/pos/bug287.scala
+++ b/test/files/pos/bug287.scala
@@ -1,7 +1,7 @@
object testBuf {
class mystream extends java.io.BufferedOutputStream(new java.io.FileOutputStream("/dev/null")) {
def w( x:String ):Unit = {
- val foo = new Array[byte](2);
+ val foo = new Array[Byte](2);
// write( byte[] ) is defined in FilterOutputStream, the superclass of BufferedOutputStream
super.write( foo ); // error
diff --git a/test/files/pos/bug2939.scala b/test/files/pos/bug2939.scala
new file mode 100644
index 0000000000..67677f2f18
--- /dev/null
+++ b/test/files/pos/bug2939.scala
@@ -0,0 +1,13 @@
+import collection._
+
+object Proxies {
+ class C1 extends MapProxy[Int,Int] { def self = Map[Int,Int]() }
+ class C2 extends mutable.MapProxy[Int,Int] { def self = mutable.Map[Int,Int]() }
+ class C3 extends immutable.MapProxy[Int,Int] { def self = immutable.Map[Int,Int]() }
+
+ class C4 extends SetProxy[Int] { def self = Set[Int]() }
+ class C5 extends mutable.SetProxy[Int] { def self = mutable.Set[Int]() }
+ class C6 extends immutable.SetProxy[Int] { def self = immutable.Set[Int]() }
+
+ class C7 extends SeqProxy[Int] { def self = Seq[Int]() }
+} \ No newline at end of file
diff --git a/test/files/pos/bug2945.scala b/test/files/pos/bug2945.scala
new file mode 100644
index 0000000000..762bdb61e1
--- /dev/null
+++ b/test/files/pos/bug2945.scala
@@ -0,0 +1,12 @@
+object Foo {
+ def test(s: String) = {
+ (s: Seq[Char]) match {
+ case Seq('f', 'o', 'o', ' ', rest1 @ _*) =>
+ rest1
+ case Seq('b', 'a', 'r', ' ', ' ', rest2 @ _*) =>
+ rest2
+ case _ =>
+ s
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/bug3020.scala b/test/files/pos/bug3020.scala
new file mode 100644
index 0000000000..cb429cd94f
--- /dev/null
+++ b/test/files/pos/bug3020.scala
@@ -0,0 +1,9 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ var x = true
+
+ ( { if (x) new scala.util.Random() } .asInstanceOf[Runnable] )
+ }
+}
+
+
diff --git a/test/files/pos/bug3097.flags b/test/files/pos/bug3097.flags
new file mode 100644
index 0000000000..144ddac9d3
--- /dev/null
+++ b/test/files/pos/bug3097.flags
@@ -0,0 +1 @@
+-unchecked -Xfatal-warnings
diff --git a/test/files/pos/bug3097.scala b/test/files/pos/bug3097.scala
new file mode 100644
index 0000000000..a034b960f7
--- /dev/null
+++ b/test/files/pos/bug3097.scala
@@ -0,0 +1,31 @@
+package seal
+
+sealed trait ISimpleValue
+
+sealed trait IListValue extends ISimpleValue {
+ def items: List[IAtomicValue[_]]
+}
+sealed trait IAtomicValue[O] extends ISimpleValue {
+ def data: O
+}
+
+sealed trait IAbstractDoubleValue[O] extends IAtomicValue[O] { }
+sealed trait IDoubleValue extends IAbstractDoubleValue[Double]
+
+case class ListValue(val items: List[IAtomicValue[_]]) extends IListValue
+class DoubleValue(val data: Double) extends IDoubleValue {
+ def asDouble = data
+}
+
+object Test {
+ /**
+ * @param args the command line arguments
+ */
+ def main(args: Array[String]): Unit = {
+ val v: ISimpleValue = new DoubleValue(1)
+ v match {
+ case m: IListValue => println("list")
+ case a: IAtomicValue[_] => println("atomic")
+ }
+ }
+}
diff --git a/test/files/pos/bug3136.scala b/test/files/pos/bug3136.scala
new file mode 100644
index 0000000000..6a5850aeb0
--- /dev/null
+++ b/test/files/pos/bug3136.scala
@@ -0,0 +1,19 @@
+class Type
+class Symbol
+case class PolyType(tps: List[Symbol], res: Type) extends Type
+class OtherType extends Type
+
+// case class NullaryMethodType(tp: Type) extends Type
+
+object NullaryMethodType {
+ def apply(resTpe: Type): Type = PolyType(List(), resTpe)
+ def unapply(tp: Type): Option[(Type)] = None
+}
+
+object Test {
+ def TEST(tp: Type): String =
+ tp match {
+ case PolyType(ps1, PolyType(ps2, res @ PolyType(a, b))) => "1"+tp // couldn't find a simpler version that still crashes
+ case NullaryMethodType(meh) => "2"+meh
+ }
+}
diff --git a/test/files/pos/bug3175.scala b/test/files/pos/bug3175.scala
new file mode 100644
index 0000000000..497ff8255c
--- /dev/null
+++ b/test/files/pos/bug3175.scala
@@ -0,0 +1,7 @@
+object Test {
+ def f(g:{val update:Unit}) = g.update
+
+ def main(args: Array[String]): Unit = {
+
+ }
+}
diff --git a/test/files/pos/bug3252.flags b/test/files/pos/bug3252.flags
new file mode 100644
index 0000000000..eb4d19bcb9
--- /dev/null
+++ b/test/files/pos/bug3252.flags
@@ -0,0 +1 @@
+-optimise \ No newline at end of file
diff --git a/test/files/pos/bug3252.scala b/test/files/pos/bug3252.scala
new file mode 100644
index 0000000000..4b8e862714
--- /dev/null
+++ b/test/files/pos/bug3252.scala
@@ -0,0 +1,15 @@
+class A {
+ def f(x : Boolean) : Thread = {
+ g {
+ x match {
+ case false =>
+ B.h { }
+ }
+ }
+ }
+
+ private def g[T](block : => T) = error("")
+}
+object B {
+ def h(block : => Unit) : Nothing = error("")
+} \ No newline at end of file
diff --git a/test/files/pos/bug3278.scala b/test/files/pos/bug3278.scala
new file mode 100644
index 0000000000..788ec75d26
--- /dev/null
+++ b/test/files/pos/bug3278.scala
@@ -0,0 +1,15 @@
+class Foo
+class Test {
+ def update[B](x : B, b : Int) {}
+ def apply[B](x : B) = 1
+}
+
+object Test {
+ def main(a : Array[String]) {
+ val a = new Test
+ val f = new Foo
+ a(f) = 1 //works
+ a(f) = a(f) + 1 //works
+ a(f) += 1 //error: reassignment to val
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/bug3411.scala b/test/files/pos/bug3411.scala
new file mode 100644
index 0000000000..b76fec66a6
--- /dev/null
+++ b/test/files/pos/bug3411.scala
@@ -0,0 +1,8 @@
+object A {
+ def g(c: PartialFunction[Any,Unit]) {}
+
+ def f {
+ lazy val x = 0
+ g { case `x` => }
+ }
+}
diff --git a/test/files/pos/bug3420.flags b/test/files/pos/bug3420.flags
new file mode 100644
index 0000000000..ea03113c66
--- /dev/null
+++ b/test/files/pos/bug3420.flags
@@ -0,0 +1 @@
+-optimise -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/bug3420.scala b/test/files/pos/bug3420.scala
new file mode 100644
index 0000000000..0fc56ed67b
--- /dev/null
+++ b/test/files/pos/bug3420.scala
@@ -0,0 +1,5 @@
+class C {
+ val cv = Map[Int, Int](1 -> 2)
+ lazy val cl = Map[Int, Int](1 -> 2)
+ def cd = Map[Int, Int](1 -> 2)
+}
diff --git a/test/files/pos/bug3430.flags b/test/files/pos/bug3430.flags
new file mode 100644
index 0000000000..eb4d19bcb9
--- /dev/null
+++ b/test/files/pos/bug3430.flags
@@ -0,0 +1 @@
+-optimise \ No newline at end of file
diff --git a/test/files/pos/bug3430.scala b/test/files/pos/bug3430.scala
new file mode 100644
index 0000000000..4990abb2a1
--- /dev/null
+++ b/test/files/pos/bug3430.scala
@@ -0,0 +1,13 @@
+// package com.example
+
+object A {
+ def f1(f: String => Boolean) = f("a")
+
+ def f2(): Boolean =
+ f1 { s1 =>
+ f1 { s2 =>
+ while (true) { }
+ true
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/bug3480.scala b/test/files/pos/bug3480.scala
new file mode 100644
index 0000000000..830365170b
--- /dev/null
+++ b/test/files/pos/bug3480.scala
@@ -0,0 +1,4 @@
+object Test {
+ val List(_*) = List(1)
+ val Array( who, what @ _* ) = "Eclipse plugin cannot not handle this" split (" ")
+}
diff --git a/test/files/pos/bug3495.flags b/test/files/pos/bug3495.flags
new file mode 100644
index 0000000000..08de722af0
--- /dev/null
+++ b/test/files/pos/bug3495.flags
@@ -0,0 +1 @@
+-Dsoot.class.path=bin:.
diff --git a/test/files/pos/bug3495.scala b/test/files/pos/bug3495.scala
new file mode 100644
index 0000000000..8d5dff4302
--- /dev/null
+++ b/test/files/pos/bug3495.scala
@@ -0,0 +1,2 @@
+class Foo { }
+
diff --git a/test/files/pos/bug3521/DoubleValue.java b/test/files/pos/bug3521/DoubleValue.java
new file mode 100644
index 0000000000..28f05cd972
--- /dev/null
+++ b/test/files/pos/bug3521/DoubleValue.java
@@ -0,0 +1,7 @@
+import java.lang.annotation.*;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.FIELD)
+public @interface DoubleValue {
+ double value();
+} \ No newline at end of file
diff --git a/test/files/pos/bug3521/a.scala b/test/files/pos/bug3521/a.scala
new file mode 100644
index 0000000000..94bb451fc3
--- /dev/null
+++ b/test/files/pos/bug3521/a.scala
@@ -0,0 +1,4 @@
+class Test {
+ @DoubleValue(-0.05)
+ var a = 0
+} \ No newline at end of file
diff --git a/test/files/pos/bug3568.scala b/test/files/pos/bug3568.scala
new file mode 100644
index 0000000000..950c16539b
--- /dev/null
+++ b/test/files/pos/bug3568.scala
@@ -0,0 +1,46 @@
+import scala.annotation._
+import scala.annotation.unchecked._
+import scala.collection._
+
+
+package object buffer {
+ val broken = new ArrayVec2() // commenting out this line causes the file to compile.
+
+ val works = Class.forName("buffer.ArrayVec2").newInstance().asInstanceOf[ArrayVec2]
+}
+
+package buffer {
+ object Main {
+ // ArrayVec2 can be compiled, instantiated and used.
+ def main(args: Array[String]) { println(works) }
+ }
+
+ trait ElemType { type Element; type Component <: ElemType }
+ trait Float1 extends ElemType { type Element = Float; type Component = Float1}
+ class Vec2 extends ElemType { type Element = Vec2; type Component = Float1 }
+
+ abstract class BaseSeq[T <: ElemType, E]
+ extends IndexedSeq[E] with IndexedSeqOptimized[E, IndexedSeq[E]] {
+ def length = 1
+ def apply(i: Int) :E
+ }
+
+ abstract class GenericSeq[T <: ElemType] extends BaseSeq[T, T#Element]
+ trait DataArray[T <: ElemType] extends BaseSeq[T, T#Element]
+ trait DataView[T <: ElemType] extends BaseSeq[T, T#Element]
+ abstract class BaseFloat1 extends BaseSeq[Float1, Float]
+
+ class ArrayFloat1 extends BaseFloat1 with DataArray[Float1] {
+ def apply(i: Int) :Float = 0f
+ }
+
+ class ViewFloat1 extends BaseFloat1 with DataView[Float1] {
+ def apply(i: Int) :Float = 0f
+ }
+
+ class ArrayVec2(val backingSeq: ArrayFloat1)
+ extends GenericSeq[Vec2] with DataArray[Vec2] {
+ def this() = this(new ArrayFloat1)
+ def apply(i: Int) :Vec2 = null
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/bug3570.scala b/test/files/pos/bug3570.scala
new file mode 100644
index 0000000000..8921f83b2a
--- /dev/null
+++ b/test/files/pos/bug3570.scala
@@ -0,0 +1,7 @@
+class test {
+ object Break extends Throwable
+ def break = throw Break
+ def block(x: => Unit) {
+ try { x } catch { case e: Break.type => }
+ }
+}
diff --git a/test/files/pos/bug3578.scala b/test/files/pos/bug3578.scala
new file mode 100644
index 0000000000..306cde811b
--- /dev/null
+++ b/test/files/pos/bug3578.scala
@@ -0,0 +1,30 @@
+object Test {
+ sealed abstract class JValue {
+ def ++(other: JValue) = {
+ def append(value1: JValue, value2: JValue): JValue = (value1, value2) match {
+ case (JNothing, x) => x
+ case (x, JNothing) => x
+ case (JObject(xs), x: JField) => JObject(xs ::: List(x))
+ case (x: JField, JObject(xs)) => JObject(x :: xs)
+ case (JArray(xs), JArray(ys)) => JArray(xs ::: ys)
+ case (JArray(xs), v: JValue) => JArray(xs ::: List(v))
+ case (v: JValue, JArray(xs)) => JArray(v :: xs)
+ case (f1: JField, f2: JField) => JObject(f1 :: f2 :: Nil)
+ case (JField(n, v1), v2: JValue) => JField(n, append(v1, v2))
+ case (x, y) => JArray(x :: y :: Nil)
+ }
+ append(this, other)
+ }
+ }
+
+ case object JNothing extends JValue
+ case object JNull extends JValue
+ case class JString(s: String) extends JValue
+ case class JDouble(num: Double) extends JValue
+ case class JInt(num: BigInt) extends JValue
+ case class JBool(value: Boolean) extends JValue
+ case class JField(name: String, value: JValue) extends JValue
+ case class JObject(obj: List[JField]) extends JValue
+ case class JArray(arr: List[JValue]) extends JValue
+}
+
diff --git a/test/files/pos/bug430-feb09.scala b/test/files/pos/bug430-feb09.scala
index 1499f32b7a..bba8996e4e 100644
--- a/test/files/pos/bug430-feb09.scala
+++ b/test/files/pos/bug430-feb09.scala
@@ -13,12 +13,12 @@ package c.scala {
case class C[T]()
}
-// Doesn't compile: type Nothing is not a member of d.scala
+// Doesn't compile: type Nothing is not a member of d.scala
package d.scala.d {
case class D[T]()
}
-// Doesn't compile: type Any is not a member of e.scala
+// Doesn't compile: type Any is not a member of e.scala
package e.scala {
case class E[T >: Nothing]()
}
diff --git a/test/files/pos/bug432.scala b/test/files/pos/bug432.scala
index 8e3097ac9d..087fd70aba 100644
--- a/test/files/pos/bug432.scala
+++ b/test/files/pos/bug432.scala
@@ -1,2 +1,2 @@
-case class Tata
+case class Tata()
object Tata
diff --git a/test/files/pos/bug460.scala b/test/files/pos/bug460.scala
index 3fc13e4dd0..466d06c2ad 100644
--- a/test/files/pos/bug460.scala
+++ b/test/files/pos/bug460.scala
@@ -1,8 +1,8 @@
object Bug460 {
def testFun(x : Int, y : Int) = x + y
- val fn = testFun _
-
- fn(1, 2) // Ok
+ val fn = testFun _
+
+ fn(1, 2) // Ok
(testFun(_, _))(1, 2) // Ok
(testFun _).apply(1, 2)
(testFun _)(1, 2) // Error! (but no longer)
diff --git a/test/files/pos/bug516.scala b/test/files/pos/bug516.scala
index ce4e0e3dd6..735b259436 100644
--- a/test/files/pos/bug516.scala
+++ b/test/files/pos/bug516.scala
@@ -4,7 +4,7 @@ import scala.collection.script._;
class Members;
object subscriber extends Subscriber[Message[String] with Undoable, Members] {
- def notify(pub: Members, event: Message[String] with Undoable): Unit =
+ def notify(pub: Members, event: Message[String] with Undoable): Unit =
(event: Message[String]) match {
case Include(l, elem) => Console.println("ADD: " + elem);
case Remove(l, elem) => Console.println("REM: " + elem);
@@ -12,4 +12,4 @@ object subscriber extends Subscriber[Message[String] with Undoable, Members] {
//case r : Remove [HasTree] with Undoable =>
}
}
-
+
diff --git a/test/files/pos/bug577.scala b/test/files/pos/bug577.scala
index ede45399a0..236c1395e2 100644
--- a/test/files/pos/bug577.scala
+++ b/test/files/pos/bug577.scala
@@ -1,15 +1,15 @@
trait PriorityTree {
type Node <: BasicTreeNode;
-
+
val top = initTree;
top.next = (initTree);
top.next.prev = (top);
-
+
def initTree : Node;
+
+
-
-
-
+
trait BasicTreeNode {
private[PriorityTree] var next : Node = _;
private[PriorityTree] var prev : Node = _;
diff --git a/test/files/pos/bug599.scala b/test/files/pos/bug599.scala
index 968e2deaee..53f205a26b 100644
--- a/test/files/pos/bug599.scala
+++ b/test/files/pos/bug599.scala
@@ -16,4 +16,4 @@ abstract class FooA {
val aaa: InnerB.this.B = doB
aaa.xxx;
}
- }
+ }
diff --git a/test/files/pos/bug602.scala b/test/files/pos/bug602.scala
index 18dd405645..6062b976b6 100644
--- a/test/files/pos/bug602.scala
+++ b/test/files/pos/bug602.scala
@@ -10,5 +10,5 @@ case class Span[K <: Ordered[K]](low: Option[K], high: Option[K]) extends Functi
case Span(Some(low), None) => (k >= low)
case Span(None, Some(high)) => (k <= high)
case _ => false
- }
+ }
}
diff --git a/test/files/pos/bug613.scala b/test/files/pos/bug613.scala
index cf179420f4..92bdd1b8ee 100644
--- a/test/files/pos/bug613.scala
+++ b/test/files/pos/bug613.scala
@@ -3,9 +3,9 @@ class Outer extends Application {
abstract class C {
val x: Int
}
- val foo = new C {
+ val foo = new C {
class I {
- val z = y
+ val z = y
}
val x = (new I).z
}
diff --git a/test/files/pos/bug616.scala b/test/files/pos/bug616.scala
index bb91c732a6..074ad190da 100644
--- a/test/files/pos/bug616.scala
+++ b/test/files/pos/bug616.scala
@@ -1,7 +1,7 @@
object testImplicit {
implicit def foo2bar(foo: Foo): Bar = foo.bar
class Foo(val bar: Bar) {
- def testCoercion = {val a = this; a.baz} // here, foo2bar is inferred by the compiler, as expected
+ def testCoercion = {val a = this; a.baz} // here, foo2bar is inferred by the compiler, as expected
//def testCoercionThisImplicit = baz // --> error: not found: value baz
def testCoercionThisExplicit: Any = this.baz // --> error: value baz is not a member of Foo
}
diff --git a/test/files/pos/bug651.scala b/test/files/pos/bug651.scala
index c146446af9..44d20ad580 100644
--- a/test/files/pos/bug651.scala
+++ b/test/files/pos/bug651.scala
@@ -4,12 +4,12 @@ trait Test3 {
trait MatchableImpl {
trait MatchImpl;
}
-
+
trait BracePairImpl {
trait BraceImpl extends MatchableImpl {
private object MyMatch1 extends MatchImpl;
protected def match0 : MatchImpl = MyMatch1;
-
+
}
}
}
diff --git a/test/files/pos/bug675.scala b/test/files/pos/bug675.scala
index 51f56920b5..3deb7a2b14 100644
--- a/test/files/pos/bug675.scala
+++ b/test/files/pos/bug675.scala
@@ -7,7 +7,7 @@ trait T {
}
trait X {
def foo : Foo = FOO_0;
- }
+ }
}
object Test extends Application {
@@ -15,5 +15,5 @@ object Test extends Application {
val x = new t.X{}
Console.println(x.foo)
}
-
-
+
+
diff --git a/test/files/pos/bug690.scala b/test/files/pos/bug690.scala
index a93c54f007..3fcdca785d 100644
--- a/test/files/pos/bug690.scala
+++ b/test/files/pos/bug690.scala
@@ -10,5 +10,5 @@ trait test {
override def foo(t : T) = super.foo(t);
}
def t : T;
- M0.foo(t);
+ M0.foo(t);
}
diff --git a/test/files/pos/bug711.scala b/test/files/pos/bug711.scala
index 70fcc7f0d0..25cd6d3c13 100644
--- a/test/files/pos/bug711.scala
+++ b/test/files/pos/bug711.scala
@@ -2,7 +2,7 @@ abstract class Component
class Button extends Component {
def sayHey: Unit = Console.println("Hey, I'm a button") }
-
+
abstract class Origin {
val delegate: Component }
diff --git a/test/files/pos/bug715.cmds b/test/files/pos/bug715.cmds
new file mode 100644
index 0000000000..2836967fca
--- /dev/null
+++ b/test/files/pos/bug715.cmds
@@ -0,0 +1,2 @@
+scalac meredith_1.scala
+scalac runner_2.scala
diff --git a/test/files/pos/bug715/meredith_1.scala b/test/files/pos/bug715/meredith_1.scala
index 4be7b48908..3ed2e57d7a 100644
--- a/test/files/pos/bug715/meredith_1.scala
+++ b/test/files/pos/bug715/meredith_1.scala
@@ -9,7 +9,7 @@ trait XMLRenderer {
classOf[java.lang.Boolean],
classOf[java.lang.Integer],
classOf[java.lang.Float],
- classOf[java.lang.String]
+ classOf[java.lang.String]
// more to come
)
@@ -21,14 +21,14 @@ trait XMLRenderer {
value match {
case null => Text( "null" )
case vUnmatched =>
- if (value.isInstanceOf[java.lang.Boolean])
+ if (value.isInstanceOf[java.lang.Boolean])
Text( value.asInstanceOf[java.lang.Boolean].toString )
- else if (value.isInstanceOf[java.lang.Integer])
+ else if (value.isInstanceOf[java.lang.Integer])
Text( value.asInstanceOf[java.lang.Integer].toString )
- else if (value.isInstanceOf[java.lang.Float])
+ else if (value.isInstanceOf[java.lang.Float])
Text( value.asInstanceOf[java.lang.Float].toString )
- // else if (value.isInstanceOf[T])
- // pojo2XML( value.asInstanceOf[T] )
+ // else if (value.isInstanceOf[T])
+ // pojo2XML( value.asInstanceOf[T] )
else
<unmatchedType>
<theType>
@@ -57,7 +57,7 @@ trait XMLRenderer {
null,
field.getName,
null,
- TopScope,
+ TopScope,
fldValXML
)
}
@@ -73,7 +73,7 @@ trait XMLRenderer {
null,
TopScope,
progeny.asInstanceOf[Array[scala.xml.Node]] : _*
- )
+ )
}
}
diff --git a/test/files/pos/bug757.scala b/test/files/pos/bug757.scala
index cc6527f3f2..f722128acc 100644
--- a/test/files/pos/bug757.scala
+++ b/test/files/pos/bug757.scala
@@ -1,4 +1,4 @@
-package foo {
+package foo {
object C {
def foo {
Console.println("foo")
@@ -6,7 +6,7 @@ package foo {
}
}
-package bar {
+package bar {
object Main extends Application {
foo.C.foo
}
diff --git a/test/files/pos/bug758.scala b/test/files/pos/bug758.scala
index 160bf37172..44769d54f1 100644
--- a/test/files/pos/bug758.scala
+++ b/test/files/pos/bug758.scala
@@ -1,7 +1,7 @@
trait A { type T; type M >: T }
-trait B extends A {
- val x : String;
- val u : A { type T = B.this.T } ;
- type T = x.type;
- type M = u.M
+trait B extends A {
+ val x : String;
+ val u : A { type T = B.this.T } ;
+ type T = x.type;
+ type M = u.M
}
diff --git a/test/files/pos/bug767.scala b/test/files/pos/bug767.scala
index 0c4067f022..d4d7eae870 100644
--- a/test/files/pos/bug767.scala
+++ b/test/files/pos/bug767.scala
@@ -4,7 +4,7 @@ abstract class AbsCell {
private var value: T = init
def get: T = value
def set (x: T) { value = x }
-
+
class Node {
val foo = 1
}
diff --git a/test/files/pos/bug788.scala b/test/files/pos/bug788.scala
index 19638dd170..3da88a2d26 100644
--- a/test/files/pos/bug788.scala
+++ b/test/files/pos/bug788.scala
@@ -4,7 +4,7 @@ trait Test {
type Node <: NodeImpl;
trait NodeImpl;
type Expression <: Node with ExpressionImpl;
- trait ExpressionImpl extends NodeImpl {
+ trait ExpressionImpl extends NodeImpl {
def self : Expression;
}
type Named <: Node with NamedImpl;
diff --git a/test/files/pos/bug802.scala b/test/files/pos/bug802.scala
index 2dea7036d6..124d4915bc 100644
--- a/test/files/pos/bug802.scala
+++ b/test/files/pos/bug802.scala
@@ -1,17 +1,17 @@
package test;
trait Test {
- abstract class BracesImpl {
+ abstract class BracesImpl {
type Singleton;
type Brace <: Singleton with BraceImpl;
- trait BraceImpl;
+ trait BraceImpl;
trait ForFile;
}
- abstract class ParensImpl extends BracesImpl {
+ abstract class ParensImpl extends BracesImpl {
type Brace <: Singleton with BraceImpl;
trait BraceImpl extends super.BraceImpl;
}
val parens : ParensImpl;
- abstract class BracksImpl extends BracesImpl {
+ abstract class BracksImpl extends BracesImpl {
type Brace <: Singleton with BraceImpl;
trait BraceImpl extends super.BraceImpl;
}
diff --git a/test/files/pos/bug807.scala b/test/files/pos/bug807.scala
index 0eeb92ea24..ed73fe3f97 100644
--- a/test/files/pos/bug807.scala
+++ b/test/files/pos/bug807.scala
@@ -6,7 +6,7 @@ trait Matcher {
trait HasLinks {
def link(b : Boolean) : Link = null;
}
-
+
}
trait BraceMatcher extends Matcher {
trait BracePair {
diff --git a/test/files/pos/bug927.scala b/test/files/pos/bug927.scala
index 7d4c59d94c..8f3cdac20f 100644
--- a/test/files/pos/bug927.scala
+++ b/test/files/pos/bug927.scala
@@ -7,5 +7,5 @@ object Test {
}
val str: Stream[Int] = Stream.fromIterator(List(1,2,3).iterator)
assert(sum(str) == 6)
-
+
}
diff --git a/test/files/pos/bug946.scala b/test/files/pos/bug946.scala
index c4bd6e9ba4..9f4cdbc043 100644
--- a/test/files/pos/bug946.scala
+++ b/test/files/pos/bug946.scala
@@ -1,7 +1,7 @@
object pmbugbounds {
trait Bar
class Foo[t <: Bar] {}
-
+
(new Foo[Bar]) match {
case _ : Foo[x] => null
}
diff --git a/test/files/pos/builders.scala b/test/files/pos/builders.scala
index 0b620769c0..51d8af88f8 100644
--- a/test/files/pos/builders.scala
+++ b/test/files/pos/builders.scala
@@ -18,16 +18,16 @@ object builders {
def += (elem: B) { buf += elem }
def result: List[B] = buf.toList
}
-/*
+/*
def fill[A, Dim1, Dim2, Coll](n1: Int, n2: Int, elem: A)(implicit b1: Builder[Coll, Dim1, A], b2: Builder[Coll, Dim2, Dim1]) = {
for (i <- 0 until n1) {
for (j <- 0 until n2) {
b1 += elem
- }
+ }
b2 += b1.result
}
b2.result
- }
+ }
*/
/*
implicit def arrayBuilder[A, B] = new Builder[Array[A], Array[B], B] {
@@ -35,7 +35,7 @@ object builders {
private val buf = new scala.collection.mutable.ListBuffer[B]
def += (elem: B) { buf += elem }
def result: Array[B] = buf.toArray
- }
+ }
*/
class Iter[A, C](elems: List[A]) {
def ++ [B >: A, D](xs: Iterable[B])(implicit b: Builder[C, D, B]): D = {
@@ -48,7 +48,7 @@ object builders {
b.result
}
}
-
+
def main(args : Array[String]) : Unit = {
val x1 = new Iter[Int, List[Int]](List(1, 2, 3))
// val x2 = new Iter[Int, Array[Int]](List(1, 2, 3))
diff --git a/test/files/pos/caseClassInMethod.scala b/test/files/pos/caseClassInMethod.scala
new file mode 100644
index 0000000000..958e5dd473
--- /dev/null
+++ b/test/files/pos/caseClassInMethod.scala
@@ -0,0 +1,5 @@
+object t {
+ def f = { object C; case class C(); 1 }
+ // pending: def g = { case class D(x: Int); object D; 2 }
+ def h = { case class E(y: Int = 10); 3 }
+}
diff --git a/test/files/pos/channels.scala b/test/files/pos/channels.scala
index 4c7be2cc82..ce6b79feaf 100644
--- a/test/files/pos/channels.scala
+++ b/test/files/pos/channels.scala
@@ -6,7 +6,7 @@ case class ![a](chan: Channel[a], data: a)
/*
object Bang {
- def unapply[a](x: ![a]): Option[{Channel[a], a}] =
+ def unapply[a](x: ![a]): Option[{Channel[a], a}] =
Some(x.chan, x.data)
}
@@ -14,7 +14,7 @@ object Bang {
object Test extends Application {
object IC extends Channel[Int]
def f[b](x: ![b]): Int = x match {
- case send: ![c] =>
+ case send: ![c] =>
send.chan match {
case IC => send.data
}
@@ -27,4 +27,4 @@ object Test2 extends Application {
case IC ! x => x
}
}
-
+
diff --git a/test/files/pos/clsrefine.scala b/test/files/pos/clsrefine.scala
index 0a016dec07..b29c01db8b 100644
--- a/test/files/pos/clsrefine.scala
+++ b/test/files/pos/clsrefine.scala
@@ -25,9 +25,9 @@ object test {
val y1, y2 = 1;
}
val a: A { type X1 = Int; type X2 = Int } = b;
- val a1 = new A {
+ val a1 = new A {
type X1 = Int;
- type X2 = String;
+ type X2 = String;
val x1 = 1;
val x2 = "hello"
}
diff --git a/test/files/pos/collectGenericCC.scala b/test/files/pos/collectGenericCC.scala
index 099a53d3f5..750475207f 100644
--- a/test/files/pos/collectGenericCC.scala
+++ b/test/files/pos/collectGenericCC.scala
@@ -7,8 +7,8 @@ object Test {
for (a <- r) b += a
b.result
}
-
- collect[Int, Vector[Int]](List(1,2,3,4))
+
+ collect[Int, Vector[Int]](List(1,2,3,4))
collect[Char, String](List('1','2','3','4'))
- collect[Char, Array[Char]](List('1','2','3','4'))
+ collect[Char, Array[Char]](List('1','2','3','4'))
} \ No newline at end of file
diff --git a/test/files/pos/collections.scala b/test/files/pos/collections.scala
index 61a25528c7..23b23d016e 100644
--- a/test/files/pos/collections.scala
+++ b/test/files/pos/collections.scala
@@ -2,7 +2,7 @@ package mixins;
import scala.collection.mutable._;
-class Collections extends HashSet[Int] with ObservableSet[Int,Collections] {
+class Collections extends HashSet[Int] with ObservableSet[Int] {
override def +=(elem: Int): this.type = super.+=(elem);
override def -=(elem: Int): this.type = super.-=(elem);
override def clear: Unit = super.clear;
diff --git a/test/files/pos/context.scala b/test/files/pos/context.scala
index 4e11d07eb4..13f6bb43c5 100644
--- a/test/files/pos/context.scala
+++ b/test/files/pos/context.scala
@@ -12,10 +12,10 @@ class Context {
abstract class SymbolWrapper {
val context: Context;
import context._;
-
+
class Symbols {
self: context.symbols.type =>
-
+
abstract class Symbol {
def typ: types.Type;
def sym: Symbol = typ.sym;
@@ -29,7 +29,7 @@ abstract class TypeWrapper {
class Types {
self: context.types.type =>
-
+
abstract class Type {
def sym: symbols.Symbol;
def typ: Type = sym.typ;
diff --git a/test/files/pos/cyclics.scala b/test/files/pos/cyclics.scala
index 395e88815a..051bdd6ed8 100644
--- a/test/files/pos/cyclics.scala
+++ b/test/files/pos/cyclics.scala
@@ -1,26 +1,26 @@
trait Param[T]
trait Abs { type T }
trait Cyclic1[A <: Param[A]] // works
-trait Cyclic2[A <: Abs { type T <: A }]
-trait Cyclic3 { type A <: Abs { type T = A } }
+trait Cyclic2[A <: Abs { type T <: A }]
+trait Cyclic3 { type A <: Abs { type T = A } }
trait Cyclic4 { type A <: Param[A] } // works
-trait Cyclic5 { type AA <: Abs; type A <: AA { type T = A } }
+trait Cyclic5 { type AA <: Abs; type A <: AA { type T = A } }
trait IterableTemplate {
type Elem
type Constr <: IterableTemplate
type ConstrOf[A] = Constr { type Elem = A }
-
+
def iterator: Iterator[Elem]
-
+
def map [B] (f: Elem => B): ConstrOf[B]
-
+
def foreach(f: Elem => Unit) = iterator.foreach(f)
}
trait Iterable[A] extends IterableTemplate { self =>
- type Elem
+ type Elem
type Constr <: Iterable[A] { type Constr <: Iterable.this.Constr }
}
diff --git a/test/files/pos/depexists.scala b/test/files/pos/depexists.scala
new file mode 100644
index 0000000000..d539c844c1
--- /dev/null
+++ b/test/files/pos/depexists.scala
@@ -0,0 +1,5 @@
+object depexists {
+
+ val c: Cell[(a, b)] forSome { type a <: Number; type b <: (a, a) } = null
+ val d = c
+}
diff --git a/test/files/pos/depmet_1.flags b/test/files/pos/depmet_1.flags
new file mode 100644
index 0000000000..1c26b24745
--- /dev/null
+++ b/test/files/pos/depmet_1.flags
@@ -0,0 +1 @@
+-Ydependent-method-types \ No newline at end of file
diff --git a/test/files/pos/depmet_1.scala b/test/files/pos/depmet_1.scala
new file mode 100644
index 0000000000..166e991817
--- /dev/null
+++ b/test/files/pos/depmet_1.scala
@@ -0,0 +1,6 @@
+object Test {
+ def precise(x: String)(y: x.type): x.type = y
+ val foo = "foo"
+ val fun : foo.type => foo.type = precise(foo)
+ val bar : foo.type = precise(foo)(foo)
+} \ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_chaining_zw.flags b/test/files/pos/depmet_implicit_chaining_zw.flags
new file mode 100644
index 0000000000..1c26b24745
--- /dev/null
+++ b/test/files/pos/depmet_implicit_chaining_zw.flags
@@ -0,0 +1 @@
+-Ydependent-method-types \ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_chaining_zw.scala b/test/files/pos/depmet_implicit_chaining_zw.scala
new file mode 100644
index 0000000000..93da3b0f8e
--- /dev/null
+++ b/test/files/pos/depmet_implicit_chaining_zw.scala
@@ -0,0 +1,28 @@
+trait Zero
+trait Succ[N]
+
+trait ZipWith[N, S] {
+ type T
+ val x: T = error("")
+}
+
+object ZipWith {
+ implicit def ZeroZipWith[S] = new ZipWith[Zero, S] {
+ type T = Stream[S]
+ }
+
+ implicit def SuccZipWith[N, S, R](implicit zWith : ZipWith[N, R]) = new ZipWith[Succ[N], S => R] {
+ type T = Stream[S] => zWith.T // dependent types replace the associated types functionality
+ }
+
+ // can't use implicitly[ZipWith[Succ[Succ[Zero]], Int => String => Boolean]],
+ // since that will chop of the {type T = ... } refinement in adapt (pt = ZipWith[Succ[Succ[Zero]], Int => String => Boolean])
+ // this works
+ // def zipWith(implicit zw: ZipWith[Succ[Succ[Zero]], Int => String => Boolean]): zw.T = zw.x
+ // thus, I present ?: implicitly on steroids!
+ def ?[T <: AnyRef](implicit w: T): w.type = w
+
+ type _2 = Succ[Succ[Zero]]
+ val zw = ?[ZipWith[_2, Int => String => Boolean]].x // : Stream[Int] => Stream[String] => Stream[Boolean]
+ // val zw = implicitly[ZipWith[Succ[Succ[Zero]], Int => String => Boolean]{type T = Stream[Int] => Stream[String] => Stream[Boolean]}].x
+} \ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_norm_ret.flags b/test/files/pos/depmet_implicit_norm_ret.flags
new file mode 100644
index 0000000000..1c26b24745
--- /dev/null
+++ b/test/files/pos/depmet_implicit_norm_ret.flags
@@ -0,0 +1 @@
+-Ydependent-method-types \ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_norm_ret.scala b/test/files/pos/depmet_implicit_norm_ret.scala
new file mode 100644
index 0000000000..bafd2f7c51
--- /dev/null
+++ b/test/files/pos/depmet_implicit_norm_ret.scala
@@ -0,0 +1,29 @@
+object Test{
+ def ?[S <: AnyRef](implicit w : S) : w.type = w
+
+ // fallback, lower priority (overloading rules apply: pick alternative in subclass lowest in subtyping lattice)
+ class ZipWithDefault {
+ implicit def ZeroZipWith[S] = new ZipWith[S] {
+ type T = Stream[S]
+ }
+ }
+
+ object ZipWith extends ZipWithDefault {
+ // def apply[S: ZipWith](s : S) = ?[ZipWith[S]].zipWith(s) // TODO: bug return type should be inferred
+ def apply[S](s : S)(implicit zw: ZipWith[S]): zw.T = zw.zipWith(s)
+
+ implicit def SuccZipWith[S,R](implicit zWith : ZipWith[R]) = new ZipWith[S => R] {
+ type T = Stream[S] => zWith.T // dependent types replace the associated types functionality
+ }
+ }
+
+ trait ZipWith[S] {
+ type T
+ def zipWith : S => T = error("")
+ }
+
+ // bug: inferred return type = (Stream[A]) => java.lang.Object with Test.ZipWith[B]{type T = Stream[B]}#T
+ // this seems incompatible with vvvvvvvvvvvvvvvvvvvvvv -- #3731
+ def map[A,B](f : A => B) /* : Stream[A] => Stream[B]*/ = ZipWith(f)
+ val tst: Stream[Int] = map{x: String => x.length}(Stream("a"))
+} \ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_oopsla_session.flags b/test/files/pos/depmet_implicit_oopsla_session.flags
new file mode 100644
index 0000000000..1c26b24745
--- /dev/null
+++ b/test/files/pos/depmet_implicit_oopsla_session.flags
@@ -0,0 +1 @@
+-Ydependent-method-types \ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_oopsla_session.scala b/test/files/pos/depmet_implicit_oopsla_session.scala
new file mode 100644
index 0000000000..e2c67d7c32
--- /dev/null
+++ b/test/files/pos/depmet_implicit_oopsla_session.scala
@@ -0,0 +1,63 @@
+object Sessions {
+ trait Session[This] {
+ type Dual
+ type HasDual[D] = Session[This]{type Dual=D}
+ def run(p: This, dp: Dual): Unit
+ }
+
+ implicit object StopSession extends Session[Stop] {
+ type Dual = Stop
+
+ def run(p: Stop, dp: Stop): Unit = {}
+ }
+
+ implicit def InDual[A, B](implicit sessionDIn: Session[B]) =
+ new Session[In[A, B]] {
+ type Dual = Out[A, sessionDIn.Dual]
+
+ def run(p: In[A, B], dp: Dual): Unit =
+ sessionDIn.run(p.func(dp.x), dp.y)
+ }
+
+ implicit def OutDual[A, B](implicit sessionDOut: Session[B]) =
+ new Session[Out[A, B]] {
+ type Dual = In[A, sessionDOut.Dual]
+
+ def run(p: Out[A, B], dp: Dual): Unit =
+ sessionDOut.run(p.y, dp.func(p.x))
+ }
+
+ sealed case class Stop()
+ sealed case class In[-A, +B](func: A => B)
+ sealed case class Out[+A, +B](x: A, y: B)
+
+ def addServer =
+ In{x: Int =>
+ In{y: Int => System.out.println("Thinking")
+ Out(x+y,
+ Stop())}}
+
+ def addClient =
+ Out(3,
+ Out(4, { System.out.println("Waiting")
+ In{z: Int => System.out.println(z)
+ Stop()}}))
+
+ def runSession[S, D: Session[S]#HasDual](p: S, dp: D) =
+ implicitly[Session[S]#HasDual[D]].run(p, dp)
+
+ // def runSession[S, D](p: S, dp: D)(implicit s: Session[S]#HasDual[D]) =
+ // s.run(p, dp)
+ //
+ // def runSession[S, D](p: S, dp: D)(implicit s: Session[S]{type Dual=D}) =
+ // s.run(p, dp)
+
+ // TODO: can we relax the ordering restrictions on dependencies so that we can use
+ // def runSession[S](p: S, dp: s.Dual)(implicit s: Session[S]) =
+ // s.run(p, dp)
+ // to emphasise similarity of type parameters and implicit arguments:
+ // def runSession[S][val s: Session[S]](p: S, dp: s.Dual) =
+ // s.run(p, dp)
+
+ def myRun = runSession(addServer, addClient)
+} \ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_oopsla_session_2.flags b/test/files/pos/depmet_implicit_oopsla_session_2.flags
new file mode 100644
index 0000000000..1c26b24745
--- /dev/null
+++ b/test/files/pos/depmet_implicit_oopsla_session_2.flags
@@ -0,0 +1 @@
+-Ydependent-method-types \ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_oopsla_session_2.scala b/test/files/pos/depmet_implicit_oopsla_session_2.scala
new file mode 100644
index 0000000000..8d7daa6dc6
--- /dev/null
+++ b/test/files/pos/depmet_implicit_oopsla_session_2.scala
@@ -0,0 +1,87 @@
+object Sessions {
+ def ?[T <: AnyRef](implicit w: T): w.type = w
+
+ // session states
+ sealed case class Stop()
+ sealed case class In[-Data, +Cont](recv: Data => Cont)
+ sealed case class Out[+Data, +Cont](data: Data, cont: Cont)
+
+ // the type theory of communicating sessions:
+
+ // an instance of type Session[S]{type Dual=D} is evidence that S and D are duals
+ // such a value witnesses this fact by describing how to compose an instance of S with an instance of D (through the run method)
+ trait Session[S] { type Self = S
+ type Dual
+ type HasDual[D] = Session[Self]{type Dual=D}
+ def run(self: Self, dual: Dual): Unit
+ }
+
+ // friendly interface to the theory
+ def runSession[S, D: Session[S]#HasDual](session: S, dual: D) =
+ ?[Session[S]#HasDual[D]].run(session, dual)
+
+ // facts in the theory:
+
+ // ------------------------[StopDual]
+ // Stop is the dual of Stop
+ implicit object StopDual extends Session[Stop] {
+ type Dual = Stop
+
+ def run(self: Self, dual: Dual): Unit = {}
+ }
+
+ // CD is the dual of Cont
+ // -------------------------------------------[InDual]
+ // Out[Data, CD] is the dual of In[Data, Cont]
+ implicit def InDual[Data, Cont](implicit cont: Session[Cont]) = new Session[In[Data, Cont]] {
+ type Dual = Out[Data, cont.Dual]
+
+ def run(self: Self, dual: Dual): Unit =
+ cont.run(self.recv(dual.data), dual.cont)
+ }
+
+ // CD is the dual of Cont
+ // -------------------------------------------[OutDual]
+ // In[Data, CD] is the dual of Out[Data, Cont]
+ implicit def OutDual[Data, Cont](implicit cont: Session[Cont]) = new Session[Out[Data, Cont]] {
+ type Dual = In[Data, cont.Dual]
+
+ def run(self: Self, dual: Dual): Unit =
+ cont.run(self.cont, dual.recv(self.data))
+ }
+
+ // a concrete session
+ def addServer =
+ In{x: Int =>
+ In{y: Int => System.out.println("Thinking")
+ Out(x+y,
+ Stop())}}
+
+ def addClient =
+ Out(3,
+ Out(4, { System.out.println("Waiting")
+ In{z: Int => System.out.println(z)
+ Stop()}}))
+
+ def myRun = runSession(addServer, addClient)
+}
+
+/* future improvements:
+
+
+ // def runSession[S, D](p: S, dp: D)(implicit s: Session[S]#HasDual[D]) =
+ // s.run(p, dp)
+ //
+ // def runSession[S, D](p: S, dp: D)(implicit s: Session[S]{type Dual=D}) =
+ // s.run(p, dp)
+
+ // TODO: can we relax the ordering restrictions on dependencies so that we can write
+ // one possibility: graph of dependencies between arguments must be acyclic
+ // def runSession[S](p: S, dp: s.Dual)(implicit s: Session[S]) =
+ // s.run(p, dp)
+ // to emphasise similarity of type parameters and implicit arguments:
+ // def runSession[S][val s: Session[S]](p: S, dp: s.Dual) =
+ // s.run(p, dp)
+
+
+*/ \ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_oopsla_session_simpler.flags b/test/files/pos/depmet_implicit_oopsla_session_simpler.flags
new file mode 100644
index 0000000000..1c26b24745
--- /dev/null
+++ b/test/files/pos/depmet_implicit_oopsla_session_simpler.flags
@@ -0,0 +1 @@
+-Ydependent-method-types \ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_oopsla_session_simpler.scala b/test/files/pos/depmet_implicit_oopsla_session_simpler.scala
new file mode 100644
index 0000000000..d2986ef56f
--- /dev/null
+++ b/test/files/pos/depmet_implicit_oopsla_session_simpler.scala
@@ -0,0 +1,44 @@
+object Sessions {
+ trait Session {
+ type Dual <: Session
+
+ def run(dp: Dual): Unit
+ }
+
+ sealed case class Stop extends Session {
+ type Dual = Stop
+
+ def run(dp: Dual): Unit = {}
+ }
+
+ // can't write B <: Session{type Dual = BDual} due to limitations in type inference algorithm
+ // (type variables cannot occur on both sides of <:)
+ // using B#Dual instead of BDual is too imprecise, since it is disconnected from the actual argument that is passed for B
+ // would be nice if we could introduce a universal quantification over BDual that is not part of the
+ // type parameter list
+ sealed case class In[A, B <: Session, BDual <: Session](recv: A => B)(implicit dual: B <:< Session{type Dual=BDual}) extends Session {
+ type Dual = Out[A, BDual]
+
+ def run(dp: Dual): Unit = recv(dp.data) run dp.cont
+ }
+
+ sealed case class Out[A, B <: Session](data: A, cont: B) extends Session {
+ type Dual = In[A, cont.Dual, cont.Dual#Dual]
+
+ def run(dp: Dual): Unit = cont run dp.recv(data)
+ }
+
+ def addServer =
+ In{x: Int =>
+ In{y: Int => System.out.println("Thinking")
+ Out(x+y,
+ Stop())}}
+
+ def addClient =
+ Out(3,
+ Out(4, { System.out.println("Waiting")
+ In{z: Int => System.out.println(z)
+ Stop()}}))
+
+ def myRun = addServer run addClient
+}
diff --git a/test/files/pos/depmet_implicit_oopsla_zipwith.flags b/test/files/pos/depmet_implicit_oopsla_zipwith.flags
new file mode 100644
index 0000000000..1c26b24745
--- /dev/null
+++ b/test/files/pos/depmet_implicit_oopsla_zipwith.flags
@@ -0,0 +1 @@
+-Ydependent-method-types \ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_oopsla_zipwith.scala b/test/files/pos/depmet_implicit_oopsla_zipwith.scala
new file mode 100644
index 0000000000..fe69802d26
--- /dev/null
+++ b/test/files/pos/depmet_implicit_oopsla_zipwith.scala
@@ -0,0 +1,44 @@
+case class Zero()
+case class Succ[N](x: N)
+import Stream.{cons, continually}
+
+trait ZipWith[N, S] {
+ type T
+
+ def manyApp: N => Stream[S] => T
+ def zipWith: N => S => T = n => f => manyApp(n)(continually(f))
+}
+object ZipWith {
+ implicit def ZeroZipWith[S] = new ZipWith[Zero, S] {
+ type T = Stream[S]
+
+ def manyApp = n => xs => xs
+ }
+
+ implicit def SuccZipWith[N, S, R](implicit zw: ZipWith[N, R]) =
+ new ZipWith[Succ[N],S => R] {
+ type T = Stream[S] => zw.T
+
+ def zapp[A, B](xs: Stream[A => B], ys: Stream[A]): Stream[B] = (xs, ys) match {
+ case (cons(f, fs), cons(s, ss)) => cons(f(s),zapp(fs, ss))
+ case (_, _) => Stream.empty
+ }
+
+ def manyApp = n => xs => ss => n match {
+ case Succ(i) => zw.manyApp(i)(zapp(xs, ss))
+ }
+ }
+}
+
+object Test {
+ def zWith[N, S](n: N, s: S)(implicit zw: ZipWith[N, S]): zw.T = zw.zipWith(n)(s)
+
+ def zipWith0: Stream[Int] = zWith(Zero(),0)
+
+// (Stream[A]) => java.lang.Object with ZipWith[Zero,B]{type T = Stream[B]}#T
+// should normalise to: Stream[A] => Stream[B]
+ def map[A, B](f: A => B) = zWith(Succ(Zero()),f)
+
+ def zipWith3[A, B, C, D](f: A => B => C => D) = //: Stream[A] => Stream[B] => Stream[C] => Stream[D] = // BUG why do we need a return type?
+ zWith(Succ(Succ(Succ(Zero()))),f)
+} \ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_tpbetareduce.flags b/test/files/pos/depmet_implicit_tpbetareduce.flags
new file mode 100644
index 0000000000..1c26b24745
--- /dev/null
+++ b/test/files/pos/depmet_implicit_tpbetareduce.flags
@@ -0,0 +1 @@
+-Ydependent-method-types \ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_tpbetareduce.scala b/test/files/pos/depmet_implicit_tpbetareduce.scala
new file mode 100644
index 0000000000..c0b9b4e8e4
--- /dev/null
+++ b/test/files/pos/depmet_implicit_tpbetareduce.scala
@@ -0,0 +1,12 @@
+trait HOSeq {
+ trait Accumulator[+coll[x], elT]
+ trait Iterable[+t] {
+ type m[+x]
+ def accumulator[t]: Accumulator[m, t]
+ }
+ implicit def listAccumulator[elT]: Accumulator[List, elT] = new Accumulator[List, elT] {}
+ trait List[+t] extends Iterable[t] {
+ type m[+x] = List[x]
+ def accumulator[t]: Accumulator[List, t] = listAccumulator[t]
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/gadt-gilles.scala b/test/files/pos/gadt-gilles.scala
index 662be9017d..309168c1e0 100644
--- a/test/files/pos/gadt-gilles.scala
+++ b/test/files/pos/gadt-gilles.scala
@@ -6,7 +6,7 @@ object Test {
val x: A[C with D] = new B[C, D] {}
val y: A[C with D] = x match { case b: B[u, v] => (new B[u, v] {}): A[u with v] } // OK
-
+
def f[T, U](p: A[T with U]): A[T with U] = p match { case b: B[u, v] => new A[u with v] {} } // Not OK
}
diff --git a/test/files/pos/gadts2.scala b/test/files/pos/gadts2.scala
index fc2a7e4333..2263cf13a6 100644
--- a/test/files/pos/gadts2.scala
+++ b/test/files/pos/gadts2.scala
@@ -9,8 +9,8 @@ object Test {
final case class NumTerm(val n: Number) extends Term[Number]
def f[a](t: Term[a], c: Cell[a]) {
- t match {
- case NumTerm(n) => c.x = MyDouble(1.0)
+ t match {
+ case NumTerm(n) => c.x = MyDouble(1.0)
}
}
diff --git a/test/files/pos/gosh.scala b/test/files/pos/gosh.scala
index 183ce9df1d..af49cbf16b 100644
--- a/test/files/pos/gosh.scala
+++ b/test/files/pos/gosh.scala
@@ -7,35 +7,35 @@ object ShapeTest extends Application {
abstract class Shape {
def draw(): Unit
}
-
+
class Line(s: Point, e: Point) extends Shape {
def draw() { Console.println("draw line " + s + "," + e) }
}
-
+
abstract class Foo {
type T <: Object
-
+
def show(o: T): Unit
def print() { Console.println("in Foo") }
}
-
+
abstract class ShapeFoo extends Foo {
type T <: Shape
def show(o: T) { o.draw() }
override def print() { Console.println("in ShapeFoo") }
}
-
+
class LineFoo extends ShapeFoo {
type T = Line
override def print() { Console.println("in LineFoo") }
}
-
+
val p1 = new Point(1,4)
val p2 = new Point(12, 28)
-
+
val l1 = new Line(p1, p2)
-
+
val l = new ShapeFoo { // ** //
type T = Line // ** //
override def print() { Console.println("in LineFoo") } // ** //
diff --git a/test/files/pos/gui.scala b/test/files/pos/gui.scala
index 9d87d1b869..5070339f47 100644
--- a/test/files/pos/gui.scala
+++ b/test/files/pos/gui.scala
@@ -2,7 +2,7 @@ object Geom {
trait Shape
case class Point(x: Int, y: Int) extends Shape
case class Rectangle(ll: Point, ur: Point) extends Shape {
- def inset(delta: Int) =
+ def inset(delta: Int) =
Rectangle(Point(ll.x - delta, ll.y - delta), Point(ur.x + delta, ur.y + delta));
}
}
@@ -20,7 +20,7 @@ trait Screen {
}
object DummyScreen extends Screen {
- def drawRect(r: Geom.Rectangle, c: Color) {
+ def drawRect(r: Geom.Rectangle, c: Color) {
Console.println("draw " + r + " with " + c)
}
def fillRect(r: Geom.Rectangle, c: Color) {
@@ -55,7 +55,7 @@ object GUI {
def mouseDown(p: Geom.Point): Unit
}
- abstract class Button(scr: Screen, p: Geom.Point, name: String)
+ abstract class Button(scr: Screen, p: Geom.Point, name: String)
extends Glyph with MouseCtl {
var enabled: Boolean = false
val label = new Label(scr, p, name)
@@ -86,7 +86,7 @@ object GUIClient {
def quit() { Console.println("application exited") }
}
- class QuitButton (scr: Screen, p: Geom.Point, name: String, a: Application)
+ class QuitButton (scr: Screen, p: Geom.Point, name: String, a: Application)
extends GUI.Button(scr, p, name) {
def doit() { a.quit() }
}
diff --git a/test/files/pos/imp2.scala b/test/files/pos/imp2.scala
index 5460c60015..407b07f3fd 100644
--- a/test/files/pos/imp2.scala
+++ b/test/files/pos/imp2.scala
@@ -1,5 +1,5 @@
object Test {
- import collection.mutable._
+ import collection.mutable._
import collection.mutable._
val x = new HashMap
}
diff --git a/test/files/pos/implicits.scala b/test/files/pos/implicits.scala
index aeb6591507..2c01dd0ba8 100644
--- a/test/files/pos/implicits.scala
+++ b/test/files/pos/implicits.scala
@@ -1,3 +1,28 @@
+// #1435
+object t1435 {
+ implicit def a(s:String):String = error("")
+ implicit def a(i:Int):String = error("")
+ implicit def b(i:Int):String = error("")
+}
+
+class C1435 {
+ val v:String = {
+ import t1435.a
+ 2
+ }
+}
+
+// #1492
+class C1492 {
+
+ class X
+
+ def foo(x: X => X) {}
+
+ foo ( implicit x => implicitly[X] )
+ foo { implicit x => implicitly[X] }
+}
+
// #1579
object Test1579 {
class Column
@@ -20,7 +45,7 @@ object Test1625 {
implicit def byName[A](x: =>A) = new Wrapped(x)
implicit def byVal[A](x: A) = x
-
+
def main(args: Array[String]) = {
// val res:Wrapped = 7 // works
@@ -32,7 +57,33 @@ object Test1625 {
}
object Test2188 {
- implicit def toJavaList[A: ClassManifest](t:collection.Seq[A]):java.util.List[A] = java.util.Arrays.asList(t.toArray:_*)
+ implicit def toJavaList[A: ClassManifest](t:collection.Seq[A]):java.util.List[A] = java.util.Arrays.asList(t.toArray:_*)
val x: java.util.List[String] = List("foo")
}
+
+object TestNumericWidening {
+ val y = 1
+ val x: java.lang.Long = y
+}
+
+// #2709
+package foo2709 {
+ class A
+ class B
+
+ package object bar {
+ implicit def a2b(a: A): B = new B
+ }
+
+ package bar {
+ object test {
+ new A: B
+ }
+ }
+}
+
+// Problem with specs
+object specsProblem {
+ println(implicitly[Manifest[Class[_]]])
+}
diff --git a/test/files/pos/imports.scala b/test/files/pos/imports.scala
index f6a55e5e07..5f70a5d2da 100644
--- a/test/files/pos/imports.scala
+++ b/test/files/pos/imports.scala
@@ -7,10 +7,10 @@ object test {
val foo = 1;
- p("hello"); print("world"); S.out.println("!");
+ p("hello"); print("world"); S.out.println("!");
S.out.flush();
}
object test1 {
import test._;
foo
-}
+}
diff --git a/test/files/pos/infer2.scala b/test/files/pos/infer2.scala
index e065219a80..06d0f5814f 100644
--- a/test/files/pos/infer2.scala
+++ b/test/files/pos/infer2.scala
@@ -5,4 +5,4 @@ case class nil[T] extends Lst[T]
object test {
Console.println(cons(1, nil()))
}
-
+
diff --git a/test/files/pos/inferbroadtype.scala b/test/files/pos/inferbroadtype.scala
index de8f7aa184..467bd0f186 100644
--- a/test/files/pos/inferbroadtype.scala
+++ b/test/files/pos/inferbroadtype.scala
@@ -2,7 +2,7 @@ object Test {
abstract class Base { val changesBaseClasses: Boolean }
class Concrete extends Base { val changesBaseClasses = true }
def getBase : Base = new Concrete
-
+
var c = new Base { val changesBaseClasses = true }
c = getBase
}
diff --git a/test/files/pos/java-access-pos/J.java b/test/files/pos/java-access-pos/J.java
new file mode 100644
index 0000000000..4f2024673c
--- /dev/null
+++ b/test/files/pos/java-access-pos/J.java
@@ -0,0 +1,15 @@
+package a.b;
+
+public abstract class J {
+ public J() { }
+ J(int x1) { }
+ protected J(int x1, int x2) { }
+
+ abstract void packageAbstract();
+ protected abstract void protectedAbstract();
+ public abstract void publicAbstract();
+
+ void packageConcrete() { return; }
+ protected void protectedConcrete() { return; }
+ public void publicConcrete() { return; }
+}
diff --git a/test/files/pos/java-access-pos/S1.scala b/test/files/pos/java-access-pos/S1.scala
new file mode 100644
index 0000000000..cc739d9555
--- /dev/null
+++ b/test/files/pos/java-access-pos/S1.scala
@@ -0,0 +1,67 @@
+package a.b
+
+/** Declaring "override" all the time.
+ */
+class S1 extends J {
+ override private[b] def packageAbstract() = ()
+ override protected[b] def protectedAbstract() = ()
+ override def publicAbstract() = ()
+
+ override private[b] def packageConcrete() = ()
+ override protected[b] def protectedConcrete() = ()
+ override def publicConcrete() = ()
+}
+
+/** Implementing abstracts.
+ */
+class S2 extends J {
+ private[b] def packageAbstract() = ()
+ protected[b] def protectedAbstract() = ()
+ def publicAbstract() = ()
+}
+
+/** Widening access.
+ */
+class S3 extends J {
+ protected[b] def packageAbstract() = ()
+ protected[b] def protectedAbstract() = ()
+ def publicAbstract() = ()
+
+ override protected[b] def packageConcrete() = ()
+ override protected[b] def protectedConcrete() = ()
+ override def publicConcrete() = ()
+}
+/** More widening.
+ */
+class S4 extends J {
+ private[a] def packageAbstract() = ()
+ protected[a] def protectedAbstract() = ()
+ def publicAbstract() = ()
+
+ override private[a] def packageConcrete() = ()
+ override protected[a] def protectedConcrete() = ()
+ override def publicConcrete() = ()
+}
+/** Yet more widening.
+ */
+class S5 extends J {
+ def packageAbstract() = ()
+ def protectedAbstract() = ()
+ def publicAbstract() = ()
+
+ override def packageConcrete() = ()
+ override def protectedConcrete() = ()
+ override def publicConcrete() = ()
+}
+/** Constructors.
+ */
+class S6 extends J(1) {
+ def packageAbstract() = ()
+ def protectedAbstract() = ()
+ def publicAbstract() = ()
+}
+class S7 extends J(1, 2) {
+ def packageAbstract() = ()
+ def protectedAbstract() = ()
+ def publicAbstract() = ()
+} \ No newline at end of file
diff --git a/test/files/pos/lambdalift.scala b/test/files/pos/lambdalift.scala
index bc997d6f81..10bbf3ac12 100644
--- a/test/files/pos/lambdalift.scala
+++ b/test/files/pos/lambdalift.scala
@@ -11,5 +11,5 @@ object test {
def h() = x;
}
g() + new inner().g();
- }
+ }
}
diff --git a/test/files/pos/listpattern.scala b/test/files/pos/listpattern.scala
index 47145bf037..b64068474c 100644
--- a/test/files/pos/listpattern.scala
+++ b/test/files/pos/listpattern.scala
@@ -1,7 +1,7 @@
trait Value {}
case class FloatValue(x: Double) extends Value
object Test {
- def applyNumeric(op: (Double, Double) => Double):
+ def applyNumeric(op: (Double, Double) => Double):
PartialFunction[List[Value], Value] = {
case List(FloatValue(x), FloatValue(y)) => FloatValue(op(x, y))
}
diff --git a/test/files/pos/looping-jsig.scala b/test/files/pos/looping-jsig.scala
index 6e3313c463..e2d9e76229 100644
--- a/test/files/pos/looping-jsig.scala
+++ b/test/files/pos/looping-jsig.scala
@@ -1,15 +1,15 @@
import scala.collection.mutable._
trait BugTrack {
- trait B[+T]
+ trait B[+T]
val cache : HashMap[A[_], B[_]] = HashMap.empty
- def A[T](f: Int => B[T]): A[T]
+ def A[T](f: Int => B[T]): A[T]
= new A[T]{def apply(in: Int) = f(in)}
-
+
abstract class A[+T] extends (Int => B[T]) {
def giveMeSame = this
- }
+ }
def amethod[T](p: =>A[T]): A[T] = A(in => cache.get(p) match {
case Some(res) => res
diff --git a/test/files/pos/manifest1.scala b/test/files/pos/manifest1.scala
index 4d3b3bfa48..8901aa7437 100644
--- a/test/files/pos/manifest1.scala
+++ b/test/files/pos/manifest1.scala
@@ -13,8 +13,9 @@ object Test {
abstract class C { type T = String; val x: T }
val c = new C { val x = "abc" }
foo(c.x)
- abstract class D { type T; val x: T }
- val d: D = new D { type T = String; val x = "x" }
+ abstract class D { type T; implicit val m: Manifest[T]; val x: T }
+ val stringm = implicitly[Manifest[String]]
+ val d: D = new D { type T = String; val m = stringm; val x = "x" }
+ import d.m
foo(d.x)
-
}
diff --git a/test/files/pos/michel6.scala b/test/files/pos/michel6.scala
index b32e8bed75..f312bf1796 100644
--- a/test/files/pos/michel6.scala
+++ b/test/files/pos/michel6.scala
@@ -1,6 +1,6 @@
object M {
def f(x: Int): Unit = {}
-
+
def g(): Int => Unit =
if (0 == 0) f else g()
}
diff --git a/test/files/pos/needstypeearly.scala b/test/files/pos/needstypeearly.scala
index a90c2575f2..bd93b5c45d 100644
--- a/test/files/pos/needstypeearly.scala
+++ b/test/files/pos/needstypeearly.scala
@@ -1,4 +1,4 @@
-abstract class NeedsXEarly {
- val x: Int
+abstract class NeedsXEarly {
+ val x: Int
}
class Foo extends { val x = 1 } with NeedsXEarly
diff --git a/test/files/pos/nested2.scala b/test/files/pos/nested2.scala
index 302688a0ef..421ea6facf 100644
--- a/test/files/pos/nested2.scala
+++ b/test/files/pos/nested2.scala
@@ -5,5 +5,5 @@ class C[A] {
object Test {
val x = new C[String]
- val y: C[String]#D[int] = new x.D[int]
+ val y: C[String]#D[Int] = new x.D[Int]
}
diff --git a/test/files/pos/nothing_manifest_disambig.scala b/test/files/pos/nothing_manifest_disambig.scala
new file mode 100644
index 0000000000..076742033f
--- /dev/null
+++ b/test/files/pos/nothing_manifest_disambig.scala
@@ -0,0 +1,10 @@
+object Test {
+ def mani[T: Manifest](xs: T) = xs
+ mani(List())
+
+ def listElMani[T: Manifest](xs: List[T]) = xs
+ listElMani(List())
+
+ def foo[A, C](m : C)(implicit ev: C <:< Traversable[A], mani: Manifest[A]): (C, A, Manifest[A]) = (m, m.head, mani)
+ foo(List(1,2,3))
+} \ No newline at end of file
diff --git a/test/files/pos/nullary.scala b/test/files/pos/nullary.scala
index 614fcdf480..8e5a834012 100644
--- a/test/files/pos/nullary.scala
+++ b/test/files/pos/nullary.scala
@@ -2,7 +2,7 @@ abstract class NullaryTest[T, m[s]] {
def nullary: String = "a"
val x = nullary
- def nullary2: T
+ def nullary2: T
val x2 = nullary2
def nullary3: m[T]
@@ -16,5 +16,5 @@ class Concrete extends NullaryTest[Int, List] {
object test {
(new Concrete).nullary2
- (new Concrete).nullary3
+ (new Concrete).nullary3
}
diff --git a/test/files/pos/nullary_poly.scala b/test/files/pos/nullary_poly.scala
index d2e1e127a1..4de7235a1c 100644
--- a/test/files/pos/nullary_poly.scala
+++ b/test/files/pos/nullary_poly.scala
@@ -2,9 +2,9 @@
class A {
// built-in
synchronized {}
-
+
val x: String = "a".asInstanceOf[String]
-
+
// user-defined:
def polyNullary[T]: List[T] = Nil
}
diff --git a/test/files/pos/partialfun.scala b/test/files/pos/partialfun.scala
index 9f32a22023..d8971e56fb 100644
--- a/test/files/pos/partialfun.scala
+++ b/test/files/pos/partialfun.scala
@@ -1,6 +1,6 @@
object partialfun {
- def applyPartial[b](f: PartialFunction[Option[String], b])(x: Option[String]) =
+ def applyPartial[b](f: PartialFunction[Option[String], b])(x: Option[String]) =
if (f.isDefinedAt(x)) f(x) else "<undefined>";
applyPartial {
diff --git a/test/files/pos/pat_gilles.scala b/test/files/pos/pat_gilles.scala
index 704d5b9c00..567d700eba 100644
--- a/test/files/pos/pat_gilles.scala
+++ b/test/files/pos/pat_gilles.scala
@@ -1,7 +1,7 @@
abstract class Table2 {
- val x: Any => Unit = { zz:Any =>
+ val x: Any => Unit = { zz:Any =>
zz match {
case Table2.CellUpdated(row, column) =>
val foo = Table2.CellUpdated(2,2)
diff --git a/test/files/pos/propagate.scala b/test/files/pos/propagate.scala
index 84f4f5d6d2..a7f9d6ce6d 100644
--- a/test/files/pos/propagate.scala
+++ b/test/files/pos/propagate.scala
@@ -14,4 +14,4 @@ class C {
-
+
diff --git a/test/files/pos/relax_implicit_divergence.scala b/test/files/pos/relax_implicit_divergence.scala
new file mode 100644
index 0000000000..8525c84bab
--- /dev/null
+++ b/test/files/pos/relax_implicit_divergence.scala
@@ -0,0 +1,7 @@
+class A(val options: Seq[String])
+
+object Test {
+ implicit def ss: Equiv[Seq[String]] = error("dummy")
+ implicit def equivA(implicit seqEq: Equiv[Seq[String]]): Equiv[A] = error("dummy")
+ implicitly[Equiv[A]]
+} \ No newline at end of file
diff --git a/test/files/pos/return_thistype.scala b/test/files/pos/return_thistype.scala
index c0736c0ad9..f164e06cc0 100644
--- a/test/files/pos/return_thistype.scala
+++ b/test/files/pos/return_thistype.scala
@@ -1,8 +1,8 @@
// tests transformation of return type in typedTypeApply (see also tcpoly_gm.scala)
-class As {
- class A {
+class As {
+ class A {
def foo: A.this.type = bar.asInstanceOf[A.this.type]
def foo2: this.type = bar.asInstanceOf[this.type]
- def bar: A = null
+ def bar: A = null
}
}
diff --git a/test/files/pos/scala-singleton.scala b/test/files/pos/scala-singleton.scala
new file mode 100644
index 0000000000..5e0baa0cb2
--- /dev/null
+++ b/test/files/pos/scala-singleton.scala
@@ -0,0 +1,55 @@
+// A bunch of ridiculous seeming tests until you realize much
+// of this didn't work until the commit which accompanies this.
+object Test {
+ def f1(x: AnyRef with Singleton): AnyRef with Singleton = x
+ def f2[T <: AnyRef with Singleton](x: T): T = x
+
+ val x1: AnyRef with Singleton = "abc"
+ val x2 = "def"
+ final val x3 = "ghi"
+ val x4: String = "jkl"
+
+ // compiles...
+ def narrow1(x: AnyRef): AnyRef with Singleton = x
+
+ // compiles, still doesn't help.
+ def narrow2(x: AnyRef): AnyRef with Singleton = x.asInstanceOf[x.type]
+
+ // fails, wait, what? This fails and narrow1 compiles?
+ def narrow3(x: AnyRef): AnyRef with Singleton = x.asInstanceOf[AnyRef with Singleton]
+
+ // ok
+ def narrow4[T <: AnyRef](x: T): AnyRef with Singleton = x
+
+ object imp {
+ implicit def narrow4[T <: AnyRef](x: T): AnyRef with Singleton = x
+ val x5: String = "mno"
+ def imp1 = f1(x5)
+
+ // f2(x5) // doesn't work but I think it should
+ def imp2 = f2(narrow4(x5))
+ }
+
+ def main(args: Array[String]): Unit = {
+ // compiles
+ f1(x1)
+ f1(x2)
+ f1(x3)
+ f1(x4)
+
+ f2(x1)
+ // f2(x2)
+ // f2(x3) // maybe this one should work
+ // f2(x4)
+
+ f1(narrow1(x4))
+ f1(narrow2(x4))
+ f1(narrow3(x4))
+ f1(narrow4(x4))
+ f2(narrow1(x4))
+ f2(narrow2(x4))
+ f2(narrow3(x4))
+ f2(narrow4(x4))
+ }
+}
+
diff --git a/test/files/pos/scan.scala b/test/files/pos/scan.scala
new file mode 100644
index 0000000000..f056c77ba1
--- /dev/null
+++ b/test/files/pos/scan.scala
@@ -0,0 +1,23 @@
+
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val lst = List(1, 2, 3, 4, 5)
+
+ assert(lst.scanLeft(0)(_ + _) == List(0, 1, 3, 6, 10, 15))
+ assert(lst.scanRight(0)(_ + _) == List(15, 14, 12, 9, 5, 0))
+
+ val emp = List[Int]()
+ assert(emp.scanLeft(0)(_ + _) == List(0))
+ assert(emp.scanRight(0)(_ + _) == List(0))
+
+ val stream = Stream(1, 2, 3, 4, 5)
+ assert(stream.scanLeft(0)(_ + _) == Stream(0, 1, 3, 6, 10, 15))
+
+ assert(Stream.from(1).scanLeft(0)(_ + _).take(5) == Stream(0, 1, 3, 6, 10))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/pos/scoping1.scala b/test/files/pos/scoping1.scala
index c9c0489e69..e695110c13 100644
--- a/test/files/pos/scoping1.scala
+++ b/test/files/pos/scoping1.scala
@@ -9,4 +9,4 @@ object This extends Application {
def foo() = ()
}
val c: C = new D
-}
+}
diff --git a/test/files/pos/selftails.scala b/test/files/pos/selftails.scala
new file mode 100644
index 0000000000..7c58543747
--- /dev/null
+++ b/test/files/pos/selftails.scala
@@ -0,0 +1,23 @@
+package net.liftweb.util
+
+/**
+* This trait adds functionality to Scala standard types
+*/
+trait BasicTypesHelpers { self: StringHelpers with ControlHelpers =>
+
+ /**
+ * Compare two arrays of Byte for byte equality.
+ * @return true if two Byte arrays contain the same bytes
+ */
+ def isEq(a: Array[Byte], b: Array[Byte]) = {
+ def eq(a: Array[Byte], b: Array[Byte], pos: Int, len: Int): Boolean = {
+ if (pos == len) true
+ else if (a(pos) != b(pos)) false
+ else eq(a , b, pos + 1, len)
+ }
+ a.length == b.length && eq(a, b, 0, a.length)
+ }
+}
+
+trait StringHelpers
+trait ControlHelpers
diff --git a/test/files/pos/signatures/Test.java b/test/files/pos/signatures/Test.java
index 3d1e3756a7..78c196526b 100644
--- a/test/files/pos/signatures/Test.java
+++ b/test/files/pos/signatures/Test.java
@@ -3,7 +3,7 @@ import test.Outer;
/* Test correct generation of java signatures. The Outer class should not
* have a Java signature attribute for the inner method definition. Trait
- * Mutable should have one, even though it is also a nested definition.
+ * Mutable should have one, even though it is also a nested definition.
* (but for classes there is a way to tell about nesting to the JVM).
*/
class Test {
diff --git a/test/files/pos/signatures/sig.scala b/test/files/pos/signatures/sig.scala
index 4236f27bed..3feb9c456a 100644
--- a/test/files/pos/signatures/sig.scala
+++ b/test/files/pos/signatures/sig.scala
@@ -1,7 +1,7 @@
package test
/* Tests correct generation of Java signatures. The local method 'bar' should
- * not get a generic signature, as it may refer to type parameters of the enclosing
+ * not get a generic signature, as it may refer to type parameters of the enclosing
* method, and the JVM does not know about nested methods.
*/
class Outer {
diff --git a/test/files/pos/spec-Function1.flags b/test/files/pos/spec-Function1.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-Function1.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-Function1.scala b/test/files/pos/spec-Function1.scala
index 2baa22f269..24ee6c0de8 100644
--- a/test/files/pos/spec-Function1.scala
+++ b/test/files/pos/spec-Function1.scala
@@ -1,7 +1,7 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -36,11 +36,11 @@ package scala
trait Function1[@specialized -T1, @specialized +R] extends AnyRef { self =>
def apply(v1:T1): R
override def toString() = "<function>"
-
+
/** (f compose g)(x) == f(g(x))
*/
def compose[A](g: A => T1): A => R = { x => apply(g(x)) }
-
+
/** (f andThen g)(x) == g(f(x))
*/
def andThen[A](g: R => A): T1 => A = { x => g(apply(x)) }
diff --git a/test/files/pos/spec-List.flags b/test/files/pos/spec-List.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-List.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-List.scala b/test/files/pos/spec-List.scala
index 17291fae56..17acb40de2 100644
--- a/test/files/pos/spec-List.scala
+++ b/test/files/pos/spec-List.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -23,10 +23,10 @@ import annotation.tailrec
* @author Martin Odersky and others
* @version 2.8
*/
-sealed abstract class List[@specialized +A] extends LinearSeq[A]
- with Product
+sealed trait List[@specialized +A] extends LinearSeq[A]
+ with Product
with GenericTraversableTemplate[A, List]
- with LinearSeqLike[A, List[A]] {
+ with LinearSeqOptimized[A, List[A]] {
override def companion: GenericCompanion[List] = List
import scala.collection.{Iterable, Traversable, Seq}
@@ -65,7 +65,7 @@ sealed abstract class List[@specialized +A] extends LinearSeq[A]
/** <p>
* Returns a list resulting from the concatenation of the given
- * list <code>prefix</code> and this list.
+ * list <code>prefix</code> and this list.
* </p>
*
* @param prefix the list to concatenate at the beginning of this list.
@@ -140,29 +140,23 @@ sealed abstract class List[@specialized +A] extends LinearSeq[A]
}
// Overridden methods from IterableLike or overloaded variants of such methods
-
+
/** Create a new list which contains all elements of this list
* followed by all elements of Traversable `that'
*/
- override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
+ override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
val b = bf(this)
- if (b.isInstanceOf[ListBuffer[_]]) (this ::: that.toList).asInstanceOf[That]
- else super.++(that)
+ if (b.isInstanceOf[ListBuffer[_]]) (this ::: xs.toList).asInstanceOf[That]
+ else super.++(xs)
}
- /** Create a new list which contains all elements of this list
- * followed by all elements of Iterator `that'
- */
- override def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[List[A], B, That]): That =
- this ++ that.toList
-
/** Overrides the method in Iterable for efficiency.
*
* @return the list itself
*/
override def toList: List[A] = this
- /** Returns the <code>n</code> first elements of this list, or else the whole
+ /** Returns the <code>n</code> first elements of this list, or else the whole
* list, if it has less than <code>n</code> elements.
* @param n the number of elements to take.
@@ -222,7 +216,7 @@ sealed abstract class List[@specialized +A] extends LinearSeq[A]
}
loop(drop(n), this)
}
-
+
// dropRight is inherited from Stream
/** Split the list at a given point and return the two parts thus
@@ -308,7 +302,7 @@ sealed abstract class List[@specialized +A] extends LinearSeq[A]
override def stringPrefix = "List"
- override def toStream : Stream[A] =
+ override def toStream : Stream[A] =
if (isEmpty) Stream.Empty
else new Stream.Cons(head, tail.toStream)
@@ -493,10 +487,10 @@ final case class ::[@specialized B](private var hd: B, private[scala] var tl: Li
* @version 2.8
*/
object List extends SeqFactory[List] {
-
+
import collection.{Iterable, Seq}
- implicit def builderFactory[A]: CanBuildFrom[Coll, A, List[A]] =
+ implicit def builderFactory[A]: CanBuildFrom[Coll, A, List[A]] =
new GenericCanBuildFrom[A] {
override def apply() = newBuilder[A]
}
@@ -558,8 +552,8 @@ object List extends SeqFactory[List] {
* @return the concatenation of all the lists
*/
@deprecated("use `xss.flatten' instead")
- def flatten[A](xss: List[List[A]]): List[A] = {
- val b = new ListBuffer[A]
+ def flatten[A](xss: List[List[A]]): List[A] = {
+ val b = new ListBuffer[A]
for (xs <- xss) {
var xc = xs
while (!xc.isEmpty) {
@@ -594,7 +588,7 @@ object List extends SeqFactory[List] {
* @return a pair of lists.
*/
@deprecated("use `xs.unzip' instead")
- def unzip[A,B](xs: Iterable[(A,B)]): (List[A], List[B]) =
+ def unzip[A,B](xs: Iterable[(A,B)]): (List[A], List[B]) =
xs.foldRight[(List[A], List[B])]((Nil, Nil)) {
case ((x, y), (xs, ys)) => (x :: xs, y :: ys)
}
@@ -604,17 +598,17 @@ object List extends SeqFactory[List] {
* of <code>Either</code>s.
*/
@deprecated("use `Either.lefts' instead")
- def lefts[A, B](es: Iterable[Either[A, B]]) =
+ def lefts[A, B](es: Iterable[Either[A, B]]) =
es.foldRight[List[A]](Nil)((e, as) => e match {
case Left(a) => a :: as
case Right(_) => as
- })
-
+ })
+
/**
* Returns the <code>Right</code> values in the given<code>Iterable</code> of <code>Either</code>s.
*/
@deprecated("use `Either.rights' instead")
- def rights[A, B](es: Iterable[Either[A, B]]) =
+ def rights[A, B](es: Iterable[Either[A, B]]) =
es.foldRight[List[B]](Nil)((e, bs) => e match {
case Left(_) => bs
case Right(b) => b :: bs
@@ -654,7 +648,7 @@ object List extends SeqFactory[List] {
*
* @param arr the array to convert
* @param start the first index to consider
- * @param len the lenght of the range to convert
+ * @param len the length of the range to convert
* @return a list that contains the same elements than <code>arr</code>
* in the same order
*/
@@ -745,7 +739,7 @@ object List extends SeqFactory[List] {
/** Returns the list resulting from applying the given function <code>f</code>
* to corresponding elements of the argument lists.
* @param f function to apply to each pair of elements.
- * @return <code>[f(a0,b0), ..., f(an,bn)]</code> if the lists are
+ * @return <code>[f(a0,b0), ..., f(an,bn)]</code> if the lists are
* <code>[a0, ..., ak]</code>, <code>[b0, ..., bl]</code> and
* <code>n = min(k,l)</code>
*/
@@ -788,7 +782,7 @@ object List extends SeqFactory[List] {
b.toList
}
- /** Tests whether the given predicate <code>p</code> holds
+ /** Tests whether the given predicate <code>p</code> holds
* for all corresponding elements of the argument lists.
*
* @param p function to apply to each pair of elements.
diff --git a/test/files/pos/spec-annotations.flags b/test/files/pos/spec-annotations.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-annotations.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-arrays.flags b/test/files/pos/spec-arrays.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-arrays.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-arrays.scala b/test/files/pos/spec-arrays.scala
index c1f253a39d..84f6eef071 100644
--- a/test/files/pos/spec-arrays.scala
+++ b/test/files/pos/spec-arrays.scala
@@ -20,7 +20,7 @@ abstract class AbsArray[T] {
def updateBoolean(idx: Int, elem: Boolean) = update(idx, elem.asInstanceOf[T])
def applyObject(idx: Int): Object = apply(idx).asInstanceOf[Object]
def updateObject(idx: Int, elem: Object) = update(idx, elem.asInstanceOf[T])
-}
+}
final class IntArray(arr: Array[Int]) extends AbsArray[Int] {
def apply(idx: Int): Int = applyInt(idx)
@@ -30,7 +30,7 @@ final class IntArray(arr: Array[Int]) extends AbsArray[Int] {
def length: Int = arr.length
}
-final class GenericArray[T](arr: Array[T]) extends AbsArray[T] {
+final class ArraySeq[T](arr: Array[T]) extends AbsArray[T] {
def apply(idx: Int): T = arr(idx)
def update(idx: Int, elem: T) = arr(idx) = elem
def length: Int = arr.length
@@ -64,7 +64,7 @@ class ScalaSpecTest extends Test {
while (i < arr.length) { acc = acc + arr.applyInt(i); i += 1 }
acc
}
-
+
def modify(j: Int) = {
val base = j * 100 % 1000
var i = 0
@@ -74,7 +74,7 @@ class ScalaSpecTest extends Test {
}
}
}
-
+
class ScalaSpec2Test extends Test {
val arr: AbsArray[Int] = new IntArray(new Array[Int](1000))
@@ -84,7 +84,7 @@ class ScalaSpec2Test extends Test {
while (i < arr.length) { acc = acc + arr.applyInt(i); i += 1 }
acc
}
-
+
def modify(j: Int) = {
val base = j * 100 % 1000
var i = 0
@@ -94,9 +94,9 @@ class ScalaSpec2Test extends Test {
}
}
}
-
+
class ScalaWrapTest extends Test {
- val arr: AbsArray[Int] = new GenericArray(new Array[Int](1000))
+ val arr: AbsArray[Int] = new ArraySeq(new Array[Int](1000))
def sum(): Int = {
var acc = 0
@@ -104,7 +104,7 @@ class ScalaWrapTest extends Test {
while (i < arr.length) { acc = acc + arr.applyInt(i); i += 1 }
acc
}
-
+
def modify(j: Int) = {
val base = j * 100 % 1000
var i = 0
@@ -114,9 +114,9 @@ class ScalaWrapTest extends Test {
}
}
}
-
+
class ScalaGenTest extends Test {
- val arr: AbsArray[Integer] = new GenericArray(new Array[Integer](1000))
+ val arr: AbsArray[Integer] = new ArraySeq(new Array[Integer](1000))
for (i <- 0 until arr.length) arr(i) = new Integer(0)
def sum(): Int = {
@@ -125,7 +125,7 @@ class ScalaGenTest extends Test {
while (i < arr.length) { acc = acc + arr.apply(i).intValue; i += 1 }
acc
}
-
+
def modify(j: Int) = {
val base = j * 100 % 1000
var i = 0
@@ -135,7 +135,7 @@ class ScalaGenTest extends Test {
}
}
}
-
+
class JavaTest extends Test {
val arr = new Array[Int](1000)
@@ -145,7 +145,7 @@ class JavaTest extends Test {
while (i < arr.length) { acc = acc + arr(i); i += 1 }
acc
}
-
+
def modify(j: Int) = {
val base = j * 100 % 1000
var i = 0
@@ -166,7 +166,7 @@ class ScalaSpec3Test extends Test {
while (i < arr.length) { acc = acc + arr(i); i += 1 }
acc
}
-
+
def modify(j: Int) = {
val base = j * 100 % 1000
var i = 0
@@ -188,25 +188,25 @@ object TestSpec extends scala.testing.Benchmark {
(new ScalaSpecTest).run()
}
}
-
+
object TestSpec2 extends scala.testing.Benchmark {
def run() {
(new ScalaSpec2Test).run()
}
}
-
+
object TestGen extends scala.testing.Benchmark {
def run() {
(new ScalaGenTest).run()
}
}
-
+
object TestWrap extends scala.testing.Benchmark {
def run() {
(new ScalaWrapTest).run()
}
}
-
+
object TestSpec3 extends scala.testing.Benchmark {
def run() {
(new ScalaSpec3Test).run()
diff --git a/test/files/pos/spec-asseenfrom.scala b/test/files/pos/spec-asseenfrom.scala
new file mode 100644
index 0000000000..cf20fc5ffa
--- /dev/null
+++ b/test/files/pos/spec-asseenfrom.scala
@@ -0,0 +1,29 @@
+class Automaton[@specialized(Double) W,State] {
+
+ def finalWeight(s: State): W = error("todo");
+
+ def allStates: Set[State] = error("toodo");
+
+ /**
+ * Returns a map from states to its final weight. may expand all nodes.
+ */
+ def finalStateWeights = Map.empty ++ allStates.map { s => (s,finalWeight(s)) }
+
+ // This works fine:
+ /*
+ def finalStateWeights() = {
+ val it = allStates.iterator;
+ while(it.hasNext) {
+ finalWeight(it.next);
+ }
+ }
+ */
+
+}
+
+abstract class Automaton2[@specialized T1, T2] {
+ def finalWeight(s: T2): T1
+ def allStates: Set[T2]
+
+ def f = allStates map finalWeight
+}
diff --git a/test/files/pos/spec-constr.scala b/test/files/pos/spec-constr.scala
new file mode 100644
index 0000000000..e908b65a41
--- /dev/null
+++ b/test/files/pos/spec-constr.scala
@@ -0,0 +1,7 @@
+class SparseArray2[@specialized(Int) T:ClassManifest](val maxSize: Int, initialLength:Int = 3) {
+ private var data = new Array[T](initialLength);
+ private var index = new Array[Int](initialLength);
+
+ // comment out to compile correctly
+ data.length + 3;
+}
diff --git a/test/files/pos/spec-cyclic.flags b/test/files/pos/spec-cyclic.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-cyclic.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-cyclic.scala b/test/files/pos/spec-cyclic.scala
index 65da297989..b983caa6db 100644
--- a/test/files/pos/spec-cyclic.scala
+++ b/test/files/pos/spec-cyclic.scala
@@ -6,18 +6,18 @@ trait MyPartialFunction[-A, +B] extends AnyRef with AbsFun[A, B]
trait ColMap[A, +B] extends MyPartialFunction[A, B] /*with Collection[(A, B)] */
-trait ColSorted[K,+A] extends ColRanged[K,A]
+trait ColSorted[K,+A] extends ColRanged[K,A]
-trait ColSortedMap[K,+E] extends ColMap[K,E] with ColSorted[K,Tuple2[K,E]]
+trait ColSortedMap[K,+E] extends ColMap[K,E] with ColSorted[K,Tuple2[K,E]]
trait MutMap[A, B] extends AnyRef
with ColMap[A, B]
-trait ColRanged[K, +A] //extends Iterable[A]
+trait ColRanged[K, +A] //extends Iterable[A]
trait JclRanged[K,A] extends ColRanged[K,A] //with MutableIterable[A] {
-trait JclMap[K,E] extends /*collection.jcl.MutableIterable[Tuple2[K,E]] with*/ MutMap[K,E]
+trait JclMap[K,E] extends /*collection.jcl.MutableIterable[Tuple2[K,E]] with*/ MutMap[K,E]
trait JclSorted[K,A] extends ColSorted[K,A] with JclRanged[K,A]
diff --git a/test/files/pos/spec-doubledef.scala b/test/files/pos/spec-doubledef.scala
new file mode 100644
index 0000000000..86b0d857d3
--- /dev/null
+++ b/test/files/pos/spec-doubledef.scala
@@ -0,0 +1,28 @@
+object Test {
+ def fn[@specialized T, @specialized U](t : T => Int, u : U => Int) : T =
+ null.asInstanceOf[T]
+}
+
+trait A[@specialized(Int) T] {
+ var value: T
+ def getWith[@specialized(Int) Z](f: T => Z) = f(value)
+}
+
+class C extends A[Int] {
+ var value = 10
+ override def getWith[@specialized(Int) Z](f: Int => Z) = f(value)
+}
+
+abstract class B[T, @specialized(scala.Int) U : Manifest, @specialized(scala.Int) V <% Ordered[V]] {
+ val u: U
+ val v: V
+
+ def f(t: T, v2: V): Pair[U, V] = {
+ val m: Array[U] = null
+ if (m.isEmpty) {
+ Pair(u, v)
+ } else {
+ Pair(u, v2)
+ }
+ }
+}
diff --git a/test/files/pos/spec-example1.flags b/test/files/pos/spec-example1.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-example1.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-fields.flags b/test/files/pos/spec-fields.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-fields.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-foo.flags b/test/files/pos/spec-foo.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-foo.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-funs.flags b/test/files/pos/spec-funs.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-funs.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-funs.scala b/test/files/pos/spec-funs.scala
index 5fea5e9560..9acc5054c9 100644
--- a/test/files/pos/spec-funs.scala
+++ b/test/files/pos/spec-funs.scala
@@ -19,7 +19,7 @@ final class IntTest {
val xs = new Array[Int](10000)
val f = new AbsFunction1[Int, Int] {
def apply(x: Int): Int = x * x
- }
+ }
for (j <- 0 until niters) {
transF(xs, f)
}
diff --git a/test/files/pos/spec-lists.flags b/test/files/pos/spec-lists.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-lists.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-localdefs.flags b/test/files/pos/spec-localdefs.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-localdefs.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-maps.flags b/test/files/pos/spec-maps.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-maps.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-multiplectors.scala b/test/files/pos/spec-multiplectors.scala
new file mode 100644
index 0000000000..8434a13936
--- /dev/null
+++ b/test/files/pos/spec-multiplectors.scala
@@ -0,0 +1,3 @@
+class Spec[@specialized(Int) A]() {
+ def this(n: Int) = this()
+}
diff --git a/test/files/pos/spec-params.flags b/test/files/pos/spec-params.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-params.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-params.scala b/test/files/pos/spec-params.scala
index 33a252120c..f522512846 100644
--- a/test/files/pos/spec-params.scala
+++ b/test/files/pos/spec-params.scala
@@ -10,7 +10,7 @@ class Foo[@specialized A: ClassManifest] {
def m2[@specialized B <: String](x: B) = x.concat("a")
// conflicting in bounds, no mention of other spec members
- // expect an overload here plus implementation in
+ // expect an overload here plus implementation in
// compatible specialized subclasses
def m3[@specialized B >: A](x: B) = ()
@@ -19,10 +19,10 @@ class Foo[@specialized A: ClassManifest] {
// non-conflicting, expect a normalized overload implementation here
def m5[@specialized B](x: B) = x
-
+
// non-conflicting, expect a normalized implementation here
// and specialized implementations for all expansions in specialized subclasses
- def m6[@specialized B](x: B, y: A) =
+ def m6[@specialized B](x: B, y: A) =
goal(y)
def goal(x: A) = {
diff --git a/test/files/pos/spec-partially.scala b/test/files/pos/spec-partially.scala
new file mode 100644
index 0000000000..90778e42a8
--- /dev/null
+++ b/test/files/pos/spec-partially.scala
@@ -0,0 +1,5 @@
+/** Test case for partially specialized classes. see #2880. */
+
+class Arc[State, @specialized T](label: T, to: State)
+
+
diff --git a/test/files/pos/spec-partialmap.scala b/test/files/pos/spec-partialmap.scala
new file mode 100644
index 0000000000..09684e0242
--- /dev/null
+++ b/test/files/pos/spec-partialmap.scala
@@ -0,0 +1,17 @@
+
+// ticket #3378, overloaded specialized variants
+import scala.collection.{Traversable,TraversableLike};
+import scala.collection.generic.CanBuildFrom;
+
+trait PartialMap[@specialized A,@specialized B]
+extends PartialFunction[A,B] with Iterable[(A,B)] {
+
+ // commenting out this declaration gives a different exception.
+ /** Getter for all values for which the given key function returns true. */
+ def apply(f : (A => Boolean)) : Iterator[B] =
+ for ((k,v) <- iterator; if f(k)) yield v;
+
+ // if this is commented, it compiles fine:
+ def apply[This <: Traversable[A], That](keys : TraversableLike[A,This])
+ (implicit bf: CanBuildFrom[This, B, That]) : That = keys.map(apply);
+}
diff --git a/test/files/pos/spec-polymeth.flags b/test/files/pos/spec-polymeth.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-polymeth.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-private.scala b/test/files/pos/spec-private.scala
new file mode 100644
index 0000000000..7d53bad955
--- /dev/null
+++ b/test/files/pos/spec-private.scala
@@ -0,0 +1,10 @@
+class Foo {
+
+ def foo[@specialized(Int) T](x: T) = new Object {
+ private final val myEdges = List(1, 2 , 3)
+
+ def boo {
+ myEdges
+ }
+ }
+}
diff --git a/test/files/pos/spec-sealed.flags b/test/files/pos/spec-sealed.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-sealed.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-sealed.scala b/test/files/pos/spec-sealed.scala
index 8c06148d3e..73256ac906 100644
--- a/test/files/pos/spec-sealed.scala
+++ b/test/files/pos/spec-sealed.scala
@@ -2,7 +2,7 @@ sealed abstract class MyList[@specialized +A] {
def head: A
def tail: MyList[A]
- def ::[@specialized B >: A](x: B): MyList[B] =
+ def ::[@specialized B >: A](x: B): MyList[B] =
new Cons[B](x, this)
}
@@ -19,7 +19,7 @@ case class Cons[@specialized a](private val hd: a, tl: MyList[a]) extends MyList
abstract class IntList extends MyList[Int]
object Main extends Application {
- val xs = 1 :: 2 :: 3 :: MyNil
+ val xs = 1 :: 2 :: 3 :: MyNil
println(xs)
}
diff --git a/test/files/pos/spec-short.flags b/test/files/pos/spec-short.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-short.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-short.scala b/test/files/pos/spec-short.scala
index 71e56a485a..94a8007c6e 100644
--- a/test/files/pos/spec-short.scala
+++ b/test/files/pos/spec-short.scala
@@ -5,7 +5,7 @@ abstract class AbsFun[@specialized T, @specialized U] {
// abstract function, specialized
def sum(xs: List[T]): Int
- def prod(xs: List[T], mul: (Int, T) => Int): Int =
+ def prod(xs: List[T], mul: (Int, T) => Int): Int =
(1 /: xs)(mul)
// concrete function, not specialized
@@ -18,9 +18,9 @@ abstract class AbsFun[@specialized T, @specialized U] {
class Square extends AbsFun[Int, Int] {
def apply(x: Int): Int = x * x
- def sum(xs: List[Int]): Int =
+ def sum(xs: List[Int]): Int =
(0 /: xs) (_ + _)
- def abs(m: Int): Int =
+ def abs(m: Int): Int =
sum(List(1, 2, 3))
}
diff --git a/test/files/pos/spec-simple.flags b/test/files/pos/spec-simple.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-simple.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-sparsearray.scala b/test/files/pos/spec-sparsearray.scala
new file mode 100644
index 0000000000..ea7710a785
--- /dev/null
+++ b/test/files/pos/spec-sparsearray.scala
@@ -0,0 +1,24 @@
+import scala.collection.mutable.MapLike
+
+class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable.Map[Int,T] with collection.mutable.MapLike[Int,T,SparseArray[T]] {
+ override def get(x: Int) = {
+ val ind = findOffset(x)
+ if(ind < 0) None else Some(error("ignore"))
+ }
+
+ /**
+ * Returns the offset into index and data for the requested vector
+ * index. If the requested index is not found, the return value is
+ * negative and can be converted into an insertion point with -(rv+1).
+ */
+ private def findOffset(i : Int) : Int = {
+ error("impl doesn't matter")
+ }
+
+ override def apply(i : Int) : T = { error("ignore") }
+ override def update(i : Int, value : T) = error("ignore")
+ override def empty = new SparseArray[T]
+ def -=(ind: Int) = error("ignore")
+ def +=(kv: (Int,T)) = error("ignore")
+ override final def iterator = error("ignore")
+}
diff --git a/test/files/pos/spec-super.flags b/test/files/pos/spec-super.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-super.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-t3497.scala b/test/files/pos/spec-t3497.scala
new file mode 100644
index 0000000000..6cc0e24f89
--- /dev/null
+++ b/test/files/pos/spec-t3497.scala
@@ -0,0 +1,16 @@
+abstract class A[T, @specialized U] {
+ def score(state: T): U
+}
+
+object B extends A[ Array[Byte], Int ] {
+ def score(state: Array[Byte]): Int = {
+ var index = 0
+ while (index < state.length) { // (index < 2) leads to the #2755 NullPointerException
+ if (state(index) == 0) {
+ return -1
+ }
+ }
+
+ return 0
+ }
+}
diff --git a/test/files/pos/spec-tailcall.flags b/test/files/pos/spec-tailcall.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-tailcall.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-tailcall.scala b/test/files/pos/spec-tailcall.scala
index 703ec011ad..d91e2f59ce 100644
--- a/test/files/pos/spec-tailcall.scala
+++ b/test/files/pos/spec-tailcall.scala
@@ -1,5 +1,5 @@
class TailCall[@specialized T] {
- final def dropLeft(n: Int, xs: List[T]): List[T] =
+ final def dropLeft(n: Int, xs: List[T]): List[T] =
if (n == 0) xs
else dropLeft(n - 1, xs.tail)
/*
@@ -7,7 +7,7 @@ class TailCall[@specialized T] {
def crash(o: Option[String]) = filter {
case None if {
- def dropLeft[T](n: Int, xs: List[T]): List[T] =
+ def dropLeft[T](n: Int, xs: List[T]): List[T] =
if (n == 0) xs
else dropLeft(n - 1, xs.tail)
dropLeft(2, List(1, 2, 3)).isEmpty
diff --git a/test/files/pos/spec-thistype.flags b/test/files/pos/spec-thistype.flags
deleted file mode 100644
index 973517e1c9..0000000000
--- a/test/files/pos/spec-thistype.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yspecialize
diff --git a/test/files/pos/spec-vector.scala b/test/files/pos/spec-vector.scala
new file mode 100644
index 0000000000..06e49b521f
--- /dev/null
+++ b/test/files/pos/spec-vector.scala
@@ -0,0 +1,4 @@
+// ticket #3379, abstract overrides
+trait Vector extends (Int=>Double) {
+ override def apply(i: Int): Double
+}
diff --git a/test/files/pos/strings.scala b/test/files/pos/strings.scala
index 9fe8cfd94b..83b8179706 100644
--- a/test/files/pos/strings.scala
+++ b/test/files/pos/strings.scala
@@ -6,5 +6,5 @@ object test {
}
// #1000
object A {
- println("""This a "raw" string ending with a "double quote"""")
+ println("""This a "raw" string ending with a "double quote"""")
}
diff --git a/test/files/pos/sudoku.scala b/test/files/pos/sudoku.scala
index 9875b22cfd..be05665809 100644
--- a/test/files/pos/sudoku.scala
+++ b/test/files/pos/sudoku.scala
@@ -3,22 +3,22 @@ object SudokuSolver extends Application {
// held in a global variable m. The program begins by reading 9 lines
// of input to fill the board
var m: Array[Array[Char]] = Array.tabulate(9)((x: Int) => readLine.toArray)
-
+
// For printing m, a method print is defined
def print = { println(""); m map (carr => println(new String(carr))) }
-
+
// The test for validity is performed by looping over i=0..8 and
// testing the row, column and 3x3 square containing the given
// coordinate
def invalid(i: Int, x: Int, y: Int, n: Char): Boolean =
i<9 && (m(y)(i) == n || m(i)(x) == n ||
m(y/3*3 + i/3)(x/3*3 + i % 3) == n || invalid(i+1, x, y, n))
-
+
// Looping over a half-closed range of consecutive integers [l..u)
// is factored out into a higher-order function
def fold(f: (Int, Int) => Int, accu: Int, l: Int, u: Int): Int =
if(l==u) accu else fold(f, f(accu, l), l+1, u)
-
+
// The search function examines each position on the board in turn,
// trying the numbers 1..9 in each unfilled position
// The function is itself a higher-order fold, accumulating the value
@@ -34,7 +34,7 @@ object SudokuSolver extends Application {
val newaccu = search(x+1, y, f, accu);
m(y)(x) = '0';
newaccu}, accu, 1, 10)}
-
+
// The main part of the program uses the search function to accumulate
// the total number of solutions
println("\n"+search(0,0,i => {print; i+1},0)+" solution(s)")
diff --git a/test/files/pos/super.cmds b/test/files/pos/super.cmds
new file mode 100644
index 0000000000..8f3f8a4172
--- /dev/null
+++ b/test/files/pos/super.cmds
@@ -0,0 +1,2 @@
+javac Super_1.java
+scalac Super_2.scala
diff --git a/test/files/pos/super/Super_1.java b/test/files/pos/super/Super_1.java
index 9acbba0ec4..418ae96bc0 100644
--- a/test/files/pos/super/Super_1.java
+++ b/test/files/pos/super/Super_1.java
@@ -1,6 +1,6 @@
// A.java
interface Inter<T> { }
-class Super implements Inter<Super.Inner> {
+class Super implements Inter<Super.Inner> {
public class Inner { };
}
diff --git a/test/files/pos/switchUnbox.flags b/test/files/pos/switchUnbox.flags
index ec1ad20e3a..cdf5f84ede 100644
--- a/test/files/pos/switchUnbox.flags
+++ b/test/files/pos/switchUnbox.flags
@@ -1 +1 @@
--Xsqueeze:on
+-Ysqueeze:on
diff --git a/test/files/pos/switchUnbox.scala b/test/files/pos/switchUnbox.scala
index a97bff5521..b8942e2559 100644
--- a/test/files/pos/switchUnbox.scala
+++ b/test/files/pos/switchUnbox.scala
@@ -1,8 +1,8 @@
-// this test has accompanying .flags file
+// this test has accompanying .flags file
// that contains -Xsqueeze:on
-//
+//
object Foo {
- var xyz: (int, String) = (1, "abc")
+ var xyz: (Int, String) = (1, "abc")
xyz._1 match {
case 1 => Console.println("OK")
case 2 => Console.println("OK")
diff --git a/test/files/pos/t0227.scala b/test/files/pos/t0227.scala
index a52a9798fc..8650350c4a 100644
--- a/test/files/pos/t0227.scala
+++ b/test/files/pos/t0227.scala
@@ -19,7 +19,7 @@ class SA(val settings: Settings) extends Base {
SD
) ::: settings.f(
SC
- )
+ )
}
object SC extends Factory {
diff --git a/test/files/pos/t0288/Foo.scala b/test/files/pos/t0288/Foo.scala
index 778ba65f58..1f7d81327d 100644
--- a/test/files/pos/t0288/Foo.scala
+++ b/test/files/pos/t0288/Foo.scala
@@ -6,4 +6,4 @@ class Foo extends Outer{
val bar = new Inner(); // Shouldn't this work?
-}
+}
diff --git a/test/files/pos/t0288/Outer.java b/test/files/pos/t0288/Outer.java
index bea3e3f8d0..7a3754fb5d 100644
--- a/test/files/pos/t0288/Outer.java
+++ b/test/files/pos/t0288/Outer.java
@@ -6,4 +6,4 @@ public class Outer{
}
-}
+}
diff --git a/test/files/pos/t0438.scala b/test/files/pos/t0438.scala
index fa5b7711ff..33b7efeaac 100644
--- a/test/files/pos/t0438.scala
+++ b/test/files/pos/t0438.scala
@@ -1,9 +1,9 @@
class Foo {
- implicit def pair2fun2[A, B, C](f: (A, B) => C) =
+ implicit def pair2fun2[A, B, C](f: (A, B) => C) =
{p: (A, B) => f(p._1, p._2) }
def foo(f: ((Int, Int)) => Int) = f
- def bar(x: Int, y: Int) = x + y
+ def bar(x: Int, y: Int) = x + y
foo({ (x: Int, y: Int) => x + y }) // works
foo(pair2fun2(bar _)) // works
diff --git a/test/files/pos/t0453.scala b/test/files/pos/t0453.scala
index dfacc5eed7..d59a3d22f2 100644
--- a/test/files/pos/t0453.scala
+++ b/test/files/pos/t0453.scala
@@ -1,5 +1,5 @@
object Test {
- val foo = new {
+ val foo = new {
trait Bar
def l () : Bar = { new Bar {} }
}
diff --git a/test/files/pos/t0770.scala b/test/files/pos/t0770.scala
index 7a0a2bf9bb..bb438f1918 100644
--- a/test/files/pos/t0770.scala
+++ b/test/files/pos/t0770.scala
@@ -1,7 +1,7 @@
trait A
{
private[this] val p = 5
-
+
def f = (b: Byte) => p
}
diff --git a/test/files/pos/t0774/unrelated.scala b/test/files/pos/t0774/unrelated.scala
index 1efdb2505e..483f836d0c 100644
--- a/test/files/pos/t0774/unrelated.scala
+++ b/test/files/pos/t0774/unrelated.scala
@@ -1,8 +1,8 @@
object Outer {
import Inner._
-
+
deathname
-
+
object Inner {
def deathname: Int = 1
}
diff --git a/test/files/pos/t0786.scala b/test/files/pos/t0786.scala
index f40cf7d2e1..4d9f1d0dc9 100644
--- a/test/files/pos/t0786.scala
+++ b/test/files/pos/t0786.scala
@@ -2,15 +2,15 @@ object ImplicitProblem {
class M[T]
def nullval[T] = null.asInstanceOf[T];
-
+
trait Rep[T] {
def eval: Int
}
-
+
implicit def toRep0(n: Int) = new Rep[Int] {
def eval = 0
}
-
+
implicit def toRepN[T](n: M[T])(implicit f: T => Rep[T]) = new Rep[M[T]] {
def eval = f(nullval[T]).eval + 1
}
@@ -18,11 +18,11 @@ object ImplicitProblem {
def depth[T <% Rep[T]](n: T) = n.eval
def main(args: Array[String]) {
- println(depth(nullval[M[Int]])) // (1) this works
+ println(depth(nullval[M[Int]])) // (1) this works
println(nullval[M[Int]].eval) // (2) this works
-
+
type m = M[Int]
- println(depth(nullval[m])) // (3) this doesn't compile on 2.7.RC1
+ println(depth(nullval[m])) // (3) this doesn't compile on 2.7.RC1
println(nullval[m].eval) // (4) this works
}
diff --git a/test/pending/pos/t0816.scala b/test/files/pos/t0816.scala
index 44282ea872..738a634166 100644
--- a/test/pending/pos/t0816.scala
+++ b/test/files/pos/t0816.scala
@@ -1,12 +1,12 @@
abstract class Atest(val data: String)
-case class Btest(override val data: String, val b: boolean) extends Atest(data)
+case class Btest(override val data: String, val b: Boolean) extends Atest(data)
case class Ctest(override val data: String) extends Btest(data, true)
class testCaseClass {
def test(x: Atest) = x match {
- case Ctest(data) => Console.println("C")
+ case Ctest(data) => Console.println("C")
case Btest(data, b) => Console.println("B")
}
}
diff --git a/test/files/pos/t0971.java b/test/files/pos/t0971.java
deleted file mode 100644
index 160dc2c5af..0000000000
--- a/test/files/pos/t0971.java
+++ /dev/null
@@ -1,4 +0,0 @@
-class A {
- int y = 1, z;
- static Object x = new java.util.HashMap<Object , Object > () ;
-}
diff --git a/test/files/pos/t0999.scala b/test/files/pos/t0999.scala
deleted file mode 100644
index c384820af1..0000000000
--- a/test/files/pos/t0999.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object A {
- val d: Double = Math.sqrt(5 match {
- case x: Double => x
- })
-}
diff --git a/test/files/pos/t1000.scala b/test/files/pos/t1000.scala
index 38f71863d8..31d7c7e902 100644
--- a/test/files/pos/t1000.scala
+++ b/test/files/pos/t1000.scala
@@ -1,3 +1,3 @@
object A {
- println("""This a "raw" string ending with a "double quote"""")
+ println("""This a "raw" string ending with a "double quote"""")
}
diff --git a/test/files/pos/t1029.cmds b/test/files/pos/t1029.cmds
new file mode 100644
index 0000000000..06b863dc03
--- /dev/null
+++ b/test/files/pos/t1029.cmds
@@ -0,0 +1,2 @@
+scalac Test_1.scala
+scalac Test_2.scala
diff --git a/test/pending/pos/t1035.scala b/test/files/pos/t1035.scala
index 2485c10018..5c8670aae3 100644
--- a/test/pending/pos/t1035.scala
+++ b/test/files/pos/t1035.scala
@@ -1,5 +1,5 @@
//A fatal error or Scala compiler
-// Scala compiler version 2.7.1-final -- (c) 2002-2009 LAMP/EPFL
+// Scala compiler version 2.7.1-final -- (c) 2002-2010 LAMP/EPFL
// Carlos Loria cloria@artinsoft.com
// 7/10/2008
@@ -7,7 +7,7 @@ class A {
var name:String = _
def getName() = name
def this(name:String, age:Int){this();this.name=name}
-
+
}
class B(name:String) extends A(name,0){
@@ -18,15 +18,15 @@ class D {
object A {
def unapply(p:A) = Some(p.getName)
}
-
+
object B {
def unapply(p:B) = Some(p.getName)
}
def foo(p:Any) = p match {
- case B(n) => println("B")
- case A(n) => println("A")
-
-
+ case B(n) => println("B")
+ case A(n) => println("A")
+
+
}
}
diff --git a/test/pending/pos/t1053.scala b/test/files/pos/t1053.scala
index 1d4dfb637e..1d4dfb637e 100644
--- a/test/pending/pos/t1053.scala
+++ b/test/files/pos/t1053.scala
diff --git a/test/files/pos/t1059.scala b/test/files/pos/t1059.scala
index 659bf375ca..bcd8f0374f 100644
--- a/test/files/pos/t1059.scala
+++ b/test/files/pos/t1059.scala
@@ -25,4 +25,4 @@ object SafeNodeSeq {
})) case _ => None }
-}
+}
diff --git a/test/files/pos/t1107/O.scala b/test/files/pos/t1107/O.scala
index 0198867704..aa605a6d09 100644
--- a/test/files/pos/t1107/O.scala
+++ b/test/files/pos/t1107/O.scala
@@ -4,10 +4,10 @@ object O
case s: Sub => true
case _ => false
}
-
+
def main(args: Array[String]): Unit = {
val c = new AnyRef with C
c.bob.toString + c.bob2.toString
- }
+ }
}
diff --git a/test/files/pos/t1107/T.scala b/test/files/pos/t1107/T.scala
index 0dff0b94fd..1f3712d529 100644
--- a/test/files/pos/t1107/T.scala
+++ b/test/files/pos/t1107/T.scala
@@ -1,6 +1,6 @@
sealed trait Top
sealed trait Sub extends Top
-trait C {
+trait C {
private object P extends Sub
def bob() = P.getClass
def bob2() = O.d(P)
diff --git a/test/files/pos/t1164.scala b/test/files/pos/t1164.scala
index 3acda88ba9..307ca92c85 100644
--- a/test/files/pos/t1164.scala
+++ b/test/files/pos/t1164.scala
@@ -1,29 +1,29 @@
-object test {
+object test {
- class Foo[a](val arg : a)
-
- object Foo {
- def apply [a](arg : a, right :a) = new Foo[a](arg)
- def unapply [a](m : Foo[a]) = Some (m.arg)
- }
+ class Foo[a](val arg : a)
+ object Foo {
+ def apply [a](arg : a, right :a) = new Foo[a](arg)
+ def unapply [a](m : Foo[a]) = Some (m.arg)
+ }
+
def matchAndGetArgFromFoo[a]( e:Foo[a]):a = {e match { case Foo(x) => x }}
-
-
- // Try the same thing as above but use function as arguemnt to Bar
+
+
+ // Try the same thing as above but use function as argument to Bar
// constructor
-
- type FunIntToA [a] = (int) => a
- class Bar[a] (var f: FunIntToA[a])
-
+
+ type FunIntToA [a] = (Int) => a
+ class Bar[a] (var f: FunIntToA[a])
+
object Bar {
def apply[a](f: FunIntToA[a]) = new Bar[a](f)
def unapply[a](m: Bar[a]) = Some (m.f)
}
-
+
def matchAndGetFunFromBar[a](b:Bar[a]) : FunIntToA[a] = { b match { case Bar(x) => x}}
-
+
}
diff --git a/test/files/pos/t1226.scala b/test/files/pos/t1226.scala
new file mode 100644
index 0000000000..0af21cbb61
--- /dev/null
+++ b/test/files/pos/t1226.scala
@@ -0,0 +1,8 @@
+package graphs;
+
+abstract class Graph (private[graphs] val mappings : Any){
+}
+
+class Nodes (mappings : Any) extends Graph(mappings) {
+ mappings.toString;
+}
diff --git a/test/files/pos/t1236.scala b/test/files/pos/t1236.scala
new file mode 100644
index 0000000000..7028162ee0
--- /dev/null
+++ b/test/files/pos/t1236.scala
@@ -0,0 +1,14 @@
+trait Empty[E[_]] {
+ def e[A]: E[A]
+}
+
+object T {
+ val ListEmpty = new Empty[List] {
+ def e[A] = Nil
+ }
+
+ def foo[F[_]](q:(String,String)) = "hello"
+ def foo[F[_]](e: Empty[F]) = "world"
+
+ val x = foo[List](ListEmpty)
+} \ No newline at end of file
diff --git a/test/files/pos/t1254/t1254.java b/test/files/pos/t1254/t1254.java
index 25b733cf28..17dc391672 100644
--- a/test/files/pos/t1254/t1254.java
+++ b/test/files/pos/t1254/t1254.java
@@ -11,7 +11,7 @@ class NothingBug3 {
scala.Option<?> o = scala.None$.MODULE$;
test(o);
- None.toLeft(new scala.Function0<Integer>() {
+ None.toLeft(new scala.runtime.AbstractFunction0<Integer>() {
public Integer apply() { return 0; }
});
}
diff --git a/test/files/pos/t1263/test.scala b/test/files/pos/t1263/test.scala
index 92d8c1cdfa..7ced59083a 100644
--- a/test/files/pos/t1263/test.scala
+++ b/test/files/pos/t1263/test.scala
@@ -2,7 +2,7 @@ package test
trait Map[A, +B] {
def plus(key: A): MapTo = new MapTo(key)
-
+
class MapTo(key: A) {
def arrow [B1 >: B](value: B1) = null
}
diff --git a/test/files/pos/t1380.flags b/test/files/pos/t1380.flags
deleted file mode 100644
index f0b0ef7f51..0000000000
--- a/test/files/pos/t1380.flags
+++ /dev/null
@@ -1 +0,0 @@
--cp pending/pos/t1380/gnujaxp.jar
diff --git a/test/files/pos/t1380/hallo.scala b/test/files/pos/t1380/hallo.scala
deleted file mode 100644
index 27ecd9fb8b..0000000000
--- a/test/files/pos/t1380/hallo.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object hallo {
- def main(args:Array[String]) = println("hallo")
-}
diff --git a/test/files/pos/t1422.scala b/test/files/pos/t1422.scala
new file mode 100644
index 0000000000..658f5c730d
--- /dev/null
+++ b/test/files/pos/t1422.scala
@@ -0,0 +1,2 @@
+case class A(private val foo:String)
+case class B(protected[this] val foo:String)
diff --git a/test/files/pos/t1459/AbstractBase.java b/test/files/pos/t1459/AbstractBase.java
new file mode 100755
index 0000000000..492419416c
--- /dev/null
+++ b/test/files/pos/t1459/AbstractBase.java
@@ -0,0 +1,5 @@
+package base;
+
+public abstract class AbstractBase {
+ public abstract void doStuff(String... params); // !!! was Object..
+} \ No newline at end of file
diff --git a/test/files/pos/t1459/App.scala b/test/files/pos/t1459/App.scala
new file mode 100755
index 0000000000..1152fcb0c3
--- /dev/null
+++ b/test/files/pos/t1459/App.scala
@@ -0,0 +1,18 @@
+package foo
+import base._
+
+object App extends Application {
+ class Concrete extends AbstractBase {
+ override def doStuff(params:java.lang.String*): Unit = println("doStuff invoked")
+ }
+
+ val impl = new Concrete
+
+ //succeeds
+ impl.doStuff(null)
+
+ val caller = new Caller
+
+ // fails with AbstractMethodError
+ caller.callDoStuff(impl)
+}
diff --git a/test/files/pos/t1459/Caller.java b/test/files/pos/t1459/Caller.java
new file mode 100755
index 0000000000..4ae51d8c57
--- /dev/null
+++ b/test/files/pos/t1459/Caller.java
@@ -0,0 +1,7 @@
+package base;
+
+public class Caller {
+ public void callDoStuff(AbstractBase impl) {
+ impl.doStuff("abc"); // was new Object());
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t1480.scala b/test/files/pos/t1480.scala
index 1d9f94d2e9..3dc3062ca0 100644
--- a/test/files/pos/t1480.scala
+++ b/test/files/pos/t1480.scala
@@ -1,6 +1,6 @@
class Foo{
def compare(newP : Any, oldP : Any) : Boolean = (newP,oldP) match {
- case (newP : AnyRef, oldP : AnyRef) if newP == oldP => newP == oldP
- case (newS : Symbol, oldS: Symbol) if newS == oldS => newS == oldS
+ case (newP : AnyRef, oldP : AnyRef) if newP == oldP => newP == oldP
+ case (newS : Symbol, oldS: Symbol) if newS == oldS => newS == oldS
}
}
diff --git a/test/pending/neg/t1545.scala b/test/files/pos/t1545.scala
index d7c0245725..d52cfa1659 100755
--- a/test/pending/neg/t1545.scala
+++ b/test/files/pos/t1545.scala
@@ -9,8 +9,8 @@ object Main extends Application {
case None => 99
}
}
-
+
println (x (Foo (None))) // prints 99
println (x (Foo (Some ("foo")))) // prints 42
-
+
}
diff --git a/test/files/pos/t1560.scala b/test/files/pos/t1560.scala
index 49cdaaed6a..c1d8a8164c 100644
--- a/test/files/pos/t1560.scala
+++ b/test/files/pos/t1560.scala
@@ -1,13 +1,13 @@
object Test extends Application {
-
+
trait C[T] {
def t: T
}
-
+
def b: Option[C[x] forSome { type x }] = null
-
+
def c = b match {
case Some(b) => b.t
}
-
+
}
diff --git a/test/files/pos/t1569.flags b/test/files/pos/t1569.flags
new file mode 100644
index 0000000000..1c26b24745
--- /dev/null
+++ b/test/files/pos/t1569.flags
@@ -0,0 +1 @@
+-Ydependent-method-types \ No newline at end of file
diff --git a/test/files/pos/t1569.scala b/test/files/pos/t1569.scala
new file mode 100644
index 0000000000..e5f9553268
--- /dev/null
+++ b/test/files/pos/t1569.scala
@@ -0,0 +1,5 @@
+object Bug {
+ class C { type T }
+ def foo(x: Int)(y: C)(z: y.T) {}
+ foo(3)(new C { type T = String })("hello")
+} \ No newline at end of file
diff --git a/test/files/pos/t1591.scala b/test/files/pos/t1591.scala
new file mode 100644
index 0000000000..4f55d7ce19
--- /dev/null
+++ b/test/files/pos/t1591.scala
@@ -0,0 +1,7 @@
+trait A
+
+object Test {
+ lazy val a = new A {
+ object Zenek
+ }
+}
diff --git a/test/files/pos/t1591_pos.scala b/test/files/pos/t1591_pos.scala
new file mode 100644
index 0000000000..4f55d7ce19
--- /dev/null
+++ b/test/files/pos/t1591_pos.scala
@@ -0,0 +1,7 @@
+trait A
+
+object Test {
+ lazy val a = new A {
+ object Zenek
+ }
+}
diff --git a/test/files/pos/t1591b.scala b/test/files/pos/t1591b.scala
new file mode 100644
index 0000000000..84372bb084
--- /dev/null
+++ b/test/files/pos/t1591b.scala
@@ -0,0 +1,13 @@
+import scala.tools.nsc._
+
+class SemanticTokens(val compiler: Global) {
+ import compiler._
+
+ def build() = ErrorType
+
+ class Process {
+ def f() = analyzer
+ // or to crash the compiler instead of a nice message,
+ // def f() = analyzer underlying _
+ }
+}
diff --git a/test/files/pos/t1693.scala b/test/files/pos/t1693.scala
new file mode 100644
index 0000000000..f3615f4756
--- /dev/null
+++ b/test/files/pos/t1693.scala
@@ -0,0 +1,9 @@
+object Test {
+ class Foo
+ class SomeOps(x : Foo) { def foo(x: String) = 1 }
+ class OtherOps(x : Foo) { def foo(x: Int) = 1 }
+ implicit def mkSomeOps(x: Foo) : SomeOps = new SomeOps(x)
+ implicit def mkOtherOps(x: Foo) : OtherOps = new OtherOps(x)
+
+ (new Foo).foo(1)
+} \ No newline at end of file
diff --git a/test/files/pos/t1711/Seq.scala b/test/files/pos/t1711/Seq.scala
index c18f05cd73..5f426ea0f7 100644
--- a/test/files/pos/t1711/Seq.scala
+++ b/test/files/pos/t1711/Seq.scala
@@ -3,7 +3,7 @@ package com
object Sequence {
def filteringFunction[V](filter: V => Boolean): List[V] => List[V] = {
- def include(v: V) =
+ def include(v: V) =
filter(v)
(l: List[V]) => l.filter(include)
}
diff --git a/test/files/pos/t1722-A.scala b/test/files/pos/t1722-A.scala
index d059bf22f8..9e522a5059 100644
--- a/test/files/pos/t1722-A.scala
+++ b/test/files/pos/t1722-A.scala
@@ -1,8 +1,8 @@
sealed trait Top
trait C {
private object P extends Top
-}
-/*
+}
+/*
$ scala -e 'new AnyRef with C'
error: error while loading Top, class file '/private/tmp/bobobo/./Top.class' is broken
(error reading Scala signature of /private/tmp/bobobo/./Top.class: malformed Scala signature of Top at 185; reference value P of trait C refers to nonexisting symbol.)
diff --git a/test/files/pos/t1722/Test.scala b/test/files/pos/t1722/Test.scala
index f236d3fdc4..5685d8f40a 100755
--- a/test/files/pos/t1722/Test.scala
+++ b/test/files/pos/t1722/Test.scala
@@ -1,5 +1,5 @@
package t1722
object Test {
- val x = new AnyRef with C
+ val x = new AnyRef with C
}
diff --git a/test/files/pos/t1722/Top.scala b/test/files/pos/t1722/Top.scala
index 4ac52412aa..cec4c531f9 100755
--- a/test/files/pos/t1722/Top.scala
+++ b/test/files/pos/t1722/Top.scala
@@ -3,8 +3,8 @@ package t1722
sealed trait Top
trait C {
private object P extends Top
-}
-/*
+}
+/*
$ scala -e 'new AnyRef with C'
error: error while loading Top, class file '/private/tmp/bobobo/./Top.class' is broken
(error reading Scala signature of /private/tmp/bobobo/./Top.class: malformed Scala signature of Top at 185; reference value P of trait C refers to nonexisting symbol.)
diff --git a/test/files/pos/t1745/J.java b/test/files/pos/t1745/J.java
index 8444eabb24..d95efe8e6f 100644
--- a/test/files/pos/t1745/J.java
+++ b/test/files/pos/t1745/J.java
@@ -1,9 +1,9 @@
-class J {
+class J {
S1 s1;
S2 s2;
-
+
String s = bar(S3.foo(), S3.bar("def"));
-
+
private String bar(String s1, String s2) {
return s1 + s2;
}
diff --git a/test/files/pos/t1751.cmds b/test/files/pos/t1751.cmds
new file mode 100644
index 0000000000..d4a4898ffd
--- /dev/null
+++ b/test/files/pos/t1751.cmds
@@ -0,0 +1,3 @@
+javac SuiteClasses.java
+scalac A2_1.scala
+scalac A1_2.scala
diff --git a/test/files/pos/t1756.scala b/test/files/pos/t1756.scala
index 4f7202114c..7aea525f64 100755
--- a/test/files/pos/t1756.scala
+++ b/test/files/pos/t1756.scala
@@ -1,5 +1,5 @@
-/**
+/**
This is a tricky issue which has to do with the fact that too much conflicting
type information is propagated into a single implicit search, where the intended
solution applies two implicit searches.
@@ -15,7 +15,7 @@ expected type from x+, because the lhs x is still typed as a Poly[A].
This means that the argument of the implicit conversion is typechecked
with expected type A with Poly[A]. And no solution is found.
-To solve this, I added a fallback scheme similar to implicit arguents:
+To solve this, I added a fallback scheme similar to implicit arguments:
When an implicit view that adds a method matching given arguments and result
type fails, try again without the result type.
*/
@@ -35,20 +35,20 @@ class Poly[C <: Ring[C]](val c: C) extends Ring[Poly[C]] {
}
object Test extends Application {
-
+
implicit def coef2poly[C <: Ring[C]](c: C): Poly[C] = new Poly(c)
val a = new A
val x = new Poly(new A)
-
+
println(x+a) // works
println(a+x) // works
-
+
val y = new Poly(new Poly(new A))
-
+
println(x+y*x) // works
println(x*y+x) // works
println(y*x+x) // works
-
+
println(x+x*y) // failed before
}
diff --git a/test/files/pos/t1761.scala b/test/files/pos/t1761.scala
index a3ceeea815..2af7280734 100644
--- a/test/files/pos/t1761.scala
+++ b/test/files/pos/t1761.scala
@@ -3,7 +3,7 @@ import scala.xml._
class Foo {
val elements: Seq[Node] = Nil
val innerTransform: PartialFunction[Elem, String] = {
- case Elem(_, l: String, _, _, _ @ _*) if elements.exists(_.label == l) =>
+ case Elem(_, l: String, _, _, _ @ _*) if elements.exists(_.label == l) =>
l
}
}
diff --git a/test/files/pos/t1782.cmds b/test/files/pos/t1782.cmds
new file mode 100644
index 0000000000..61f3d3788e
--- /dev/null
+++ b/test/files/pos/t1782.cmds
@@ -0,0 +1,2 @@
+javac Ann.java Days.java ImplementedBy.java
+scalac Test_1.scala
diff --git a/test/files/pos/t1798.scala b/test/files/pos/t1798.scala
index 1624e3025e..93df61e844 100644
--- a/test/files/pos/t1798.scala
+++ b/test/files/pos/t1798.scala
@@ -2,7 +2,7 @@ object Foo { private def bar(): Int = 55 }
class Foo(x: Int) { def this() = this(Foo.bar()) }
/*
- * scalac28 a.scala
+ * scalac28 a.scala
a.scala:2: error: method bar cannot be accessed in object Foo
class Foo(x: Int) { def this() = this(Foo.bar()) }
^
diff --git a/test/pending/pos/t1836/J.java b/test/files/pos/t1836/J.java
index a009a59e21..a009a59e21 100644
--- a/test/pending/pos/t1836/J.java
+++ b/test/files/pos/t1836/J.java
diff --git a/test/pending/pos/t1836/S.scala b/test/files/pos/t1836/S.scala
index 88ce1063e9..88ce1063e9 100644
--- a/test/pending/pos/t1836/S.scala
+++ b/test/files/pos/t1836/S.scala
diff --git a/test/files/pos/t1840/J.java b/test/files/pos/t1840/J.java
index fd98b6c4a5..a697596fdd 100644
--- a/test/files/pos/t1840/J.java
+++ b/test/files/pos/t1840/J.java
@@ -1,4 +1,4 @@
package p;
-class J {
- J() {}
+class J {
+ J() {}
}
diff --git a/test/files/pos/t1942.cmds b/test/files/pos/t1942.cmds
new file mode 100644
index 0000000000..c14311042a
--- /dev/null
+++ b/test/files/pos/t1942.cmds
@@ -0,0 +1,2 @@
+scalac A_1.scala
+scalac Test_2.scala
diff --git a/test/pending/pos/t1996.scala b/test/files/pos/t1996.scala
index 2730128196..2730128196 100644
--- a/test/pending/pos/t1996.scala
+++ b/test/files/pos/t1996.scala
diff --git a/test/files/pos/t2023.scala b/test/files/pos/t2023.scala
index 21c6fc96a6..de3e848fbd 100644
--- a/test/files/pos/t2023.scala
+++ b/test/files/pos/t2023.scala
@@ -3,11 +3,11 @@ trait C[A]
object C {
implicit def ipl[A](implicit from: A => Ordered[A]): C[A] = null
}
-
+
object P {
def foo[A](i: A, j: A)(implicit c: C[A]): Unit = ()
}
-
+
class ImplicitChainTest {
def testTrivial: Unit = {
P.foo('0', '9')
diff --git a/test/files/pos/t2060.scala b/test/files/pos/t2060.scala
index 2c701150e4..cf7250f545 100755
--- a/test/files/pos/t2060.scala
+++ b/test/files/pos/t2060.scala
@@ -4,7 +4,7 @@
* line':
*
* val failure = 1.0 + new Op[Int]
- *
+ *
* we reduce the problem to finding a function from Double to
* {+: _ >: Op[Int] <: Any}, that is, a method which takes
* an argument which is an Op[Int] or a supertype thereof.
diff --git a/test/files/pos/t2082.scala b/test/files/pos/t2082.scala
index 3a160612fe..38937d78fb 100755
--- a/test/files/pos/t2082.scala
+++ b/test/files/pos/t2082.scala
@@ -1,10 +1,10 @@
trait Mapper[T <: Mapper[T]]
-trait KeyedMapper[KeyType, T <: KeyedMapper[KeyType, T]] extends Mapper[T]
+trait KeyedMapper[KeyType, T <: KeyedMapper[KeyType, T]] extends Mapper[T]
-trait KeyedMetaMapper[KeyType, T <: KeyedMapper[KeyType, T]]
+trait KeyedMetaMapper[KeyType, T <: KeyedMapper[KeyType, T]]
trait MappedForeignKey[KeyType, Owner <: Mapper[Owner], Other <: KeyedMapper[KeyType, Other]]
@@ -19,19 +19,19 @@ class TestRun extends KeyedMapper[Long, TestRun] with IdPK {
object TestRun extends TestRun with KeyedMetaMapper[Long, TestRun]
class MetaTestSubject extends TestSubject with KeyedMetaMapper[Long, TestSubject]
-object TestSubject extends MetaTestSubject
+object TestSubject extends MetaTestSubject
object Main {
-
+
def oneToOneJoin[PType <: KeyedMapper[Long, PType] with IdPK,
CType <: KeyedMapper[Long, CType] with IdPK,
- CMetaType <: CType with KeyedMetaMapper[Long, CType],
+ CMetaType <: CType with KeyedMetaMapper[Long, CType],
FKType <: MappedForeignKey[Long, PType, CType]]
- (parents: List[PType], metaMapper: CMetaType, keyGetter: (PType) => FKType ):
+ (parents: List[PType], metaMapper: CMetaType, keyGetter: (PType) => FKType ):
Map[Long, CType] = Map.empty
-
+
def callIt {
- oneToOneJoin[TestRun, TestSubject, MetaTestSubject,
+ oneToOneJoin[TestRun, TestSubject, MetaTestSubject,
MappedForeignKey[Long, TestRun, TestSubject]](
List(), TestSubject, (tr: TestRun) => tr.testSubject)
}
diff --git a/test/files/pos/t2133.scala b/test/files/pos/t2133.scala
new file mode 100644
index 0000000000..99bac5c38b
--- /dev/null
+++ b/test/files/pos/t2133.scala
@@ -0,0 +1,18 @@
+trait Foo {
+ object bar {
+ private[this] def fn() = 5
+ }
+}
+
+trait Foo2 {
+ object bip {
+ def fn() = 10
+ }
+}
+
+class Bob extends AnyRef with Foo with Foo2 {
+ import bip._
+ import bar._
+
+ def go() = fn()
+}
diff --git a/test/files/pos/t2261.scala b/test/files/pos/t2261.scala
index aac5c9e0fd..af24234235 100644
--- a/test/files/pos/t2261.scala
+++ b/test/files/pos/t2261.scala
@@ -5,5 +5,5 @@ object Test {
x = List(1,2,3)
// the problem here was that somehow the type variable that was used to infer the type argument for List.apply
// would accumulate several conflicting constraints
- // can't reproduce with
+ // can't reproduce with
} \ No newline at end of file
diff --git a/test/files/pos/t2305.scala b/test/files/pos/t2305.scala
new file mode 100644
index 0000000000..d0b103fdba
--- /dev/null
+++ b/test/files/pos/t2305.scala
@@ -0,0 +1,26 @@
+import java.util.ArrayList
+
+trait Bind[Z[_]]
+
+class MySerializable[X] extends java.io.Serializable
+
+object Bind {
+ implicit val JavaArrayListBind: Bind[ArrayList] = new Bind[ArrayList] {}
+ implicit val MySerializableBind: Bind[MySerializable] = new Bind[MySerializable] {}
+}
+
+object works {
+ // this works fine:
+ def runbind(implicit bind: Bind[MySerializable]) {}
+ runbind
+}
+
+object breaks {
+ def runbind(implicit bind: Bind[ArrayList]) {}
+ runbind
+ /*java.lang.AssertionError: assertion failed: java.io.Serializable
+ at scala.Predef$.assert(Predef.scala:107)
+ at scala.tools.nsc.symtab.Types$TypeRef.transform(Types.scala:1417)
+ at scala.tools.nsc.symtab.Types$TypeRef.baseType(Types.scala:1559)
+ */
+}
diff --git a/test/files/pos/t2331.scala b/test/files/pos/t2331.scala
new file mode 100644
index 0000000000..9a15b5c2a9
--- /dev/null
+++ b/test/files/pos/t2331.scala
@@ -0,0 +1,11 @@
+trait C {
+ def m[T]: T
+}
+
+object Test {
+ val o /*: C --> no crash*/ = new C {
+ def m[T]: Nothing /*: T --> no crash*/ = error("omitted")
+ }
+
+ o.m[Nothing]
+} \ No newline at end of file
diff --git a/test/files/pos/t2413/TestJava.java b/test/files/pos/t2413/TestJava.java
new file mode 100644
index 0000000000..252c01fbc0
--- /dev/null
+++ b/test/files/pos/t2413/TestJava.java
@@ -0,0 +1,7 @@
+package pack;
+
+public class TestJava {
+ protected String repeatParam(String ... items) {
+ return "nothing";
+ }
+}
diff --git a/test/files/pos/t2413/TestScalac.scala b/test/files/pos/t2413/TestScalac.scala
new file mode 100644
index 0000000000..6992a30f2c
--- /dev/null
+++ b/test/files/pos/t2413/TestScalac.scala
@@ -0,0 +1,23 @@
+import pack.TestJava
+
+class Foo extends TestJava {
+
+ // THIS METHOD YIELDS TO CRASH
+/* def foomethod : Option[String] => Unit = {
+ case None =>
+ val path = repeatParam("s","a","b","c")
+ ()
+ case Some(error) =>
+ ()
+ }
+
+ // THIS IS OK
+ def foomethod2 : String = repeatParam("s","a");
+
+ // THIS IS OK
+ val aVal = repeatParam("1","2","3") */
+
+ // THIS YIELDS TO CRASH
+ for (a <- 1 to 4 ; anotherVal = repeatParam("1","2","3"))
+ yield anotherVal
+}
diff --git a/test/files/pos/t2421.scala b/test/files/pos/t2421.scala
index 0d01be29fc..26e485c160 100644
--- a/test/files/pos/t2421.scala
+++ b/test/files/pos/t2421.scala
@@ -7,8 +7,8 @@ object Test {
implicit val forcibleInt: (Int <~< Forcible[Int]) = error("")
def headProxy[P <: Forcible[Int]](implicit w: Int <~< P): P = error("")
-
- headProxy
- // trivial[Int] should not be considered a valid implicit, since w would have type Int <~< Int,
+
+ headProxy
+ // trivial[Int] should not be considered a valid implicit, since w would have type Int <~< Int,
// and headProxy's type parameter P cannot be instantiated to Int
} \ No newline at end of file
diff --git a/test/files/pos/t2421_delitedsl.scala b/test/files/pos/t2421_delitedsl.scala
index a05887023a..ad6afa7bd8 100644
--- a/test/files/pos/t2421_delitedsl.scala
+++ b/test/files/pos/t2421_delitedsl.scala
@@ -1,10 +1,10 @@
trait DeliteDSL {
abstract class <~<[-From, +To] extends (From => To)
- implicit def trivial[A]: A <~< A = new (A <~< A) {def apply(x: A) = x}
+ implicit def trivial[A]: A <~< A = new (A <~< A) {def apply(x: A) = x}
trait Forcible[T]
object Forcible {
- def factory[T](f: T => Forcible[T]) = new (T <~< Forcible[T]){def apply(x: T) = f(x)}
+ def factory[T](f: T => Forcible[T]) = new (T <~< Forcible[T]){def apply(x: T) = f(x)}
}
case class DeliteInt(x: Int) extends Forcible[Int]
@@ -22,16 +22,16 @@ trait DeliteDSL {
// If T is already a proxy (it is forcible), the compiler should use
// forcibleIdentity to deduce that P=T. If T is Int, the compiler
// should use intToForcible to deduce that P=DeliteInt.
- //
+ //
// Without this feature, the user must write 'xs.proxyOfFirst[DeliteInt]',
// with the feature they can write 'xs.proxyOfFirst', which is shorter and
// avoids exposing internal DELITE types to the world.
object Test {
- val x = new DeliteCollection(List(1,2,3)).headProxy
+ val x = new DeliteCollection(List(1,2,3)).headProxy
// inferred: val x: Forcible[Int] = new DeliteCollection[Int](List.apply[Int](1, 2, 3)).headProxy[Forcible[Int]](forcibleInt);
- val xAlready = new DeliteCollection(List(DeliteInt(1),DeliteInt(2),DeliteInt(3))).headProxy
+ val xAlready = new DeliteCollection(List(DeliteInt(1),DeliteInt(2),DeliteInt(3))).headProxy
// inferred: val xAlready: DeliteInt = new DeliteCollection[DeliteInt](List.apply[DeliteInt](DeliteInt(1), DeliteInt(2), DeliteInt(3))).headProxy[DeliteInt](trivial[DeliteInt]);
}
} \ No newline at end of file
diff --git a/test/files/pos/t2421b.scala b/test/files/pos/t2421b.scala
new file mode 100644
index 0000000000..8b848abb75
--- /dev/null
+++ b/test/files/pos/t2421b.scala
@@ -0,0 +1,19 @@
+object Test {
+ class A
+ class B
+ class C
+ class F[X]
+
+ def f(implicit aa: F[A]) = println(aa)
+
+ implicit def a : F[A] = new F[A]()
+ implicit def b[X <: B] = new F[X]()
+
+ f
+}
+/* bug:
+error: ambiguous implicit values:
+ both method b in object Test1 of type [X <: Test1.B]Test1.F[X]
+ and method a in object Test1 of type => Test1.F[Test1.A]
+ match expected type Test1.F[Test1.A]
+*/
diff --git a/test/files/pos/t2421c.scala b/test/files/pos/t2421c.scala
new file mode 100644
index 0000000000..755e6a39f0
--- /dev/null
+++ b/test/files/pos/t2421c.scala
@@ -0,0 +1,17 @@
+object Test {
+ class A
+ class B
+ class C
+ class F[X]
+
+ def f(implicit aa: F[A]) = println(aa)
+
+ implicit def a : F[A] = new F[A]()
+
+ // generalised from t2421b to verify we check enough
+ class G[X]
+ implicit def g[X] = new G[X]()
+ implicit def b[X <: B](implicit mx: G[X]) = new F[X]()
+
+ f
+} \ No newline at end of file
diff --git a/test/files/pos/t2429.scala b/test/files/pos/t2429.scala
index 9b9cb89de7..3ea3f9e2a5 100755
--- a/test/files/pos/t2429.scala
+++ b/test/files/pos/t2429.scala
@@ -1,8 +1,8 @@
object Msg {
trait T
-
+
trait TSeq
-
+
object TSeq {
implicit def fromSeq(s: Seq[T]): TSeq = error("stub")
}
diff --git a/test/files/pos/t2433/A.java b/test/files/pos/t2433/A.java
new file mode 100755
index 0000000000..8ae23ff816
--- /dev/null
+++ b/test/files/pos/t2433/A.java
@@ -0,0 +1,4 @@
+class A223 extends B223.Inner {
+ static class Inner {}
+ void foo() {}
+} \ No newline at end of file
diff --git a/test/files/pos/t2433/B.java b/test/files/pos/t2433/B.java
new file mode 100755
index 0000000000..d0d5580ffb
--- /dev/null
+++ b/test/files/pos/t2433/B.java
@@ -0,0 +1,4 @@
+class B223 {
+ static class Inner {}
+ void m(A223.Inner x) {}
+} \ No newline at end of file
diff --git a/test/files/pos/t2433/Test.scala b/test/files/pos/t2433/Test.scala
new file mode 100755
index 0000000000..0e07231e07
--- /dev/null
+++ b/test/files/pos/t2433/Test.scala
@@ -0,0 +1,3 @@
+object Test {
+ (new A223).foo()
+}
diff --git a/test/files/pos/t2444.scala b/test/files/pos/t2444.scala
index 6f07dcf92d..a052270196 100644
--- a/test/files/pos/t2444.scala
+++ b/test/files/pos/t2444.scala
@@ -2,14 +2,14 @@ object Test {
trait Foo
- class Bar {
+ class Bar {
object baz extends Foo
}
- def frob[P1, P2<:Foo](f:P1 => P2) = ()
+ def frob[P1, P2<:Foo](f:P1 => P2) = ()
def main(args:Array[String]) : Unit = {
- frob((p:Bar) => p.baz)
+ frob((p:Bar) => p.baz)
}
}
diff --git a/test/files/pos/t2454.scala b/test/files/pos/t2454.scala
new file mode 100644
index 0000000000..00f2e6f677
--- /dev/null
+++ b/test/files/pos/t2454.scala
@@ -0,0 +1,25 @@
+package am;
+
+trait One[M[_]] {
+ val x : Int
+}
+
+trait Two[M[_,_]] {
+ val x : Int
+}
+
+object Test {
+ // Works.
+ val x = new Two[Map] {
+ val x = 5
+ }
+
+ val o = new One[java.util.List] {
+ val x = 1
+ }
+
+ // Does not work
+ val y = new Two[java.util.concurrent.ConcurrentHashMap] {
+ val x = 3
+ }
+}
diff --git a/test/files/pos/t2464.cmds b/test/files/pos/t2464.cmds
new file mode 100644
index 0000000000..ca733ef23d
--- /dev/null
+++ b/test/files/pos/t2464.cmds
@@ -0,0 +1,3 @@
+javac JavaOne.java
+scalac ScalaOne_1.scala
+scalac t2464_2.scala
diff --git a/test/files/pos/t2464/JavaOne.java b/test/files/pos/t2464/JavaOne.java
new file mode 100644
index 0000000000..ff36868a0e
--- /dev/null
+++ b/test/files/pos/t2464/JavaOne.java
@@ -0,0 +1,5 @@
+class ClassTwo {
+ public static class Child {
+ public void func2() {return ;}
+ }
+}
diff --git a/test/files/pos/t2464/ScalaOne_1.scala b/test/files/pos/t2464/ScalaOne_1.scala
new file mode 100644
index 0000000000..0271b9ce72
--- /dev/null
+++ b/test/files/pos/t2464/ScalaOne_1.scala
@@ -0,0 +1,6 @@
+class ScalaClassOne extends ClassTwo.Child {
+ def func4() = {
+ func2
+ }
+}
+
diff --git a/test/files/pos/t2464/t2464_2.scala b/test/files/pos/t2464/t2464_2.scala
new file mode 100644
index 0000000000..13a52c952b
--- /dev/null
+++ b/test/files/pos/t2464/t2464_2.scala
@@ -0,0 +1,3 @@
+object Test {
+ val c1 = new ScalaClassOne
+}
diff --git a/test/files/pos/t2484.scala b/test/files/pos/t2484.scala
new file mode 100755
index 0000000000..20c51b09a1
--- /dev/null
+++ b/test/files/pos/t2484.scala
@@ -0,0 +1,17 @@
+class Admin extends javax.swing.JApplet {
+ val jScrollPane = new javax.swing.JScrollPane (null, 0, 0)
+ def bug2484: Unit = {
+ scala.concurrent.ops.spawn {jScrollPane.synchronized {
+ def someFunction () = {}
+ //scala.concurrent.ops.spawn {someFunction ()}
+ jScrollPane.addComponentListener (new java.awt.event.ComponentAdapter {override def componentShown (e: java.awt.event.ComponentEvent) = {
+ someFunction (); jScrollPane.removeComponentListener (this)}})
+ }}
+ }
+}
+// t2630.scala
+object Test {
+ def meh(xs: List[Any]) {
+ xs map { x => (new AnyRef {}) }
+ }
+}
diff --git a/test/files/pos/t2504.scala b/test/files/pos/t2504.scala
index 67f8226852..0abe7dd13e 100755
--- a/test/files/pos/t2504.scala
+++ b/test/files/pos/t2504.scala
@@ -1,5 +1,5 @@
object Test {
val ys: Iterable[_] = Array("abc")
- val xs = Array("abc")
+ val xs = Array("abc")
xs sameElements Array("abc")
}
diff --git a/test/files/pos/t2545.scala b/test/files/pos/t2545.scala
index 6ad994223c..b4238fb718 100755
--- a/test/files/pos/t2545.scala
+++ b/test/files/pos/t2545.scala
@@ -1,6 +1,6 @@
trait Frog[T] {
- def hello: T
- def size: Int
+ def hello: T
+ def size: Int
}
trait OnlyWithFrogs {
diff --git a/test/files/pos/t2569/Child.scala b/test/files/pos/t2569/Child.scala
index 64f4dc172f..3d7f4248b5 100644
--- a/test/files/pos/t2569/Child.scala
+++ b/test/files/pos/t2569/Child.scala
@@ -1,9 +1,9 @@
package varargs
-
+
class Child extends Parent {
-
+
override def concatenate(strings: String*): String =
strings map("\"" + _ + "\"") mkString("(", ", ", ")")
-
+
}
diff --git a/test/files/pos/t2569/Parent.java b/test/files/pos/t2569/Parent.java
index 89421becbd..133f2ee567 100644
--- a/test/files/pos/t2569/Parent.java
+++ b/test/files/pos/t2569/Parent.java
@@ -1,7 +1,7 @@
package varargs;
-
+
public class Parent {
-
+
public String concatenate(String... strings) {
StringBuilder builder = new StringBuilder();
for (String s : strings) {
@@ -9,5 +9,5 @@ package varargs;
}
return builder.toString();
}
-
+
}
diff --git a/test/files/pos/t2610.scala b/test/files/pos/t2610.scala
new file mode 100644
index 0000000000..8dd4cde66e
--- /dev/null
+++ b/test/files/pos/t2610.scala
@@ -0,0 +1,17 @@
+package mada; package defects; package tests
+
+package object bbb {
+ def bar = ()
+ aaa.foo // value foo is not a member of package mada.defects.tests.aaa
+}
+
+package object aaa {
+ def foo = ()
+}
+
+/* compiles successfully if placed here..
+package object bbb {
+ def bar = ()
+ aaa.foo // value foo is not a member of package mada.defects.tests.aaa
+}
+*/ \ No newline at end of file
diff --git a/test/files/pos/t2619.scala b/test/files/pos/t2619.scala
new file mode 100644
index 0000000000..565bc9572b
--- /dev/null
+++ b/test/files/pos/t2619.scala
@@ -0,0 +1,80 @@
+abstract class F {
+ final def apply(x: Int): AnyRef = null
+}
+abstract class AbstractModule {
+ def as: List[AnyRef]
+ def ms: List[AbstractModule]
+ def fs: List[F] = Nil
+ def rs(x: Int): List[AnyRef] = fs.map(_(x))
+}
+abstract class ModuleType1 extends AbstractModule {}
+abstract class ModuleType2 extends AbstractModule {}
+
+object ModuleAE extends ModuleType1 {
+ def as = Nil
+ def ms = Nil
+}
+object ModuleAF extends ModuleType2 {
+ def as = Nil
+ def ms = List(ModuleAE)
+}
+object ModuleAG extends ModuleType1 {
+ def as = List("")
+ def ms = Nil
+}
+object ModuleAI extends ModuleType1 {
+ def as = Nil
+ def ms = List(ModuleAE)
+}
+object ModuleAK extends ModuleType2 {
+ def as = Nil
+ def ms = List(ModuleAF)
+}
+object ModuleAL extends ModuleType1 {
+ def as = Nil
+ def ms = List(
+ ModuleAG,
+ ModuleAI
+ )
+}
+object ModuleAM extends ModuleType1 {
+ def as = Nil
+ def ms = List(
+ ModuleAL,
+ ModuleAE
+ ) ::: List(ModuleAK)
+}
+object ModuleBE extends ModuleType1 {
+ def as = Nil
+ def ms = Nil
+}
+object ModuleBF extends ModuleType2 {
+ def as = Nil
+ def ms = List(ModuleBE)
+}
+object ModuleBG extends ModuleType1 {
+ def as = List("")
+ def ms = Nil
+}
+object ModuleBI extends ModuleType1 {
+ def as = Nil
+ def ms = List(ModuleBE)
+}
+object ModuleBK extends ModuleType2 {
+ def as = Nil
+ def ms = List(ModuleBF)
+}
+object ModuleBL extends ModuleType1 {
+ def as = Nil
+ def ms = List(
+ ModuleBG,
+ ModuleBI
+ )
+}
+object ModuleBM extends ModuleType1 {
+ def as = Nil
+ def ms = List(
+ ModuleBL,
+ ModuleBE
+ ) ::: List(ModuleBK)
+} \ No newline at end of file
diff --git a/test/files/pos/t2624.scala b/test/files/pos/t2624.scala
new file mode 100644
index 0000000000..76f0e30369
--- /dev/null
+++ b/test/files/pos/t2624.scala
@@ -0,0 +1,4 @@
+object Test {
+ List(1).map(identity(_))
+ List(1).map(identity) // this didn't typecheck before the fix
+}
diff --git a/test/files/pos/t2635.scala b/test/files/pos/t2635.scala
new file mode 100755
index 0000000000..7cd5531356
--- /dev/null
+++ b/test/files/pos/t2635.scala
@@ -0,0 +1,16 @@
+abstract class Base
+
+object Test
+{
+ def run(c: Class[_ <: Base]): Unit = {
+ }
+
+ def main(args: Array[String]): Unit =
+ {
+ val sc: Option[Class[_ <: Base]] = Some(classOf[Base])
+ sc match {
+ case Some(c) => run(c)
+ case None =>
+ }
+ }
+}
diff --git a/test/files/pos/t2660.scala b/test/files/pos/t2660.scala
new file mode 100644
index 0000000000..b1908b201b
--- /dev/null
+++ b/test/files/pos/t2660.scala
@@ -0,0 +1,25 @@
+package hoho
+
+class G
+
+class H extends G
+
+class A[T](x: T) {
+
+ def this(y: G, z: T) = {
+ this(z)
+ print(1)
+ }
+
+ def this(z: H, h: T) = {
+ this(h)
+ print(2)
+ }
+}
+
+object T {
+ def main(args: Array[String]) {
+ implicit def g2h(g: G): H = new H
+ new A(new H, 23)
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t2664.scala b/test/files/pos/t2664.scala
new file mode 100644
index 0000000000..7b667d0106
--- /dev/null
+++ b/test/files/pos/t2664.scala
@@ -0,0 +1,9 @@
+package pkg1 {
+ class C {
+ private[pkg1] def foo: Int = 1
+ }
+
+ trait T extends C {
+ private[pkg1] abstract override def foo = super.foo + 1
+ }
+}
diff --git a/test/files/pos/t2665.scala b/test/files/pos/t2665.scala
new file mode 100644
index 0000000000..108daf509a
--- /dev/null
+++ b/test/files/pos/t2665.scala
@@ -0,0 +1,3 @@
+object Test {
+ val x: Unit = Array("")
+} \ No newline at end of file
diff --git a/test/files/pos/t2667.scala b/test/files/pos/t2667.scala
new file mode 100644
index 0000000000..b214cc7f37
--- /dev/null
+++ b/test/files/pos/t2667.scala
@@ -0,0 +1,6 @@
+object A {
+ def foo(x: Int, y: Int*): Int = 45
+ def foo[T](x: T*): Int = 55
+
+ val x: Unit = foo(23, 23f)
+} \ No newline at end of file
diff --git a/test/files/pos/t2669.scala b/test/files/pos/t2669.scala
new file mode 100644
index 0000000000..e34f08f0f5
--- /dev/null
+++ b/test/files/pos/t2669.scala
@@ -0,0 +1,28 @@
+// #2629, #2639, #2669
+object Test2669 {
+
+ def test[T](l: java.util.ArrayList[_ <: T]) = 1
+ test(new java.util.ArrayList[String]())
+
+}
+
+import java.util.ArrayList
+
+object Test2629 {
+ def main(args: Array[String]): Unit = {
+ val l = new ArrayList[String](1)
+ val m = new ArrayList(l)
+
+ println(l.size)
+ println(m.size)
+ }
+}
+
+
+import java.util.Vector
+
+// scalac cannot detect lack of type params, but then throws AssertionError later:
+class TVector2639 {
+ val b = new Vector // this line passed without error detected
+ val a = new Vector(1) // this line caused throwing AssertionError when scalac
+}
diff --git a/test/files/pos/t2673.scala b/test/files/pos/t2673.scala
new file mode 100644
index 0000000000..7f232df3f5
--- /dev/null
+++ b/test/files/pos/t2673.scala
@@ -0,0 +1,4 @@
+object Test {
+ val xs = Array(1, 2, 3)
+ (xs, xs).zipped map (_ + _)
+} \ No newline at end of file
diff --git a/test/files/pos/t2683.scala b/test/files/pos/t2683.scala
new file mode 100755
index 0000000000..4ba34b554a
--- /dev/null
+++ b/test/files/pos/t2683.scala
@@ -0,0 +1,7 @@
+class A
+class B extends A
+
+object Test {
+ val c: Class[_ <: A] = Class.forName("B").asSubclass(classOf[A])
+ val x: Option[Class[_ <: A]] = Some(3).map { case _ => c }
+}
diff --git a/test/files/pos/t2691.scala b/test/files/pos/t2691.scala
new file mode 100644
index 0000000000..3c0d193e9a
--- /dev/null
+++ b/test/files/pos/t2691.scala
@@ -0,0 +1,9 @@
+object Breakdown {
+ def unapplySeq(x: Int): Some[List[String]] = Some(List("", "there"))
+}
+object Test {
+ 42 match {
+ case Breakdown("") => // needed to trigger bug
+ case Breakdown("", who) => println ("hello " + who)
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t2698.scala b/test/files/pos/t2698.scala
new file mode 100644
index 0000000000..0e2662de61
--- /dev/null
+++ b/test/files/pos/t2698.scala
@@ -0,0 +1,10 @@
+import scala.collection._
+import scala.util.regexp._
+
+abstract class S2 {
+ val lang: WordExp
+ type __labelT = lang._labelT
+
+ var deltaq: Array[__labelT] = _
+ def delta1 = immutable.Map(deltaq.zipWithIndex: _*)
+}
diff --git a/test/files/pos/t2708.scala b/test/files/pos/t2708.scala
new file mode 100644
index 0000000000..19485bf4ce
--- /dev/null
+++ b/test/files/pos/t2708.scala
@@ -0,0 +1 @@
+class Foo(@volatile var v: Int)
diff --git a/test/files/pos/t2726.cmds b/test/files/pos/t2726.cmds
new file mode 100644
index 0000000000..5fcb18bfbb
--- /dev/null
+++ b/test/files/pos/t2726.cmds
@@ -0,0 +1,2 @@
+scalac SQLBuilder_1.scala
+scalac test_2.scala
diff --git a/test/files/pos/t2726/SQLBuilder_1.scala b/test/files/pos/t2726/SQLBuilder_1.scala
new file mode 100644
index 0000000000..7b3e3d8322
--- /dev/null
+++ b/test/files/pos/t2726/SQLBuilder_1.scala
@@ -0,0 +1,7 @@
+class SQLBuilder extends SQLBuilder.Segment
+
+object SQLBuilder {
+ trait Segment
+}
+
+
diff --git a/test/files/pos/t2726/test_2.scala b/test/files/pos/t2726/test_2.scala
new file mode 100644
index 0000000000..e738143aeb
--- /dev/null
+++ b/test/files/pos/t2726/test_2.scala
@@ -0,0 +1,3 @@
+object SQuery2Test {
+ new SQLBuilder
+}
diff --git a/test/files/pos/t2741/2741-1.scala b/test/files/pos/t2741/2741-1.scala
new file mode 100644
index 0000000000..7a7ef412ae
--- /dev/null
+++ b/test/files/pos/t2741/2741-1.scala
@@ -0,0 +1,13 @@
+sealed trait Kleisli[M[_], A, B]
+
+trait PartialApplyKA[T[_[_], _, _], M[_], A] {
+ type Apply[B] = T[M, A, B]
+}
+
+trait MA[M[_], A]
+
+trait MAs {
+ val a: MA[PartialApplyKA[Kleisli, List, String]#Apply, Int] = null
+}
+
+object Scalaz extends MAs
diff --git a/test/files/pos/t2741/2741-2.scala b/test/files/pos/t2741/2741-2.scala
new file mode 100644
index 0000000000..d17c76efb2
--- /dev/null
+++ b/test/files/pos/t2741/2741-2.scala
@@ -0,0 +1,5 @@
+// object Test compiles jointly, but not separately.
+object Test {
+ import Scalaz._
+ Scalaz.a
+} \ No newline at end of file
diff --git a/test/files/pos/t2794.scala b/test/files/pos/t2794.scala
new file mode 100644
index 0000000000..a17edf8cb3
--- /dev/null
+++ b/test/files/pos/t2794.scala
@@ -0,0 +1,9 @@
+class Key[T]
+
+class Entry[T](val k: Key[T], val v: T)
+
+object Entry {
+
+ def makeDefault[T >: Null <: AnyRef] = new Entry[T](new Key[T], null: T)
+
+}
diff --git a/test/files/pos/t2795.scala b/test/files/pos/t2795.scala
new file mode 100644
index 0000000000..a4e1b7db83
--- /dev/null
+++ b/test/files/pos/t2795.scala
@@ -0,0 +1,17 @@
+package bug1
+
+trait Element[T] {
+}
+
+trait Config {
+ type T <: Element[T]
+ implicit val m: ClassManifest[T]
+ // XXX Following works fine:
+ // type T <: Element[_]
+}
+
+trait Transform { self: Config =>
+ def processBlock(block: Array[T]): Unit = {
+ var X = new Array[T](1)
+ }
+}
diff --git a/test/files/pos/t2797.scala b/test/files/pos/t2797.scala
new file mode 100644
index 0000000000..4323664e91
--- /dev/null
+++ b/test/files/pos/t2797.scala
@@ -0,0 +1,9 @@
+class MyVector[A] {
+ def map[B](f: A => B): MyVector[B] = error("")
+}
+
+object Test {
+ def unzip[B, C](_this: MyVector[(B, C)]): (MyVector[B], MyVector[C]) = {
+ (_this.map{ bc => bc._1 }, _this.map{ bc => bc._2 })
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t2799.flags b/test/files/pos/t2799.flags
new file mode 100644
index 0000000000..d1b831ea87
--- /dev/null
+++ b/test/files/pos/t2799.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t2799.scala b/test/files/pos/t2799.scala
new file mode 100644
index 0000000000..fe93c0e301
--- /dev/null
+++ b/test/files/pos/t2799.scala
@@ -0,0 +1 @@
+@deprecated("hi mom") case class Bob ()
diff --git a/test/files/pos/t2809.scala b/test/files/pos/t2809.scala
new file mode 100644
index 0000000000..1f68b0b07a
--- /dev/null
+++ b/test/files/pos/t2809.scala
@@ -0,0 +1,20 @@
+package p1 {
+ abstract class T1 {
+ protected def bug(p: Int = 1): Int // without 'protected' compiles fine
+ }
+}
+package p2 { // all being in the same package compiles fine
+ import p1._
+ abstract class T2 extends T1 {
+ class A {
+ bug()
+ }
+ }
+
+ abstract class T3 extends T2 {
+ class A {
+ bug()
+ }
+ }
+}
+
diff --git a/test/files/pos/t2810.scala b/test/files/pos/t2810.scala
new file mode 100644
index 0000000000..c85eca164a
--- /dev/null
+++ b/test/files/pos/t2810.scala
@@ -0,0 +1,8 @@
+
+
+
+
+object Test {
+ val closeable1: { def close(): Unit } = new scala.io.Source { val iter: Iterator[Char] = "".iterator }
+ val closeable2: { def close(): Unit } = new java.io.Closeable { def close() = {} }
+}
diff --git a/test/files/pos/t2868.cmds b/test/files/pos/t2868.cmds
new file mode 100644
index 0000000000..ed8124a9e0
--- /dev/null
+++ b/test/files/pos/t2868.cmds
@@ -0,0 +1,3 @@
+javac Jann.java Nest.java
+scalac pick_1.scala
+scalac test_2.scala
diff --git a/test/files/pos/t2868/Jann.java b/test/files/pos/t2868/Jann.java
new file mode 100644
index 0000000000..f5b68de7b0
--- /dev/null
+++ b/test/files/pos/t2868/Jann.java
@@ -0,0 +1,5 @@
+public @interface Jann {
+ public String str();
+ public Nest inn();
+ public int[] arr();
+}
diff --git a/test/files/pos/t2868/Nest.java b/test/files/pos/t2868/Nest.java
new file mode 100644
index 0000000000..53652291ad
--- /dev/null
+++ b/test/files/pos/t2868/Nest.java
@@ -0,0 +1,3 @@
+public @interface Nest {
+ public int value();
+}
diff --git a/test/files/pos/t2868/pick_1.scala b/test/files/pos/t2868/pick_1.scala
new file mode 100644
index 0000000000..e91728ec2f
--- /dev/null
+++ b/test/files/pos/t2868/pick_1.scala
@@ -0,0 +1,7 @@
+class ann(s: String) extends StaticAnnotation
+class pick {
+ final val s = "bang!"
+ @ann("bang!") def foo = 1
+ @Jann(str = "bang!", inn = new Nest(1), arr = Array(1, 2)) def bar = 2
+ @Jann(str = "bang!", inn = new Nest(1), arr = Array(1, 2)) def baz = 3
+}
diff --git a/test/files/pos/t2868/test_2.scala b/test/files/pos/t2868/test_2.scala
new file mode 100644
index 0000000000..f11ef0fae2
--- /dev/null
+++ b/test/files/pos/t2868/test_2.scala
@@ -0,0 +1,6 @@
+class test {
+ val l = (new pick).s
+ val u = (new pick).foo
+ val c = (new pick).bar
+ val k = (new pick).baz
+}
diff --git a/test/files/pos/t2913.scala b/test/files/pos/t2913.scala
new file mode 100755
index 0000000000..ee86b9e402
--- /dev/null
+++ b/test/files/pos/t2913.scala
@@ -0,0 +1,53 @@
+class A {
+ def foo(a: Int) = 0
+}
+
+class RichA {
+ def foo(a: String) = 0
+ def foo(a: String, b: String) = 0
+ def foo() = 0
+}
+
+object Test {
+
+ implicit def AToRichA(a: A) = new RichA
+
+ val a = new A
+ a.foo()
+ a.foo(1)
+
+ a.foo("") // Without implicits, a type error regarding invalid argument types is generated at `""`. This is
+ // the same position as an argument, so the 'second try' typing with an Implicit View is tried,
+ // and AToRichA(a).foo("") is found.
+ //
+ // My reading of the spec "7.3 Views" is that `a.foo` denotes a member of `a`, so the view should
+ // not be triggered.
+ //
+ // But perhaps the implementation was changed to solve See https://lampsvn.epfl.ch/trac/scala/ticket/1756
+
+ a.foo("a", "b") // Without implicits, a type error regarding invalid arity is generated at `foo(<error>"", "")`.
+ // Typers#tryTypedApply:3274 only checks if the error is as the same position as `foo`, `"a"`, or `"b"`.
+ // None of these po
+}
+
+// t0851 is essentially the same:
+object test1 {
+ case class Foo[T,T2](f : (T,T2) => String) extends (((T,T2)) => String){
+ def apply(t : T) = (s:T2) => f(t,s)
+ def apply(p : (T,T2)) = f(p._1,p._2)
+ }
+ implicit def g[T](f : (T,String) => String) = Foo(f)
+ def main(args : Array[String]) : Unit = {
+ val f = (x:Int,s:String) => s + x
+ println(f(1))
+ ()
+ }
+}
+object Main {
+ def main(args : Array[String]) {
+ val fn = (a : Int, str : String) => "a: " + a + ", str: " + str
+ implicit def fx[T](f : (T,String) => String) = (x:T) => f(x,null)
+ println(fn(1))
+ ()
+ }
+}
diff --git a/test/files/pos/t294.cmds b/test/files/pos/t294.cmds
new file mode 100644
index 0000000000..62c9a5a068
--- /dev/null
+++ b/test/files/pos/t294.cmds
@@ -0,0 +1,3 @@
+javac Ann.java Ann2.java
+scalac Test_1.scala
+scalac Test_2.scala
diff --git a/test/files/pos/t2940/Cycle.java b/test/files/pos/t2940/Cycle.java
new file mode 100644
index 0000000000..eef6c23b5e
--- /dev/null
+++ b/test/files/pos/t2940/Cycle.java
@@ -0,0 +1,3 @@
+public interface Cycle<T extends Cycle<?>> {
+ void doStuff();
+} \ No newline at end of file
diff --git a/test/files/pos/t2940/Error.scala b/test/files/pos/t2940/Error.scala
new file mode 100644
index 0000000000..7c600667f3
--- /dev/null
+++ b/test/files/pos/t2940/Error.scala
@@ -0,0 +1,12 @@
+abstract class Error {
+ val c: Cycle[_]
+}
+
+object Test {
+ trait Quux[T] extends Cycle[Quux[T]]
+ val x = new Quux[Int] { def doStuff() { } }
+
+ def main(args: Array[String]): Unit = {
+
+ }
+}
diff --git a/test/files/pos/t2956/BeanDefinitionVisitor.java b/test/files/pos/t2956/BeanDefinitionVisitor.java
new file mode 100644
index 0000000000..2ff5daa253
--- /dev/null
+++ b/test/files/pos/t2956/BeanDefinitionVisitor.java
@@ -0,0 +1,6 @@
+import java.util.Map;
+public class BeanDefinitionVisitor {
+ @SuppressWarnings("unchecked")
+ protected void visitMap(Map<?, ?> mapVal) {
+ }
+}
diff --git a/test/files/pos/t2956/t2956.scala b/test/files/pos/t2956/t2956.scala
new file mode 100755
index 0000000000..eb6e817465
--- /dev/null
+++ b/test/files/pos/t2956/t2956.scala
@@ -0,0 +1,7 @@
+import scala.collection.JavaConversions._
+
+class Outer {
+ protected class Inner extends BeanDefinitionVisitor {
+ protected def visitMap(mapVal: Map[_, _]): Unit = ()
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t2994a.scala b/test/files/pos/t2994a.scala
new file mode 100644
index 0000000000..f2d57c34ca
--- /dev/null
+++ b/test/files/pos/t2994a.scala
@@ -0,0 +1,27 @@
+object Naturals {
+ trait NAT {
+ type a[s[_ <: NAT] <: NAT, z <: NAT] <: NAT
+ type v = a[SUCC, ZERO]
+ }
+ final class ZERO extends NAT {
+ type a[s[_ <: NAT] <: NAT, z <: NAT] = z
+ }
+ final class SUCC[n <: NAT] extends NAT {
+ type a[s[_ <: NAT] <: NAT, z <: NAT] = s[n#a[s, z]]
+ }
+ type _0 = ZERO
+ type _1 = SUCC[_0]
+ type _2 = SUCC[_1]
+ type _3 = SUCC[_2]
+ type _4 = SUCC[_3]
+ type _5 = SUCC[_4]
+ type _6 = SUCC[_5]
+
+
+ // crashes scala-2.8.0 beta1
+ trait MUL[n <: NAT, m <: NAT] extends NAT {
+ trait curry[n[_[_], _], s[_]] { type f[z <: NAT] = n[s, z] }
+ type a[s[_ <: NAT] <: NAT, z <: NAT] = n#a[curry[m#a, s]#f, z]
+ }
+
+} \ No newline at end of file
diff --git a/test/files/pos/t2994b.scala b/test/files/pos/t2994b.scala
new file mode 100644
index 0000000000..c9d9cc812b
--- /dev/null
+++ b/test/files/pos/t2994b.scala
@@ -0,0 +1,7 @@
+object Test {
+ trait Bar[X[_]]
+ trait Baz[S[_] <: Bar[S]] {
+ type Apply[T]
+ }
+ trait Foo[V[_] <: Bar[V]] extends Bar[Baz[V]#Apply]
+} \ No newline at end of file
diff --git a/test/files/pos/t3037.scala b/test/files/pos/t3037.scala
new file mode 100644
index 0000000000..b71ffe0418
--- /dev/null
+++ b/test/files/pos/t3037.scala
@@ -0,0 +1,13 @@
+package test
+
+object A {
+ println(("a" match {
+ case "a" => 1
+ case _ => "a"
+ }).asInstanceOf[Object])
+ def foo[T](x: T) = x
+ var x: Int = 1
+ var y: Long = 1L
+ x = foo(x)
+ y = foo(y)
+}
diff --git a/test/files/pos/t3071.scala b/test/files/pos/t3071.scala
new file mode 100644
index 0000000000..7e14432941
--- /dev/null
+++ b/test/files/pos/t3071.scala
@@ -0,0 +1,7 @@
+class A (val i: Int) {
+ def copy (i: Int = this.i): A = new A(i)
+}
+
+class B (val j: Int) extends A(1) {
+ override def copy (j: Int = this.j): B = new B(j)
+}
diff --git a/test/files/pos/t3076/C2.scala b/test/files/pos/t3076/C2.scala
new file mode 100644
index 0000000000..d08f9ee81d
--- /dev/null
+++ b/test/files/pos/t3076/C2.scala
@@ -0,0 +1,4 @@
+class C2 {
+ def m1() { new T { } }
+ def m2() { new T { } }
+}
diff --git a/test/files/pos/t3076/T.scala b/test/files/pos/t3076/T.scala
new file mode 100644
index 0000000000..b710a29343
--- /dev/null
+++ b/test/files/pos/t3076/T.scala
@@ -0,0 +1,2 @@
+trait T { private val z = new C1 }
+private class C1
diff --git a/test/files/pos/t3079.scala b/test/files/pos/t3079.scala
new file mode 100644
index 0000000000..4bead34ff3
--- /dev/null
+++ b/test/files/pos/t3079.scala
@@ -0,0 +1,17 @@
+sealed trait Identity[A] {
+ val value: A
+}
+
+trait Coerce[A, B] {
+ def unwrap: (A => B)
+}
+
+object Coerce {
+ def IdentityCoerce[B] = new Coerce[Identity[B], B] {
+ // java.lang.Error: A in trait Identity cannot be instantiated from ?x$1.type
+ def unwrap = _.value
+
+ // Providing the type of _ works around the problem.
+ //def unwrap = (_: Identity[B]).value
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t3108.scala b/test/files/pos/t3108.scala
new file mode 100644
index 0000000000..6a1da73220
--- /dev/null
+++ b/test/files/pos/t3108.scala
@@ -0,0 +1,5 @@
+object A {
+ val a: NotNull = ""
+ val b: NotNull = 41
+}
+
diff --git a/test/files/pos/t3152.scala b/test/files/pos/t3152.scala
new file mode 100644
index 0000000000..a20428dbee
--- /dev/null
+++ b/test/files/pos/t3152.scala
@@ -0,0 +1,20 @@
+trait Applicative[M[_]]
+
+sealed trait MA[M[_], A] {
+ def sequence[N[_], B](implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = error("stub")
+ // def sequence3[N[_], B]()(implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = error("stub")
+}
+
+object test {
+ implicit def ListMA[A](l: List[A]): MA[List, A] = error("stub")
+ implicit val ao: Applicative[Option] = error("stub")
+
+ /* This compiles OK:
+ (Nil: List[Option[Int]]).sequence3(): Option[List[Int]]
+ */
+
+ // BUG: error: immutable is not an enclosing class
+ // !!! No line number is reported with the error
+ (Nil: List[Option[Int]]).sequence: Option[List[Int]]
+ (List[Option[Int]]()).sequence: Option[List[Int]]
+} \ No newline at end of file
diff --git a/test/files/pos/t3174.scala b/test/files/pos/t3174.scala
new file mode 100755
index 0000000000..c3d90a4946
--- /dev/null
+++ b/test/files/pos/t3174.scala
@@ -0,0 +1,14 @@
+object test {
+ def method() {
+ class Foo extends AnyRef {
+ object Color {
+ object Blue
+ }
+
+ class Board {
+ val grid = Color.Blue
+ }
+ }
+ new Foo
+ }
+ }
diff --git a/test/files/pos/t3174b.scala b/test/files/pos/t3174b.scala
new file mode 100755
index 0000000000..002c4f090f
--- /dev/null
+++ b/test/files/pos/t3174b.scala
@@ -0,0 +1,12 @@
+trait Foo[X] { def foo : Map[String,Foo[X]] }
+
+object Test {
+ def f[T]() : Foo[T] = {
+ class Anon extends Foo[T] {
+ var foo: Map[String, Foo[T]] = Map[String,Foo[T]]()
+ //def foo = Map[String,Foo[T]]()
+ //def foo_=(x: Map[String,Foo[T]]) {}
+ }
+ new Anon
+ }
+}
diff --git a/test/files/pos/t3177.scala b/test/files/pos/t3177.scala
new file mode 100644
index 0000000000..21893c9422
--- /dev/null
+++ b/test/files/pos/t3177.scala
@@ -0,0 +1,39 @@
+trait InvariantFunctor[F[_]] {
+ def xmap[A, B](ma: F[A], f: A => B, g: B => A): F[B]
+}
+
+object InvariantFunctor {
+ import Endo._
+
+ implicit val EndoInvariantFunctor = new InvariantFunctor[Endo] {
+ def xmap[A, B](ma: Endo[A], f: A => B, g: B => A): Endo[B] = (b: B) => f(ma(g(b)))
+ }
+
+ // The definition about fails with:
+ // anon-type.scala:9: error: not found: value b
+ // def xmap[A, B](ma: Endo[A], f: A => B, g: B => A): Endo[B] = (b: B) => f(ma(g(b)))
+ // ^
+ // anon-type.scala:8: error: not found: type $anon
+ // implicit val EndoInvariantFunctor = new InvariantFunctor[Endo] {
+ // ^
+
+
+ // These both work:
+ // implicit val EndoInvariantFunctorAscribed: InvariantFunctor[Endo] = new InvariantFunctor[Endo] {
+ // def xmap[A, B](ma: Endo[A], f: A => B, g: B => A): Endo[B] = (b: B) => f(ma(g(b)))
+ // }
+ //
+ // implicit val EndoInvariantFunctorStubbed = new InvariantFunctor[Endo] {
+ // def xmap[A, B](ma: Endo[A], f: A => B, g: B => A): Endo[B] = error("stub")
+ // }
+}
+
+trait Endo[X]
+
+object Endo {
+ implicit def EndoTo[A](f: A => A): Endo[A] = new Endo[A] {
+ def apply(a: A) = f(a)
+ }
+
+ implicit def EndoFrom[A](e: Endo[A]): A => A = e.apply(_)
+} \ No newline at end of file
diff --git a/test/files/pos/t3249/Test.java b/test/files/pos/t3249/Test.java
new file mode 100644
index 0000000000..4cc7cb2ab5
--- /dev/null
+++ b/test/files/pos/t3249/Test.java
@@ -0,0 +1,5 @@
+public class Test {
+ public static void meh() {
+ new A<Integer>().f();
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t3249/a.scala b/test/files/pos/t3249/a.scala
new file mode 100644
index 0000000000..0394464549
--- /dev/null
+++ b/test/files/pos/t3249/a.scala
@@ -0,0 +1,11 @@
+class A[U] { def f[T] = { class X extends A[T] } }
+
+
+/*
+$ scalac a.scala
+$ javac -cp .:$SCALA_HOME/lib/scala-library.jar -Xprint 'A$X$1'
+
+ public class X$1 extends A<java.lang.Object> implements scala.ScalaObject {
+ public X$1(A<U> null);
+ }
+*/ \ No newline at end of file
diff --git a/test/files/pos/t3274.scala b/test/files/pos/t3274.scala
new file mode 100644
index 0000000000..dfa6a4ec01
--- /dev/null
+++ b/test/files/pos/t3274.scala
@@ -0,0 +1,9 @@
+trait A { this: B =>
+ trait X {
+ class Y1 extends Y
+ }
+}
+
+trait B extends A {
+ trait Y { def f {} }
+} \ No newline at end of file
diff --git a/test/files/pos/t3349/AbstractTupleSet.java b/test/files/pos/t3349/AbstractTupleSet.java
new file mode 100644
index 0000000000..47b440a589
--- /dev/null
+++ b/test/files/pos/t3349/AbstractTupleSet.java
@@ -0,0 +1,9 @@
+public abstract class AbstractTupleSet implements TupleSet {
+ public void addColumn(String name, Class type) {
+ throw new UnsupportedOperationException();
+ }
+
+ public void addColumn(String name, String expr) {
+ throw new UnsupportedOperationException();
+ }
+}
diff --git a/test/files/pos/t3349/Table.java b/test/files/pos/t3349/Table.java
new file mode 100644
index 0000000000..1609367623
--- /dev/null
+++ b/test/files/pos/t3349/Table.java
@@ -0,0 +1,9 @@
+public class Table extends AbstractTupleSet {
+ public void addColumn(String name, Class type) {
+ throw new UnsupportedOperationException();
+ }
+
+ public void addColumn(String name, String expr) {
+ throw new UnsupportedOperationException();
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t3349/Test.scala b/test/files/pos/t3349/Test.scala
new file mode 100644
index 0000000000..8174e4c4f8
--- /dev/null
+++ b/test/files/pos/t3349/Test.scala
@@ -0,0 +1,5 @@
+object Test {
+ val label = "name"
+ val table: Table = error("")
+ table.addColumn( label, label.getClass )
+} \ No newline at end of file
diff --git a/test/files/pos/t3349/TupleSet.java b/test/files/pos/t3349/TupleSet.java
new file mode 100644
index 0000000000..14a073a950
--- /dev/null
+++ b/test/files/pos/t3349/TupleSet.java
@@ -0,0 +1,4 @@
+public interface TupleSet {
+ public void addColumn(String name, Class type);
+ public void addColumn(String name, String expr);
+} \ No newline at end of file
diff --git a/test/files/pos/t3363.scala b/test/files/pos/t3363.scala
new file mode 100755
index 0000000000..302f8c43cc
--- /dev/null
+++ b/test/files/pos/t3363.scala
@@ -0,0 +1,18 @@
+object TestCase {
+
+ //now matter if you put (abstract) class or trait it will fail in all cases
+ trait MapOps[T]
+
+ //if fs was reduced to List (generic type with one parameter) then the code compiles
+ //if you inherit from MapOps[T] instead of MapOps[F] then code compiles fine
+ implicit def map2ops[T,F](fs: Map[T,F]) = new MapOps[F] {
+ //if you remove this line, then code compiles
+ lazy val m: Manifest[T] = error("just something to make it compile")
+ def is(xs: List[T]) = List(xs)
+ }
+
+ def main(args: Array[String]) {
+ println(Map(1 -> "2") is List(2))
+ }
+
+ }
diff --git a/test/files/pos/t3373.scala b/test/files/pos/t3373.scala
new file mode 100644
index 0000000000..b4af3610bb
--- /dev/null
+++ b/test/files/pos/t3373.scala
@@ -0,0 +1,11 @@
+class Entry(time: Long) {
+ def getTime: Long = time
+}
+
+object Test {
+ def extractTime(e: Entry) = e.getTime
+
+ implicit val orderEntries = new Ordering[Entry] {
+ def compare(first: Entry, second: Entry) = extractTime(first) compare extractTime(second)
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t3374.scala b/test/files/pos/t3374.scala
new file mode 100644
index 0000000000..4c0293181d
--- /dev/null
+++ b/test/files/pos/t3374.scala
@@ -0,0 +1,6 @@
+trait Parent {
+ type Test[A, H[B <: A]]
+}
+trait Sub extends Parent {
+ type Test[AS, HS[B <: AS]] = AS
+} \ No newline at end of file
diff --git a/test/files/pos/t3384.scala b/test/files/pos/t3384.scala
new file mode 100644
index 0000000000..4d4a81d69d
--- /dev/null
+++ b/test/files/pos/t3384.scala
@@ -0,0 +1,14 @@
+package test
+
+package p {
+ class A(a: String = "")
+}
+
+package object po {
+ type A = p.A
+}
+
+import po._
+class C {
+ val a = new A() //p.A.init$default$1)
+}
diff --git a/test/files/pos/t3404/Base.java b/test/files/pos/t3404/Base.java
new file mode 100644
index 0000000000..c5df18cc9f
--- /dev/null
+++ b/test/files/pos/t3404/Base.java
@@ -0,0 +1,3 @@
+abstract class Base {
+ abstract Class foo(Object o);
+} \ No newline at end of file
diff --git a/test/files/pos/t3404/Derived.scala b/test/files/pos/t3404/Derived.scala
new file mode 100644
index 0000000000..16976fe3d5
--- /dev/null
+++ b/test/files/pos/t3404/Derived.scala
@@ -0,0 +1,3 @@
+class Derived extends Base {
+ def foo(a: AnyRef) = classOf[String]
+} \ No newline at end of file
diff --git a/test/files/pos/t3417.scala b/test/files/pos/t3417.scala
new file mode 100644
index 0000000000..d2de1608aa
--- /dev/null
+++ b/test/files/pos/t3417.scala
@@ -0,0 +1,11 @@
+trait X extends NotNull {
+ def foo = 1
+}
+
+trait Y extends Object with NotNull {
+ def bar = 1
+}
+
+class Z extends NotNull
+
+class W extends Object with NotNull
diff --git a/test/files/pos/t3419/B_1.scala b/test/files/pos/t3419/B_1.scala
new file mode 100644
index 0000000000..a8ec7edba4
--- /dev/null
+++ b/test/files/pos/t3419/B_1.scala
@@ -0,0 +1,3 @@
+trait T[A,B] {
+ type X[a <: A, b <: B] <: B
+} \ No newline at end of file
diff --git a/test/files/pos/t3419/C_2.scala b/test/files/pos/t3419/C_2.scala
new file mode 100644
index 0000000000..da721d2c31
--- /dev/null
+++ b/test/files/pos/t3419/C_2.scala
@@ -0,0 +1,3 @@
+object F {
+ type S = T[Any,Int] {type X[N <: Int, Acc <: Int] = Acc}
+} \ No newline at end of file
diff --git a/test/files/pos/t3429/A.scala b/test/files/pos/t3429/A.scala
new file mode 100644
index 0000000000..4b705808c1
--- /dev/null
+++ b/test/files/pos/t3429/A.scala
@@ -0,0 +1,12 @@
+class A {
+ @Test(exc = classOf[Exception])
+ def myTestMethod = 0
+}
+// rytz@chara:~/scala/trunk/sandbox$ javac Test.java
+// rytz@chara:~/scala/trunk/sandbox$ ../build/pack/bin/scalac A.scala
+// A.scala:2: error: type mismatch;
+// found : java.lang.Class[Exception](classOf[java.lang.Exception])
+// required: java.lang.Class
+// @Test(exc = classOf[Exception])
+// ^
+// one error found \ No newline at end of file
diff --git a/test/files/pos/t3429/Test.java b/test/files/pos/t3429/Test.java
new file mode 100644
index 0000000000..e7c57c90c5
--- /dev/null
+++ b/test/files/pos/t3429/Test.java
@@ -0,0 +1,3 @@
+public @interface Test {
+ public Class exc() default Exception.class;
+} \ No newline at end of file
diff --git a/test/files/pos/t3477.scala b/test/files/pos/t3477.scala
new file mode 100644
index 0000000000..660aa55736
--- /dev/null
+++ b/test/files/pos/t3477.scala
@@ -0,0 +1,7 @@
+class J3 {
+ def f[K, K1 >: K, V](x: Map[K1, V]): Map[K, V] = error("")
+}
+
+object Test {
+ (new J3).f(Map[Int, Int]())
+} \ No newline at end of file
diff --git a/test/files/pos/t3486/JTest.java b/test/files/pos/t3486/JTest.java
new file mode 100644
index 0000000000..0bf388b72d
--- /dev/null
+++ b/test/files/pos/t3486/JTest.java
@@ -0,0 +1,3 @@
+public class JTest<A> extends T2<A> {
+ public A m( A a ) { return a; }
+} \ No newline at end of file
diff --git a/test/files/pos/t3486/test.scala b/test/files/pos/t3486/test.scala
new file mode 100644
index 0000000000..544232b0d1
--- /dev/null
+++ b/test/files/pos/t3486/test.scala
@@ -0,0 +1,6 @@
+trait Test[A] {
+ def m( a: A ): A
+ def specified(a:A):A = a
+}
+
+abstract class T2[A] extends Test[A] \ No newline at end of file
diff --git a/test/files/pos/t3494.scala b/test/files/pos/t3494.scala
new file mode 100644
index 0000000000..35a4bcde5d
--- /dev/null
+++ b/test/files/pos/t3494.scala
@@ -0,0 +1,7 @@
+object Test {
+ def f[T](xs: T*) = ()
+
+ val x = "abc"
+
+ f[x.type](x)
+} \ No newline at end of file
diff --git a/test/files/pos/t3560.scala b/test/files/pos/t3560.scala
new file mode 100644
index 0000000000..3cde9710dc
--- /dev/null
+++ b/test/files/pos/t3560.scala
@@ -0,0 +1,2 @@
+trait Foo[X] { def foo : Map[String,Foo[X]] }
+object T3560 { def f[T]() : Foo[T] = new Foo[T] { var foo = Map[String,Foo[T]]() } }
diff --git a/test/files/pos/t3568.scala b/test/files/pos/t3568.scala
new file mode 100755
index 0000000000..0f26e2fad3
--- /dev/null
+++ b/test/files/pos/t3568.scala
@@ -0,0 +1,46 @@
+import scala.annotation._
+import scala.annotation.unchecked._
+import scala.collection._
+
+
+package object buffer {
+ val broken = new ArrayVec2() // commenting out this line causes the file to compile.
+
+ val works = Class.forName("buffer.ArrayVec2").newInstance().asInstanceOf[ArrayVec2]
+}
+
+package buffer {
+ object Main {
+ // ArrayVec2 can be compiled, instantiated and used.
+ def main(args: Array[String]) { println(works) }
+ }
+
+ trait ElemType { type Element; type Component <: ElemType }
+ trait Float1 extends ElemType { type Element = Float; type Component = Float1}
+ class Vec2 extends ElemType { type Element = Vec2; type Component = Float1 }
+
+ abstract class BaseSeq[T <: ElemType, E]
+ extends IndexedSeq[E] with IndexedSeqOptimized[E, IndexedSeq[E]] {
+ def length = 1
+ def apply(i: Int) :E
+ }
+
+ abstract class GenericSeq[T <: ElemType] extends BaseSeq[T, T#Element]
+ trait DataArray[T <: ElemType] extends BaseSeq[T, T#Element]
+ trait DataView[T <: ElemType] extends BaseSeq[T, T#Element]
+ abstract class BaseFloat1 extends BaseSeq[Float1, Float]
+
+ class ArrayFloat1 extends BaseFloat1 with DataArray[Float1] {
+ def apply(i: Int) :Float = 0f
+ }
+
+ class ViewFloat1 extends BaseFloat1 with DataView[Float1] {
+ def apply(i: Int) :Float = 0f
+ }
+
+ class ArrayVec2(val backingSeq: ArrayFloat1)
+ extends GenericSeq[Vec2] with DataArray[Vec2] {
+ def this() = this(new ArrayFloat1)
+ def apply(i: Int) :Vec2 = null
+ }
+}
diff --git a/test/files/pos/t3582.scala b/test/files/pos/t3582.scala
new file mode 100644
index 0000000000..e20af5e61d
--- /dev/null
+++ b/test/files/pos/t3582.scala
@@ -0,0 +1,12 @@
+trait C[A]
+object Test {
+ def ImplicitParamCA[CC[A], A](implicit ev: C[A]) {implicitly[C[A]]} // must use this exact syntax...
+ // error: could not find implicit value for parameter e: C[A]
+}
+// [[syntax trees at end of typer]]
+// abstract trait C#5[A#9116 >: Nothing#5832 <: Any#52] extends scala#33.AnyRef#2780;
+// final object Test#15 extends java.lang.Object#2485 with ScalaObject#1913 {
+// def ImplicitParamCA#9123[CC#9124[A#10858 >: Nothing#5832 <: Any#52] >: [A#10858]Nothing#5832 <: [A#10858]Any#52,
+// A#9125 >: Nothing#5832 <: Any#52](implicit ev#10856: C#5[A#9127]): Unit#3818
+// = scala#34.this.Predef#1683.implicitly#8816[C#5[A#10858]]()
+// }
diff --git a/test/files/pos/t3582b.scala b/test/files/pos/t3582b.scala
new file mode 100644
index 0000000000..8f0bfb9b2a
--- /dev/null
+++ b/test/files/pos/t3582b.scala
@@ -0,0 +1,5 @@
+object ParamScoping {
+ // scoping worked fine in the result type, but was wrong in body
+ // reason: typedTypeDef needs new context, which was set up by typed1 but not by typedDefDef and typedClassDef
+ def noOverlapFOwithHO[T, G[T]]: G[T] = null.asInstanceOf[G[T]]
+} \ No newline at end of file
diff --git a/test/files/pos/t3612.scala b/test/files/pos/t3612.scala
new file mode 100644
index 0000000000..d3bcc373e3
--- /dev/null
+++ b/test/files/pos/t3612.scala
@@ -0,0 +1,6 @@
+trait C
+
+class Outer {
+ object O0 extends C {}
+ object O extends C { self => }
+} \ No newline at end of file
diff --git a/test/files/pos/t3622/test/AsyncTask.java b/test/files/pos/t3622/test/AsyncTask.java
new file mode 100644
index 0000000000..cfcea3fe1a
--- /dev/null
+++ b/test/files/pos/t3622/test/AsyncTask.java
@@ -0,0 +1,5 @@
+package test;
+
+public abstract class AsyncTask<Params, Progress, Result> {
+ protected abstract Result doInBackground(Params... args);
+} \ No newline at end of file
diff --git a/test/files/pos/t3622/test/MyAsyncTask.java b/test/files/pos/t3622/test/MyAsyncTask.java
new file mode 100644
index 0000000000..9ef4947052
--- /dev/null
+++ b/test/files/pos/t3622/test/MyAsyncTask.java
@@ -0,0 +1,9 @@
+package test;
+
+public abstract class MyAsyncTask extends AsyncTask<String, String, String> {
+ protected abstract String doInBackground1(String[] args);
+ @Override
+ protected String doInBackground(String... args) {
+ return doInBackground1(new String[]{"dummy"});
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t3622/test/Test.scala b/test/files/pos/t3622/test/Test.scala
new file mode 100644
index 0000000000..fb82c581f9
--- /dev/null
+++ b/test/files/pos/t3622/test/Test.scala
@@ -0,0 +1,5 @@
+package test
+
+class Test extends MyAsyncTask {
+ protected[test] def doInBackground1(args: Array[String]): String = ""
+} \ No newline at end of file
diff --git a/test/files/pos/t3676.scala b/test/files/pos/t3676.scala
new file mode 100644
index 0000000000..60c0ceaec8
--- /dev/null
+++ b/test/files/pos/t3676.scala
@@ -0,0 +1,5 @@
+trait SeqLike[+Repr]
+trait Seq extends SeqLike[Seq]
+
+trait MySeq extends Seq with SeqLike[MySub]
+trait MySub extends MySeq
diff --git a/test/files/pos/t3688.scala b/test/files/pos/t3688.scala
new file mode 100644
index 0000000000..0ac1cfe514
--- /dev/null
+++ b/test/files/pos/t3688.scala
@@ -0,0 +1,9 @@
+import collection.mutable
+import collection.JavaConversions._
+import java.{util => ju}
+
+object Test {
+
+ implicitly[mutable.Map[Int, String] => ju.Dictionary[Int, String]]
+
+}
diff --git a/test/files/pos/t3731.scala b/test/files/pos/t3731.scala
new file mode 100644
index 0000000000..75938540c0
--- /dev/null
+++ b/test/files/pos/t3731.scala
@@ -0,0 +1,13 @@
+object Test{
+ trait ZW[S]{type T}
+ def ZipWith[S, M <: ZW[S]]: M#T = error("ZW")
+
+ // meh must be parameterised to force an asSeenFrom that
+ // duplicates the refinement in the TR's pre without updating its sym
+ def meh[A] = ZipWith[A, ZW[A]{type T=Stream[A]}]
+
+ meh[Int]: Stream[Int]
+}
+// debugging output in coevolveSym should say:
+// coevolved type T#11029 : Stream#3234[A#9228] to type T#11277 : Stream#3234[A#9227]
+// with Test.ZW#9219[A#9228]{type T#11029 = Stream#3234[A#9228]} -> Test.ZW#9219[A#9227]{type T#11277 = Stream#3234[A#9227]}
diff --git a/test/files/pos/t3777.scala b/test/files/pos/t3777.scala
new file mode 100644
index 0000000000..165eeebfdb
--- /dev/null
+++ b/test/files/pos/t3777.scala
@@ -0,0 +1,7 @@
+object Test {
+ type Point = Map[Symbol, String]
+ type Points = IndexedSeq[Point]
+
+ def makePoints2: Points = IndexedSeq[Point]()
+ val spoints2 = util.Random.shuffle(makePoints2)
+}
diff --git a/test/files/pos/t3859.scala b/test/files/pos/t3859.scala
new file mode 100644
index 0000000000..83d4c37b29
--- /dev/null
+++ b/test/files/pos/t3859.scala
@@ -0,0 +1,4 @@
+class Test {
+ def foo: Unit = bar(Array(): _*)
+ def bar(values: AnyRef*): Unit = ()
+} \ No newline at end of file
diff --git a/test/files/pos/t3864/scalaz_2.scala b/test/files/pos/t3864/scalaz_2.scala
new file mode 100644
index 0000000000..a3f5b69617
--- /dev/null
+++ b/test/files/pos/t3864/scalaz_2.scala
@@ -0,0 +1 @@
+object Scalaz extends Tuples \ No newline at end of file
diff --git a/test/files/pos/t3864/tuples_1.scala b/test/files/pos/t3864/tuples_1.scala
new file mode 100644
index 0000000000..1d19af6e41
--- /dev/null
+++ b/test/files/pos/t3864/tuples_1.scala
@@ -0,0 +1,78 @@
+trait PimpedType[X] {
+ val value: X
+}
+
+trait Tuples {
+
+
+trait Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] extends PimpedType[Tuple15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]] {
+ def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15)}
+ def toIndexedSeq[Z](implicit ev: value.type <:< Tuple15[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15)}
+ def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15))
+}
+
+implicit def ToTuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)): Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] = new { val value = t } with Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]
+
+
+trait Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] extends PimpedType[Tuple16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]] {
+ def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16)}
+ def toIndexedSeq[Z](implicit ev: value.type <:< Tuple16[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16)}
+ def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16))
+}
+
+implicit def ToTuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)): Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] = new { val value = t } with Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]
+
+
+trait Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] extends PimpedType[Tuple17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]] {
+ def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17)}
+ def toIndexedSeq[Z](implicit ev: value.type <:< Tuple17[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17)}
+ def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17))
+}
+
+implicit def ToTuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)): Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] = new { val value = t } with Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]
+
+
+trait Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] extends PimpedType[Tuple18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]] {
+ def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18)}
+ def toIndexedSeq[Z](implicit ev: value.type <:< Tuple18[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18)}
+ def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18))
+}
+
+implicit def ToTuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)): Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] = new { val value = t } with Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]
+
+
+trait Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] extends PimpedType[Tuple19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]] {
+ def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19)}
+ def toIndexedSeq[Z](implicit ev: value.type <:< Tuple19[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19)}
+ def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _, _19: (S => SS) = identity[S] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18), _19(value._19))
+}
+
+implicit def ToTuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)): Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] = new { val value = t } with Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]
+
+
+trait Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] extends PimpedType[Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]] {
+ def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20)}
+ def toIndexedSeq[Z](implicit ev: value.type <:< Tuple20[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20)}
+ def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _, _19: (S => SS) = identity[S] _, _20: (T => TT) = identity[T] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18), _19(value._19), _20(value._20))
+}
+
+implicit def ToTuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)): Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] = new { val value = t } with Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]
+
+
+trait Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] extends PimpedType[Tuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]] {
+ def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21)}
+ def toIndexedSeq[Z](implicit ev: value.type <:< Tuple21[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21)}
+ def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _, _19: (S => SS) = identity[S] _, _20: (T => TT) = identity[T] _, _21: (U => UU) = identity[U] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18), _19(value._19), _20(value._20), _21(value._21))
+}
+
+implicit def ToTuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)): Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] = new { val value = t } with Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]
+
+
+trait Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] extends PimpedType[Tuple22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]] {
+ def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)}
+ def toIndexedSeq[Z](implicit ev: value.type <:< Tuple22[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)}
+ def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU, VV](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _, _19: (S => SS) = identity[S] _, _20: (T => TT) = identity[T] _, _21: (U => UU) = identity[U] _, _22: (V => VV) = identity[V] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU, VV) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18), _19(value._19), _20(value._20), _21(value._21), _22(value._22))
+}
+
+implicit def ToTuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)): Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] = new { val value = t } with Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]
+} \ No newline at end of file
diff --git a/test/files/pos/t3946/A.java b/test/files/pos/t3946/A.java
new file mode 100644
index 0000000000..70265229df
--- /dev/null
+++ b/test/files/pos/t3946/A.java
@@ -0,0 +1,5 @@
+package p;
+
+public class A {
+ protected void f() {}
+}
diff --git a/test/files/pos/t3946/Test_1.scala b/test/files/pos/t3946/Test_1.scala
new file mode 100644
index 0000000000..0cceff3aab
--- /dev/null
+++ b/test/files/pos/t3946/Test_1.scala
@@ -0,0 +1,12 @@
+package q {
+ class B extends p.A {
+ override protected def f() { }
+ }
+}
+
+package p {
+ object T {
+ val a = new A()
+ a.f()
+ }
+}
diff --git a/test/pending/pos/t425.scala b/test/files/pos/t425.scala
index e50c50ac35..e50c50ac35 100644
--- a/test/pending/pos/t425.scala
+++ b/test/files/pos/t425.scala
diff --git a/test/files/pos/t5013/Bar_2.scala b/test/files/pos/t5013/Bar_2.scala
new file mode 100644
index 0000000000..9eac556a23
--- /dev/null
+++ b/test/files/pos/t5013/Bar_2.scala
@@ -0,0 +1,5 @@
+package b
+
+class Bar extends a.Foo {
+ println(x) // Error: Not found: value x
+}
diff --git a/test/files/pos/t5013/Foo_1.scala b/test/files/pos/t5013/Foo_1.scala
new file mode 100644
index 0000000000..ee21112a3e
--- /dev/null
+++ b/test/files/pos/t5013/Foo_1.scala
@@ -0,0 +1,5 @@
+package a
+
+class Foo {
+ protected[Foo] var x = 0
+}
diff --git a/test/files/pos/tcpoly_boundedmonad.scala b/test/files/pos/tcpoly_boundedmonad.scala
index ef02507b66..24a911769b 100644
--- a/test/files/pos/tcpoly_boundedmonad.scala
+++ b/test/files/pos/tcpoly_boundedmonad.scala
@@ -1,19 +1,19 @@
trait Monad[T <: Bound[T], MyType[x <: Bound[x]], Bound[_]] {
- def map[S <: Bound[S]](f: T => S): MyType[S]
+ def map[S <: Bound[S]](f: T => S): MyType[S]
- def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
+ def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
- (f: T => Result[S]): Result[S]
+ (f: T => Result[S]): Result[S]
def filter(p: T => Boolean): MyType[T]
}
class Set[T <: Ordered[T]] extends Monad[T, Set, Ordered] {
- def map[S <: Ordered[S]](f: T => S): Set[S] = error("TODO")
-
- def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
+ def map[S <: Ordered[S]](f: T => S): Set[S] = error("TODO")
+
+ def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
- (f: T => Result[S]): Result[S] = error("TODO")
-
- def filter(p: T => Boolean): Set[T] = error("TODO")
+ (f: T => Result[S]): Result[S] = error("TODO")
+
+ def filter(p: T => Boolean): Set[T] = error("TODO")
}
diff --git a/test/files/pos/tcpoly_bounds1.scala b/test/files/pos/tcpoly_bounds1.scala
index 5874cc664d..142c0b7b7f 100644
--- a/test/files/pos/tcpoly_bounds1.scala
+++ b/test/files/pos/tcpoly_bounds1.scala
@@ -1,6 +1,6 @@
-class Foo[t[x]<: Pair[Int, x]]
+class Foo[t[x]<: Pair[Int, x]]
-//
+//
class MyPair[z](a: Int, b: z) extends Pair[Int, z](a,b)
object foo extends Foo[MyPair]
diff --git a/test/files/pos/tcpoly_checkkinds_mix.scala b/test/files/pos/tcpoly_checkkinds_mix.scala
index 3734405f8b..2d265da6b9 100644
--- a/test/files/pos/tcpoly_checkkinds_mix.scala
+++ b/test/files/pos/tcpoly_checkkinds_mix.scala
@@ -2,9 +2,9 @@ trait Iterable[A <: Bound[A], Bound[_]] {
type MyType[x <: Bound[x]] <: Iterable[x, Bound]
def map[B <: Bound[B]](f: A => B): MyType[B]
def flatMap[B <: Bound[B]](f: A => MyType[B]): MyType[B]
- def filter(p: A => Boolean): MyType[A]
+ def filter(p: A => Boolean): MyType[A]
}
-trait OrderedSet[T <: Ordered[T]] extends Iterable[T, Ordered] {
+trait OrderedSet[T <: Ordered[T]] extends Iterable[T, Ordered] {
type MyType[x <: Ordered[x]] = OrderedSet[x]
-}
+}
diff --git a/test/files/pos/tcpoly_gm.scala b/test/files/pos/tcpoly_gm.scala
index 95361e0dac..ecaeef9679 100644
--- a/test/files/pos/tcpoly_gm.scala
+++ b/test/files/pos/tcpoly_gm.scala
@@ -1,4 +1,4 @@
-trait Rep[a] {
+trait Rep[a] {
def rep[m[x]]: m[a] // typedTypeApply must use asSeenFrom to adapt the return type
// since rep is called on x: Rep[t]
// a must become t
@@ -9,7 +9,7 @@ case class ShowBin[b](app: b => String)
object foo {
def showBin[t](x: Rep[t], y: t): String = {
val r: ShowBin[t] = x.rep[ShowBin]
- r.app(y)
+ r.app(y)
}
}
-
+
diff --git a/test/files/pos/tcpoly_higherorder_bound_method.scala b/test/files/pos/tcpoly_higherorder_bound_method.scala
index 3905b3b96d..090bb8fcf3 100644
--- a/test/files/pos/tcpoly_higherorder_bound_method.scala
+++ b/test/files/pos/tcpoly_higherorder_bound_method.scala
@@ -1,3 +1,3 @@
trait SkolemisationOfHigherOrderBoundInMethod {
def method[A, N[X <: A], M[X <: N[A]]]: Unit
-}
+}
diff --git a/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala b/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala
index de31efd565..97594d506d 100644
--- a/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala
+++ b/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala
@@ -7,9 +7,9 @@ class IterableOps[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC],
object Test {
- implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2]))
+ implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2]))
= new IterableOps[CC, A1, A2](tuple)
-
+
val t = (List(1, 2, 3), List(6, 5, 4))
tupleOfIterableWrapper(t) unzip
diff --git a/test/files/pos/tcpoly_late_method_params.scala b/test/files/pos/tcpoly_late_method_params.scala
index e2f0bcffb3..c9298918a0 100644
--- a/test/files/pos/tcpoly_late_method_params.scala
+++ b/test/files/pos/tcpoly_late_method_params.scala
@@ -1,5 +1,5 @@
trait Foo {
- def flatMap[RT <: RBound[RT], RBound[_], Result[x <: RBound[x]]]: Result[RT]
+ def flatMap[RT <: RBound[RT], RBound[_], Result[x <: RBound[x]]]: Result[RT]
// bounds for RT& = >: scala.this.Nothing <: RBound&[RT&]
// bounds for x = >: scala.this.Nothing <: RBound&[x]
}
diff --git a/test/files/pos/tcpoly_method.scala b/test/files/pos/tcpoly_method.scala
index 294b53b915..80dc0482fd 100644
--- a/test/files/pos/tcpoly_method.scala
+++ b/test/files/pos/tcpoly_method.scala
@@ -1,6 +1,6 @@
trait Iterable[m[+x], +t] {
def flatMap[resColl[+x] <: Iterable[resColl, x], s](f: t => resColl[s]): resColl[s]
-
+
def foo[a[x]] = "a"
val x = foo[List]
}
diff --git a/test/files/pos/tcpoly_overloaded.scala b/test/files/pos/tcpoly_overloaded.scala
index f67e4a9fef..4240074d85 100644
--- a/test/files/pos/tcpoly_overloaded.scala
+++ b/test/files/pos/tcpoly_overloaded.scala
@@ -1,10 +1,10 @@
trait Monad[T <: Bound[T], MyType[x <: Bound[x]], Bound[_]] {
- def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
+ def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
- (f: T => Result[S]): Result[S]
- def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
+ (f: T => Result[S]): Result[S]
+ def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
- (f: T => Result[S], foo: String): Result[S]
+ (f: T => Result[S], foo: String): Result[S]
def flatMap[S <: Bound[S]]
(f: T => MyType[S], foo: Int): MyType[S]
}
@@ -12,14 +12,14 @@ trait Monad[T <: Bound[T], MyType[x <: Bound[x]], Bound[_]] {
trait Test {
def moo: MList[Int]
class MList[T](el: T) extends Monad[T, List, Any] {
- def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
+ def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
(f: T => Result[S]): Result[S] = error("foo")
- def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
+ def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
(f: T => Result[S], foo: String): Result[S] = error("foo")
def flatMap[S]
- (f: T => List[S], foo: Int): List[S] = error("foo")
+ (f: T => List[S], foo: Int): List[S] = error("foo")
}
val l: MList[String] = moo.flatMap[String, List, Any, MList]((x: Int) => new MList("String"))
}
diff --git a/test/files/pos/tcpoly_poly.scala b/test/files/pos/tcpoly_poly.scala
index 1ba04e29df..50ffc7837f 100644
--- a/test/files/pos/tcpoly_poly.scala
+++ b/test/files/pos/tcpoly_poly.scala
@@ -1,3 +1,3 @@
-class Monad[m[x]]
+class Monad[m[x]]
object ml extends Monad[List]
diff --git a/test/files/pos/tcpoly_return_overriding.scala b/test/files/pos/tcpoly_return_overriding.scala
index 57ec8da76c..0814e73fa4 100644
--- a/test/files/pos/tcpoly_return_overriding.scala
+++ b/test/files/pos/tcpoly_return_overriding.scala
@@ -2,7 +2,7 @@ trait Generic[g[x]] {
def unit: g[Unit]
}
-trait Rep[t] {
+trait Rep[t] {
def rep[m[x]](implicit gen: Generic[m]): m[t]
}
diff --git a/test/files/pos/tcpoly_seq.scala b/test/files/pos/tcpoly_seq.scala
index 48b3e1ce52..b5f46f6b6b 100644
--- a/test/files/pos/tcpoly_seq.scala
+++ b/test/files/pos/tcpoly_seq.scala
@@ -6,40 +6,40 @@ trait HOSeq {
// values implementing this interface, in order to provide more performant ways of building that structure
trait Accumulator[+coll[x], elT] {
def += (el: elT): Unit
- def result: coll[elT]
+ def result: coll[elT]
}
-
-
+
+
// Iterable abstracts over the type of its structure as well as its elements (see PolyP's Bifunctor)
- // m[x] is intentionally unbounded: fold can then be defined nicely
- // variance: if we write m[+x] instead of +m[+x], x is an invariant position because its enclosing type
+ // m[x] is intentionally unbounded: fold can then be defined nicely
+ // variance: if we write m[+x] instead of +m[+x], x is an invariant position because its enclosing type
// is an invariant position -- should probably rule that out?
trait Iterable[+m[+x], +t] {
//def unit[a](orig: a): m[a]
def iterator: Iterator[t]
-
+
// construct an empty accumulator that will produce the same structure as this iterable, with elements of type t
def accumulator[t]: Accumulator[m, t]
-
+
def filter(p: t => Boolean): m[t] = {
val buf = accumulator[t]
val elems = iterator
while (elems.hasNext) { val x = elems.next; if (p(x)) buf += x }
buf.result
}
-
+
def map[s](f: t => s): m[s] = {
val buf = accumulator[s]
val elems = iterator
while (elems.hasNext) buf += f(elems.next)
buf.result
}
-
+
// flatMap is a more specialized map, it only works if the mapped function produces Iterable values,
// which are then added to the result one by one
// the compiler should be able to find the right accumulator (implicit buf) to build the result
// to get concat, resColl = SingletonIterable, f = unit for SingletonIterable
- def flatMap[resColl[+x] <: Iterable[resColl, x], s](f: t => resColl[s])(implicit buf: Accumulator[resColl, s]): resColl[s] = {
+ def flatMap[resColl[+x] <: Iterable[resColl, x], s](f: t => resColl[s])(implicit buf: Accumulator[resColl, s]): resColl[s] = {
// TODO: would a viewbound for resColl[x] be better?
// -- 2nd-order type params are not yet in scope in view bound
val elems = iterator
@@ -48,9 +48,9 @@ trait HOSeq {
while (elemss.hasNext) buf += elemss.next
}
buf.result
- }
+ }
}
-
+
final class ListBuffer[A] {
private var start: List[A] = Nil
private var last: ::[A] = _
@@ -78,7 +78,7 @@ trait HOSeq {
exported = !start.isEmpty
start
}
-
+
/** Clears the buffer contents.
*/
def clear {
@@ -97,13 +97,13 @@ trait HOSeq {
}
}
}
-
+
implicit def listAccumulator[elT]: Accumulator[List, elT] = new Accumulator[List, elT] {
private[this] val buff = new ListBuffer[elT]
def += (el: elT): Unit = buff += el
def result: List[elT] = buff.toList
}
-
+
trait List[+t] extends Iterable[List, t] {
def head: t
def tail: List[t]
@@ -121,14 +121,14 @@ trait HOSeq {
// construct an empty accumulator that will produce the same structure as this iterable, with elements of type t
def accumulator[t]: Accumulator[List, t] = listAccumulator[t]
}
-
+
// TODO: the var tl approach does not seem to work because subtyping isn't fully working yet
final case class ::[+b](hd: b, private val tl: List[b]) extends List[b] {
def head = hd
def tail = if(tl==null) this else tl // hack
override def isEmpty: Boolean = false
}
-
+
case object Nil extends List[Nothing] {
def isEmpty = true
def head: Nothing =
@@ -157,18 +157,18 @@ trait HOSeq {
def filter(f: T=>Boolean): FilterResult
def subseq(from: Int, to: Int): Subseq
def flatMap[S <: Seq[K], K](f: T => S): S#Concat // legal?
- def concat(others: Seq[T]): Concat
+ def concat(others: Seq[T]): Concat
*/
-
+
/*trait Iterator[t] {
// @post hasAdvanced implies hasNext
// model def hasAdvanced: Boolean
-
+
def hasNext: Boolean // pure
-
+
// @pre hasAdvanced
def current: t // pure
-
+
// @pre hasNext
// @post hasAdvanced
def advance: Unit
diff --git a/test/files/pos/tcpoly_seq_typealias.scala b/test/files/pos/tcpoly_seq_typealias.scala
index fb48126ce6..0651ad9760 100644
--- a/test/files/pos/tcpoly_seq_typealias.scala
+++ b/test/files/pos/tcpoly_seq_typealias.scala
@@ -6,42 +6,42 @@ trait HOSeq {
// values implementing this interface, in order to provide more performant ways of building that structure
trait Accumulator[+coll[x], elT] {
def += (el: elT): Unit
- def result: coll[elT]
+ def result: coll[elT]
}
-
-
+
+
// Iterable abstracts over the type of its structure as well as its elements (see PolyP's Bifunctor)
- // m[x] is intentionally unbounded: fold can then be defined nicely
- // variance: if we write m[+x] instead of +m[+x], x is an invariant position because its enclosing type
+ // m[x] is intentionally unbounded: fold can then be defined nicely
+ // variance: if we write m[+x] instead of +m[+x], x is an invariant position because its enclosing type
// is an invariant position -- should probably rule that out?
trait Iterable[+t] {
- type m[+x]
-
+ type m[+x]
+
//def unit[a](orig: a): m[a]
def iterator: Iterator[t]
-
+
// construct an empty accumulator that will produce the same structure as this iterable, with elements of type t
def accumulator[t]: Accumulator[m, t]
-
+
def filter(p: t => Boolean): m[t] = {
val buf = accumulator[t]
val elems = iterator
while (elems.hasNext) { val x = elems.next; if (p(x)) buf += x }
buf.result
}
-
+
def map[s](f: t => s): m[s] = {
val buf = accumulator[s]
val elems = iterator
while (elems.hasNext) buf += f(elems.next)
buf.result
}
-
+
// flatMap is a more specialized map, it only works if the mapped function produces Iterable values,
// which are then added to the result one by one
// the compiler should be able to find the right accumulator (implicit buf) to build the result
// to get concat, resColl = SingletonIterable, f = unit for SingletonIterable
- def flatMap[resColl[+x] <: Iterable[x], s](f: t => resColl[s])(implicit buf: Accumulator[resColl, s]): resColl[s] = {
+ def flatMap[resColl[+x] <: Iterable[x], s](f: t => resColl[s])(implicit buf: Accumulator[resColl, s]): resColl[s] = {
// TODO: would a viewbound for resColl[x] be better?
// -- 2nd-order type params are not yet in scope in view bound
val elems = iterator
@@ -50,9 +50,9 @@ trait HOSeq {
while (elemss.hasNext) buf += elemss.next
}
buf.result
- }
+ }
}
-
+
final class ListBuffer[A] {
private var start: List[A] = Nil
private var last: ::[A] = _
@@ -80,7 +80,7 @@ trait HOSeq {
exported = !start.isEmpty
start
}
-
+
/** Clears the buffer contents.
*/
def clear {
@@ -99,16 +99,16 @@ trait HOSeq {
}
}
}
-
+
implicit def listAccumulator[elT]: Accumulator[List, elT] = new Accumulator[List, elT] {
private[this] val buff = new ListBuffer[elT]
def += (el: elT): Unit = buff += el
def result: List[elT] = buff.toList
}
-
+
trait List[+t] extends Iterable[t] {
type m[+x] = List[x]
-
+
def head: t
def tail: List[t]
def isEmpty: Boolean
@@ -125,14 +125,14 @@ trait HOSeq {
// construct an empty accumulator that will produce the same structure as this iterable, with elements of type t
def accumulator[t]: Accumulator[List, t] = listAccumulator[t]
}
-
+
// TODO: the var tl approach does not seem to work because subtyping isn't fully working yet
final case class ::[+b](hd: b, private val tl: List[b]) extends List[b] {
def head = hd
def tail = if(tl==null) this else tl // hack
override def isEmpty: Boolean = false
}
-
+
case object Nil extends List[Nothing] {
def isEmpty = true
def head: Nothing =
diff --git a/test/files/pos/ted.scala b/test/files/pos/ted.scala
index 314f109328..d8ae64f29b 100644
--- a/test/files/pos/ted.scala
+++ b/test/files/pos/ted.scala
@@ -9,7 +9,7 @@ object App
case (b, e) => b * exponentiate(b, e - 1)
}
-
+
def main(args : Array[String]) =
System.out.println(exponentiate(2, 2))
diff --git a/test/files/pos/test5.scala b/test/files/pos/test5.scala
index 4dbafc9ac3..b04de5d613 100644
--- a/test/files/pos/test5.scala
+++ b/test/files/pos/test5.scala
@@ -53,7 +53,7 @@ object test {
// Check type j.P
j.chk_ip(val_mp);
- j.chk_ip(val_np);
+ j.chk_ip(val_np);
// Check type i.X
i.chk_ix(i.val_ix);
@@ -63,6 +63,6 @@ object test {
// Check j.X
j.chk_ix(j.val_ix);
j.chk_ix(j.val_jx);
- j.chk_ix(val_njx);
+ j.chk_ix(val_njx);
}
}
diff --git a/test/files/pos/test5refine.scala b/test/files/pos/test5refine.scala
index 5459b3b975..290449c3ed 100644
--- a/test/files/pos/test5refine.scala
+++ b/test/files/pos/test5refine.scala
@@ -60,7 +60,7 @@ object test {
// Check type j.P
j.chk_ip(val_mp);
- j.chk_ip(val_np);
+ j.chk_ip(val_np);
// Check type i.X
i.chk_ix(i.val_ix);
@@ -70,6 +70,6 @@ object test {
// Check j.X
j.chk_ix(j.val_ix);
j.chk_ix(j.val_jx);
- j.chk_ix(val_njx);
+ j.chk_ix(val_njx);
}
}
diff --git a/test/files/pos/thistypes.scala b/test/files/pos/thistypes.scala
index 7319cc1ecb..26339e07c4 100644
--- a/test/files/pos/thistypes.scala
+++ b/test/files/pos/thistypes.scala
@@ -5,4 +5,4 @@ trait B {
trait C extends B {
def foo: C.this.I;
-}
+}
diff --git a/test/files/pos/ticket0137.scala b/test/files/pos/ticket0137.scala
index 94ef8e49fc..72f955fe5e 100644
--- a/test/files/pos/ticket0137.scala
+++ b/test/files/pos/ticket0137.scala
@@ -1,7 +1,7 @@
-trait AbsM {
- abstract class MonadCompanion[M[_]]
+trait AbsM {
+ abstract class MonadCompanion[M[_]]
abstract class AbsMonadCompanion extends MonadCompanion[AM] {
- def newTag: Int
+ def newTag: Int
}
type AM[_] // to trigger the bug, this must be an abstract type member that comes after the reference to it
diff --git a/test/pending/pos/ticket2251.scala b/test/files/pos/ticket2251.scala
index 7b6efb0ea0..7b6efb0ea0 100644
--- a/test/pending/pos/ticket2251.scala
+++ b/test/files/pos/ticket2251.scala
diff --git a/test/files/pos/traits.scala b/test/files/pos/traits.scala
index 8dcd9c0b5f..bd64d7215a 100644
--- a/test/files/pos/traits.scala
+++ b/test/files/pos/traits.scala
@@ -20,8 +20,8 @@ object Test {
trait BorderedColoredShape extends Shape with Bordered with Colored {
override def equals(other: Any) = other match {
case that: BorderedColoredShape => (
- super.equals(that) &&
- super[Bordered].equals(that) &&
+ super.equals(that) &&
+ super[Bordered].equals(that) &&
super[Colored].equals(that))
case _ => false
}
diff --git a/test/files/pos/typealias_dubious.scala b/test/files/pos/typealias_dubious.scala
index de6bb67fe3..587453a037 100644
--- a/test/files/pos/typealias_dubious.scala
+++ b/test/files/pos/typealias_dubious.scala
@@ -1,8 +1,8 @@
class MailBox {
- //class Message
+ //class Message
type Message = AnyRef
-}
-
+}
+
abstract class Actor {
private val in = new MailBox
@@ -10,6 +10,6 @@ abstract class Actor {
def unstable: Actor = error("foo")
- def dubiousSend(msg: MailBox#Message) =
+ def dubiousSend(msg: MailBox#Message) =
unstable.send(msg) // in.Message becomes unstable.Message, but that's ok since Message is a concrete type member
-}
+}
diff --git a/test/files/pos/typealiases.scala b/test/files/pos/typealiases.scala
index d03b521f77..5974921ed3 100644
--- a/test/files/pos/typealiases.scala
+++ b/test/files/pos/typealiases.scala
@@ -3,18 +3,18 @@ package foo
trait Test[T] {
type Check[T] = Array[T] => Unit;
type MyPair[S] = (T, S)
-
+
val pair1: (T, Int)
val pair: MyPair[Int] = pair1
-
+
def check(xs: Array[T], c: Check[T]) = c(xs)
- def check2[S](xs: Array[S], c: Check[S]) = c(xs)
-}
+ def check2[S](xs: Array[S], c: Check[S]) = c(xs)
+}
-object main extends Test[Int] {
- val pair1 = (1,1)
+object main extends Test[Int] {
+ val pair1 = (1,1)
implicit def topair(x: Int): Pair[Int, Int] = (x,x)
- val pair2: MyPair[Int] = 1
+ val pair2: MyPair[Int] = 1
val x: Short = 1
}
diff --git a/test/files/pos/unapplyNeedsMemberType.scala b/test/files/pos/unapplyNeedsMemberType.scala
index b423257e04..2581512a7b 100644
--- a/test/files/pos/unapplyNeedsMemberType.scala
+++ b/test/files/pos/unapplyNeedsMemberType.scala
@@ -8,7 +8,7 @@ trait Gunk[a] {
def unapply(s: Seq) = unapply_Cons(s)
}
def unapply_Cons(s: Any): Option[Tuple2[a, Seq]]
-}
+}
class Join[a] extends Gunk[a] {
type Seq = JoinSeq
diff --git a/test/files/pos/unapplySeq.scala b/test/files/pos/unapplySeq.scala
index 6d13cc8b52..4d5409c6f7 100644
--- a/test/files/pos/unapplySeq.scala
+++ b/test/files/pos/unapplySeq.scala
@@ -4,7 +4,7 @@ object FooSeq {
val y = x.asInstanceOf[Bar]
Some(y.size, y.name)
} else None
- }
+ }
def main(args:Array[String]) = {
val b = new Bar
diff --git a/test/files/pos/unapplyVal.scala b/test/files/pos/unapplyVal.scala
index 338cdb1839..10a6ba7c2c 100644
--- a/test/files/pos/unapplyVal.scala
+++ b/test/files/pos/unapplyVal.scala
@@ -10,7 +10,7 @@ class Buffer {
def joinPat(x: Any): Unit = {
x match {
- case Put =>
+ case Put =>
case Put(y) =>
println("returning "+y)
}
@@ -29,7 +29,7 @@ object unapplyJoins extends Application { // bug #1257
object Get extends Sync
val jp: PartialFunction[Any, Any] = {
- case Get() =>
+ case Get() =>
}
}
diff --git a/test/files/positions/Anon.scala b/test/files/positions/Anon.scala
index 940fff035a..65eb7ae477 100644
--- a/test/files/positions/Anon.scala
+++ b/test/files/positions/Anon.scala
@@ -2,7 +2,7 @@ object Anon {
trait Foo {
val bar : Int
}
-
+
def foo = new Foo {
override val bar = 23
}
diff --git a/test/files/positions/Enclosing1.scala b/test/files/positions/Enclosing1.scala
index e170187d60..7c8fbaf4a0 100644
--- a/test/files/positions/Enclosing1.scala
+++ b/test/files/positions/Enclosing1.scala
@@ -1,5 +1,5 @@
object Enclosing1 {
do {
-
+
} while (true)
}
diff --git a/test/files/positions/ExcludedPrefix1.scala b/test/files/positions/ExcludedPrefix1.scala
index f3562c37f0..72d9756b2a 100644
--- a/test/files/positions/ExcludedPrefix1.scala
+++ b/test/files/positions/ExcludedPrefix1.scala
@@ -5,16 +5,16 @@ object ExcludedPrefix1 {
case
object
BLAH
-
+
val
a = 1
-
+
var
b = 2
-
+
def
c = 23
-
+
private
def
d = 23
@@ -22,19 +22,19 @@ object ExcludedPrefix1 {
lazy
val
e = 23
-
+
private
type
f = Int
-
+
val
g,
h = 23
-
+
val
(i,
j) = (0, 0)
-
+
val Pair(
k,
l) = (0, 0)
diff --git a/test/files/positions/New1.scala b/test/files/positions/New1.scala
new file mode 100644
index 0000000000..bd17c0d523
--- /dev/null
+++ b/test/files/positions/New1.scala
@@ -0,0 +1,3 @@
+class New1 {
+ val a = new
+}
diff --git a/test/files/positions/Overlap3.scala b/test/files/positions/Overlap3.scala
index 4c5f8af9d1..657c12d4fd 100644
--- a/test/files/positions/Overlap3.scala
+++ b/test/files/positions/Overlap3.scala
@@ -1,3 +1,3 @@
object Overlap3 {
- val (a, b) = (0, 0)
+ val (a, b) = (0, 0)
}
diff --git a/test/files/positions/Overlap7.scala b/test/files/positions/Overlap7.scala
new file mode 100644
index 0000000000..b3dc0d10e2
--- /dev/null
+++ b/test/files/positions/Overlap7.scala
@@ -0,0 +1,3 @@
+import java.lang.String, java.lang.Object
+
+class Overlap7
diff --git a/test/files/positions/Scaladoc2.scala b/test/files/positions/Scaladoc2.scala
index e52263d86c..78bc4acb28 100644
--- a/test/files/positions/Scaladoc2.scala
+++ b/test/files/positions/Scaladoc2.scala
@@ -4,13 +4,13 @@ object Scaladoc2 {
* Foo
*/
def g {}
-
+
/*
* Blah blah
*/
def h{}
h
}
-
+
def h {}
}
diff --git a/test/files/positions/Scaladoc3.scala b/test/files/positions/Scaladoc3.scala
index c331b7e396..bb9d66fdb4 100644
--- a/test/files/positions/Scaladoc3.scala
+++ b/test/files/positions/Scaladoc3.scala
@@ -3,6 +3,6 @@ object Scaladoc3 {
* Foo
*/
import scala.collection.mutable.ArrayBuffer
-
+
def f {}
}
diff --git a/test/files/positions/Scaladoc4.scala b/test/files/positions/Scaladoc4.scala
index 133cde1c85..f613dda7f5 100644
--- a/test/files/positions/Scaladoc4.scala
+++ b/test/files/positions/Scaladoc4.scala
@@ -3,6 +3,6 @@ object Scaladoc4 {
* Foo
*/
2+2
-
+
def f {}
}
diff --git a/test/files/positions/Scaladoc6.scala b/test/files/positions/Scaladoc6.scala
new file mode 100644
index 0000000000..5c230edeb2
--- /dev/null
+++ b/test/files/positions/Scaladoc6.scala
@@ -0,0 +1,10 @@
+object Scaladoc6 {
+ {
+ /**
+ * Foo
+ */
+ val i = 23
+ }
+
+ def f {}
+}
diff --git a/test/files/positions/Scaladoc7.scala b/test/files/positions/Scaladoc7.scala
new file mode 100644
index 0000000000..6175222e3f
--- /dev/null
+++ b/test/files/positions/Scaladoc7.scala
@@ -0,0 +1,6 @@
+object Scaladoc7 {
+ /**
+ * Foo
+ */
+ val Pair(i, j) = (1, 2)
+}
diff --git a/test/files/positions/Scaladoc8.scala b/test/files/positions/Scaladoc8.scala
new file mode 100644
index 0000000000..519d6ca06c
--- /dev/null
+++ b/test/files/positions/Scaladoc8.scala
@@ -0,0 +1,6 @@
+/**
+ * Foo
+ */
+object Scaladoc8 {
+
+}
diff --git a/test/files/positions/Unsupported2.scala b/test/files/positions/Unsupported2.scala
new file mode 100644
index 0000000000..e21df5ef0e
--- /dev/null
+++ b/test/files/positions/Unsupported2.scala
@@ -0,0 +1,5 @@
+object Unsupported2 {
+ for (k <- 0 until xs.xize)
+ for(i = 0 until (xs.size)) {
+ }
+}
diff --git a/test/files/res/bug597/Test.scala b/test/files/res/bug597/Test.scala
index 2f63f46c3d..45b90bb17d 100644
--- a/test/files/res/bug597/Test.scala
+++ b/test/files/res/bug597/Test.scala
@@ -2,7 +2,7 @@ package test;
abstract class Base {
type A <: Ax;
-
+
abstract class Ax {
def a = null;
def string = "A";
diff --git a/test/files/res/bug722/Parser.scala b/test/files/res/bug722/Parser.scala
index 6d9739ed04..d36a20410b 100644
--- a/test/files/res/bug722/Parser.scala
+++ b/test/files/res/bug722/Parser.scala
@@ -2,7 +2,7 @@
package bug722;
trait Parser {
trait Link {
- def foo = {}
+ def foo = {}
}
}
diff --git a/test/files/res/bug735/ScalaExpressions.scala b/test/files/res/bug735/ScalaExpressions.scala
index aacc412b68..eb00f49cb2 100644
--- a/test/files/res/bug735/ScalaExpressions.scala
+++ b/test/files/res/bug735/ScalaExpressions.scala
@@ -1,6 +1,6 @@
package bug735;
trait ScalaExpressions {
- trait ExpressionFactory {
+ trait ExpressionFactory {
def foo = 10;
def bar : Int;
}
diff --git a/test/files/res/bug743/BracesXXX.scala b/test/files/res/bug743/BracesXXX.scala
index 8a2416caa8..545618daad 100644
--- a/test/files/res/bug743/BracesXXX.scala
+++ b/test/files/res/bug743/BracesXXX.scala
@@ -1,6 +1,6 @@
package bug743;
trait BracesXXX extends ParserXXX {
- trait Matchable extends IsLinked {
+ trait Matchable extends IsLinked {
def foo : NodeImpl = null;
}
}
diff --git a/test/files/res/bug743/ParserXXX.scala b/test/files/res/bug743/ParserXXX.scala
index 6ecf13c1e1..449dd71a50 100644
--- a/test/files/res/bug743/ParserXXX.scala
+++ b/test/files/res/bug743/ParserXXX.scala
@@ -1,9 +1,9 @@
package bug743;
trait ParserXXX {
- val foo = null;
- trait NodeImpl {
+ val foo = null;
+ trait NodeImpl {
trait Link extends ParserXXX.this.Link {
- val from = null;
+ val from = null;
}
}
trait Link {
diff --git a/test/files/res/bug785/ScalaNewTyper.scala b/test/files/res/bug785/ScalaNewTyper.scala
index fb955572ed..582538e41e 100644
--- a/test/files/res/bug785/ScalaNewTyper.scala
+++ b/test/files/res/bug785/ScalaNewTyper.scala
@@ -1,7 +1,7 @@
package bug785;
trait ScalaNewTyper {
private var typed : String = null;
- trait HasSymbol {
+ trait HasSymbol {
protected def foo : Unit = {}
}
trait HasArgsTypeParametersImpl extends HasSymbol {
diff --git a/test/files/res/bug831/NewScalaParserXXX.scala b/test/files/res/bug831/NewScalaParserXXX.scala
index 88c81637f0..48f78a7b4b 100644
--- a/test/files/res/bug831/NewScalaParserXXX.scala
+++ b/test/files/res/bug831/NewScalaParserXXX.scala
@@ -10,33 +10,33 @@ trait ScalaNodeScannerXXX {
//for (val ns <-n; val i <- 0.until(ns)) yield f;
-trait NewScalaScannerXXX extends ScalaNodeScannerXXX {
+trait NewScalaScannerXXX extends ScalaNodeScannerXXX {
type Unfixed <: Node with UnfixedImpl;
trait UnfixedImpl extends super.UnfixedImpl with NodeImpl;
type Statement <: Unfixed with StatementImpl;
trait StatementImpl extends UnfixedImpl { def self : Statement; }
type NewLine <: Statement with NewLineImpl;
- trait NewLineImpl extends StatementImpl {
- def self : NewLine;
+ trait NewLineImpl extends StatementImpl {
+ def self : NewLine;
def isActive : Boolean = true;
}
object ArrowMode extends Enumeration { val Def, Case, Expr = Value }
}
-trait ScalaPrecedenceXXX extends NewScalaScannerXXX {
+trait ScalaPrecedenceXXX extends NewScalaScannerXXX {
type NewLine <: Statement with NewLineImpl;
- trait NewLineImpl extends super.NewLineImpl with StatementImpl {
- def self : NewLine;
+ trait NewLineImpl extends super.NewLineImpl with StatementImpl {
+ def self : NewLine;
override def isActive = super[NewLineImpl].isActive;
}
}
trait NewScalaParserXXX extends NewScalaScannerXXX with ScalaPrecedenceXXX {
type NewLine <: Statement with NewLineImpl;
trait MyNewLine extends super[NewScalaScannerXXX].NewLineImpl;
- trait NewLineImpl extends MyNewLine with
+ trait NewLineImpl extends MyNewLine with
super[ScalaPrecedenceXXX].NewLineImpl with
- StatementImpl {
- def self : NewLine;
+ StatementImpl {
+ def self : NewLine;
override def isActive = super[MyNewLine].isActive;
}
}
diff --git a/test/files/run/Course-2002-02.scala b/test/files/run/Course-2002-02.scala
index b8650108ed..56d7298aaf 100644
--- a/test/files/run/Course-2002-02.scala
+++ b/test/files/run/Course-2002-02.scala
@@ -100,7 +100,7 @@ object M4 {
def sumInts = sum(x => x)
def sumCubes = sum(x => x * x * x)
- def sumReciprocals = sum(1.0/_)
+ def sumReciprocals = sum(1.0/_)
def sumPi = { n: Int => 4 + sum(x => 4.0/(4*x+1) - 4.0/(4*x-1))(1, n) }
Console.println(sumInts(1,4))
@@ -194,7 +194,7 @@ object M8 {
//############################################################################
object M9 {
- def accumulate[t](combiner: (t, t) => t, nullValue: t, f: Int => t,
+ def accumulate[t](combiner: (t, t) => t, nullValue: t, f: Int => t,
next: Int => Int)(a: Int, b: Int): t =
if (a > b) nullValue
else combiner(f(a), accumulate(combiner, nullValue, f, next)(next(a), b))
@@ -328,9 +328,9 @@ object MD {
iter(a, zero)
}
- def plus (x:Double,y:Double) = x+y;
+ def plus (x:Double,y:Double) = x+y;
val sum: (Int => Double) => (Int, Int) => Double = reduce(plus , 0);
- def times(x:Double,y:Double) = x*y;
+ def times(x:Double,y:Double) = x*y;
val product: (Int => Double) => (Int, Int) => Double = reduce(times, 1);
def factorial(n: Int) = product(x => x)(1 , n)
diff --git a/test/files/run/Course-2002-05.scala b/test/files/run/Course-2002-05.scala
index a1d71e2818..8ed8e041a7 100644
--- a/test/files/run/Course-2002-05.scala
+++ b/test/files/run/Course-2002-05.scala
@@ -129,7 +129,7 @@ object M3 {
else {
def isSafe(column: Int, placement: Placement): Boolean =
placement forall {
- pos => (pos._2 != column &&
+ pos => (pos._2 != column &&
abs(pos._2 - column) != row - pos._1)
}
diff --git a/test/files/run/Course-2002-08.scala b/test/files/run/Course-2002-08.scala
index 2e4b5111a9..85b0bad527 100644
--- a/test/files/run/Course-2002-08.scala
+++ b/test/files/run/Course-2002-08.scala
@@ -520,7 +520,7 @@ abstract class CircuitSimulator() extends BasicCircuitSimulator() {
val w1 = new Wire();
val w2 = new Wire();
val w3 = new Wire();
-
+
andGate(in, ctrl(1), w3);
andGate(in, ctrl(1), w2);
andGate(in, ctrlN(1), w1);
diff --git a/test/files/run/Course-2002-09.scala b/test/files/run/Course-2002-09.scala
index fac39e0841..384a91efd8 100644
--- a/test/files/run/Course-2002-09.scala
+++ b/test/files/run/Course-2002-09.scala
@@ -81,7 +81,7 @@ class Constant(q: Quantity, v: Double) extends Constraint {
class Probe(name: String, q: Quantity) extends Constraint {
def newValue: Unit = printProbe(q.getValue);
def dropValue: Unit = printProbe(None);
- private def printProbe(v: Option[double]) {
+ private def printProbe(v: Option[Double]) {
val vstr = v match {
case Some(x) => x.toString()
case None => "?"
@@ -103,7 +103,7 @@ class Quantity() {
if (v != v1) error("Error! contradiction: " + v + " and " + v1);
case None =>
informant = setter; value = Some(v);
- for (val c <- constraints; !(c == informant)) {
+ for (c <- constraints; if !(c == informant)) {
c.newValue;
}
}
@@ -112,7 +112,7 @@ class Quantity() {
def forgetValue(retractor: Constraint): Unit = {
if (retractor == informant) {
value = None;
- for (val c <- constraints; !(c == informant)) c.dropValue;
+ for (c <- constraints; if !(c == informant)) c.dropValue;
}
}
def forgetValue: Unit = forgetValue(NoConstraint);
@@ -258,7 +258,7 @@ object M2 {
};
}
- def show(x: Option[int], y: Option[Int], z: Option[int]) = {
+ def show(x: Option[Int], y: Option[Int], z: Option[Int]) = {
Console.print("a = " +set(a,x)+ ", b = " +set(b,y)+ ", c = " +set(c,z));
Console.println(" => " + a.str + " * " + b.str + " = " + c.str);
a.forgetValue; b.forgetValue; c.forgetValue;
diff --git a/test/files/run/Course-2002-13.scala b/test/files/run/Course-2002-13.scala
index 27551b735b..c016d41a90 100644
--- a/test/files/run/Course-2002-13.scala
+++ b/test/files/run/Course-2002-13.scala
@@ -66,7 +66,7 @@ object Terms {
override def toString() =
a + (if (ts.isEmpty) "" else ts.mkString("(", ",", ")"));
def map(s: Subst): Term = Con(a, ts map (t => t map s));
- def tyvars = (ts flatMap (t => t.tyvars)).removeDuplicates;
+ def tyvars = (ts flatMap (t => t.tyvars)).distinct;
}
private var count = 0;
@@ -113,7 +113,7 @@ object Programs {
case class Clause(lhs: Term, rhs: List[Term]) {
def tyvars =
- (lhs.tyvars ::: (rhs flatMap (t => t.tyvars))).removeDuplicates;
+ (lhs.tyvars ::: (rhs flatMap (t => t.tyvars))).distinct;
def newInstance = {
var s: Subst = List();
for (val a <- tyvars) { s = Binding(a, newVar(a)) :: s }
diff --git a/test/files/run/MutableListTest.scala b/test/files/run/MutableListTest.scala
index 322a368b16..70431570e9 100644
--- a/test/files/run/MutableListTest.scala
+++ b/test/files/run/MutableListTest.scala
@@ -1,126 +1,126 @@
-
-
-
-import scala.collection.mutable.MutableList
-
-
-
-class ExtList[T] extends MutableList[T] {
- def printState {
- println("Length: " + len)
- println("Last elem: " + last0.elem)
- println("First elem: " + first0.elem)
- println("After first elem: " + first0.next.elem)
- println("After first: " + first0.next)
- println("Last: " + last0)
- }
-}
-
-object Test {
-
- def main(args: Array[String]) {
- testEmpty
- testAddElement
- testAddFewElements
- testAddMoreElements
- testTraversables
- }
-
- def testEmpty {
- val mlist = new MutableList[Int]
- assert(mlist.isEmpty)
- assert(mlist.get(0) == None)
- }
-
- def testAddElement {
- val mlist = new MutableList[Int]
- mlist += 17
- assert(mlist.nonEmpty)
- assert(mlist.length == 1)
- assert(mlist.head == 17)
- assert(mlist.last == 17)
- assert(mlist(0) == 17)
- assert(mlist.tail.isEmpty)
- assert(mlist.tail.length == 0)
- mlist(0) = 101
- assert(mlist(0) == 101)
- assert(mlist.toList == List(101))
- assert(mlist.tail.get(0) == None)
- assert((mlist.tail += 19).head == 19)
- assert(mlist.tail.length == 0)
- }
-
- def testAddFewElements {
- val mlist = new MutableList[Int]
- for (i <- 0 until 2) mlist += i
-// mlist.printState
- for (i <- 0 until 2) assert(mlist(i) == i)
- assert(mlist.length == 2)
- assert(mlist.nonEmpty)
- assert(mlist.tail.length == 1)
- assert(mlist.tail.tail.length == 0)
- assert(mlist.tail.tail.isEmpty)
- }
-
- def testAddMoreElements {
- val mlist = new MutableList[Int]
- for (i <- 0 until 10) mlist += i * i
- assert(mlist.nonEmpty)
- assert(mlist.length == 10)
- for (i <- 0 until 10) assert(mlist(i) == i * i)
- assert(mlist(5) == 5 * 5)
- assert(mlist(9) == 9 * 9)
- var sometail = mlist
- for (i <- 0 until 10) {
- assert(sometail.head == i * i)
- sometail = sometail.tail
- }
- mlist(5) = -25
- assert(mlist(5) == -25)
- mlist(0) = -1
- assert(mlist(0) == -1)
- mlist(9) = -81
- assert(mlist(9) == -81)
- assert(mlist(5) == -25)
- assert(mlist(0) == -1)
- assert(mlist.last == -81)
- mlist.clear
- assert(mlist.isEmpty)
- mlist += 1001
- assert(mlist.head == 1001)
- mlist += 9999
- assert(mlist.tail.head == 9999)
- assert(mlist.last == 9999)
- }
-
- def testTraversables {
- val mlist = new MutableList[Int]
- for (i <- 0 until 10) mlist += i * i
- var lst = mlist.drop(5)
- for (i <- 0 until 5) assert(lst(i) == (i + 5) * (i + 5))
- lst = lst.take(3)
- for (i <- 0 until 3) assert(lst(i) == (i + 5) * (i + 5))
- lst += 129
- assert(lst(3) == 129)
- assert(lst.last == 129)
- assert(lst.length == 4)
- lst += 7
- assert(lst.init.last == 129)
- assert(lst.length == 5)
- lst.clear
- assert(lst.length == 0)
- for (i <- 0 until 5) lst += i
- assert(lst.reduceLeft(_ + _) == 10)
- }
-
-}
-
-
-
-
-
-
-
-
-
-
+
+
+
+import scala.collection.mutable.MutableList
+
+
+
+class ExtList[T] extends MutableList[T] {
+ def printState {
+ println("Length: " + len)
+ println("Last elem: " + last0.elem)
+ println("First elem: " + first0.elem)
+ println("After first elem: " + first0.next.elem)
+ println("After first: " + first0.next)
+ println("Last: " + last0)
+ }
+}
+
+object Test {
+
+ def main(args: Array[String]) {
+ testEmpty
+ testAddElement
+ testAddFewElements
+ testAddMoreElements
+ testTraversables
+ }
+
+ def testEmpty {
+ val mlist = new MutableList[Int]
+ assert(mlist.isEmpty)
+ assert(mlist.get(0) == None)
+ }
+
+ def testAddElement {
+ val mlist = new MutableList[Int]
+ mlist += 17
+ assert(mlist.nonEmpty)
+ assert(mlist.length == 1)
+ assert(mlist.head == 17)
+ assert(mlist.last == 17)
+ assert(mlist(0) == 17)
+ assert(mlist.tail.isEmpty)
+ assert(mlist.tail.length == 0)
+ mlist(0) = 101
+ assert(mlist(0) == 101)
+ assert(mlist.toList == List(101))
+ assert(mlist.tail.get(0) == None)
+ assert((mlist.tail += 19).head == 19)
+ assert(mlist.tail.length == 0)
+ }
+
+ def testAddFewElements {
+ val mlist = new MutableList[Int]
+ for (i <- 0 until 2) mlist += i
+// mlist.printState
+ for (i <- 0 until 2) assert(mlist(i) == i)
+ assert(mlist.length == 2)
+ assert(mlist.nonEmpty)
+ assert(mlist.tail.length == 1)
+ assert(mlist.tail.tail.length == 0)
+ assert(mlist.tail.tail.isEmpty)
+ }
+
+ def testAddMoreElements {
+ val mlist = new MutableList[Int]
+ for (i <- 0 until 10) mlist += i * i
+ assert(mlist.nonEmpty)
+ assert(mlist.length == 10)
+ for (i <- 0 until 10) assert(mlist(i) == i * i)
+ assert(mlist(5) == 5 * 5)
+ assert(mlist(9) == 9 * 9)
+ var sometail = mlist
+ for (i <- 0 until 10) {
+ assert(sometail.head == i * i)
+ sometail = sometail.tail
+ }
+ mlist(5) = -25
+ assert(mlist(5) == -25)
+ mlist(0) = -1
+ assert(mlist(0) == -1)
+ mlist(9) = -81
+ assert(mlist(9) == -81)
+ assert(mlist(5) == -25)
+ assert(mlist(0) == -1)
+ assert(mlist.last == -81)
+ mlist.clear
+ assert(mlist.isEmpty)
+ mlist += 1001
+ assert(mlist.head == 1001)
+ mlist += 9999
+ assert(mlist.tail.head == 9999)
+ assert(mlist.last == 9999)
+ }
+
+ def testTraversables {
+ val mlist = new MutableList[Int]
+ for (i <- 0 until 10) mlist += i * i
+ var lst = mlist.drop(5)
+ for (i <- 0 until 5) assert(lst(i) == (i + 5) * (i + 5))
+ lst = lst.take(3)
+ for (i <- 0 until 3) assert(lst(i) == (i + 5) * (i + 5))
+ lst += 129
+ assert(lst(3) == 129)
+ assert(lst.last == 129)
+ assert(lst.length == 4)
+ lst += 7
+ assert(lst.init.last == 129)
+ assert(lst.length == 5)
+ lst.clear
+ assert(lst.length == 0)
+ for (i <- 0 until 5) lst += i
+ assert(lst.reduceLeft(_ + _) == 10)
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/run/OrderingTest.scala b/test/files/run/OrderingTest.scala
index fee15fc2f9..a3eca5f860 100644
--- a/test/files/run/OrderingTest.scala
+++ b/test/files/run/OrderingTest.scala
@@ -6,7 +6,7 @@ object Test extends Application {
assert((cmp == 0) == (cmp2 == 0))
assert((cmp > 0) == (cmp2 < 0))
assert((cmp < 0) == (cmp2 > 0))
- }
+ }
def testAll[T](t1 : T, t2 : T)(implicit ord : Ordering[T]) = {
assert(ord.compare(t1, t2) < 0)
@@ -16,8 +16,8 @@ object Test extends Application {
}
assert(Ordering[String].compare("australopithecus", "brontausaurus") < 0)
- // assert(Ordering[Unit].compare((), ()) == 0)
-
+ // assert(Ordering[Unit].compare((), ()) == 0)
+
testAll("bar", "foo");
testAll[Byte](0, 1);
testAll(false, true)
@@ -28,7 +28,7 @@ object Test extends Application {
testAll[Iterable[Int]](List(1, 2), List(2));
testAll((1, "bar"), (1, "foo"))
testAll((1, "foo"), (2, "bar"))
-
+
// sortBy
val words = "The quick brown fox jumped over the lazy dog".split(' ')
val result = words.sortBy(x => (x.length, x.head))
diff --git a/test/files/run/QueueTest.scala b/test/files/run/QueueTest.scala
index 859ce2071a..2f8ecaed4b 100644
--- a/test/files/run/QueueTest.scala
+++ b/test/files/run/QueueTest.scala
@@ -1,297 +1,297 @@
-
-
-import scala.collection.mutable.Queue
-
-
-
-
-class ExtQueue[T] extends Queue[T] {
- def printState {
- println("-------------------")
- println("Length: " + len)
- println("First: " + first0)
- println("First elem: " + first0.elem)
- println("After first: " + first0.next)
- }
-}
-
-object Test {
-
- def main(args: Array[String]) {
- testEmpty
- testEnqueue
- testTwoEnqueues
- testFewEnqueues
- testMoreEnqueues
- }
-
- def testEmpty {
- val queue = new Queue[Int]
-
- assert(queue.isEmpty)
- assert(queue.size == 0)
- assert(queue.length == 0)
- assert(queue.dequeueFirst(_ > 500) == None)
- assert(queue.dequeueAll(_ > 500).isEmpty)
-
- queue.clear
- assert(queue.isEmpty)
- assert(queue.size == 0)
- assert(queue.length == 0)
- assert(queue.dequeueFirst(_ > 500) == None)
- assert(queue.dequeueAll(_ > 500).isEmpty)
- }
-
- def testEnqueue {
- val queue = new Queue[Int]
-
- queue.enqueue(10)
- assert(queue.nonEmpty)
- assert(queue.size == 1)
- assert(queue.length == 1)
- assert(queue.head == 10)
- assert(queue(0) == 10)
- assert(queue.init.isEmpty)
- assert(queue.tail.isEmpty)
-
- queue.clear
- assert(queue.isEmpty)
- assert(queue.length == 0)
-
- queue.enqueue(11)
- assert(queue.nonEmpty)
- assert(queue.length == 1)
- assert(queue.head == 11)
- assert(queue.front == 11)
-
- val deq = queue.dequeue
- assert(deq == 11)
- assert(queue.isEmpty)
- assert(queue.length == 0)
-
- queue.enqueue(12)
- val pdopt = queue.dequeueFirst(_ > 999)
- assert(pdopt == None)
- assert(queue.nonEmpty && queue.length == 1)
-
- val somepd = queue.dequeueFirst(_ >= 1)
- assert(somepd == Some(12))
- assert(queue.isEmpty && queue.length == 0)
- }
-
- def testTwoEnqueues {
- val queue = new ExtQueue[Int]
- queue.enqueue(30)
- queue.enqueue(40)
-
- assert(queue.length == 2)
- assert(queue.size == 2)
- assert(queue.nonEmpty)
- assert(queue.front == 30)
-// queue.printState
-
- val all = queue.dequeueAll(_ > 20)
- assert(all.size == 2)
- assert(all.contains(30))
- assert(all.contains(40))
- assert(queue.size == 0)
- assert(queue.isEmpty)
- }
-
- def testFewEnqueues {
- val queue = new ExtQueue[Int]
- queue.enqueue(10)
- queue.enqueue(20)
-
- assert(queue.length == 2)
- assert(queue.nonEmpty)
- assert(queue.head == 10)
- assert(queue.last == 20)
- assert(queue.front == 10)
-// queue.printState
-
- val ten = queue.dequeue
- assert(ten == 10)
- assert(queue.length == 1)
-// queue.printState
-
- queue.enqueue(30)
-// queue.printState
- val gt25 = queue.dequeueFirst(_ > 25)
- assert(gt25 == Some(30))
- assert(queue.nonEmpty)
- assert(queue.length == 1)
- assert(queue.head == 20)
- assert(queue.front == 20)
-// queue.printState
-
- queue.enqueue(30)
-// queue.printState
- val lt25 = queue.dequeueFirst(_ < 25)
- assert(lt25 == Some(20))
- assert(queue.nonEmpty)
- assert(queue.length == 1)
-// queue.printState
-
- queue.enqueue(40)
-// queue.printState
- val all = queue.dequeueAll(_ > 20)
-// queue.printState
- assert(all.size == 2)
- assert(all.contains(30))
- assert(all.contains(40))
- assert(queue.isEmpty)
- assert(queue.length == 0)
-
- queue.enqueue(50)
- queue.enqueue(60)
-// queue.printState
- val allgt55 = queue.dequeueAll(_ > 55)
-// println(allgt55)
-// queue.printState
- assert(allgt55.size == 1)
- assert(allgt55.contains(60))
- assert(queue.length == 1)
-
- queue.enqueue(70)
- queue.enqueue(80)
-// queue.printState
- val alllt75 = queue.dequeueAll(_ < 75)
-// queue.printState
- assert(alllt75.size == 2)
- assert(alllt75.contains(70))
- assert(alllt75.contains(50))
- assert(queue.length == 1)
- assert(queue.head == 80)
- assert(queue.last == 80)
- assert(queue.front == 80)
- }
-
- def testMoreEnqueues {
- val queue = new ExtQueue[Int]
- for (i <- 0 until 10) queue.enqueue(i * 2)
-
- for (i <- 0 until 10) {
- val top = queue.dequeue
- assert(top == i * 2)
- assert(queue.length == 10 - i - 1)
- }
- assert(queue.isEmpty)
- assert(queue.length == 0)
-
- for (i <- 0 until 10) queue.enqueue(i * i)
- assert(queue.length == 10)
- assert(queue.nonEmpty)
-
- //queue.printState
- val gt5 = queue.dequeueAll(_ > 4)
- //queue.printState
- //println(gt5)
- assert(gt5.size == 7)
- assert(queue.length == 3)
- assert(queue.nonEmpty)
-
- queue.clear
- assert(queue.length == 0)
- assert(queue.isEmpty)
-
- for (i <- 0 until 10) queue.enqueue(i)
- assert(queue.length == 10)
-
- val even = queue.dequeueAll(_ % 2 == 0)
- assert(even.size == 5)
- assert(even.sameElements(List(0, 2, 4, 6, 8)))
- assert(queue.length == 5)
- assert(queue.head == 1)
- assert(queue.last == 9)
-
- val odd = queue.dequeueAll(_ %2 == 1)
- assert(odd.size == 5)
- assert(queue.length == 0)
- assert(queue.isEmpty)
- assert(odd.sameElements(List(1, 3, 5, 7, 9)))
-
- for (i <- 0 until 10) queue.enqueue(i * i)
- assert(queue.last == 81)
- assert(queue.head == 0)
- assert(queue.length == 10)
-
- val foddgt25 = queue.dequeueFirst(num => num > 25 && num % 2 == 1)
- assert(foddgt25 == Some(49))
- assert(queue.length == 9)
- assert(queue.nonEmpty)
-
- //queue.printState
- val lt30 = queue.dequeueAll(_ < 30)
- //println(lt30)
- //queue.printState
- assert(lt30.size == 6)
- assert(queue.length == 3)
-
- val fgt60 = queue.dequeueFirst(_ > 60)
- assert(fgt60 == Some(64))
- assert(queue.length == 2)
- assert(queue.head == 36)
- assert(queue.last == 81)
-
- val sgt60 = queue.dequeueFirst(_ > 60)
- assert(sgt60 == Some(81))
- assert(queue.length == 1)
- assert(queue.head == 36)
- assert(queue.last == 36)
-
- val nogt60 = queue.dequeueFirst(_ > 60)
- assert(nogt60 == None)
- assert(queue.length == 1)
- assert(queue.nonEmpty)
- assert(queue.head == 36)
-
- val gt0 = queue.dequeueFirst(_ > 0)
- assert(gt0 == Some(36))
- assert(queue.length == 0)
- assert(queue.isEmpty)
-
- for (i <- 0 until 4) queue.enqueue(i)
- val interv = queue.dequeueAll(n => n > 0 && n < 3)
- assert(interv.sameElements(List(1, 2)))
- assert(queue.length == 2)
- assert(queue.head == 0)
- assert(queue.last == 3)
-
- queue.dequeue
- assert(queue.head == 3)
-
- queue.enqueue(9)
- val three = queue.dequeueFirst(_ < 5)
- assert(three == Some(3))
- assert(queue.length == 1)
- assert(queue.head == 9)
-
- queue.clear
- for (i <- -100 until 100) queue.enqueue(i * i + i % 7 + 5)
- assert(queue.length == 200)
-
- val manyodds = queue.dequeueAll(_ % 2 == 1)
- assert((manyodds.size + queue.length) == 200)
-
- queue.dequeueAll(_ > -10000)
- assert(queue.isEmpty)
-
- for (i <- 0 until 100) queue.enqueue(i)
- val multof3 = queue.dequeueAll(_ % 3 == 0)
- assert(multof3.size == 34)
- assert(queue.size == 66)
-
- val n98 = queue.dequeueFirst(_ == 98)
- assert(n98 == Some(98))
- assert(queue.size == 65)
- assert(queue.last == 97)
- assert(queue.head == 1)
- // well... seems to work
- }
-
-}
-
-
-
-
+
+
+import scala.collection.mutable.Queue
+
+
+
+
+class ExtQueue[T] extends Queue[T] {
+ def printState {
+ println("-------------------")
+ println("Length: " + len)
+ println("First: " + first0)
+ println("First elem: " + first0.elem)
+ println("After first: " + first0.next)
+ }
+}
+
+object Test {
+
+ def main(args: Array[String]) {
+ testEmpty
+ testEnqueue
+ testTwoEnqueues
+ testFewEnqueues
+ testMoreEnqueues
+ }
+
+ def testEmpty {
+ val queue = new Queue[Int]
+
+ assert(queue.isEmpty)
+ assert(queue.size == 0)
+ assert(queue.length == 0)
+ assert(queue.dequeueFirst(_ > 500) == None)
+ assert(queue.dequeueAll(_ > 500).isEmpty)
+
+ queue.clear
+ assert(queue.isEmpty)
+ assert(queue.size == 0)
+ assert(queue.length == 0)
+ assert(queue.dequeueFirst(_ > 500) == None)
+ assert(queue.dequeueAll(_ > 500).isEmpty)
+ }
+
+ def testEnqueue {
+ val queue = new Queue[Int]
+
+ queue.enqueue(10)
+ assert(queue.nonEmpty)
+ assert(queue.size == 1)
+ assert(queue.length == 1)
+ assert(queue.head == 10)
+ assert(queue(0) == 10)
+ assert(queue.init.isEmpty)
+ assert(queue.tail.isEmpty)
+
+ queue.clear
+ assert(queue.isEmpty)
+ assert(queue.length == 0)
+
+ queue.enqueue(11)
+ assert(queue.nonEmpty)
+ assert(queue.length == 1)
+ assert(queue.head == 11)
+ assert(queue.front == 11)
+
+ val deq = queue.dequeue
+ assert(deq == 11)
+ assert(queue.isEmpty)
+ assert(queue.length == 0)
+
+ queue.enqueue(12)
+ val pdopt = queue.dequeueFirst(_ > 999)
+ assert(pdopt == None)
+ assert(queue.nonEmpty && queue.length == 1)
+
+ val somepd = queue.dequeueFirst(_ >= 1)
+ assert(somepd == Some(12))
+ assert(queue.isEmpty && queue.length == 0)
+ }
+
+ def testTwoEnqueues {
+ val queue = new ExtQueue[Int]
+ queue.enqueue(30)
+ queue.enqueue(40)
+
+ assert(queue.length == 2)
+ assert(queue.size == 2)
+ assert(queue.nonEmpty)
+ assert(queue.front == 30)
+// queue.printState
+
+ val all = queue.dequeueAll(_ > 20)
+ assert(all.size == 2)
+ assert(all.contains(30))
+ assert(all.contains(40))
+ assert(queue.size == 0)
+ assert(queue.isEmpty)
+ }
+
+ def testFewEnqueues {
+ val queue = new ExtQueue[Int]
+ queue.enqueue(10)
+ queue.enqueue(20)
+
+ assert(queue.length == 2)
+ assert(queue.nonEmpty)
+ assert(queue.head == 10)
+ assert(queue.last == 20)
+ assert(queue.front == 10)
+// queue.printState
+
+ val ten = queue.dequeue
+ assert(ten == 10)
+ assert(queue.length == 1)
+// queue.printState
+
+ queue.enqueue(30)
+// queue.printState
+ val gt25 = queue.dequeueFirst(_ > 25)
+ assert(gt25 == Some(30))
+ assert(queue.nonEmpty)
+ assert(queue.length == 1)
+ assert(queue.head == 20)
+ assert(queue.front == 20)
+// queue.printState
+
+ queue.enqueue(30)
+// queue.printState
+ val lt25 = queue.dequeueFirst(_ < 25)
+ assert(lt25 == Some(20))
+ assert(queue.nonEmpty)
+ assert(queue.length == 1)
+// queue.printState
+
+ queue.enqueue(40)
+// queue.printState
+ val all = queue.dequeueAll(_ > 20)
+// queue.printState
+ assert(all.size == 2)
+ assert(all.contains(30))
+ assert(all.contains(40))
+ assert(queue.isEmpty)
+ assert(queue.length == 0)
+
+ queue.enqueue(50)
+ queue.enqueue(60)
+// queue.printState
+ val allgt55 = queue.dequeueAll(_ > 55)
+// println(allgt55)
+// queue.printState
+ assert(allgt55.size == 1)
+ assert(allgt55.contains(60))
+ assert(queue.length == 1)
+
+ queue.enqueue(70)
+ queue.enqueue(80)
+// queue.printState
+ val alllt75 = queue.dequeueAll(_ < 75)
+// queue.printState
+ assert(alllt75.size == 2)
+ assert(alllt75.contains(70))
+ assert(alllt75.contains(50))
+ assert(queue.length == 1)
+ assert(queue.head == 80)
+ assert(queue.last == 80)
+ assert(queue.front == 80)
+ }
+
+ def testMoreEnqueues {
+ val queue = new ExtQueue[Int]
+ for (i <- 0 until 10) queue.enqueue(i * 2)
+
+ for (i <- 0 until 10) {
+ val top = queue.dequeue
+ assert(top == i * 2)
+ assert(queue.length == 10 - i - 1)
+ }
+ assert(queue.isEmpty)
+ assert(queue.length == 0)
+
+ for (i <- 0 until 10) queue.enqueue(i * i)
+ assert(queue.length == 10)
+ assert(queue.nonEmpty)
+
+ //queue.printState
+ val gt5 = queue.dequeueAll(_ > 4)
+ //queue.printState
+ //println(gt5)
+ assert(gt5.size == 7)
+ assert(queue.length == 3)
+ assert(queue.nonEmpty)
+
+ queue.clear
+ assert(queue.length == 0)
+ assert(queue.isEmpty)
+
+ for (i <- 0 until 10) queue.enqueue(i)
+ assert(queue.length == 10)
+
+ val even = queue.dequeueAll(_ % 2 == 0)
+ assert(even.size == 5)
+ assert(even.sameElements(List(0, 2, 4, 6, 8)))
+ assert(queue.length == 5)
+ assert(queue.head == 1)
+ assert(queue.last == 9)
+
+ val odd = queue.dequeueAll(_ %2 == 1)
+ assert(odd.size == 5)
+ assert(queue.length == 0)
+ assert(queue.isEmpty)
+ assert(odd.sameElements(List(1, 3, 5, 7, 9)))
+
+ for (i <- 0 until 10) queue.enqueue(i * i)
+ assert(queue.last == 81)
+ assert(queue.head == 0)
+ assert(queue.length == 10)
+
+ val foddgt25 = queue.dequeueFirst(num => num > 25 && num % 2 == 1)
+ assert(foddgt25 == Some(49))
+ assert(queue.length == 9)
+ assert(queue.nonEmpty)
+
+ //queue.printState
+ val lt30 = queue.dequeueAll(_ < 30)
+ //println(lt30)
+ //queue.printState
+ assert(lt30.size == 6)
+ assert(queue.length == 3)
+
+ val fgt60 = queue.dequeueFirst(_ > 60)
+ assert(fgt60 == Some(64))
+ assert(queue.length == 2)
+ assert(queue.head == 36)
+ assert(queue.last == 81)
+
+ val sgt60 = queue.dequeueFirst(_ > 60)
+ assert(sgt60 == Some(81))
+ assert(queue.length == 1)
+ assert(queue.head == 36)
+ assert(queue.last == 36)
+
+ val nogt60 = queue.dequeueFirst(_ > 60)
+ assert(nogt60 == None)
+ assert(queue.length == 1)
+ assert(queue.nonEmpty)
+ assert(queue.head == 36)
+
+ val gt0 = queue.dequeueFirst(_ > 0)
+ assert(gt0 == Some(36))
+ assert(queue.length == 0)
+ assert(queue.isEmpty)
+
+ for (i <- 0 until 4) queue.enqueue(i)
+ val interv = queue.dequeueAll(n => n > 0 && n < 3)
+ assert(interv.sameElements(List(1, 2)))
+ assert(queue.length == 2)
+ assert(queue.head == 0)
+ assert(queue.last == 3)
+
+ queue.dequeue
+ assert(queue.head == 3)
+
+ queue.enqueue(9)
+ val three = queue.dequeueFirst(_ < 5)
+ assert(three == Some(3))
+ assert(queue.length == 1)
+ assert(queue.head == 9)
+
+ queue.clear
+ for (i <- -100 until 100) queue.enqueue(i * i + i % 7 + 5)
+ assert(queue.length == 200)
+
+ val manyodds = queue.dequeueAll(_ % 2 == 1)
+ assert((manyodds.size + queue.length) == 200)
+
+ queue.dequeueAll(_ > -10000)
+ assert(queue.isEmpty)
+
+ for (i <- 0 until 100) queue.enqueue(i)
+ val multof3 = queue.dequeueAll(_ % 3 == 0)
+ assert(multof3.size == 34)
+ assert(queue.size == 66)
+
+ val n98 = queue.dequeueFirst(_ == 98)
+ assert(n98 == Some(98))
+ assert(queue.size == 65)
+ assert(queue.last == 97)
+ assert(queue.head == 1)
+ // well... seems to work
+ }
+
+}
+
+
+
+
diff --git a/test/files/run/ReplacementMatching.scala b/test/files/run/ReplacementMatching.scala
new file mode 100644
index 0000000000..faa46419dc
--- /dev/null
+++ b/test/files/run/ReplacementMatching.scala
@@ -0,0 +1,47 @@
+
+
+
+import util.matching._
+
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ replacementMatching
+ groupsMatching
+ }
+
+ def replacementMatching {
+ val regex = """\$\{(.+?)\}""".r
+ val replaced = regex.replaceAllIn("Replacing: ${main}. And another method: ${foo}.",
+ (m: util.matching.Regex.Match) => {
+ val identifier = m.group(1)
+ identifier
+ })
+ assert(replaced == "Replacing: main. And another method: foo.")
+
+ val regex3 = """\$\{(.+?)\}""".r
+ val replaced3 = regex3.replaceSomeIn("Replacing: ${main}. And another: ${foo}.", (m: util.matching.Regex.Match) => {
+ val id = m.group(1)
+ if (id.startsWith("m")) Some(id) else None
+ })
+ assert(replaced3 == "Replacing: main. And another: ${foo}.")
+ }
+
+ def groupsMatching {
+ val Date = """(\d+)/(\d+)/(\d+)""".r
+ for (Regex.Groups(a, b, c) <- Date findFirstMatchIn "1/1/2001 marks the start of the millenium. 31/12/2000 doesn't.") {
+ assert(a == "1")
+ assert(b == "1")
+ assert(c == "2001")
+ }
+ for (Regex.Groups(a, b, c) <- (Date findAllIn "1/1/2001 marks the start of the millenium. 31/12/2000 doesn't.").matchData) {
+ assert(a == "1" || a == "31")
+ assert(b == "1" || b == "12")
+ assert(c == "2001" || c == "2000")
+ }
+ }
+
+}
diff --git a/test/files/run/ReverseSeqView.scala b/test/files/run/ReverseSeqView.scala
new file mode 100644
index 0000000000..88394064c5
--- /dev/null
+++ b/test/files/run/ReverseSeqView.scala
@@ -0,0 +1,25 @@
+
+
+
+
+
+
+object Test extends Application {
+
+ val lstv = List(1, 2, 3).view
+ val lstvr = lstv.reverse
+ assert(lstvr.iterator.toList == List(3, 2, 1))
+ assert(lstvr.reverse == List(1, 2, 3))
+ assert(lstvr.reverseIterator.toList == List(1, 2, 3))
+ assert(lstvr.reverseMap(_ + 1) == List(2, 3, 4))
+
+}
+
+
+
+
+
+
+
+
+
diff --git a/test/files/run/SymbolsTest.scala b/test/files/run/SymbolsTest.scala
new file mode 100644
index 0000000000..cdd68ab13e
--- /dev/null
+++ b/test/files/run/SymbolsTest.scala
@@ -0,0 +1,283 @@
+
+
+
+
+class Slazz {
+ val s1 = 'myFirstSymbol
+ val s2 = 'mySecondSymbol
+ def s3 = 'myThirdSymbol
+ var s4: Symbol = null
+
+ s4 = 'myFourthSymbol
+}
+
+class Base {
+ val basesymbol = 'symbase
+}
+
+class Sub extends Base {
+ val subsymbol = 'symsub
+}
+
+trait Signs {
+ val ind = 'indication
+ val trace = 'trace
+}
+
+trait Lazy1 {
+ lazy val v1 = "lazy v1"
+ lazy val s1 = 'lazySymbol1
+}
+
+trait Lazy2 {
+ lazy val v2 = "lazy v2"
+ lazy val s2 = 'lazySymbol2
+}
+
+trait Lazy3 {
+ lazy val v3 = "lazy v3"
+ lazy val s3 = 'lazySymbol3
+}
+
+object SingletonOfLazyness {
+ lazy val lazysym = 'lazySymbol
+ lazy val another = 'another
+ lazy val lastone = 'lastone
+}
+
+/*
+ * Tests symbols to see if they work correct.
+ */
+object Test {
+ class Inner {
+ val simba = 'smba
+ var mfs: Symbol = null
+ mfs = Symbol("mfsa")
+ }
+
+ object InnerObject {
+ val o1 = 'aaa
+ val o2 = 'ddd
+ }
+
+ def aSymbol = 'myFirstSymbol
+ val anotherSymbol = 'mySecondSymbol
+
+ def main(args: Array[String]) {
+ testLiterals
+ testForLoop
+ testInnerClasses
+ testInnerObjects
+ testWithHashMaps
+ testLists
+ testAnonymous
+ testNestedObject
+ testInheritance
+ testTraits
+ testLazyTraits
+ testLazyObjects
+ }
+
+ def testLiterals {
+ val scl = new Slazz
+ assert(scl.s1 == aSymbol)
+ assert(scl.s2 == anotherSymbol)
+ assert(scl.s3 == 'myThirdSymbol)
+ assert(scl.s4 == Symbol.apply("myFourthSymbol"))
+ assert(scl.s1 == Symbol("myFirstSymbol"))
+ }
+
+ def testForLoop {
+ for (i <- 0 until 100) List("Val" + i)
+ }
+
+ def testInnerClasses {
+ val innerPower = new Inner
+ assert(innerPower.simba == 'smba)
+ assert(innerPower.mfs == 'mfsa)
+ }
+
+ def testInnerObjects {
+ assert(InnerObject.o1 == 'aaa)
+ assert(InnerObject.o2 == 'ddd)
+ }
+
+ def testWithHashMaps {
+ val map = new collection.mutable.HashMap[Symbol, Symbol]
+ map.put(InnerObject.o1, 'smba)
+ map.put(InnerObject.o2, 'mfsa)
+ map.put(Symbol("WeirdKey" + 1), Symbol("Weird" + "Val" + 1))
+ assert(map('aaa) == 'smba)
+ assert(map('ddd) == 'mfsa)
+ assert(map('WeirdKey1) == Symbol("WeirdVal1"))
+
+ map.clear
+ for (i <- 0 until 100) map.put(Symbol("symKey" + i), Symbol("symVal" + i))
+ assert(map(Symbol("symKey15")) == Symbol("symVal15"))
+ assert(map('symKey22) == 'symVal22)
+ assert(map('symKey73) == 'symVal73)
+ assert(map('symKey56) == 'symVal56)
+ assert(map('symKey91) == 'symVal91)
+ }
+
+ def testLists {
+ var lst: List[Symbol] = Nil
+ for (i <- 0 until 100) lst ::= Symbol("lsym" + (99 - i))
+ assert(lst(0) == 'lsym0)
+ assert(lst(10) == 'lsym10)
+ assert(lst(30) == 'lsym30)
+ assert(lst(40) == 'lsym40)
+ assert(lst(65) == 'lsym65)
+ assert(lst(90) == 'lsym90)
+ }
+
+ def testAnonymous { // TODO complaints classdef can't be found for some reason, runs fine in my case
+ // val anon = () => {
+ // val simba = 'smba
+ // simba
+ // }
+ // val an2 = () => {
+ // object nested {
+ // val m = 'mfsa
+ // }
+ // nested.m
+ // }
+ // val an3 = () => {
+ // object nested {
+ // val f = () => {
+ // 'layered
+ // }
+ // def gets = f()
+ // }
+ // nested.gets
+ // }
+ // val inner = new Inner
+ // assert(anon() == inner.simba)
+ // assert(anon().toString == "'smba")
+ // assert(an2() == 'mfsa)
+ // assert(an3() == Symbol("layered" + ""))
+ }
+
+ def testNestedObject {
+ object nested {
+ def sign = 'sign
+ def insignia = 'insignia
+ }
+ assert(nested.sign == 'sign)
+ assert(nested.insignia == 'insignia)
+ assert(('insignia).toString == "'insignia")
+ }
+
+ def testInheritance {
+ val base = new Base
+ val sub = new Sub
+ assert(base.basesymbol == 'symbase)
+ assert(sub.subsymbol == 'symsub)
+ assert(sub.basesymbol == 'symbase)
+
+ val anon = new Sub {
+ def subsubsymbol = 'symsubsub
+ }
+ assert(anon.subsubsymbol == 'symsubsub)
+ assert(anon.subsymbol == 'symsub)
+ assert(anon.basesymbol == 'symbase)
+
+ object nested extends Sub {
+ def objsymbol = 'symobj
+ }
+ assert(nested.objsymbol == 'symobj)
+ assert(nested.subsymbol == 'symsub)
+ assert(nested.basesymbol == 'symbase)
+ assert(('symbase).toString == "'symbase")
+ }
+
+ def testTraits {
+ val fromTrait = new AnyRef with Signs {
+ def traitsymbol = 'traitSymbol
+ }
+
+ assert(fromTrait.traitsymbol == 'traitSymbol)
+ assert(fromTrait.ind == 'indication)
+ assert(fromTrait.trace == 'trace)
+ assert(('trace).toString == "'trace")
+
+ trait Compl {
+ val s1 = 's1
+ def s2 = 's2
+ object inner {
+ val s3 = 's3
+ val s4 = 's4
+ }
+ }
+
+ val compl = new Sub with Signs with Compl
+ assert(compl.s1 == 's1)
+ assert(compl.s2 == 's2)
+ assert(compl.inner.s3 == 's3)
+ assert(compl.inner.s4 == 's4)
+ assert(compl.ind == 'indication)
+ assert(compl.trace == 'trace)
+ assert(compl.subsymbol == 'symsub)
+ assert(compl.basesymbol == 'symbase)
+
+ object Local extends Signs with Compl {
+ val s5 = 's5
+ def s6 = 's6
+ object inner2 {
+ val s7 = 's7
+ def s8 = 's8
+ }
+ }
+ assert(Local.s5 == 's5)
+ assert(Local.s6 == 's6)
+ assert(Local.inner2.s7 == 's7)
+ assert(Local.inner2.s8 == 's8)
+ assert(Local.inner.s3 == 's3)
+ assert(Local.inner.s4 == 's4)
+ assert(Local.s1 == 's1)
+ assert(Local.s2 == 's2)
+ assert(Local.trace == 'trace)
+ assert(Local.ind == 'indication)
+ assert(('s8).toString == "'s8")
+ }
+
+ def testLazyTraits {
+ val l1 = new AnyRef with Lazy1
+ val l2 = new AnyRef with Lazy2
+ val l3 = new AnyRef with Lazy3
+
+ l1.v1
+ l2.v2
+ l3.v3
+ assert((l1.s1).toString == "'lazySymbol1")
+ assert(l2.s2 == Symbol("lazySymbol" + 2))
+ assert(l3.s3 == 'lazySymbol3)
+ }
+
+ def testLazyObjects {
+ assert(SingletonOfLazyness.lazysym == 'lazySymbol)
+ assert(SingletonOfLazyness.another == Symbol("ano" + "ther"))
+ assert((SingletonOfLazyness.lastone).toString == "'lastone")
+
+ object nested {
+ lazy val sym1 = 'snested1
+ lazy val sym2 = 'snested2
+ }
+
+ assert(nested.sym1 == 'snested1)
+ assert(nested.sym2 == Symbol("snested" + "2"))
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/run/absoverride.scala b/test/files/run/absoverride.scala
index a3c03df0d7..8c6de09d2a 100644
--- a/test/files/run/absoverride.scala
+++ b/test/files/run/absoverride.scala
@@ -26,16 +26,16 @@ trait SyncIterator extends AbsIterator {
}
}
trait LoggedIterator extends AbsIterator {
- abstract override def next: T = {
- val x = super.next; println("log: " + x); x
+ abstract override def next: T = {
+ val x = super.next; println("log: " + x); x
}
}
-class Iter2(s: String) extends StringIterator(s)
- with SyncIterator with LoggedIterator;
+class Iter2(s: String) extends StringIterator(s)
+ with SyncIterator with LoggedIterator;
object Test {
def main(args: Array[String]) {
class Iter extends StringIterator(args(0)) with RichIterator with SyncIterator with LoggedIterator
val iter = new Iter
- iter foreach Console.println
+ iter foreach Console.println
}
}
diff --git a/test/files/run/adding-growing-set.scala b/test/files/run/adding-growing-set.scala
new file mode 100644
index 0000000000..5903813ed1
--- /dev/null
+++ b/test/files/run/adding-growing-set.scala
@@ -0,0 +1,11 @@
+/** This will run a a loooong time if Set's builder copies a
+ * complete new Set for every element.
+ */
+object Test {
+ def main(args: Array[String]): Unit = {
+ val a = new Array[Long](1000000)
+ (1 to 10000) foreach (i => a(i) = i)
+ val s = collection.mutable.Set(a: _*)
+ assert(s.sum > 0)
+ }
+}
diff --git a/test/files/run/arrayclone.scala b/test/files/run/arrayclone.scala
new file mode 100644
index 0000000000..55d8fc395d
--- /dev/null
+++ b/test/files/run/arrayclone.scala
@@ -0,0 +1,106 @@
+object Test extends Application{
+ BooleanArrayClone;
+ ByteArrayClone;
+ ShortArrayClone;
+ CharArrayClone;
+ IntArrayClone;
+ LongArrayClone;
+ FloatArrayClone;
+ DoubleArrayClone;
+ ObjectArrayClone;
+ PolymorphicArrayClone;
+}
+
+object BooleanArrayClone{
+ val it : Array[Boolean] = Array(true, false);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = false;
+ assert(it(0) == true)
+}
+
+object ByteArrayClone{
+ val it : Array[Byte] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object ShortArrayClone{
+ val it : Array[Short] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object CharArrayClone{
+ val it : Array[Char] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object IntArrayClone{
+ val it : Array[Int] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object LongArrayClone{
+ val it : Array[Long] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object FloatArrayClone{
+ val it : Array[Float] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object DoubleArrayClone{
+ val it : Array[Double] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object ObjectArrayClone{
+ val it : Array[String] = Array("1", "0");
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = "0";
+ assert(it(0) == "1")
+}
+
+object PolymorphicArrayClone{
+ def testIt[T](it : Array[T], one : T, zero : T) = {
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = zero;
+ assert(it(0) == one)
+ }
+
+ testIt(Array("one", "two"), "one", "two");
+
+ class Mangler[T: Manifest](ts : T*){
+ // this will always be a BoxedAnyArray even after we've unboxed its contents.
+ val it = ts.toArray[T];
+ }
+
+ val mangled = new Mangler[Int](0, 1);
+
+ val y : Array[Int] = mangled.it; // make sure it's unboxed
+
+ testIt(mangled.it, 0, 1);
+}
diff --git a/test/files/run/arraycopy.scala b/test/files/run/arraycopy.scala
new file mode 100644
index 0000000000..82c34c23e7
--- /dev/null
+++ b/test/files/run/arraycopy.scala
@@ -0,0 +1,31 @@
+
+
+object Test {
+ def main(args: Array[String]) {
+ val a = new Array[Int](10)
+ val b = new Array[Any](10)
+ for (i <- 0 until 10) b(i) = i
+
+ Array.copy(b, 3, a, 3, 7)
+ assert(a.toSeq == List(0, 0, 0, 3, 4, 5, 6, 7, 8, 9))
+ }
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/run/arybufgrow.scala b/test/files/run/arybufgrow.scala
index 4dccd962f2..9e18435243 100644
--- a/test/files/run/arybufgrow.scala
+++ b/test/files/run/arybufgrow.scala
@@ -2,8 +2,8 @@ import scala.collection.mutable._;
object Test extends Application {
val buf = new ArrayBuffer[String];
- for(val i <- List.range(0,1000)) {
- buf + "hello";
+ for (i <- List.range(0,1000)) {
+ buf += "hello";
}
Console.println("1000 = " + buf.length);
diff --git a/test/files/run/bigDecimalCache.scala b/test/files/run/bigDecimalCache.scala
new file mode 100644
index 0000000000..e8ebefee78
--- /dev/null
+++ b/test/files/run/bigDecimalCache.scala
@@ -0,0 +1,9 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val bd5a = BigDecimal(5)
+ val mc = java.math.MathContext.DECIMAL32
+ val bd5b = BigDecimal(5,mc)
+
+ assert(bd5b.mc == mc)
+ }
+}
diff --git a/test/files/run/bitsets-msil.check b/test/files/run/bitsets-msil.check
index 9fefa3125e..b187571bff 100644
--- a/test/files/run/bitsets-msil.check
+++ b/test/files/run/bitsets-msil.check
@@ -1,23 +1,23 @@
-ms0 = Set(2)
-ms1 = Set(2)
-ms2 = Set(2)
+ms0 = BitSet(2)
+ms1 = BitSet(2)
+ms2 = BitSet(2)
mb0 = False
mb1 = True
mb2 = False
xs0 = List(2)
xs1 = List(2)
xs2 = List(2)
-ma0 = List(4)
-ma1 = List(4)
-ma2 = List(4)
-mi0 = Set(2)
-mi1 = Set(2)
-mi2 = Set(2)
+ma0 = List(2)
+ma1 = List(2)
+ma2 = List(2)
+mi0 = BitSet(2)
+mi1 = BitSet(2)
+mi2 = BitSet(2)
-is0 = Set()
-is1 = Set()
-is2 = Set(2)
-is3 = Set()
+is0 = BitSet()
+is1 = BitSet()
+is2 = BitSet(2)
+is3 = BitSet()
ib0 = False
ib1 = False
ib2 = True
@@ -26,8 +26,8 @@ ys0 = List()
ys1 = List()
ys2 = List(2)
ys3 = List()
-ia0 = List(0)
-ia1 = List(0)
-ia2 = List(4)
+ia0 = List()
+ia1 = List()
+ia2 = List(2)
ia3 = List()
diff --git a/test/files/run/boolexprs.scala b/test/files/run/boolexprs.scala
index b9b4faea9c..4f1c4b161a 100644
--- a/test/files/run/boolexprs.scala
+++ b/test/files/run/boolexprs.scala
@@ -10,7 +10,7 @@ class Counter {
object Test1 {
var flag = false;
- def flip: Boolean = { val tmp = flag; flag = !flag; tmp }
+ def flip: Boolean = { val tmp = flag; flag = !flag; tmp }
def run: Int = {
val c = new Counter;
c.incrThen(flip || flip);
diff --git a/test/files/run/bug0325.scala b/test/files/run/bug0325.scala
index 236f1b101f..92331ab05f 100644
--- a/test/files/run/bug0325.scala
+++ b/test/files/run/bug0325.scala
@@ -7,7 +7,7 @@ case class RS(self: String) {
}
def split(separator: Char): Array[String] = self.split(escape(separator))
-
+
def split(separators: Array[Char]): Array[String] = {
val re = separators.foldLeft("[")(_+escape(_)) + "]"
self.split(re)
@@ -27,7 +27,7 @@ object Test {
case e@_ => println(which + " failed with " + e.getClass)
}
}
-
+
def main(args: Array[String]) {
val badChars = "?*{+([\\^.$"
@@ -46,8 +46,8 @@ object Test {
for ((c,str) <- badCases)
test(("a"+c+"b").split(str.toArray),"RichString split(\""+ str + "\")")
println
-
+
for ((c,str) <- badCases)
- test(RS("a"+c+"b").split(str.toArray),"RS split(\""+ str + "\")")
+ test(RS("a"+c+"b").split(str.toArray),"RS split(\""+ str + "\")")
}
}
diff --git a/test/files/run/bug1005.scala b/test/files/run/bug1005.scala
index 5ccd89dd85..60129bcc51 100644
--- a/test/files/run/bug1005.scala
+++ b/test/files/run/bug1005.scala
@@ -10,7 +10,7 @@ object Test
object FromPoly{
def main(args : Array[String]) = (new Bar[AnyRef](Array[AnyRef]("Halp!"))).bar
}
-
+
def main(args: Array[String]): Unit = {
println(FromMono main null mkString)
println(FromPoly main null mkString)
diff --git a/test/files/run/bug1074.check b/test/files/run/bug1074.check
index ecff8510ed..ccf1cb1551 100644
--- a/test/files/run/bug1074.check
+++ b/test/files/run/bug1074.check
@@ -1,3 +1,3 @@
-q0 = Set(kl, jk, cd, fg, a, ef, gh, de, hj, b, lm, mn)
+q0 = Set(kl, jk, cd, fg, ef, gh, a, de, hj, b, lm, mn)
q1 = Set() 0
q2 = Set() 0
diff --git a/test/files/run/bug1141.scala b/test/files/run/bug1141.scala
index a98e3cf204..7de031366c 100644
--- a/test/files/run/bug1141.scala
+++ b/test/files/run/bug1141.scala
@@ -2,6 +2,6 @@ object Test extends Application {
val foo = new {
def apply(args : String*) = args foreach println
}
-
+
foo("var", "args")
}
diff --git a/test/files/run/bug1220.scala b/test/files/run/bug1220.scala
index 88baa980f5..165a1c0eca 100644
--- a/test/files/run/bug1220.scala
+++ b/test/files/run/bug1220.scala
@@ -1,7 +1,7 @@
object Test extends Application {
class QSRichIterable[A](self: Iterable[A]) {
- def filterMap[R](f: PartialFunction[A,R]) =
+ def filterMap[R](f: PartialFunction[A,R]) =
self filter (f.isDefinedAt) map f
}
diff --git a/test/files/run/bug1300.scala b/test/files/run/bug1300.scala
index 1a759f4e1e..7b2fd79cec 100644
--- a/test/files/run/bug1300.scala
+++ b/test/files/run/bug1300.scala
@@ -4,10 +4,10 @@ object Test extends Application
// val a1 = x1.toArray[Any]
val a2 = Array('a','b','c','d').toArray[Any]
val a3 = Array("e","f","g","h").toArray[Any]
-
+
Array.copy(a3, 0, a1, 0, 4)
Array.copy(a2, 0, a3, 0, 4)
Array.copy(a2, 0, a1, 0, 4)
-
+
println(a1.mkString + a2.mkString + a3.mkString)
}
diff --git a/test/files/run/bug1309.scala b/test/files/run/bug1309.scala
index d753f4d96c..6b5167eb56 100644
--- a/test/files/run/bug1309.scala
+++ b/test/files/run/bug1309.scala
@@ -1,6 +1,6 @@
object Test {
def f(ras: => RandomAccessSeq[Byte]): RandomAccessSeq[Byte] = ras
-
+
def main(args: Array[String]): Unit = {
f(new Array[Byte](0))
}
diff --git a/test/files/run/bug1766.scala b/test/files/run/bug1766.scala
new file mode 100644
index 0000000000..94f69356e1
--- /dev/null
+++ b/test/files/run/bug1766.scala
@@ -0,0 +1,16 @@
+object Test extends Application {
+
+ class C(s: String) {
+
+ def this(i: Int) = this("bar")
+
+ def f = {
+ val v: { def n: Int } = new { val n = 3 }
+ v.n
+ }
+
+ }
+
+ new C("foo").f
+
+}
diff --git a/test/files/run/bug2029.scala b/test/files/run/bug2029.scala
index 32b04f0b47..1cbe97a350 100644
--- a/test/files/run/bug2029.scala
+++ b/test/files/run/bug2029.scala
@@ -3,10 +3,10 @@ object Test{
import scala.collection.immutable.TreeSet;
val mainSet = TreeSet(1 to 5 :_*)
-
+
var compareCalled = false;
val smallerSet = TreeSet(2 to 4 :_*)(Ordering[Int].reverse)
-
+
println(mainSet.mkString(","))
println(smallerSet.mkString(","))
println(smallerSet.subsetOf(mainSet));
diff --git a/test/files/run/bug2124.scala b/test/files/run/bug2124.scala
index e72deb09bc..a4fd654d76 100644
--- a/test/files/run/bug2124.scala
+++ b/test/files/run/bug2124.scala
@@ -9,7 +9,7 @@ object Test {
println(new RuleTransformer(new RewriteRule {
- override def transform(n: Node): NodeSeq = {
+ override def transform(n: Node): NodeSeq = {
val result = n match {
case <t>{_*}</t> => <q/>
diff --git a/test/files/run/bug2125.scala b/test/files/run/bug2125.scala
index 8314e4f019..a10ed9827b 100644
--- a/test/files/run/bug2125.scala
+++ b/test/files/run/bug2125.scala
@@ -5,11 +5,11 @@ import scala.xml.transform._
object Test {
val sampleXml = <xml:group><p><lost/><t><s><r></r></s></t></p></xml:group>
-
+
def main(args: scala.Array[String]) {
println(new RuleTransformer(new RewriteRule {
- override def transform(n: Node): NodeSeq = {
+ override def transform(n: Node): NodeSeq = {
val result = n match {
diff --git a/test/files/run/bug2276.scala b/test/files/run/bug2276.scala
index 25c30d9e96..68b9976355 100644
--- a/test/files/run/bug2276.scala
+++ b/test/files/run/bug2276.scala
@@ -2,7 +2,7 @@ import scala.xml._
import scala.xml.transform._
object Test extends Application {
- val inputXml : Node =
+ val inputXml : Node =
<root>
<subnode>
<version>1</version>
diff --git a/test/files/run/bug2354.scala b/test/files/run/bug2354.scala
new file mode 100644
index 0000000000..5419911ac3
--- /dev/null
+++ b/test/files/run/bug2354.scala
@@ -0,0 +1,17 @@
+import scala.xml.parsing._
+import scala.io.Source
+
+object Test
+{
+ val xml_good = "<title><![CDATA[Hello [tag]]]></title>"
+ val xml_bad = "<title><![CDATA[Hello [tag] ]]></title>"
+
+ val parser1 = ConstructingParser.fromSource(Source.fromString(xml_good),false)
+ val parser2 = ConstructingParser.fromSource(Source.fromString(xml_bad),false)
+
+ def main(args: Array[String]): Unit = {
+ parser1.document
+ parser2.document
+ }
+}
+
diff --git a/test/files/run/bug2378.scala b/test/files/run/bug2378.scala
new file mode 100644
index 0000000000..f696a78b4c
--- /dev/null
+++ b/test/files/run/bug2378.scala
@@ -0,0 +1,9 @@
+object Test
+{
+ val f1 = -0.0
+ val f2 = -(0.0)
+ def main(args: Array[String]): Unit = {
+ assert(f1.toString startsWith "-")
+ assert(f2.toString startsWith "-")
+ }
+}
diff --git a/test/files/run/bug2512.scala b/test/files/run/bug2512.scala
index 514b26650f..c2dfda6c1f 100644
--- a/test/files/run/bug2512.scala
+++ b/test/files/run/bug2512.scala
@@ -4,11 +4,11 @@ object Test
{
val runs = 10000
class Bop
-
+
def main(args: Array[String]): Unit = {
val set: HashSet[Bop] = new HashSet("Bop", 16)
(1 to runs).toList foreach (_ => set addEntry new Bop)
-
+
assert(runs == set.size && set.size == set.iterator.length)
}
}
diff --git a/test/files/run/bug2514.scala b/test/files/run/bug2514.scala
index e23b441ecf..21c4afb472 100644
--- a/test/files/run/bug2514.scala
+++ b/test/files/run/bug2514.scala
@@ -1,7 +1,7 @@
object Test
{
implicit def x[A](a: A) = new { def xx = a }
-
+
def main(args: Array[String]): Unit = {
val r1 = 12 xx;
val r2 = 12.xx
@@ -9,7 +9,7 @@ object Test
val r4 = 12.xx + 12.xx
val r5 = 12.`xx` + 12.xx
val r6 = 12.3.`xx` + 12.xx
-
+
assert(r5 == 24)
}
} \ No newline at end of file
diff --git a/test/files/run/bug2552.check b/test/files/run/bug2552.check
new file mode 100644
index 0000000000..1deeae772f
--- /dev/null
+++ b/test/files/run/bug2552.check
@@ -0,0 +1,48 @@
+p(0)
+0
+p(1)
+1
+p(2)
+2
+p(3)
+3
+p(4)
+4
+p(5)
+5
+p(6)
+6
+p(7)
+7
+p(8)
+8
+p(9)
+9
+p(10)
+p(0)
+true
+true
+0
+p(1)
+true
+1
+p(2)
+false
+false
+p(0)
+true
+true
+0
+p(1)
+p(2)
+2
+p(3)
+p(4)
+4
+p(5)
+p(6)
+6
+p(7)
+p(8)
+8
+p(9)
diff --git a/test/files/run/bug2552.scala b/test/files/run/bug2552.scala
new file mode 100644
index 0000000000..34fe25084e
--- /dev/null
+++ b/test/files/run/bug2552.scala
@@ -0,0 +1,34 @@
+object Test extends Application {
+ def testTakeWhile = {
+ val numbers = Iterator.range(0, 50)
+ val zeroTo9 = numbers.takeWhile(x => { println("p(" + x + ")"); x < 10 } )
+
+ zeroTo9.foreach(println _)
+
+ val zeroTo1 = Iterator.range(0, 20).takeWhile(x => { println("p(" + x + ")"); x < 2 } )
+
+ println(zeroTo1.hasNext)
+ println(zeroTo1.hasNext)
+ println(zeroTo1.next)
+ println(zeroTo1.hasNext)
+ println(zeroTo1.next)
+ println(zeroTo1.hasNext)
+ println(zeroTo1.hasNext)
+ }
+
+ def testFilter = {
+ val predicate = (x: Int) => { println("p(" + x + ")"); x % 2 == 0 }
+
+ val evens = Iterator.range(0, 10).filter(predicate)
+
+ println(evens.hasNext)
+ println(evens.hasNext)
+ println(evens.next)
+
+ evens.foreach(println _)
+ }
+
+ testTakeWhile
+ testFilter
+}
+
diff --git a/test/files/run/bug2636.scala b/test/files/run/bug2636.scala
new file mode 100644
index 0000000000..3271f79ffc
--- /dev/null
+++ b/test/files/run/bug2636.scala
@@ -0,0 +1,35 @@
+object Test
+{
+ type Foo = { def update(x: Int, value: String): Unit }
+ type Foo2 = { def update(x: Int, value: String): Int }
+ type Foo3 = { def update(x: Int, value: String): Array[Int] }
+
+ def alen() = {
+ type L1 = { def length: Int }
+ def len(p: L1) = p.length
+ val x: L1 = Array(1,2,3)
+ len(x)
+ }
+
+ type A1 = { def apply(x: Int): String }
+ def arrApply(a: A1, x: Int) = a(x)
+
+ def main(args: Array[String]): Unit = {
+ val arr = new Array[String](3)
+ val p1: Foo = arr
+ def a1 = p1(0) = "b"
+
+ val p2: Foo2 = new { def update(x: Int, value: String) = { p1(1) = "o" ; 1 } }
+ def a2 = p2(0) = "c"
+
+ val p3: Foo3 = new { def update(x: Int, value: String) = { p1(2) = "b" ; Array(1) } }
+ def a3 = p3(10) = "hi mom"
+
+ a1 ; a2 ; a3 ;
+
+ assert(arr.mkString == "bob")
+ assert(alen() == 3)
+ assert(arrApply(arr, 1) == "o")
+ assert(arrApply(new { def apply(x: Int) = "tom" }, -100) == "tom")
+ }
+} \ No newline at end of file
diff --git a/test/files/run/bug2721.check b/test/files/run/bug2721.check
new file mode 100644
index 0000000000..2bd7656b36
--- /dev/null
+++ b/test/files/run/bug2721.check
@@ -0,0 +1,2 @@
+root:-rootVal-sub:-subVal-
+root:-rootVal-sub:-subVal-
diff --git a/test/files/run/bug2721.scala b/test/files/run/bug2721.scala
new file mode 100644
index 0000000000..93af884a60
--- /dev/null
+++ b/test/files/run/bug2721.scala
@@ -0,0 +1,12 @@
+object Test
+{
+ val xml1 = <root xmlns:ns="nsUri" ns:at="rootVal"><sub ns:at="subVal"/></root>
+ val xml2= scala.xml.XML.loadString("""<root xmlns:ns="nsUri" ns:at="rootVal"><sub ns:at="subVal"/></root>""")
+
+ def backslashSearch(x: xml.Elem) = "root:-"+(x \ "@{nsUri}at") +"-sub:-"+(x \ "sub" \ "@{nsUri}at") +"-"
+
+ def main(args: Array[String]): Unit = {
+ println(backslashSearch(xml1))
+ println(backslashSearch(xml2))
+ }
+}
diff --git a/test/files/run/bug2876.scala b/test/files/run/bug2876.scala
new file mode 100644
index 0000000000..f71879ebff
--- /dev/null
+++ b/test/files/run/bug2876.scala
@@ -0,0 +1,7 @@
+object Test
+{
+ def main(args: Array[String]): Unit = {
+ "x".view.filter(_ => true).take(1)
+ }
+}
+
diff --git a/test/files/run/bug2958.scala b/test/files/run/bug2958.scala
new file mode 100644
index 0000000000..b9563a1b77
--- /dev/null
+++ b/test/files/run/bug2958.scala
@@ -0,0 +1,16 @@
+object Test {
+ def f(args: Array[String]) = args match {
+ case Array("-p", prefix, from, to) =>
+ prefix + from + to
+
+ case Array(from, to) =>
+ from + to
+
+ case _ =>
+ "default"
+ }
+
+ def main(args: Array[String]) {
+ assert(f(Array("1", "2")) == "12")
+ }
+} \ No newline at end of file
diff --git a/test/files/run/bug3004.scala b/test/files/run/bug3004.scala
new file mode 100644
index 0000000000..a1e9c6c72f
--- /dev/null
+++ b/test/files/run/bug3004.scala
@@ -0,0 +1,14 @@
+object MyClass {
+ val duplicate: Int = 10
+}
+
+class MyClass {
+ private val duplicate = MyClass.duplicate
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val x = new MyClass
+ ()
+ }
+}
diff --git a/test/files/run/bug3126.scala b/test/files/run/bug3126.scala
new file mode 100644
index 0000000000..36322bf896
--- /dev/null
+++ b/test/files/run/bug3126.scala
@@ -0,0 +1,9 @@
+object Test {
+ case class C(x: Int)
+ val v: Some[Int] = null
+
+ def main(args: Array[String]): Unit = {
+ try C.unapply(null) catch { case _: MatchError => }
+ try v match { case Some(1) => } catch { case _: MatchError => }
+ }
+}
diff --git a/test/files/run/bug3175.check b/test/files/run/bug3175.check
new file mode 100644
index 0000000000..12246140f4
--- /dev/null
+++ b/test/files/run/bug3175.check
@@ -0,0 +1,11 @@
+10
+15
+3
+3
+3
+5
+5
+5
+100
+jabooboo
+hi mom
diff --git a/test/files/run/bug3175.scala b/test/files/run/bug3175.scala
new file mode 100644
index 0000000000..aff2e67d0d
--- /dev/null
+++ b/test/files/run/bug3175.scala
@@ -0,0 +1,55 @@
+/** A bit down the road this test will examine
+ * the bytecode.
+ */
+object Test {
+ def len(x:{ def length: Int }) = x.length
+ def f1(x:{ def apply(x: Int): Long }) = x(0)
+ def f2(x:{ def apply(x: Int): Byte }) = x(0)
+ def f3(x:{ def apply(x: Int): String }) = x(0).length
+
+ def f4(x:{ def update(x: Int, y: Long): Unit }, y: Long) = x(0) = y
+ def f5(x:{ def update(x: Int, y: Byte): Unit }, y: Byte) = x(0) = y
+ def f6(x:{ def update(x: Int, y: String): Unit }, y: String) = x(0) = y
+
+ def f7(x: { def length: Any }) = x.length
+
+ def f8(x: { def apply(x: Int): Any }) = x(0)
+ def f9(x: { def apply(x: Int): Int }) = x(0)
+ def f10(x: { def apply(x: Int): Long }) = x(0)
+
+ // update has some interesting special cases
+ def f11(x:{ def update(x: Int, y: Long): Any }, y: Long) = x(0) = y
+ def f12(x:{ def update(x: Int, y: String): AnyVal }, y: String) = x(0) = y
+ def f13(x:{ def update(x: Int, y: String): AnyRef }, y: String) = x(0) = y
+
+ // doesn't work yet, see #3197
+ // def fclone(x:{ def clone(): AnyRef }) = x.clone()
+
+ def main(args: Array[String]): Unit = {
+ val longs = Array(5L)
+ val bytes = Array(5: Byte)
+ val strs = Array("abcde", "fghjij")
+
+ println(len(Array(1,2,3)) + len(Array(4.0,5.0f)) + len(Array("abc", 5)) + len("bop"))
+ println(f1(longs) + f2(bytes) + f3(strs))
+
+ f4(longs, 1)
+ f5(bytes, 1)
+ f6(strs, "a")
+
+ println(f1(longs) + f2(bytes) + f3(strs))
+
+ println(f7(Array(1,2,3)))
+ println(f7("def"))
+
+ println(f8(Array(5)))
+ println(f9(Array(5)))
+ println(f10(Array(5)))
+
+ f11(longs, 100L)
+ f12(strs, "jabooboo")
+ println(longs(0))
+ println(strs(0))
+ f13(new { def update(x: Int, y: String): List[Int] = { println("hi mom") ; Nil } }, "irrelevant")
+ }
+}
diff --git a/test/files/run/bug3269.check b/test/files/run/bug3269.check
new file mode 100644
index 0000000000..c25611c15c
--- /dev/null
+++ b/test/files/run/bug3269.check
@@ -0,0 +1,2 @@
+1
+Hello
diff --git a/test/files/run/bug3269.scala b/test/files/run/bug3269.scala
new file mode 100644
index 0000000000..6fe727111c
--- /dev/null
+++ b/test/files/run/bug3269.scala
@@ -0,0 +1,9 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val it = List(1).iterator ++ { println("Hello"); Iterator.empty }
+ println(it.next)
+ it.hasNext
+ it.hasNext
+ it.hasNext
+ }
+}
diff --git a/test/files/run/bug3327.check b/test/files/run/bug3327.check
new file mode 100644
index 0000000000..980a0d5f19
--- /dev/null
+++ b/test/files/run/bug3327.check
@@ -0,0 +1 @@
+Hello World!
diff --git a/test/files/run/bug3327.scala b/test/files/run/bug3327.scala
new file mode 100644
index 0000000000..7e6d3fc210
--- /dev/null
+++ b/test/files/run/bug3327.scala
@@ -0,0 +1,8 @@
+object Test {
+ def main (args : Array[String]) {
+ val b = new StringBuilder
+ b.append ("Hello World!")
+ b.lastIndexOf ('e')
+ println (b.toString)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/bug3395.check b/test/files/run/bug3395.check
new file mode 100644
index 0000000000..5f5521fae2
--- /dev/null
+++ b/test/files/run/bug3395.check
@@ -0,0 +1,2 @@
+abc
+def
diff --git a/test/files/run/bug3395.scala b/test/files/run/bug3395.scala
new file mode 100644
index 0000000000..01cc431871
--- /dev/null
+++ b/test/files/run/bug3395.scala
@@ -0,0 +1,13 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ Seq("") match {
+ case Seq("") => println("abc")
+ case Seq(_, _, x) => println(x)
+ }
+
+ Seq(1, 2, "def") match {
+ case Seq("") => println("abc")
+ case Seq(_, _, x) => println(x)
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/run/bug3397.scala b/test/files/run/bug3397.scala
new file mode 100644
index 0000000000..243fe766de
--- /dev/null
+++ b/test/files/run/bug3397.scala
@@ -0,0 +1,7 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val x = Seq(Set(1,2,3),Set(4,5,6),Set(7,8,9)).transpose
+
+ ()
+ }
+}
diff --git a/test/files/run/bug3516.check b/test/files/run/bug3516.check
new file mode 100644
index 0000000000..d0d10d82fa
--- /dev/null
+++ b/test/files/run/bug3516.check
@@ -0,0 +1,3 @@
+1
+1
+21
diff --git a/test/files/run/bug3516.scala b/test/files/run/bug3516.scala
new file mode 100644
index 0000000000..82a97f27de
--- /dev/null
+++ b/test/files/run/bug3516.scala
@@ -0,0 +1,13 @@
+object Test {
+ def mkIterator = (1 to 5).iterator map (x => { println(x) ; x })
+ def mkInfinite = Iterator continually { println(1) ; 1 }
+
+ def main(args: Array[String]): Unit = {
+ // Stream is strict in its head so we should see 1 from each of them.
+ val s1 = mkIterator.toStream
+ val s2 = mkInfinite.toStream
+ // back and forth without slipping into nontermination.
+ println((Stream from 1).toIterator.drop(10).toStream.drop(10).toIterator.next)
+ ()
+ }
+}
diff --git a/test/files/run/bug3529.scala b/test/files/run/bug3529.scala
new file mode 100644
index 0000000000..bb82424bf6
--- /dev/null
+++ b/test/files/run/bug3529.scala
@@ -0,0 +1,14 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ assert(1 to 10 drop 10 isEmpty)
+ assert(1 until 10 drop 9 isEmpty)
+ assert(1 to 10 by 2 drop 5 isEmpty)
+ assert(10 to 1 by -1 drop 10 isEmpty)
+ assert((10 to 1 by -1 drop 9) == Seq(1))
+
+ assert((1 to 10 drop 9) == Seq(10))
+ assert((1 until 10 drop 9) == Nil)
+
+ assert(Stream(1 to 10).flatten.toList == Stream(1 until 11).flatten.toList)
+ }
+}
diff --git a/test/files/run/bug3540.scala b/test/files/run/bug3540.scala
new file mode 100644
index 0000000000..4eb3de780b
--- /dev/null
+++ b/test/files/run/bug3540.scala
@@ -0,0 +1,7 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ assert(List.iterate(List(1,2,3), 4)(_.tail).last.isEmpty)
+ assert(Stream.iterate(Stream(1,2,3), 4)(_.tail).last.isEmpty)
+ assert(Array.iterate(Array(1,2,3), 4)(_.tail).last.isEmpty)
+ }
+}
diff --git a/test/files/run/bug3563.scala b/test/files/run/bug3563.scala
new file mode 100644
index 0000000000..2a80ef412a
--- /dev/null
+++ b/test/files/run/bug3563.scala
@@ -0,0 +1,21 @@
+
+
+
+
+
+// ticket #3563
+object Test {
+
+ def main(args: Array[String]) {
+ var sum = 0
+ val setseq = Set(1, 2, 3, 4).toSeq
+ setseq.map( n => { sum += n; n * n }).head
+ assert(sum == 10)
+
+ sum = 0
+ val mapseq = Map(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4).toSeq
+ mapseq.map( n => { sum += n._1; (n._1 + n._1, n._2 * n._2) }).head
+ assert(sum == 10)
+ }
+
+}
diff --git a/test/files/run/bug3616.check b/test/files/run/bug3616.check
new file mode 100644
index 0000000000..f31e21baff
--- /dev/null
+++ b/test/files/run/bug3616.check
@@ -0,0 +1 @@
+Fruit.ValueSet(A, B, C)
diff --git a/test/files/run/bug3616.scala b/test/files/run/bug3616.scala
new file mode 100644
index 0000000000..777b97f9ab
--- /dev/null
+++ b/test/files/run/bug3616.scala
@@ -0,0 +1,12 @@
+object X extends Enumeration {
+ val Y = Value
+}
+object Fruit extends Enumeration {
+ val x = X.Y
+ val A,B,C = Value
+}
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(Fruit.values)
+ }
+}
diff --git a/test/files/run/bug363.scala b/test/files/run/bug363.scala
index c747be2ec9..5f3f30a098 100644
--- a/test/files/run/bug363.scala
+++ b/test/files/run/bug363.scala
@@ -3,7 +3,7 @@ object Test {
println("I love the smell of (Array[String])Unit in the morning.")
}
}
-
+
class Test {
def kurtz() = "We must kill them. We must incinerate them."
}
diff --git a/test/files/run/bug408.scala b/test/files/run/bug408.scala
new file mode 100644
index 0000000000..4d3dcbcb84
--- /dev/null
+++ b/test/files/run/bug408.scala
@@ -0,0 +1,12 @@
+object Test
+{
+ val a = scala.collection.immutable.Set.empty ++ (0 to 100000)
+ val b = scala.collection.immutable.Set.empty ++ (0 to 100000)
+
+ def main(args: Array[String]): Unit = {
+ a -- b
+ a -- b
+ a -- b
+ a -- b
+ }
+}
diff --git a/test/files/run/bug4238/J.java b/test/files/run/bug4238/J.java
new file mode 100644
index 0000000000..948989b4e7
--- /dev/null
+++ b/test/files/run/bug4238/J.java
@@ -0,0 +1,4 @@
+class J {
+ scala.collection.mutable.HashMap<String, String> x =
+ new scala.collection.mutable.HashMap<String, String>();
+}
diff --git a/test/files/run/bug594.scala b/test/files/run/bug594.scala
index 0c3be3d5de..f923a3cd2a 100644
--- a/test/files/run/bug594.scala
+++ b/test/files/run/bug594.scala
@@ -2,7 +2,7 @@ object Test {
def main(args: Array[String]): Unit = {
val array = Array("one", "two", "three")
val firstTwo: Array[String] = array.slice(0,2)
- for(val x <- firstTwo)
+ for (x <- firstTwo)
Console.println(x)
}
}
diff --git a/test/files/run/bug603.scala b/test/files/run/bug603.scala
index 361cef1f41..b8825c933b 100644
--- a/test/files/run/bug603.scala
+++ b/test/files/run/bug603.scala
@@ -22,7 +22,7 @@ object forceDelay {
object Test {
import forceDelay._
-
+
def main(args: Array[String]) = {
val s: Susp[Int] = delay { Console.println("evaluating..."); 3 }
Console.println("s = " + s)
diff --git a/test/files/run/bug627.scala b/test/files/run/bug627.scala
index 6415694ffe..ecaf150741 100644
--- a/test/files/run/bug627.scala
+++ b/test/files/run/bug627.scala
@@ -1,6 +1,6 @@
object Test {
def main(args: Array[String]) {
- val s: Seq[int] = Array(1, 2, 3, 4)
+ val s: Seq[Int] = Array(1, 2, 3, 4)
println(s)
}
}
diff --git a/test/files/run/bug744.scala b/test/files/run/bug744.scala
index 4895e9baa0..d5e9f6df34 100644
--- a/test/files/run/bug744.scala
+++ b/test/files/run/bug744.scala
@@ -5,7 +5,7 @@ trait Linked {
}
}
object Test {
- class Test extends Linked {
+ class Test extends Linked {
trait FileImpl extends super.FileImpl {
// val x: int = 1
}
diff --git a/test/files/run/bug751.scala b/test/files/run/bug751.scala
new file mode 100644
index 0000000000..294d3af5c2
--- /dev/null
+++ b/test/files/run/bug751.scala
@@ -0,0 +1,6 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val map = Map(1 -> "a", 2 -> "b", 3 -> "c")
+ assert(map.filterKeys(_ % 2 == 0).isInstanceOf[scala.collection.immutable.Map[_,_]])
+ }
+}
diff --git a/test/files/run/bug920.scala b/test/files/run/bug920.scala
index 6a7f122d55..1e12e6ba87 100644
--- a/test/files/run/bug920.scala
+++ b/test/files/run/bug920.scala
@@ -7,7 +7,7 @@ object Test {
trait Foo extends Test.Foo0 {
def foo : B.this.type = B.this;
}
- class baz extends Baz with Foo {
+ class baz extends Baz with Foo {
override def toString = "baz"
}
Console.println(new baz);
diff --git a/test/files/run/bugs.scala b/test/files/run/bugs.scala
index bbf2a0fb39..d5905af76c 100644
--- a/test/files/run/bugs.scala
+++ b/test/files/run/bugs.scala
@@ -304,7 +304,7 @@ object Bug250Test {
// Bug 257
object Bug257Test {
- def sayhello(): Unit = { Console.println("I should come 1st and 2nd"); };
+ def sayhello(): Unit = { Console.println("I should come 1st and 2nd"); };
def sayhi(): Unit = { Console.println("I should come last"); };
def f1(x: Unit): Unit = ();
diff --git a/test/files/run/bugs2087-and-2400.scala b/test/files/run/bugs2087-and-2400.scala
new file mode 100644
index 0000000000..93cd633ffb
--- /dev/null
+++ b/test/files/run/bugs2087-and-2400.scala
@@ -0,0 +1,20 @@
+object Test
+{
+ def negativeCharMaker = new (Short => Char) { def apply(x: Short) = x.toChar }
+ def main(args: Array[String]): Unit = {
+ // throws exception if -100 gets to Character.valueOf
+ val x = negativeCharMaker(-100)
+
+ // chars are unsigned, they should never be equal to negative values
+ assert((-100).toShort != (-100).toChar)
+ assert((-100).toChar != (-100).toShort)
+ assert((-100).toChar != (-100).toByte)
+ assert((-100).toByte != (-100).toChar)
+
+ // BoxesRunTime must agree as well
+ assert(((-100).toShort: Any) != (-100).toChar)
+ assert(((-100).toChar: Any) != (-100).toShort)
+ assert(((-100).toChar: Any) != (-100).toByte)
+ assert(((-100).toByte: Any) != (-100).toChar)
+ }
+}
diff --git a/test/files/run/bytecodecs.scala b/test/files/run/bytecodecs.scala
new file mode 100644
index 0000000000..bf8a0f8ed3
--- /dev/null
+++ b/test/files/run/bytecodecs.scala
@@ -0,0 +1,39 @@
+import scala.reflect.generic.ByteCodecs._
+
+object Test {
+
+ def test8to7(xs: Array[Byte]) {
+ val ys = encode8to7(xs)
+ decode7to8(ys, ys.length)
+ assert(ys.take(xs.length).deep == xs.deep,
+ "test8to7("+xs.deep+") failed, result = "+ys.take(xs.length).deep)
+ }
+
+ def testAll(xs: Array[Byte]) {
+ val ys = encode(xs)
+ decode(ys)
+ assert(ys.take(xs.length).deep == xs.deep,
+ "testAll("+xs.deep+") failed, result = "+ys.take(xs.length).deep)
+ }
+
+ def test(inputs: Array[Byte]*) {
+ for (input <- inputs) {
+ test8to7(input)
+ testAll(input)
+ }
+ }
+
+ def main(args: Array[String]) {
+ test(
+ Array(1, 2, 3),
+ Array(1, 2, 3, 4, 5, 6, 7),
+ Array(1, -2, 0, -3, -5, -6, -7),
+ Array(1, 3, -1, -128, 0, 0, -128, 1, 2, 3))
+ val rand = new scala.util.Random()
+ for (i <- 1 until 5000) {
+ var xs = new Array[Byte](i)
+ rand.nextBytes(xs)
+ test(xs)
+ }
+ }
+}
diff --git a/test/files/run/caseClassEquality.scala b/test/files/run/caseClassEquality.scala
new file mode 100644
index 0000000000..4940d80951
--- /dev/null
+++ b/test/files/run/caseClassEquality.scala
@@ -0,0 +1,36 @@
+object Test {
+ abstract class A1
+ case class C1(x: Int) extends A1
+ class C2(x: Int) extends C1(x) {
+ override def productPrefix = "Shazbot!"
+ }
+ class C3(x: Int) extends C1(x) {
+ override def canEqual(other: Any) = other.isInstanceOf[C3]
+ override def equals(other: Any) = other match {
+ case ob: C3 => x == ob.x
+ case _ => false
+ }
+ }
+
+ case class CS1(xs: Any*)
+ class CS2(xs: Seq[_]*) extends CS1(xs: _*)
+ class CS3(xs: IndexedSeq[Int]*) extends CS2(xs: _*)
+
+ case class H1(x: Int, y: Double)
+ class H2(x: Double, y: Int) extends H1(y, x)
+
+ def main(args: Array[String]): Unit = {
+ assert(C1(5) == new C2(5))
+ assert(new C2(5) == C1(5))
+ assert(C1(5).hashCode == new C2(5).hashCode)
+ assert(new C2(5).hashCode == C1(5).hashCode)
+
+ assert(C1(5) != new C3(5))
+ assert(new C3(5) != C1(5))
+
+ assert(CS1(List(1d,2d), Seq[Float](3f, 4f)) == new CS3(IndexedSeq(1,2), IndexedSeq(3, 4)))
+
+ assert(H1(5, 10d) == new H2(10d, 5))
+ assert(H1(5, 10d).hashCode == new H2(10d, 5).hashCode)
+ }
+}
diff --git a/test/files/run/castsingleton.scala b/test/files/run/castsingleton.scala
index f907467741..3921696efa 100644
--- a/test/files/run/castsingleton.scala
+++ b/test/files/run/castsingleton.scala
@@ -8,4 +8,4 @@ object Test extends Application {
}
empty(L())
-}
+}
diff --git a/test/files/run/checked.scala b/test/files/run/checked.scala
index adb0c509a5..4a79aa2b9c 100644
--- a/test/files/run/checked.scala
+++ b/test/files/run/checked.scala
@@ -23,9 +23,9 @@ trait T {
// Should not throw
class D extends B with T {
val sum = x + y + z + b1 + b2 + t1 + t2
- override def toString =
+ override def toString =
"sum = " + sum
-
+
}
abstract class NeedsXEarly {
@@ -91,7 +91,7 @@ class TestInterference extends {
object Test extends Application {
-
+
def shouldThrow(t: => Unit) = try {
t
println("[FAIL]: No UFE thrown")
diff --git a/test/files/run/classof.scala b/test/files/run/classof.scala
index b50facc1e9..10c07d2e51 100644
--- a/test/files/run/classof.scala
+++ b/test/files/run/classof.scala
@@ -13,14 +13,14 @@ object Test {
println(classOf[Long])
println(classOf[Float])
println(classOf[Double])
-
+
println("Class types")
println(classOf[SomeClass])
println(classOf[List[Array[Float]]])
println(classOf[(String, Map[Int, String])])
println("Arrays:")
- println(classOf[Array[Unit]])
+ println(classOf[Array[Unit]])
println(classOf[Array[Int]])
println(classOf[Array[Double]])
println(classOf[Array[List[String]]])
diff --git a/test/files/run/collections.scala b/test/files/run/collections.scala
index f286123322..2bbeed00d1 100644
--- a/test/files/run/collections.scala
+++ b/test/files/run/collections.scala
@@ -61,7 +61,7 @@ object Test extends Application {
}
time {
var x = 0
- for (i <- 0 to 10000)
+ for (i <- 0 to 10000)
s get i match {
case Some(i) => x += i
case None =>
@@ -96,7 +96,7 @@ object Test extends Application {
}
time {
var x = 0
- for (i <- 0 to 10000)
+ for (i <- 0 to 10000)
s get i match {
case Some(i) => x += i
case None =>
diff --git a/test/files/run/colltest1.check b/test/files/run/colltest1.check
index b49d328be4..7377174281 100644
--- a/test/files/run/colltest1.check
+++ b/test/files/run/colltest1.check
@@ -72,11 +72,11 @@ new test starting with List()
9: List(2, 3, 4, 5, 6, 7, 8, 9, 10)
1
List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
-new test starting with IndexedSeq()
-10: IndexedSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
-9: IndexedSeq(2, 3, 4, 5, 6, 7, 8, 9, 10)
+new test starting with Vector()
+10: Vector(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
+9: Vector(2, 3, 4, 5, 6, 7, 8, 9, 10)
1
-IndexedSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
+Vector(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
new test starting with Vector()
10: Vector(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
9: Vector(2, 3, 4, 5, 6, 7, 8, 9, 10)
@@ -95,15 +95,15 @@ ArrayBuffer(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
true
false
true
-Map(O -> O, W -> W, H -> H, P -> P, G -> G, V -> V, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, G -> G, V -> V, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, V -> V, G -> G, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, V -> V, G -> G, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, G -> G, V -> V, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, G -> G, V -> V, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(A -> A, B -> B, C -> C, D -> D, E -> E, F -> F, G -> G, H -> H, I -> I, J -> J, K -> K, L -> L, M -> M, N -> N, O -> O, P -> P, Q -> Q, R -> R, S -> S, T -> T, U -> U, V -> V, W -> W, X -> X, Y -> Y, Z -> Z)
-Map(A -> A, B -> B, C -> C, D -> D, E -> E, F -> F, G -> G, H -> H, I -> I, J -> J, K -> K, L -> L, M -> M, N -> N, O -> O, P -> P, Q -> Q, R -> R, S -> S, T -> T, U -> U, V -> V, W -> W, X -> X, Y -> Y, Z -> Z)
-Map(O -> O, W -> W, H -> H, P -> P, V -> V, G -> G, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, V -> V, G -> G, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, G -> G, V -> V, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, G -> G, V -> V, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
diff --git a/test/files/run/colltest1.scala b/test/files/run/colltest1.scala
index 081522d3f6..a505847058 100644
--- a/test/files/run/colltest1.scala
+++ b/test/files/run/colltest1.scala
@@ -2,8 +2,7 @@ import collection._
object Test extends Application {
- def orderedTraversableTest(empty: Traversable[Int])
- {
+ def orderedTraversableTest(empty: Traversable[Int]) {
println("new test starting with "+empty)
assert(empty.isEmpty)
val ten = empty ++ List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
@@ -25,13 +24,13 @@ object Test extends Application {
val secondFive = empty ++ (6 to 10)
assert(firstFive ++ secondFive == ten, firstFive ++ secondFive)
val odds = ten filter (_ % 2 != 0)
- val evens = ten remove (_ % 2 != 0)
+ val evens = ten filterNot (_ % 2 != 0)
assert(odds.size == evens.size)
val (o, e) = ten.partition(_ % 2 == 0)
assert(o.size == e.size)
val gs = ten groupBy (x => x / 4)
- val vs1 = (for (k <- gs.keysIterator; v <- gs(k).toIterable.iterator) yield v).toList
- val vs2 = gs.values.toList.flatten
+ val vs1 = (for (k <- gs.keysIterator; v <- gs(k).toIterable.iterator) yield v).toList.sorted
+ val vs2 = gs.values.toList.flatten.sorted
// val vs2 = gs.values.toList flatMap (xs => xs)
assert(ten.head == 1)
assert(ten.tail.head == 2)
@@ -62,7 +61,7 @@ object Test extends Application {
assert(ten.toStream == ten)
assert(ten.toString endsWith "(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)")
assert(ten.mkString("[", "; ", "]") endsWith "[1; 2; 3; 4; 5; 6; 7; 8; 9; 10]")
- }
+ }
def orderedIterableTest(empty: Iterable[Int]) {
orderedTraversableTest(empty)
@@ -80,12 +79,12 @@ object Test extends Application {
val tenPlus = ten map (_ + 1)
assert((ten zip tenPlus) forall { case (x, y) => x + 1 == y })
val dble = ten flatMap (x => List(x, x))
- assert(dble.removeDuplicates == ten)
+ assert(dble.distinct == ten)
assert(ten.length == 10)
assert(ten(0) == 1 && ten(9) == 10)
assert((ten lengthCompare 10) == 0 && (ten lengthCompare 1) > 0 && (ten lengthCompare 11) < 0)
assert((ten isDefinedAt 0) && (ten isDefinedAt 9))
- assert(!(ten isDefinedAt -1));
+ assert(!(ten isDefinedAt -1));
assert(!(ten isDefinedAt 10))
val tenten = ten zip ten
assert((tenten map (_._1)) == ten)
@@ -112,17 +111,20 @@ object Test extends Application {
assert(ten.startsWith(List(3, 4), 2))
assert(ten.endsWith(List(9, 10)))
assert(ten.endsWith(List()))
- assert(ten.indexOfSeq(List(3, 4, 5)) == 2, ten.indexOfSeq(List(3, 4, 5)))
+ assert(ten.indexOfSlice(List(3, 4, 5)) == 2, ten.indexOfSlice(List(3, 4, 5)))
+ assert(ten.lastIndexOfSlice(List(8, 9, 10)) == 7)
+ assert(ten.lastIndexOfSlice(List(1, 2, 3)) == 0)
+ assert(ten.lastIndexOfSlice(List(9, 10, 11)) == -1)
assert(ten contains 1)
assert(ten contains 10)
assert(!(ten contains 0))
assert((empty ++ (1 to 7) union empty ++ (3 to 10)) == List(1, 2, 3, 4, 5, 6, 7, 3, 4, 5, 6, 7, 8, 9, 10))
assert((ten diff ten).isEmpty)
assert((ten diff List()) == ten)
- assert((ten diff (ten filter (_ % 2 == 0))) == (ten remove (_ % 2 == 0)))
+ assert((ten diff (ten filter (_ % 2 == 0))) == (ten filterNot (_ % 2 == 0)))
assert((ten intersect ten) == ten)
assert((ten intersect List(5)) == List(5))
- assert((ten ++ ten).removeDuplicates == ten)
+ assert((ten ++ ten).distinct == ten)
assert(ten.patch(3, List(4, 5, 6, 7), 4) == ten)
assert(ten.patch(0, List(1, 2, 3), 9) == List(1, 2, 3, 10))
assert(empty.padTo(10, 7) == Array.fill(10)(7).toSeq)
@@ -170,10 +172,10 @@ object Test extends Application {
m += (("D" -> "D"), ("E" -> "E"), ("F" -> "F"))
m ++= List(("G" -> "G"), ("H" -> "H"), ("I" -> "I"))
m ++= ('J' to 'Z') map (x => (x.toString -> x.toString))
- println(m)
+ println(m.toList.sorted)
assert(!m.isEmpty)
- assert(m.keySet forall (k => (m get k) == Some(k)))
- assert(m.keySet forall (k => (m apply k) == k))
+ assert(m.keySet forall (k => (m get k) == Some(k)))
+ assert(m.keySet forall (k => (m apply k) == k))
assert(m.keySet forall (m contains))
assert(m.getOrElse("7", "@") == "@")
assert(m.keySet.size == 26)
@@ -185,8 +187,8 @@ object Test extends Application {
assert(mm.isEmpty, mm)
def m3 = empty ++ m1
assert(m1 == m3)
- println(m3)
- val m4 = m3.remove { case (k, v) => k != "A" }
+ println(m3.toList.sorted)
+ val m4 = m3 filterNot { case (k, v) => k != "A" }
assert(m4.size == 1, m4)
}
diff --git a/test/files/run/concurrent-stream.check b/test/files/run/concurrent-stream.check
new file mode 100644
index 0000000000..d4adf84490
--- /dev/null
+++ b/test/files/run/concurrent-stream.check
@@ -0,0 +1,3 @@
+Testing standard cons.
+Evaluation 0: List(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+Evaluation 1: List(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
diff --git a/test/files/run/concurrent-stream.scala b/test/files/run/concurrent-stream.scala
new file mode 100644
index 0000000000..42c695964e
--- /dev/null
+++ b/test/files/run/concurrent-stream.scala
@@ -0,0 +1,36 @@
+// test concurrent calls to Stream.tail
+object Test {
+
+def slowRange(from: Int, until: Int, cons: (Int, => Stream[Int]) => Stream[Int]): Stream[Int] = {
+ var current = from
+ def next: Stream[Int] = {
+ Thread.sleep(100)
+ if (current >= until) Stream.empty
+ else {
+ val stream = cons(current, next)
+ current += 1
+ stream
+ }
+ }
+ next
+}
+
+def testCons(cons: (Int, => Stream[Int]) => Stream[Int]): Unit = {
+ import scala.actors.Actor._
+
+ val stream = slowRange(0, 10, cons)
+ val main = self
+ actor { main ! stream.toList }
+ actor { main ! stream.toList }
+ val eval0 = receive { case list: List[Int] => list }
+ val eval1 = receive { case list: List[Int] => list }
+ println("Evaluation 0: " + eval0)
+ println("Evaluation 1: " + eval1)
+}
+
+ def main(args: Array[String]) {
+ println("Testing standard cons.")
+ testCons(Stream.cons.apply(_, _))
+ }
+}
+
diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check
index f18ff6e3c9..8050017659 100644
--- a/test/files/run/constrained-types.check
+++ b/test/files/run/constrained-types.check
@@ -52,8 +52,8 @@ val y = b.y // should keep the annotation
y: Int @Annot(Stuff.x) = 10
-----
-def m(x: String): String @Annot(x) = x // m should be annotated with a debruijn
-m: (x$0:String)String @Annot(x)
+def m(x: String): String @Annot(x) = x
+m: (x: String)String @Annot(x)
-----
val three = "three"
@@ -121,7 +121,7 @@ y: java.lang.String = hello
-----
val x = 3 : Int @Annot(e+f+g+h) //should have a graceful error message
-<console>:5: error: not found: value e
+<console>:6: error: not found: value e
val x = 3 : Int @Annot(e+f+g+h) //should have a graceful error message
^
diff --git a/test/files/run/constrained-types.scala b/test/files/run/constrained-types.scala
index 59fd0b1b8c..b3d5254f69 100644
--- a/test/files/run/constrained-types.scala
+++ b/test/files/run/constrained-types.scala
@@ -21,7 +21,7 @@ object Test {
"val a = new A",
-
+
"""val y = a.y // should rewrite "this.x" to "a.x" """,
@@ -45,7 +45,7 @@ object Test {
"val y = b.y // should keep the annotation",
- "def m(x: String): String @Annot(x) = x // m should be annotated with a debruijn",
+ "def m(x: String): String @Annot(x) = x",
"val three = \"three\"",
"val three2 = m(three:three.type) // should change x to three",
"var four = \"four\"",
@@ -77,7 +77,7 @@ object Test {
"""object A { val x = "hello" : String @ rep }""",
"val y = a.x // should drop the annotation",
- "val x = 3 : Int @Annot(e+f+g+h) //should have a graceful error message",
+ "val x = 3 : Int @Annot(e+f+g+h) //should have a graceful error message",
"class Where(condition: Boolean) extends Annotation",
"val x : Int @Where(self > 0 && self < 100) = 3"
diff --git a/test/files/run/ctor-order.scala b/test/files/run/ctor-order.scala
index 44e981e413..b32f005d70 100644
--- a/test/files/run/ctor-order.scala
+++ b/test/files/run/ctor-order.scala
@@ -8,7 +8,7 @@ class Outer {
class X extends {
/* The constructor of X should set this.$outer to the outer instance
- * *before* calling the super constructors. This is tested by
+ * *before* calling the super constructors. This is tested by
* mixin M1, which tries to access global from the enclosing class.
*/
val outer = Outer.this
diff --git a/test/files/run/distinct.check b/test/files/run/distinct.check
new file mode 100644
index 0000000000..b0883f382e
--- /dev/null
+++ b/test/files/run/distinct.check
@@ -0,0 +1 @@
+abcdefghijklmnopqrstuvwxyz
diff --git a/test/files/run/distinct.scala b/test/files/run/distinct.scala
new file mode 100644
index 0000000000..09e5a0734a
--- /dev/null
+++ b/test/files/run/distinct.scala
@@ -0,0 +1,15 @@
+/** This is a test to make sure distinct always
+ * returns the first of any duplicated element.
+ */
+object Test {
+ val alphabet = 'a' to 'z' mkString ""
+ val alphaList = 'a' to 'z' toList
+ def shuffled = util.Random.shuffle(alphaList)
+
+ def main(args: Array[String]): Unit = {
+ val longList = alphaList ++ (1 to 9 flatMap (_ => shuffled))
+ val result = longList.distinct mkString ""
+
+ println(result)
+ }
+}
diff --git a/test/files/run/elidable.check b/test/files/run/elidable.check
new file mode 100644
index 0000000000..4ce04f0040
--- /dev/null
+++ b/test/files/run/elidable.check
@@ -0,0 +1 @@
+Good for me, I was not elided.
diff --git a/test/files/run/elidable.flags b/test/files/run/elidable.flags
new file mode 100644
index 0000000000..93fd3d5317
--- /dev/null
+++ b/test/files/run/elidable.flags
@@ -0,0 +1 @@
+-Xelide-below 900
diff --git a/test/files/run/elidable.scala b/test/files/run/elidable.scala
new file mode 100644
index 0000000000..264efbad59
--- /dev/null
+++ b/test/files/run/elidable.scala
@@ -0,0 +1,16 @@
+import annotation._
+import elidable._
+
+object Test {
+ @elidable(FINEST) def f1() = assert(false, "Should have been elided.")
+ @elidable(INFO) def f2() = assert(false, "Should have been elided.")
+ @elidable(SEVERE) def f3() = println("Good for me, I was not elided.")
+ @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
+
+ def main(args: Array[String]): Unit = {
+ f1()
+ f2()
+ f3()
+ f4
+ }
+}
diff --git a/test/files/run/equality.scala b/test/files/run/equality.scala
new file mode 100644
index 0000000000..68055fd012
--- /dev/null
+++ b/test/files/run/equality.scala
@@ -0,0 +1,40 @@
+// a quickly assembled test of equality. Needs work.
+object Test
+{
+ import scala.runtime.ScalaRunTime.hash
+
+ def makeFromInt(x: Int) = List(
+ x.toByte, x.toShort, x.toInt, x.toLong, x.toFloat, x.toDouble, BigInt(x), BigDecimal(x)
+ ) ::: (
+ if (x < 0) Nil else List(x.toChar)
+ )
+ def makeFromDouble(x: Double) = List(
+ x.toShort, x.toInt, x.toLong, x.toFloat, x.toDouble, BigInt(x.toInt), BigDecimal(x)
+ )
+
+ def main(args: Array[String]): Unit = {
+ var xs = makeFromInt(5)
+ for (x <- xs ; y <- xs) {
+ assert(x == y, x + " == " + y)
+ assert(hash(x) == hash(y), "hash(%s) == hash(%s)".format(x, y))
+ }
+
+ xs = makeFromInt(-5)
+ for (x <- xs ; y <- xs) {
+ assert(x == y, x + " == " + y)
+ assert(hash(x) == hash(y), "hash(%s) == hash(%s)".format(x, y))
+ }
+
+ xs = makeFromDouble(500.0)
+ for (x <- xs ; y <- xs) {
+ assert(x == y, x + " == " + y)
+ assert(hash(x) == hash(y), "hash(%s) == hash(%s)".format(x, y))
+ }
+
+ // negatives
+ val bigLong = new java.util.concurrent.atomic.AtomicLong(Long.MaxValue)
+ assert(-1 != bigLong && bigLong != -1) // bigLong.intValue() == -1
+ assert(BigDecimal(1.1) != 1L)
+ assert(1L != BigDecimal(1.1))
+ }
+}
diff --git a/test/files/run/exceptions-2.scala b/test/files/run/exceptions-2.scala
index 820ab3634d..d0312a49b2 100644
--- a/test/files/run/exceptions-2.scala
+++ b/test/files/run/exceptions-2.scala
@@ -68,7 +68,7 @@ object Test {
Console.println("Outermost finally");
}
- def mixed =
+ def mixed =
try {
if (10 > 0)
throw Leaf(10);
@@ -107,7 +107,7 @@ object Test {
case Leaf(a) => Console.println(a);
}
} catch {
- case npe: NullPointerException =>
+ case npe: NullPointerException =>
Console.println("Caught an NPE");
}
@@ -141,14 +141,14 @@ object Test {
}
}
- def valInFinally: Unit =
- try {
+ def valInFinally: Unit =
+ try {
} finally {
val fin = "Abc";
Console.println(fin);
};
- def tryAndValInFinally: Unit =
+ def tryAndValInFinally: Unit =
try {
} finally {
val fin = "Abc";
@@ -157,51 +157,51 @@ object Test {
} catch { case _ => () }
};
- def returnInBody: Unit = try {
+ def returnInBody: Unit = try {
try {
Console.println("Normal execution...");
- return
+ return
Console.println("non reachable code");
} finally {
Console.println("inner finally");
}
- } finally {
+ } finally {
Console.println("Outer finally");
}
- def returnInBodySynch: Unit = try {
+ def returnInBodySynch: Unit = try {
synchronized {
try {
Console.println("Synchronized normal execution...");
- return
+ return
Console.println("non reachable code");
} finally {
Console.println("inner finally");
}
}
- } finally {
+ } finally {
Console.println("Outer finally");
}
- def returnInBodyAndInFinally: Unit = try {
+ def returnInBodyAndInFinally: Unit = try {
try {
Console.println("Normal execution...");
- return
+ return
Console.println("non reachable code");
} finally {
Console.println("inner finally");
return
}
- } finally {
+ } finally {
Console.println("Outer finally");
return
}
- def returnInBodyAndInFinally2: Unit = try {
+ def returnInBodyAndInFinally2: Unit = try {
try {
Console.println("Normal execution...");
- return
+ return
Console.println("non reachable code");
} finally {
try {
@@ -211,7 +211,7 @@ object Test {
Console.println("finally inside finally");
}
}
- } finally {
+ } finally {
Console.println("Outer finally");
return
}
@@ -253,7 +253,7 @@ object Test {
}
- def returnWithFinallyClean: Int = try {
+ def returnWithFinallyClean: Int = try {
try {
Console.println("Normal execution...");
return 10
@@ -262,11 +262,28 @@ object Test {
} finally {
Console.println("inner finally");
}
- } finally {
+ } finally {
Console.println("Outer finally");
try { 1 } catch { case e: java.io.IOException => () }
}
+ /** Test that empty finally clauses containing while are correctly emitted.
+ */
+ class Issue {
+ var b = 0
+ try {
+ // println("abc")
+ } finally {
+ while (b == -1) {b = 0}
+ }
+ }
+
+ /* Tests that class Issue passes verification. */
+ def whileInFinally = {
+ new Issue
+ }
+
+
def main(args: Array[String]): Unit = {
Console.println("nested1: ");
@@ -277,7 +294,7 @@ object Test {
Console.println("mixed: ");
execute(mixed);
-
+
Console.println("withValue1:");
execute(withValue1);
@@ -305,7 +322,7 @@ object Test {
Console.println("NoExcep.method3:");
execute(NoExcep.method3);
-
+
Console.println("NoExcep.method4:");
execute(NoExcep.method4);
@@ -326,5 +343,7 @@ object Test {
Console.println("Return with finally clause that cleans the stack")
returnWithFinallyClean
+
+ whileInFinally
}
}
diff --git a/test/files/run/exceptions-nest.check b/test/files/run/exceptions-nest.check
new file mode 100644
index 0000000000..48725e4d27
--- /dev/null
+++ b/test/files/run/exceptions-nest.check
@@ -0,0 +1,13 @@
+2
+23
+2
+5
+2
+4
+OK
+4
+OK
+10
+1
+()
+10
diff --git a/test/files/run/exceptions-nest.scala b/test/files/run/exceptions-nest.scala
new file mode 100644
index 0000000000..d3f37452b5
--- /dev/null
+++ b/test/files/run/exceptions-nest.scala
@@ -0,0 +1,157 @@
+object Test extends Application {
+
+ println(test1)
+ println(test2)
+ println(test3)
+ println(test4)
+ println(test5)
+ try { println(test6) } catch { case _ => println("OK") }
+ println(test7)
+ try { println(test8) } catch { case _ => println("OK") }
+ println(test9)
+ println(test10)
+ println(test11)
+ println(test12)
+
+ def test1 = {
+ var x = 1
+ try {
+ x = 2
+ } catch {
+ case _: NullPointerException => x = 3
+ case _ => x = 4
+ }
+ x
+ }
+
+ def test2 = {
+ var x = 1
+ try {
+ x = 2
+ try {
+ x = 21
+ } catch {
+ case _ => x = 22
+ }
+ x = 23
+ } catch {
+ case _: NullPointerException => x = 3
+ case _ => x = 4
+ }
+ x
+ }
+
+ def test3 = {
+ var x = 1
+ try {
+ try{x = 2} catch { case _ => x = 4 }
+ } catch {
+ case _: NullPointerException => x = 3
+ case _ => x = 4
+ }
+ x
+ }
+
+ def test4 = {
+ var x = 1
+ try {
+ x = 2
+ } catch {
+ case _: NullPointerException => x = 3
+ case _ => x = 4
+ }
+ try {
+ x = 5
+ } catch {
+ case _: NullPointerException => x = 6
+ }
+ x
+ }
+
+ def test5 = {
+ var x = 1
+ try {
+ x = 2
+ } catch {
+ case _: NullPointerException => try { x = 3 } catch { case f => throw f }
+ case _ => x = 4; try { x = 41 } catch { case _: Exception => x = 42 }; x = 43
+ }
+ x
+ }
+
+ def test6: Int = {
+ var x = 1
+ try {
+ x = 2
+ (null: String).toString
+ } catch {
+ case e: NullPointerException =>
+ throw e
+ case _ =>
+ x = 3
+ return 1000
+ } finally {
+ x = 4
+ println(x)
+ }
+ x
+ }
+
+ def test7 = {
+ var x = 1
+ try {
+ x = 2
+ } finally {
+ try {
+ x = 4
+ } catch {
+ case _ => x = 5
+ }
+ }
+ x
+ }
+
+ def test8 = {
+ var x = 1
+ try {
+ throw new NullPointerException
+ } catch {
+ case e => throw e
+ }
+ x
+ }
+
+ def test9 = {
+ try { "" match {
+ case s: String => 10
+ }} catch { case _ => 20 }
+ }
+
+ var x10 = 1
+ def test10: Int = {
+ try { 1 }
+ catch { case e if (x10 == 1) => 1 }
+ }
+
+ def test11 {
+ try { () }
+ catch { case e => () }
+ }
+
+ class E1 extends Exception
+ class E2 extends Exception
+ class E3 extends Exception
+
+ def test12_impl(op: => Int) = try {
+ op
+ } catch {
+ case e: E1 => 2
+ case e: E2 => 3
+ case e: E3 => 4
+ }
+ def test12 =
+ test12_impl(1) +
+ test12_impl(throw new E1) +
+ test12_impl(throw new E2) +
+ test12_impl(throw new E3)
+}
diff --git a/test/files/run/existentials.scala b/test/files/run/existentials.scala
index 471f8fe779..78980df1b0 100644
--- a/test/files/run/existentials.scala
+++ b/test/files/run/existentials.scala
@@ -2,7 +2,7 @@ class Foo {
class Line {
case class Cell[T](var x: T)
def f[T](x: Any): Cell[t1] forSome { type t1 } = x match { case y: Cell[t] => y }
-
+
var x: Cell[T] forSome { type T } = new Cell(1)
println({ x = new Cell("abc"); x })
}
@@ -12,7 +12,7 @@ class FooW {
class Line {
case class Cell[T](var x: T)
def f[T](x: Any): Cell[ _ ] = x match { case y: Cell[t] => y }
-
+
var x: Cell[_] = new Cell(1)
println({ x = new Cell("abc"); x })
}
diff --git a/test/files/run/forvaleq.scala b/test/files/run/forvaleq.scala
index 8c1824a769..2a958802b6 100644
--- a/test/files/run/forvaleq.scala
+++ b/test/files/run/forvaleq.scala
@@ -2,7 +2,7 @@
import scala.collection.immutable.Queue
import scala.{List=>L}
-
+
object Test {
// redefine some symbols to make it extra hard
class List
@@ -16,11 +16,11 @@ object Test {
case _ if (x<10) => x
case _ => firstDigit(x / 10)
}
-
-
+
+
{
- // a basic test case
-
+ // a basic test case
+
val input = L.range(0,20)
val oddFirstTimesTwo =
for {x <- input
@@ -32,7 +32,7 @@ object Test {
{
// a test case with patterns
-
+
val input = L.range(0, 20)
val oddFirstTimesTwo =
for {x <- input
@@ -43,10 +43,10 @@ object Test {
yield a + b
println(oddFirstTimesTwo)
}
-
+
{
// make sure it works on non-Ls
-
+
// val input: Queue = Queue.Empty[int].incl(L.range(0,20))
val input = L.range(0, 20).iterator
val oddFirstTimesTwo =
@@ -54,36 +54,36 @@ object Test {
xf = firstDigit(x)
if xf % 2 == 1}
yield x*2
- println(oddFirstTimesTwo.toList)
+ println(oddFirstTimesTwo.toList)
}
-
+
{
// yield the computed value
-
+
val input = L.range(0,20)
val oddFirstTimesTwo =
for {x <- input
xf = firstDigit(x)
if xf % 2 == 1}
yield xf*2
- println(oddFirstTimesTwo)
+ println(oddFirstTimesTwo)
}
{
// make sure the function is only called once
var count: Int = 0
-
+
def fdct(x: Int) = {
count += 1
firstDigit(x)
}
-
+
val input = L.range(0,20)
for {x <- input
xf = fdct(x)
if xf % 2 == 1}
yield xf
-
+
println("called " + count + " times")
}
diff --git a/test/files/run/gadts.scala b/test/files/run/gadts.scala
index 11b094d99a..697df4c1b4 100644
--- a/test/files/run/gadts.scala
+++ b/test/files/run/gadts.scala
@@ -2,8 +2,8 @@ abstract class Term[T]
case class Lit(x: Int) extends Term[Int]
case class Succ(t: Term[Int]) extends Term[Int]
case class IsZero(t: Term[Int]) extends Term[Boolean]
-case class If[T](c: Term[Boolean],
- t1: Term[T],
+case class If[T](c: Term[Boolean],
+ t1: Term[T],
t2: Term[T]) extends Term[T]
object Test extends Application {
diff --git a/test/files/run/groupby.scala b/test/files/run/groupby.scala
new file mode 100644
index 0000000000..fe08f52812
--- /dev/null
+++ b/test/files/run/groupby.scala
@@ -0,0 +1,18 @@
+
+
+
+// Fixes #3422
+object Test {
+
+ def main(args: Array[String]) {
+ val arr = Array.range(0,10)
+ val map = arr groupBy (_%2)
+ val v1 = map(0)
+ val v2 = map(0)
+ // this should hold, of course, assuming also that group by returns a regular
+ // map implementation, and does nothing fancy - and it should return just a
+ // hash map by default.
+ assert(v1 eq v2)
+ }
+
+}
diff --git a/test/files/run/hashCodeBoxesRunTime.scala b/test/files/run/hashCodeBoxesRunTime.scala
index 3eacacb663..081a73376e 100644
--- a/test/files/run/hashCodeBoxesRunTime.scala
+++ b/test/files/run/hashCodeBoxesRunTime.scala
@@ -4,16 +4,16 @@ object Test
{
import java.{ lang => jl }
import scala.runtime.BoxesRunTime.{ hashFromNumber, hashFromObject }
-
- def allSame[T](xs: List[T]) = assert(xs.removeDuplicates.size == 1, "failed: " + xs)
-
+
+ def allSame[T](xs: List[T]) = assert(xs.distinct.size == 1, "failed: " + xs)
+
def mkNumbers(x: Int): List[Number] =
List(x.toByte, x.toShort, x, x.toLong, x.toFloat, x.toDouble)
-
+
def testLDF(x: Long) = allSame(List[Number](x, x.toDouble, x.toFloat) map hashFromNumber)
-
+
def main(args: Array[String]): Unit = {
- List(Byte.MinValue, -1, 0, 1, Byte.MaxValue) foreach { n =>
+ List(Byte.MinValue, -1, 0, 1, Byte.MaxValue) foreach { n =>
val hashes = mkNumbers(n) map hashFromNumber
allSame(hashes)
if (n >= 0) {
@@ -21,7 +21,7 @@ object Test
assert(charCode == hashes.head)
}
}
-
+
testLDF(Short.MaxValue.toLong)
testLDF(Short.MinValue.toLong)
}
diff --git a/test/files/run/hashhash.scala b/test/files/run/hashhash.scala
new file mode 100644
index 0000000000..5a7859357f
--- /dev/null
+++ b/test/files/run/hashhash.scala
@@ -0,0 +1,15 @@
+object Test
+{
+ class A { val x1 = this.## ; val x2 = super.## }
+ val myA = new A
+ assert(myA.x1 == myA.x2)
+
+ def confirmSame(x: Any) = assert(x.## == x.hashCode, "%s.## != %s.hashCode".format(x, x))
+ def confirmDifferent(x: Any) = assert(x.## != x.hashCode, "%s.## == %s.hashCode (but should not)".format(x, x))
+
+ def main(args: Array[String]): Unit = {
+ /** Just a little sanity check, not to be confused with a unit test. */
+ List(5, 5.5f, "abc", new AnyRef, new A, ()) foreach confirmSame
+ List(5.0f, 1.0d, -(5.0f), (-1.0d)) foreach confirmDifferent
+ }
+}
diff --git a/test/files/run/infix.scala b/test/files/run/infix.scala
index 60a844acad..66a41f66f1 100644
--- a/test/files/run/infix.scala
+++ b/test/files/run/infix.scala
@@ -9,4 +9,4 @@ object Test extends Application {
case null op (0, 0) op (1, 1) op (2, 2) => Console.println("OK")
}
}
-
+
diff --git a/test/files/run/inliner-infer.scala b/test/files/run/inliner-infer.scala
index 107b9508ee..2c818c4a43 100644
--- a/test/files/run/inliner-infer.scala
+++ b/test/files/run/inliner-infer.scala
@@ -7,8 +7,8 @@ object Test extends Application {
@annotation.tailrec
def walk(xs: MyList): Unit = {
- if (xs.isEmpty)
- println("empty")
+ if (xs.isEmpty)
+ println("empty")
else {
println("non-empty")
walk(MyNil)
@@ -26,4 +26,4 @@ object MyNil extends MyList {
override def isEmpty = true
}
-
+
diff --git a/test/files/run/iq.scala b/test/files/run/iq.scala
index 88a2884fcc..59baee1270 100644
--- a/test/files/run/iq.scala
+++ b/test/files/run/iq.scala
@@ -9,8 +9,8 @@ object iq {
/* Create an empty queue. */
val q: Queue[Int] = Queue.Empty
- /* Test isEmpty.
- * Expected: Empty
+ /* Test isEmpty.
+ * Expected: Empty
*/
if (q.isEmpty) {
Console.println("Empty")
@@ -20,14 +20,14 @@ object iq {
//val q2 = q + 42 + 0 // deprecated
val q2 = q.enqueue(42).enqueue(0)
- /* Test is empty and dequeue.
+ /* Test is empty and dequeue.
* Expected: Head: 42
*/
val q4 =
if (q2.isEmpty) {
Console.println("Empty")
q2
- }
+ }
else {
val (head, q3) = q2.dequeue
Console.println("Head: " + head)
@@ -36,8 +36,8 @@ object iq {
/* Test sequence enqueing. */
val q5: Queue[Any] = q4.enqueue(List(1,2,3,4,5,6,7,8,9))
- /* Test toString.
- * Expected: Head: q5: Queue(0,1,2,3,4,5,6,7,8,9)
+ /* Test toString.
+ * Expected: Head: q5: Queue(0,1,2,3,4,5,6,7,8,9)
*/
Console.println("q5: " + q5)
/* Test apply
@@ -59,7 +59,7 @@ object iq {
//val q8 = q7 + 10 + 11 //deprecated
val q8 = q7.enqueue(10).enqueue(11)
/* Test dequeu
- * Expected: q8: Queue(2,3,4,5,6,7,8,9,10,11)
+ * Expected: q8: Queue(2,3,4,5,6,7,8,9,10,11)
*/
Console.println("q8: " + q8)
val q9 = Queue(2,3,4,5,6,7,8,9,10,11)
@@ -70,14 +70,14 @@ object iq {
Console.println("q8 == q9: " + (q8 == q9))
/* Testing elements
- * Expected: Elements: 1 2 3 4 5 6 7 8 9
+ * Expected: Elements: 1 2 3 4 5 6 7 8 9
*/
- Console.print("Elements: ");
+ Console.print("Elements: ");
q6.iterator.foreach(e => Console.print(" "+ e + " "))
- Console.println;
+ Console.println;
/* Testing mkString
- * Expected: String: <1-2-3-4-5-6-7-8-9>
+ * Expected: String: <1-2-3-4-5-6-7-8-9>
*/
Console.println("String: " + q6.mkString("<","-",">"))
@@ -89,7 +89,7 @@ object iq {
/* Testing front
* Expected: Front: 1
*/
- Console.println("Front: " + q6.front);
+ Console.println("Front: " + q6.front);
}
}
diff --git a/test/files/run/issue192.scala b/test/files/run/issue192.scala
index 55130072b7..32e203a4e1 100644
--- a/test/files/run/issue192.scala
+++ b/test/files/run/issue192.scala
@@ -1,16 +1,16 @@
object Test extends Application {
-
+
def f1(p: Any{def unary_+ : Int}) = +p
def f2(p: Any{def unary_- : Int}) = -p
def f3(p: Any{def unary_~ : Int}) = ~p
def f4(p: Any{def unary_! : Boolean}) = !p
-
+
def f5(p: Any{def +(q: Int): Int}) = p + 7
def f6(p: Any{def -(q: Int): Int}) = p - 7
def f7(p: Any{def *(q: Int): Int}) = p * 7
def f8(p: Any{def /(q: Int): Int}) = p / 7
def f9(p: Any{def %(q: Int): Int}) = p % 7
-
+
def f10(p: Any{def |(q: Int): Int}) = p | 7
def f11(p: Any{def |(q: Boolean): Boolean}) = p | true
def f12(p: Any{def ^(q: Int): Int}) = p ^ 7
@@ -19,11 +19,11 @@ object Test extends Application {
def f15(p: Any{def &(q: Boolean): Boolean}) = p & true
def f16(p: Any{def ||(q: Boolean): Boolean}) = p || true
def f17(p: Any{def &&(q: Boolean): Boolean}) = p && true
-
+
def f18(p: Any{def <<(q: Int): Int}) = p << 7
def f19(p: Any{def >>(q: Int): Int}) = p >> 7
def f20(p: Any{def >>>(q: Int): Int}) = p >>> 7
-
+
def f21(p: Any{def toByte: Byte}) = p.toByte
def f22(p: Any{def toShort: Short}) = p.toShort
def f23(p: Any{def toChar: Char}) = p.toChar
@@ -31,28 +31,28 @@ object Test extends Application {
def f25(p: Any{def toLong: Long}) = p.toLong
def f26(p: Any{def toFloat: Float}) = p.toFloat
def f27(p: Any{def toDouble: Double}) = p.toDouble
-
+
def f28(p: Any{def ==(q: Int): Boolean}) = p == 7
def f29(p: Any{def !=(q: Int): Boolean}) = p != 7
def f30(p: Any{def ==(q: Boolean): Boolean}) = p == true
def f31(p: Any{def !=(q: Boolean): Boolean}) = p != true
-
+
def f32(p: Any{def <(q: Int): Boolean}) = p < 7
def f33(p: Any{def <=(q: Int): Boolean}) = p <= 7
def f34(p: Any{def >=(q: Int): Boolean}) = p >= 7
def f35(p: Any{def >(q: Int): Boolean}) = p > 7
-
+
print("f1 = "); println(f1(1) == +1)
print("f2 = "); println(f2(1) == -1)
print("f3 = "); println(f3(1) == ~1)
print("f4 = "); println(f4(true) == !true)
-
+
print("f5 = "); println(f5(4) == (4 + 7))
print("f6 = "); println(f6(4) == (4 - 7))
print("f7 = "); println(f7(4) == (4 * 7))
print("f8 = "); println(f8(4) == (4 / 7))
print("f9 = "); println(f9(4) == (4 % 7))
-
+
print("f10 = "); println(f10(4) == (4 | 7))
print("f11 = "); println(f11(false) == (false | true))
print("f12 = "); println(f12(4) == (4 ^ 7))
@@ -61,11 +61,11 @@ object Test extends Application {
print("f15 = "); println(f15(false) == (false & true))
print("f16 = "); println(f16(false) == (false || true))
print("f17 = "); println(f17(false) == (false && true))
-
+
print("f18 = "); println(f18(4) == (4 << 7))
print("f19 = "); println(f19(-4) == (-4 >> 7))
print("f20 = "); println(f20(-4) == (-4 >>> 7))
-
+
print("f21 = "); println(f21(4.2) == (4.2.toByte))
print("f22 = "); println(f22(4.2) == (4.2.toShort))
print("f23 = "); println(f23(4.2) == (4.2.toChar))
@@ -73,17 +73,17 @@ object Test extends Application {
print("f25 = "); println(f25(4.2) == (4.2.toLong))
print("f26 = "); println(f26(4.2) == (4.2.toFloat))
print("f27 = "); println(f27(4.2) == (4.2.toDouble))
-
+
print("f28 = "); println(f28(4) == (4 == 7))
print("f29 = "); println(f29(4) == (4 != 7))
print("f30 = "); println(f30(false) == (false == true))
print("f31 = "); println(f31(false) == (false != true))
-
+
print("f32 = "); println(f32(4) == (4 < 7))
print("f33 = "); println(f33(4) == (4 <= 7))
print("f34 = "); println(f34(4) == (4 >= 7))
print("f35 = "); println(f35(4) == (4 > 7))
-
+
println("ok")
-
+
}
diff --git a/test/files/run/iterator-iterate-lazy.scala b/test/files/run/iterator-iterate-lazy.scala
new file mode 100644
index 0000000000..73886f192b
--- /dev/null
+++ b/test/files/run/iterator-iterate-lazy.scala
@@ -0,0 +1,5 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ Iterator.iterate(1 to 5 toList)(_.tail).takeWhile(_.nonEmpty).map(_.head).toList
+ }
+}
diff --git a/test/files/run/iterator3444.scala b/test/files/run/iterator3444.scala
new file mode 100644
index 0000000000..2d0643bd56
--- /dev/null
+++ b/test/files/run/iterator3444.scala
@@ -0,0 +1,23 @@
+
+
+// ticked #3444
+object Test {
+
+ def main(args: Array[String]) {
+ val it = (1 to 12).toSeq.iterator
+
+ assert(it.next == 1)
+ assert(it.take(2).toList == List(2, 3))
+
+ val jt = (4 to 12).toSeq.iterator
+ assert(jt.next == 4)
+ assert(jt.drop(5).toList == List(10, 11, 12))
+
+ val kt = (1 until 10).toSeq.iterator
+ assert(kt.drop(50).toList == Nil)
+
+ val mt = (1 until 5).toSeq.iterator
+ assert(mt.take(50).toList == List(1, 2, 3, 4))
+ }
+
+}
diff --git a/test/files/run/iterators.check b/test/files/run/iterators.check
index cd1b79144a..bb139c1610 100644
--- a/test/files/run/iterators.check
+++ b/test/files/run/iterators.check
@@ -7,7 +7,7 @@ test check_drop was successful
test check_foreach was successful
test check_forall was successful
test check_fromArray was successful
-test check_collect was successful
+test check_toSeq was successful
test check_indexOf was successful
test check_findIndexOf was successful
diff --git a/test/files/run/iterators.scala b/test/files/run/iterators.scala
index 5879d38df3..f0f93f0683 100644
--- a/test/files/run/iterators.scala
+++ b/test/files/run/iterators.scala
@@ -82,19 +82,19 @@ object Test {
var xs4 = Iterator.fromArray(a, 0, 4).toList;
xs0.length + xs1.length + xs2.length + xs3.length + xs4.length
}
-
- def check_collect: String =
- List(1, 2, 3, 4, 5).iterator.collect.mkString("x")
-
+
+ def check_toSeq: String =
+ List(1, 2, 3, 4, 5).iterator.toSeq.mkString("x")
+
def check_indexOf: String = {
val i = List(1, 2, 3, 4, 5).indexOf(4)
val j = List(1, 2, 3, 4, 5).indexOf(16)
"" + i + "x" + j
}
-
+
def check_findIndexOf: String = {
- val i = List(1, 2, 3, 4, 5).findIndexOf { x: Int => x >= 4 }
- val j = List(1, 2, 3, 4, 5).findIndexOf { x: Int => x >= 16 }
+ val i = List(1, 2, 3, 4, 5).indexWhere { x: Int => x >= 4 }
+ val j = List(1, 2, 3, 4, 5).indexWhere { x: Int => x >= 16 }
"" + i + "x" + j
}
@@ -124,7 +124,7 @@ object Test {
check_success("check_foreach", check_foreach, 190)
check_success("check_forall", check_forall, 0)
check_success("check_fromArray",check_fromArray, 14)
- check_success("check_collect", check_collect, "1x2x3x4x5")
+ check_success("check_toSeq", check_toSeq, "1x2x3x4x5")
check_success("check_indexOf", check_indexOf, "3x-1")
check_success("check_findIndexOf", check_findIndexOf, "3x-1")
println()
diff --git a/test/files/run/json.check b/test/files/run/json.check
index a735624221..021214beaa 100644
--- a/test/files/run/json.check
+++ b/test/files/run/json.check
@@ -1,12 +1,17 @@
-Some(List((name,value)))
-Some(List((name,va1ue)))
-Some(List((name,List((name1,va1ue1), (name2,va1ue2)))))
-Some(List((name,")))
-Some(List((age,0.0)))
+Passed: Map(name -> value)
+Passed: Map(name -> va1ue)
+Passed: Map(name -> Map(name1 -> va1ue1, name2 -> va1ue2))
+Passed: Map(name -> ")
+Passed: Map(function -> add_symbol)
+Passed: List(Map(a -> team), Map(b -> 52.0))
+Passed: Map()
+Passed: List()
+Passed: List(4.0, 1.0, 3.0, 2.0, 6.0, 5.0, 8.0, 7.0)
+Passed: Map(age -> 0.0)
-Some(List((firstName,John), (lastName,Smith), (address,List((streetAddress,21 2nd Street), (city,New York), (state,NY), (postalCode,10021.0))), (phoneNumbers,List(212 732-1234, 646 123-4567))))
+Passed: Map(firstName -> John, lastName -> Smith, address -> Map(streetAddress -> 21 2nd Street, city -> New York, state -> NY, postalCode -> 10021.0), phoneNumbers -> List(212 732-1234, 646 123-4567))
-Some(List((fullname,Sean Kelly), (org,SK Consulting), (emailaddrs,List(List((type,work), (value,kelly@seankelly.biz)), List((type,home), (pref,1.0), (value,kelly@seankelly.tv)))), (telephones,List(List((type,work), (pref,1.0), (value,+1 214 555 1212)), List((type,fax), (value,+1 214 555 1213)), List((type,mobile), (value,+1 214 555 1214)))), (addresses,List(List((type,work), (format,us), (value,1234 Main StnSpringfield, TX 78080-1216)), List((type,home), (format,us), (value,5678 Main StnSpringfield, TX 78080-1316)))), (urls,List(List((type,work), (value,http://seankelly.biz/)), List((type,home), (value,http://seankelly.tv/))))))
+Passed: Map(addresses -> List(Map(format -> us, type -> work, value -> 1234 Main StnSpringfield, TX 78080-1216), Map(format -> us, type -> home, value -> 5678 Main StnSpringfield, TX 78080-1316)), emailaddrs -> List(Map(type -> work, value -> kelly@seankelly.biz), Map(pref -> 1.0, type -> home, value -> kelly@seankelly.tv)), fullname -> Sean Kelly, org -> SK Consulting, telephones -> List(Map(pref -> 1.0, type -> work, value -> +1 214 555 1212), Map(type -> fax, value -> +1 214 555 1213), Map(type -> mobile, value -> +1 214 555 1214)), urls -> List(Map(type -> work, value -> http://seankelly.biz/), Map(type -> home, value -> http://seankelly.tv/)))
-Some(List((web-app,List((servlet,List(List((servlet-name,cofaxCDS), (servlet-class,org.cofax.cds.CDSServlet), (init-param,List((configGlossary:installationAt,Philadelphia, PA), (configGlossary:adminEmail,ksm@pobox.com), (configGlossary:poweredBy,Cofax), (configGlossary:poweredByIcon,/images/cofax.gif), (configGlossary:staticPath,/content/static), (templateProcessorClass,org.cofax.WysiwygTemplate), (templateLoaderClass,org.cofax.FilesTemplateLoader), (templatePath,templates), (templateOverridePath,), (defaultListTemplate,listTemplate.htm), (defaultFileTemplate,articleTemplate.htm), (useJSP,false), (jspListTemplate,listTemplate.jsp), (jspFileTemplate,articleTemplate.jsp), (cachePackageTagsTrack,200.0), (cachePackageTagsStore,200.0), (cachePackageTagsRefresh,60.0), (cacheTemplatesTrack,100.0), (cacheTemplatesStore,50.0), (cacheTemplatesRefresh,15.0), (cachePagesTrack,200.0), (cachePagesStore,100.0), (cachePagesRefresh,10.0), (cachePagesDirtyRead,10.0), (searchEngineListTemplate,forSearchEnginesList.htm), (searchEngineFileTemplate,forSearchEngines.htm), (searchEngineRobotsDb,WEB-INF/robots.db), (useDataStore,true), (dataStoreClass,org.cofax.SqlDataStore), (redirectionClass,org.cofax.SqlRedirection), (dataStoreName,cofax), (dataStoreDriver,com.microsoft.jdbc.sqlserver.SQLServerDriver), (dataStoreUrl,jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon), (dataStoreUser,sa), (dataStorePassword,dataStoreTestQuery), (dataStoreTestQuery,SET NOCOUNT ON;select test='test';), (dataStoreLogFile,/usr/local/tomcat/logs/datastore.log), (dataStoreInitConns,10.0), (dataStoreMaxConns,100.0), (dataStoreConnUsageLimit,100.0), (dataStoreLogLevel,debug), (maxUrlLength,500.0)))), List((servlet-name,cofaxEmail), (servlet-class,org.cofax.cds.EmailServlet), (init-param,List((mailHost,mail1), (mailHostOverride,mail2)))), List((servlet-name,cofaxAdmin), (servlet-class,org.cofax.cds.AdminServlet)), List((servlet-name,fileServlet), (servlet-class,org.cofax.cds.FileServlet)), List((servlet-name,cofaxTools), (servlet-class,org.cofax.cms.CofaxToolsServlet), (init-param,List((templatePath,toolstemplates/), (log,1.0), (logLocation,/usr/local/tomcat/logs/CofaxTools.log), (logMaxSize,), (dataLog,1.0), (dataLogLocation,/usr/local/tomcat/logs/dataLog.log), (dataLogMaxSize,), (removePageCache,/content/admin/remove?cache=pages&id=), (removeTemplateCache,/content/admin/remove?cache=templates&id=), (fileTransferFolder,/usr/local/tomcat/webapps/content/fileTransferFolder), (lookInContext,1.0), (adminGroupID,4.0), (betaServer,true)))))), (servlet-mapping,List((cofaxCDS,/), (cofaxEmail,/cofaxutil/aemail/*), (cofaxAdmin,/admin/*), (fileServlet,/static/*), (cofaxTools,/tools/*))), (taglib,List((taglib-uri,cofax.tld), (taglib-location,/WEB-INF/tlds/cofax.tld)))))))
+Passed: Map(web-app -> Map(servlet -> List(Map(init-param -> Map(cachePackageTagsRefresh -> 60.0, cachePackageTagsStore -> 200.0, cachePackageTagsTrack -> 200.0, cachePagesDirtyRead -> 10.0, cachePagesRefresh -> 10.0, cachePagesStore -> 100.0, cachePagesTrack -> 200.0, cacheTemplatesRefresh -> 15.0, cacheTemplatesStore -> 50.0, cacheTemplatesTrack -> 100.0, configGlossary:adminEmail -> ksm@pobox.com, configGlossary:installationAt -> Philadelphia, PA, configGlossary:poweredBy -> Cofax, configGlossary:poweredByIcon -> /images/cofax.gif, configGlossary:staticPath -> /content/static, dataStoreClass -> org.cofax.SqlDataStore, dataStoreConnUsageLimit -> 100.0, dataStoreDriver -> com.microsoft.jdbc.sqlserver.SQLServerDriver, dataStoreInitConns -> 10.0, dataStoreLogFile -> /usr/local/tomcat/logs/datastore.log, dataStoreLogLevel -> debug, dataStoreMaxConns -> 100.0, dataStoreName -> cofax, dataStorePassword -> dataStoreTestQuery, dataStoreTestQuery -> SET NOCOUNT ON;select test='test';, dataStoreUrl -> jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon, dataStoreUser -> sa, defaultFileTemplate -> articleTemplate.htm, defaultListTemplate -> listTemplate.htm, jspFileTemplate -> articleTemplate.jsp, jspListTemplate -> listTemplate.jsp, maxUrlLength -> 500.0, redirectionClass -> org.cofax.SqlRedirection, searchEngineFileTemplate -> forSearchEngines.htm, searchEngineListTemplate -> forSearchEnginesList.htm, searchEngineRobotsDb -> WEB-INF/robots.db, templateLoaderClass -> org.cofax.FilesTemplateLoader, templateOverridePath -> , templatePath -> templates, templateProcessorClass -> org.cofax.WysiwygTemplate, useDataStore -> true, useJSP -> false), servlet-class -> org.cofax.cds.CDSServlet, servlet-name -> cofaxCDS), Map(init-param -> Map(mailHost -> mail1, mailHostOverride -> mail2), servlet-class -> org.cofax.cds.EmailServlet, servlet-name -> cofaxEmail), Map(servlet-class -> org.cofax.cds.AdminServlet, servlet-name -> cofaxAdmin), Map(servlet-class -> org.cofax.cds.FileServlet, servlet-name -> fileServlet), Map(init-param -> Map(adminGroupID -> 4.0, betaServer -> true, dataLog -> 1.0, dataLogLocation -> /usr/local/tomcat/logs/dataLog.log, dataLogMaxSize -> , fileTransferFolder -> /usr/local/tomcat/webapps/content/fileTransferFolder, log -> 1.0, logLocation -> /usr/local/tomcat/logs/CofaxTools.log, logMaxSize -> , lookInContext -> 1.0, removePageCache -> /content/admin/remove?cache=pages&id=, removeTemplateCache -> /content/admin/remove?cache=templates&id=, templatePath -> toolstemplates/), servlet-class -> org.cofax.cms.CofaxToolsServlet, servlet-name -> cofaxTools)), servlet-mapping -> Map(cofaxAdmin -> /admin/*, cofaxCDS -> /, cofaxEmail -> /cofaxutil/aemail/*, cofaxTools -> /tools/*, fileServlet -> /static/*), taglib -> Map(taglib-location -> /WEB-INF/tlds/cofax.tld, taglib-uri -> cofax.tld)))
diff --git a/test/files/run/json.scala b/test/files/run/json.scala
index ec0bad7ebe..6d25999e9c 100644
--- a/test/files/run/json.scala
+++ b/test/files/run/json.scala
@@ -1,14 +1,80 @@
import scala.util.parsing.json._
+import scala.collection.immutable.TreeMap
object Test extends Application {
- def printJSON(s: String) {
- println(JSON parse s)
+ /* This method converts parsed JSON back into real JSON notation with objects in
+ * sorted-key order. Not required by the spec, but it allows us to to a stable
+ * toString comparison. */
+ def jsonToString(in : Any) : String = in match {
+ case l : List[_] => "[" + l.map(jsonToString).mkString(", ") + "]"
+ case m : Map[String,_] => "{" + m.elements.toList
+ .sort({ (x,y) => x._1 < y._1 })
+ .map({ case (k,v) => "\"" + k + "\": " + jsonToString(v) })
+ .mkString(", ") + "}"
+ case s : String => "\"" + s + "\""
+ case x => x.toString
}
- printJSON("{\"name\": \"value\"}")
- printJSON("{\"name\": \"va1ue\"}") // ticket #136
- printJSON("{\"name\": { \"name1\": \"va1ue1\", \"name2\": \"va1ue2\" } }")
+
+ def sortJSON(in : Any) : Any = in match {
+ case l : List[_] => l.map(sortJSON)
+ case m : Map[String,_] => TreeMap(m.mapElements(sortJSON).elements.toSeq : _*)
+ case x => x
+ }
+
+ // For this one, just parsing should be considered a pass
+ def printJSON(given : String) {
+ JSON parseFull given match {
+ case None => println("Parse failed for \"%s\"".format(given))
+ case Some(parsed) => println("Passed: " + sortJSON(parsed))
+ }
+ }
+
+ def printJSON(given : String, expected : Any) {
+ JSON parseFull given match {
+ case None => println("Parse failed for \"%s\"".format(given))
+ case Some(parsed) => if (parsed == expected) {
+ println("Passed: " + parsed)
+ } else {
+ val eStr = sortJSON(expected).toString
+ val pStr = sortJSON(parsed).toString
+
+ // Figure out where the Strings differ and generate a marker
+ val mismatchPosition = eStr.toList.zip(pStr.toList).findIndexOf({case (a,b) => a != b}) match {
+ case -1 => Math.min(eStr.length, pStr.length)
+ case x => x
+ }
+ val reason = (" " * mismatchPosition) + "^"
+ println("Expected, got:\n %s\n %s (from \"%s\")\n %s".format(eStr, pStr, given, reason))
+ }
+ }
+ }
+
+ // The library should differentiate between lower case "l" and number "1" (ticket #136)
+ printJSON("{\"name\": \"value\"}", Map("name" -> "value"))
+ printJSON("{\"name\": \"va1ue\"}", Map("name" -> "va1ue"))
+ printJSON("{\"name\": { \"name1\": \"va1ue1\", \"name2\": \"va1ue2\" } }",
+ Map("name" -> Map("name1" -> "va1ue1", "name2" -> "va1ue2")))
+
+ // Unicode escapes should be handled properly
printJSON("{\"name\": \"\\u0022\"}")
+
+ // The library should return a map for JSON objects (ticket #873)
+ printJSON("""{"function":"add_symbol"}""", Map("function" -> "add_symbol"))
+
+ // The library should recurse into arrays to find objects (ticket #2207)
+ printJSON("""[{"a": "team"},{"b": 52}]""", List(Map("a" -> "team"), Map("b" -> 52.0)))
+
+ // The library should differentiate between empty maps and lists (ticket #3284)
+ printJSON("{}", Map())
+ printJSON("[]", List())
+
+ // Lists should be returned in the same order as specified
+ printJSON("[4,1,3,2,6,5,8,7]", List[Double](4,1,3,2,6,5,8,7))
+
+ // Additional tests
printJSON("{\"age\": 0}")
+
+
println
// from http://en.wikipedia.org/wiki/JSON
@@ -27,8 +93,25 @@ object Test extends Application {
"646 123-4567"
]
}"""
- //println(sample1)
- printJSON(sample1)
+
+ // Should be equivalent to:
+ val sample1Obj = Map(
+ "firstName" -> "John",
+ "lastName" -> "Smith",
+ "address" -> Map(
+ "streetAddress" -> "21 2nd Street",
+ "city" -> "New York",
+ "state" -> "NY",
+ "postalCode" -> 10021
+ ),
+ "phoneNumbers"-> List(
+ "212 732-1234",
+ "646 123-4567"
+ )
+ )
+
+
+ printJSON(sample1, sample1Obj)
println
// from http://www.developer.com/lang/jscript/article.php/3596836
@@ -63,7 +146,7 @@ object Test extends Application {
// from http://json.org/example.html
val sample3 = """
{"web-app": {
- "servlet": [
+ "servlet": [
{
"servlet-name": "cofaxCDS",
"servlet-class": "org.cofax.cds.CDSServlet",
@@ -119,7 +202,7 @@ object Test extends Application {
{
"servlet-name": "cofaxAdmin",
"servlet-class": "org.cofax.cds.AdminServlet"},
-
+
{
"servlet-name": "fileServlet",
"servlet-class": "org.cofax.cds.FileServlet"},
@@ -146,7 +229,7 @@ object Test extends Application {
"cofaxAdmin": "/admin/*",
"fileServlet": "/static/*",
"cofaxTools": "/tools/*"},
-
+
"taglib": {
"taglib-uri": "cofax.tld",
"taglib-location": "/WEB-INF/tlds/cofax.tld"}
diff --git a/test/files/run/lazy-concurrent.check b/test/files/run/lazy-concurrent.check
new file mode 100644
index 0000000000..33cff9d4f8
--- /dev/null
+++ b/test/files/run/lazy-concurrent.check
@@ -0,0 +1 @@
+Initializing singleton.
diff --git a/test/files/run/lazy-concurrent.scala b/test/files/run/lazy-concurrent.scala
new file mode 100644
index 0000000000..4699ed6a15
--- /dev/null
+++ b/test/files/run/lazy-concurrent.scala
@@ -0,0 +1,17 @@
+object Test {
+ def main(args: Array[String]) {
+ class Singleton {
+ val field = ()
+ println("Initializing singleton.")
+ }
+ lazy val Singleton = new Singleton
+
+ var i = 0
+ while (i < 4) {
+ new Thread(new Runnable {
+ def run = Singleton.field
+ }).start
+ i += 1
+ }
+ }
+}
diff --git a/test/files/run/lazy-exprs.scala b/test/files/run/lazy-exprs.scala
index 8a80423646..d4913c78d5 100644
--- a/test/files/run/lazy-exprs.scala
+++ b/test/files/run/lazy-exprs.scala
@@ -2,7 +2,7 @@ object TestExpressions {
def patmatchScrut {
lazy val z1: Option[String] = { println("forced <z1>"); Some("lazy z1") }
-
+
val res = z1 match {
case Some(msg) => msg
case None => "failed"
@@ -17,10 +17,10 @@ object TestExpressions {
def patmatchCase {
val t: Option[String] = Some("test")
val res = t match {
- case Some(msg) =>
+ case Some(msg) =>
lazy val z1 = { println("forced <z1>"); "lazy z1" }
z1
-
+
case None => "failed"
}
print("lazy val in case: ")
@@ -36,9 +36,9 @@ object TestExpressions {
print("lazy val in case: ")
val t: Option[String] = Some("lazy Z1")
t match {
- case Some(Z1) =>
+ case Some(Z1) =>
println("ok")
-
+
case None =>
println("failed")
}
@@ -60,13 +60,13 @@ object TestExpressions {
print("lazy val in pattern: ")
val t: Option[String] = Some("LazyField")
t match {
- case Some(LazyField) =>
+ case Some(LazyField) =>
println("ok")
-
+
case None =>
println("failed")
}
- }
+ }
lazy val (x, y) = ({print("x"); "x"}, {print("y"); "y"})
def testPatLazyVal {
diff --git a/test/files/run/lazy-locals.scala b/test/files/run/lazy-locals.scala
index be738a0f70..5755007c55 100644
--- a/test/files/run/lazy-locals.scala
+++ b/test/files/run/lazy-locals.scala
@@ -163,18 +163,18 @@ object Test extends Application {
// see #1589
object NestedLazyVals extends Application {
- lazy val x = {
+ lazy val x = {
lazy val y = { println("forcing y"); 42; }
println("forcing x")
- y
+ y
}
-
+
val x1 = 5 + { lazy val y = 10 ; y }
-
+
println(x)
println(x1)
}
-
+
trait TNestedLazyVals {
lazy val x = { lazy val y = 42; y }
}
diff --git a/test/files/run/lazy-override.scala b/test/files/run/lazy-override.scala
index e5884257a8..fe2c85bda4 100644
--- a/test/files/run/lazy-override.scala
+++ b/test/files/run/lazy-override.scala
@@ -20,7 +20,7 @@ object Test extends Application {
val b = new B
print("b.x=")
- println(b.x)
+ println(b.x)
print("b.z=")
- println(b.z)
+ println(b.z)
}
diff --git a/test/files/run/lazy-traits.scala b/test/files/run/lazy-traits.scala
index 5a8d3f3db7..e3fb37af84 100644
--- a/test/files/run/lazy-traits.scala
+++ b/test/files/run/lazy-traits.scala
@@ -1,5 +1,5 @@
trait A {
- lazy val z1 = {
+ lazy val z1 = {
println("<forced z1>")
"lazy z1"
}
@@ -7,7 +7,7 @@ trait A {
/** Simple class which mixes in one lazy val. */
class Cls extends AnyRef with A {
- override def toString =
+ override def toString =
"z1 = " + z1
}
@@ -18,7 +18,7 @@ class Cls2 extends AnyRef with A {
"lazy z2"
}
- override def toString =
+ override def toString =
"z1 = " + z1 + " z2 = " + z2
}
@@ -34,7 +34,7 @@ class ClsB extends Object with B {
println("<forced zc1>")
"lazy zc1"
}
- override def toString =
+ override def toString =
"z1 = " + z1 + " zb1 = " + zb1 + " zc1 = " + zc1
}
@@ -73,39 +73,39 @@ class OverflownLazyFields extends Object with A {
lazy val zc30 = { println("<forced zc30>"); "lazy zc30" }
lazy val zc31 = { println("<forced zc31>"); "lazy zc31" }
- override def toString =
- "\nzc00 = " + zc00 +
- "\nzc01 = " + zc01 +
- "\nzc02 = " + zc02 +
- "\nzc03 = " + zc03 +
- "\nzc04 = " + zc04 +
- "\nzc05 = " + zc05 +
- "\nzc06 = " + zc06 +
- "\nzc07 = " + zc07 +
- "\nzc08 = " + zc08 +
- "\nzc09 = " + zc09 +
- "\nzc10 = " + zc10 +
- "\nzc11 = " + zc11 +
- "\nzc12 = " + zc12 +
- "\nzc13 = " + zc13 +
- "\nzc14 = " + zc14 +
- "\nzc15 = " + zc15 +
- "\nzc16 = " + zc16 +
- "\nzc17 = " + zc17 +
- "\nzc18 = " + zc18 +
- "\nzc19 = " + zc19 +
- "\nzc20 = " + zc20 +
- "\nzc21 = " + zc21 +
- "\nzc22 = " + zc22 +
- "\nzc23 = " + zc23 +
- "\nzc24 = " + zc24 +
- "\nzc25 = " + zc25 +
- "\nzc26 = " + zc26 +
- "\nzc27 = " + zc27 +
- "\nzc28 = " + zc28 +
- "\nzc29 = " + zc29 +
- "\nzc30 = " + zc30 +
- "\nzc31 = " + zc31 +
+ override def toString =
+ "\nzc00 = " + zc00 +
+ "\nzc01 = " + zc01 +
+ "\nzc02 = " + zc02 +
+ "\nzc03 = " + zc03 +
+ "\nzc04 = " + zc04 +
+ "\nzc05 = " + zc05 +
+ "\nzc06 = " + zc06 +
+ "\nzc07 = " + zc07 +
+ "\nzc08 = " + zc08 +
+ "\nzc09 = " + zc09 +
+ "\nzc10 = " + zc10 +
+ "\nzc11 = " + zc11 +
+ "\nzc12 = " + zc12 +
+ "\nzc13 = " + zc13 +
+ "\nzc14 = " + zc14 +
+ "\nzc15 = " + zc15 +
+ "\nzc16 = " + zc16 +
+ "\nzc17 = " + zc17 +
+ "\nzc18 = " + zc18 +
+ "\nzc19 = " + zc19 +
+ "\nzc20 = " + zc20 +
+ "\nzc21 = " + zc21 +
+ "\nzc22 = " + zc22 +
+ "\nzc23 = " + zc23 +
+ "\nzc24 = " + zc24 +
+ "\nzc25 = " + zc25 +
+ "\nzc26 = " + zc26 +
+ "\nzc27 = " + zc27 +
+ "\nzc28 = " + zc28 +
+ "\nzc29 = " + zc29 +
+ "\nzc30 = " + zc30 +
+ "\nzc31 = " + zc31 +
"\nz1 = " + z1
}
diff --git a/test/files/run/lisp.scala b/test/files/run/lisp.scala
index fe3941d0db..63b615804b 100644
--- a/test/files/run/lisp.scala
+++ b/test/files/run/lisp.scala
@@ -12,11 +12,11 @@ class LispTokenizer(s: String) extends Iterator[String] {
while (i < s.length() && s.charAt(i) <= ' ') i += 1
i < s.length()
}
- def next: String =
+ def next: String =
if (hasNext) {
val start = i
if (isDelimiter(s charAt i)) i += 1
- else
+ else
do i = i + 1
while (!isDelimiter(s charAt i))
s.substring(start, i)
diff --git a/test/files/run/lists.scala b/test/files/run/lists.scala
index 695d5a0336..8a319c2fed 100644
--- a/test/files/run/lists.scala
+++ b/test/files/run/lists.scala
@@ -12,7 +12,7 @@ import testing.SUnit._
*/
object Test extends TestConsoleMain {
def suite = new TestSuite(
- Test_multiset, // multiset operations: union, intersect, diff
+ Test_multiset, // multiset operations: union, intersect, diff
Test1, //count, exists, filter, ..
Test2, //#468
Test3, //#1691
@@ -63,7 +63,7 @@ object Test_multiset extends TestCase("multiset") with Assert {
assertEquals("vs_diff_xs", List(3, 2), vs diff xs)
assertTrue("xs_subset_vs", isSubListOf(xs -- vs, xs diff vs))
- // tests adapted from Thomas Jung
+ // tests adapted from Thomas Jung
assertTrue(
"be symmetric after sorting", {
def sort(zs: List[Int]) = zs sort ( _ > _ )
@@ -73,7 +73,7 @@ object Test_multiset extends TestCase("multiset") with Assert {
"obey min cardinality", {
def cardinality[A](zs: List[A], e: A): Int = zs count (e == _)
val intersection = xs intersect ys
- xs forall (e => cardinality(intersection, e) == (cardinality(xs, e)
+ xs forall (e => cardinality(intersection, e) == (cardinality(xs, e)
min cardinality(ys, e)))
})
assertTrue(
@@ -126,8 +126,8 @@ object Test1 extends TestCase("ctor") with Assert {
assertEquals("check_forall", true, b1 & b2)
}
{
- val ys1 = xs1 remove { e => e % 2 != 0 }
- val ys2 = xs4 remove { e => e < 5 }
+ val ys1 = xs1 filterNot { e => e % 2 != 0 }
+ val ys2 = xs4 filterNot { e => e < 5 }
assertEquals("check_remove", 3, ys1.length + ys2.length)
}
{
@@ -149,7 +149,7 @@ object Test2 extends TestCase("t0468") with Assert {
override def runTest {
val xs1 = List(1, 2, 3)
val xs2 = List(0)
-
+
val ys1 = xs1 ::: List(4)
assertEquals("check_:::", List(1, 2, 3, 4), ys1)
diff --git a/test/files/run/manifests.scala b/test/files/run/manifests.scala
new file mode 100644
index 0000000000..1da06b8aee
--- /dev/null
+++ b/test/files/run/manifests.scala
@@ -0,0 +1,147 @@
+object Test
+{
+ object Variances extends Enumeration {
+ val CO, IN, CONTRA = Value
+ }
+ import Variances.{ CO, IN, CONTRA }
+
+ object SubtypeRelationship extends Enumeration {
+ val NONE, SAME, SUB, SUPER = Value
+ }
+ import SubtypeRelationship.{ NONE, SAME, SUB, SUPER }
+
+ class VarianceTester[T, U, CC[_]](expected: Variances.Value)(
+ implicit ev1: Manifest[T], ev2: Manifest[U], ev3: Manifest[CC[T]], ev4: Manifest[CC[U]]) {
+
+ def elements = List(ev1 <:< ev2, ev2 <:< ev1)
+ def containers = List(ev3 <:< ev4, ev4 <:< ev3)
+
+ def isUnrelated = typeCompare[T, U] == NONE
+ def isSame = typeCompare[T, U] == SAME
+ def isSub = typeCompare[T, U] == SUB
+ def isSuper = typeCompare[T, U] == SUPER
+
+ def showsCovariance = (elements == containers)
+ def showsContravariance = (elements == containers.reverse)
+ def showsInvariance = containers forall (_ == isSame)
+
+ def allContainerVariances = List(showsCovariance, showsInvariance, showsContravariance)
+
+ def showsExpectedVariance =
+ if (isUnrelated) allContainerVariances forall (_ == false)
+ else if (isSame) allContainerVariances forall (_ == true)
+ else expected match {
+ case CO => showsCovariance && !showsContravariance && !showsInvariance
+ case IN => showsInvariance && !showsCovariance && !showsContravariance
+ case CONTRA => showsContravariance && !showsCovariance && !showsInvariance
+ }
+ }
+
+ def showsCovariance[T, U, CC[_]](implicit ev1: Manifest[T], ev2: Manifest[U], ev3: Manifest[CC[T]], ev4: Manifest[CC[U]]) =
+ new VarianceTester[T, U, CC](CO) showsExpectedVariance
+
+ def showsInvariance[T, U, CC[_]](implicit ev1: Manifest[T], ev2: Manifest[U], ev3: Manifest[CC[T]], ev4: Manifest[CC[U]]) =
+ new VarianceTester[T, U, CC](IN) showsExpectedVariance
+
+ def showsContravariance[T, U, CC[_]](implicit ev1: Manifest[T], ev2: Manifest[U], ev3: Manifest[CC[T]], ev4: Manifest[CC[U]]) =
+ new VarianceTester[T, U, CC](CONTRA) showsExpectedVariance
+
+ def typeCompare[T, U](implicit ev1: Manifest[T], ev2: Manifest[U]) = (ev1 <:< ev2, ev2 <:< ev1) match {
+ case (true, true) => SAME
+ case (true, false) => SUB
+ case (false, true) => SUPER
+ case (false, false) => NONE
+ }
+
+ def assertAnyRef[T: Manifest] = List(
+ manifest[T] <:< manifest[Any],
+ manifest[T] <:< manifest[AnyRef],
+ !(manifest[T] <:< manifest[AnyVal])
+ ) foreach (assert(_, "assertAnyRef"))
+
+ def assertAnyVal[T: Manifest] = List(
+ manifest[T] <:< manifest[Any],
+ !(manifest[T] <:< manifest[AnyRef]),
+ manifest[T] <:< manifest[AnyVal]
+ ) foreach (assert(_, "assertAnyVal"))
+
+ def assertSameType[T: Manifest, U: Manifest] = assert(typeCompare[T, U] == SAME, "assertSameType")
+ def assertSuperType[T: Manifest, U: Manifest] = assert(typeCompare[T, U] == SUPER, "assertSuperType")
+ def assertSubType[T: Manifest, U: Manifest] = assert(typeCompare[T, U] == SUB, "assertSubType")
+ def assertNoRelationship[T: Manifest, U: Manifest] = assert(typeCompare[T, U] == NONE, "assertNoRelationship")
+
+ def testVariancesVia[T: Manifest, U: Manifest] = assert(
+ typeCompare[T, U] == SUB &&
+ showsCovariance[T, U, List] &&
+ showsInvariance[T, U, Set],
+ "testVariancesVia"
+ )
+
+ def runAllTests = {
+ assertAnyVal[AnyVal]
+ assertAnyVal[Unit]
+ assertAnyVal[Int]
+ assertAnyVal[Double]
+ assertAnyVal[Boolean]
+ assertAnyVal[Char]
+
+ assertAnyRef[AnyRef]
+ assertAnyRef[java.lang.Object]
+ assertAnyRef[java.lang.Integer]
+ assertAnyRef[java.lang.Double]
+ assertAnyRef[java.lang.Boolean]
+ assertAnyRef[java.lang.Character]
+ assertAnyRef[String]
+ assertAnyRef[scala.List[String]]
+ assertAnyRef[scala.List[_]]
+
+ // variance doesn't work yet
+ // testVariancesVia[String, Any]
+ // testVariancesVia[String, AnyRef]
+
+ assertSubType[List[String], List[Any]]
+ assertSubType[List[String], List[AnyRef]]
+ assertNoRelationship[List[String], List[AnyVal]]
+
+ assertSubType[List[Int], List[Any]]
+ assertSubType[List[Int], List[AnyVal]]
+ assertNoRelationship[List[Int], List[AnyRef]]
+
+ // Nothing
+ assertSubType[Nothing, Any]
+ assertSubType[Nothing, AnyVal]
+ assertSubType[Nothing, AnyRef]
+ assertSubType[Nothing, String]
+ assertSubType[Nothing, List[String]]
+ assertSubType[Nothing, Null]
+ assertSameType[Nothing, Nothing]
+
+ // Null
+ assertSubType[Null, Any]
+ assertNoRelationship[Null, AnyVal]
+ assertSubType[Null, AnyRef]
+ assertSubType[Null, String]
+ assertSubType[Null, List[String]]
+ assertSameType[Null, Null]
+ assertSuperType[Null, Nothing]
+
+ // Any
+ assertSameType[Any, Any]
+ assertSuperType[Any, AnyVal]
+ assertSuperType[Any, AnyRef]
+ assertSuperType[Any, String]
+ assertSuperType[Any, List[String]]
+ assertSuperType[Any, Null]
+ assertSuperType[Any, Nothing]
+
+ // Misc unrelated types
+ assertNoRelationship[Unit, AnyRef]
+ assertNoRelationship[Unit, Int]
+ assertNoRelationship[Int, Long]
+ assertNoRelationship[Boolean, String]
+ assertNoRelationship[List[Boolean], List[String]]
+ assertNoRelationship[Set[Boolean], Set[String]]
+ }
+
+ def main(args: Array[String]): Unit = runAllTests
+}
diff --git a/test/files/run/mapValues.scala b/test/files/run/mapValues.scala
new file mode 100644
index 0000000000..bd1794bc51
--- /dev/null
+++ b/test/files/run/mapValues.scala
@@ -0,0 +1,8 @@
+object Test {
+ val m = Map(1 -> 1, 2 -> 2)
+ val mv = (m mapValues identity) - 1
+
+ def main(args: Array[String]): Unit = {
+ assert(mv.size == 1)
+ }
+}
diff --git a/test/files/run/map_java_conversions.scala b/test/files/run/map_java_conversions.scala
new file mode 100644
index 0000000000..58ff471190
--- /dev/null
+++ b/test/files/run/map_java_conversions.scala
@@ -0,0 +1,60 @@
+
+
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ import collection.JavaConversions._
+
+ test(new java.util.HashMap[String, String])
+ test(new java.util.Properties)
+ testConcMap
+ }
+
+ def testConcMap {
+ import collection.JavaConversions._
+
+ val concMap = new java.util.concurrent.ConcurrentHashMap[String, String]
+
+ test(concMap)
+ val cmap = asConcurrentMap(concMap)
+ cmap.putIfAbsent("absentKey", "absentValue")
+ cmap.put("somekey", "somevalue")
+ assert(cmap.remove("somekey", "somevalue") == true)
+ assert(cmap.replace("absentKey", "newAbsentValue") == Some("absentValue"))
+ assert(cmap.replace("absentKey", "newAbsentValue", ".......") == true)
+ }
+
+ def test(m: collection.mutable.Map[String, String]) {
+ m.clear
+ assert(m.size == 0)
+
+ m.put("key", "value")
+ assert(m.size == 1)
+
+ assert(m.put("key", "anotherValue") == Some("value"))
+ assert(m.put("key2", "value2") == None)
+ assert(m.size == 2)
+
+ m += (("key3", "value3"))
+ assert(m.size == 3)
+
+ m -= "key2"
+ assert(m.size == 2)
+ assert(m.nonEmpty)
+ assert(m.remove("key") == Some("anotherValue"))
+
+ m.clear
+ for (i <- 0 until 10) m += (("key" + i, "value" + i))
+ for ((k, v) <- m) assert(k.startsWith("key"))
+ }
+
+}
+
+
+
+
+
+
diff --git a/test/files/run/matcharraytail.check b/test/files/run/matcharraytail.check
index 15381501a9..f2844d41a9 100644
--- a/test/files/run/matcharraytail.check
+++ b/test/files/run/matcharraytail.check
@@ -1,2 +1,2 @@
Array(foo, bar, baz)
-ArrayBuffer(bar, baz)
+Vector(bar, baz)
diff --git a/test/files/run/matchbytes.scala b/test/files/run/matchbytes.scala
index 73e55d47cb..e776f0bf84 100644
--- a/test/files/run/matchbytes.scala
+++ b/test/files/run/matchbytes.scala
@@ -4,4 +4,4 @@ object Test extends Application{
case 1 => println(1);
case _ => println("????");
}
-}
+}
diff --git a/test/files/run/matchintasany.scala b/test/files/run/matchintasany.scala
index 2c732b98b5..e21dce9562 100644
--- a/test/files/run/matchintasany.scala
+++ b/test/files/run/matchintasany.scala
@@ -5,4 +5,4 @@ object Test extends Application{
case 1L => println(1);
case _ => println("????");
}
-}
+}
diff --git a/test/files/run/matchnull.scala b/test/files/run/matchnull.scala
index 2cc8550d47..cfb3049239 100644
--- a/test/files/run/matchnull.scala
+++ b/test/files/run/matchnull.scala
@@ -3,7 +3,7 @@ object Test
def f1 = null match { case x: AnyRef => 1 case _ => -1 }
def f2(x: Any) = x match { case 52 => 1 ; case null => -1 ; case _ => 0 }
def f3(x: AnyRef) = x match { case x: String => 1 ; case List(_) => 0 ; case null => -1 ; case _ => -2 }
-
+
def main(args: Array[String]): Unit = {
println(f1)
println(f2(null))
diff --git a/test/files/run/misc.scala b/test/files/run/misc.scala
index 2ae76bd968..139d2d70f7 100644
--- a/test/files/run/misc.scala
+++ b/test/files/run/misc.scala
@@ -60,7 +60,7 @@ object Test {
Console.println;
val x = 13;
x;
- // !!! why are DefDef replaced by Block(Tree[0])? we should use Empty!
+ // !!! why are DefDef replaced by Block(Tree[0])? we should use Empty!
def f = 19;
f;
def f0() = 11;
diff --git a/test/files/run/missingparams.scala b/test/files/run/missingparams.scala
index 3672fdf76f..0b79fa15d0 100644
--- a/test/files/run/missingparams.scala
+++ b/test/files/run/missingparams.scala
@@ -6,8 +6,8 @@ final class Foo(val x: Int) {
// test that the closure elimination is not wrongly replacing
// 'that' by 'this'
- def intersect(that: Foo) =
- filter { dummy =>
+ def intersect(that: Foo) =
+ filter { dummy =>
// x // dummy
that.x > 0
}
diff --git a/test/files/run/names-defaults.check b/test/files/run/names-defaults.check
index 937f95962b..d31e98be32 100644
--- a/test/files/run/names-defaults.check
+++ b/test/files/run/names-defaults.check
@@ -18,29 +18,20 @@ get: [
get: 4
get: @
4: @
+get: 10
+get: flu
+10: flu
get: 8
get: 9
get: %
get: 5
5: %, 17
12: ', 13, 16
+1: bird, swine, 10
+20
+30
+40
6: ~
-get: 7
-get: +
-7: +
-get: +
-get: 8
-8: +
-9: ?
-get: 39
-get: 38
-get: |
-10: |, 77
-get: 2.233
-get: <
-get: 13
-get: x
-13: x, 2.233, <
14: /
100: 100: nix, nix, 982, 982, 0
100: overridden, bla, 0, 0, 555
@@ -77,13 +68,12 @@ get: 20
get: 20
20
0
+1
dlkfj0dlkfj102
lskf2dkflj2
dlkd5nixda10nixdadklfj1dklfj
C(dlkf,234,struct)struct???
C(dflkj,234,Some(209))None!!
-20020100
-C(dlfkj,11,10)35dlkf
dflk10
1-1jupee
12.39
@@ -115,3 +105,14 @@ klfj1
blublu1
my text
List(1, 2)
+3
+3
+3
+3
+3
+3
+3
+3
+3
+3
+3
diff --git a/test/files/run/names-defaults.scala b/test/files/run/names-defaults.scala
index 008939cc9d..ee0862558a 100644
--- a/test/files/run/names-defaults.scala
+++ b/test/files/run/names-defaults.scala
@@ -13,21 +13,27 @@ object Test extends Application {
// mixing named and positional
test1(get(4), b = get("@"))
+ test1(a = get(10), get("flu"))
test2(get(8), v = get(9))(get("%"), l = get(5))
test3(12, 13)("'", d = 16)
+ test3(a = 1, "swine")(c = "bird", d = 10L)
// anonymous functions
- val f1: (Int, String) => Unit = test1(_, _); f1(6, "~")
- val f2: Int => Unit = test1(a = _, b = get("+")); f2(get(7))
- val f3 = test1(b = _: String, a = get(8)); f3(get("+"))
- val f4: (Int, String) => Unit = test1(_, b = _); f4(9, "?")
+ {
+ def doMod(f: Int => Unit) { f(20) }
+ var var1 = 0
+ doMod(var1 = _)
+ println(var1)
- val f5: Int => (String, Int) => Unit = test2(v = get(38), u = _)_
- f5(get(39))(get("|"), 10)
+ synchronized(var1 = 30)
+ println(var1)
- val f6: (Double, String) => Unit = test3(get(13), _)(d = _, c = get("x"))
- f6(get(2.233), get("<"))
+ var var2 = 0
+ def delay(var2: => Int) = { var2 }
+ println(delay(var2 = 40))
+ }
+ val f1: (Int, String) => Unit = test1(_, _); f1(6, "~")
test4(14)
@@ -133,6 +139,10 @@ object Test extends Application {
println(bn4())
println(bn4(a = 0))
+ class t2929(x: => Int = 1) {
+ def foo = x
+ }
+ println((new t2929()).foo)
// constructors
val a1 = new A(b = "dlkfj")(d = 102)
@@ -145,10 +155,6 @@ object Test extends Application {
println(c1.print)
val c2 = C("dflkj", c = Some(209): Option[Int])(None, "!!")
println(c2.print)
- val a_f: String => A[String, Nothing] = new A[String, Nothing](b = _)(d = 100)
- println(a_f("20").print)
- val c_f: Int => C[Int] = C("dlfkj", c = 10, b = _)(35, e = "dlkf")
- println(c_f(11).print)
// "super" qualifier
@@ -250,6 +256,9 @@ object Test extends Application {
def test11[T[P]](x: T[T[List[T[X forSome { type X }]]]] = List(1,2)) = x
// (cannot call f using the default, List(1,2) doesn't match the param type)
+ def multinest = { def bar(x: Int = 1) = { def bar(x: Int = 2) = x; bar() + x }; bar() }
+ println(multinest)
+
// #2290
def spawn(a: Int, b: => Unit) = { () }
@@ -263,6 +272,102 @@ object Test extends Application {
// #2390
case class A2390[T](x: Int) { def copy(a: Int)(b: Int = 0) = 0 }
+ // #2489
+ class A2489 { def foo { def bar(a: Int = 1) = a; bar(); val u = 0 } }
+
+ // a bug reported on the mailing lists, related to #2489
+ class Test2489 {
+ def foo(): Int = {
+ val i = 10
+ case class Foo(j: Int)
+ i
+ }
+ }
+
+ // #2784
+ class Test2784 {
+ object t { def f(x: Int) = x }
+ val one = t f (x = 1)
+ }
+
+ // #2820
+ class Test2820 {
+ class A[T](f: String = "ski!")
+ class C extends A
+ }
+
+ object t3178 {
+ def foo(x: String) = x
+ def foo(x: Int) = x
+ def bar(foo: Int) = foo
+ bar(foo = 1)
+ }
+
+
+ // #3207
+ trait P3207[T] {
+ class Inner(val f: T => Unit = (x: T) => println(x))
+ }
+
+ object Test3207_1 {
+ val p = new P3207[Int] {}
+ val q = new p.Inner() {
+ def g = 0
+ }
+ }
+
+ object Test3207_2 {
+ val p = new P3207[Int] {
+ val inner = new Inner() {
+ def g = 0
+ }
+ }
+ }
+
+ // #3344
+ def m3344_1 = { case class C(x: Int); C(1).copy(2).x }
+ m3344_1
+ def m3344_2 = { class C(val x: Int = 1); new C().x }
+ m3344_2
+
+ // #3338
+ object t3338 {
+ class Container {
+ class GenericClass[T](arg: String = "")
+ }
+
+ object Container extends Container
+
+ class Test {
+ val a = new Container.GenericClass()
+ }
+ }
+ (new t3338.Test).a
+
+
+ // subclassing and defaults in both class constructors
+ class CBLAH(val x: Int = 1)
+ class DBLAH(val y: String = "2") extends CBLAH()
+ (new DBLAH())
+
+
+ // #3697
+ object t3697 {
+ def a(x: Int*)(s: Int = 3) = s
+ def b(a: Int, b: Int, c: Int*) = a + b
+ }
+ println(t3697.a(Seq(3): _*)())
+ println(t3697.a(3)())
+ println(t3697.a()())
+ println(t3697.a(2,3,1)())
+ println(t3697.b(a = 1, b = 2))
+ println(t3697.b(a = 1, b = 2, 3))
+ println(t3697.b(b = 1, a = 2, c = 3))
+ println(t3697.b(a = 1, b = 2, 3, 4))
+ println(t3697.b(a = 1, b = 2, Seq(3, 4): _*))
+ println(t3697.b(b = 1, a = 2, c = Seq(3, 4): _*))
+
+
// DEFINITIONS
def test1(a: Int, b: String) = println(a +": "+ b)
def test2(u: Int, v: Int)(k: String, l: Int) = println(l +": "+ k +", "+ (u + v))
diff --git a/test/files/run/nodebuffer-array.check b/test/files/run/nodebuffer-array.check
new file mode 100644
index 0000000000..49f8bfaf8d
--- /dev/null
+++ b/test/files/run/nodebuffer-array.check
@@ -0,0 +1,3 @@
+<entry>
+ <elem>a</elem><elem>b</elem><elem>c</elem>
+ </entry>
diff --git a/test/files/run/nodebuffer-array.scala b/test/files/run/nodebuffer-array.scala
new file mode 100644
index 0000000000..4e1ffe1e5e
--- /dev/null
+++ b/test/files/run/nodebuffer-array.scala
@@ -0,0 +1,15 @@
+object Test {
+
+ def f(s: String) = {
+ <entry>
+ {
+ for (item <- s split ',') yield
+ <elem>{ item }</elem>
+ }
+ </entry>
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(f("a,b,c"))
+ }
+}
diff --git a/test/files/run/numbereq.scala b/test/files/run/numbereq.scala
new file mode 100644
index 0000000000..77a217df36
--- /dev/null
+++ b/test/files/run/numbereq.scala
@@ -0,0 +1,41 @@
+object Test {
+ def mkNumbers(x: Int): List[AnyRef] = {
+ val base = List(
+ BigDecimal(x),
+ BigInt(x),
+ new java.lang.Double(x.toDouble),
+ new java.lang.Float(x.toFloat),
+ new java.lang.Long(x.toLong),
+ new java.lang.Integer(x)
+ )
+ val extras = List(
+ if (x >= Short.MinValue && x <= Short.MaxValue) List(new java.lang.Short(x.toShort)) else Nil,
+ if (x >= Byte.MinValue && x <= Byte.MaxValue) List(new java.lang.Byte(x.toByte)) else Nil,
+ if (x >= Char.MinValue && x <= Char.MaxValue) List(new java.lang.Character(x.toChar)) else Nil
+ ).flatten
+
+ base ::: extras
+ }
+
+
+ def main(args: Array[String]): Unit = {
+ val ints = (0 to 15).toList map (Short.MinValue >> _)
+ val ints2 = ints map (x => -x)
+ val ints3 = ints map (_ + 1)
+ val ints4 = ints2 map (_ - 1)
+
+ val setneg1 = ints map mkNumbers
+ val setneg2 = ints3 map mkNumbers
+ val setpos1 = ints2 map mkNumbers
+ val setpos2 = ints4 map mkNumbers
+ val zero = mkNumbers(0)
+
+ val sets = setneg1 ++ setneg2 ++ List(zero) ++ setpos1 ++ setpos2
+
+ for (set <- sets ; x <- set ; y <- set) {
+ // println("'%s' == '%s' (%s == %s) (%s == %s)".format(x, y, x.hashCode, y.hashCode, x.##, y.##))
+ assert(x == y, "%s/%s != %s/%s".format(x, x.getClass, y, y.getClass))
+ assert(x.## == y.##, "%s != %s".format(x.getClass, y.getClass))
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/run/packrat1.scala b/test/files/run/packrat1.scala
index 26fa8e76f2..71b593d421 100644
--- a/test/files/run/packrat1.scala
+++ b/test/files/run/packrat1.scala
@@ -21,15 +21,15 @@ object Test extends Application{
}
object grammars extends StandardTokenParsers with PackratParsers{
-
+
def extractResult(r : ParseResult[_]) = r match {
case Success(a,_) => a
case NoSuccess(a,_) => a
}
-
+
lexical.delimiters ++= List("+","-","*","/","(",")")
lexical.reserved ++= List("Hello","World")
-
+
/****
* term = term + fact | term - fact | fact
* fact = fact * num | fact / num | num
@@ -39,7 +39,7 @@ object grammars extends StandardTokenParsers with PackratParsers{
val term: PackratParser[Int] = (term~("+"~>fact) ^^ {case x~y => x+y}
|term~("-"~>fact) ^^ {case x~y => x-y}
|fact)
-
+
val fact: PackratParser[Int] = (fact~("*"~>numericLit) ^^ {case x~y => x*y.toInt}
|fact~("/"~>numericLit) ^^ {case x~y => x/y.toInt}
|"("~>term<~")"
diff --git a/test/files/run/packrat2.scala b/test/files/run/packrat2.scala
index e5c8db0f50..45558e86cc 100644
--- a/test/files/run/packrat2.scala
+++ b/test/files/run/packrat2.scala
@@ -22,15 +22,15 @@ object Test extends Application{
}
object grammars2 extends StandardTokenParsers with PackratParsers{
-
+
def extractResult(r : ParseResult[_]) = r match{
case Success(a,_) => a
case NoSuccess(a,_) => a
}
-
+
lexical.delimiters ++= List("+","-","*","/","(",")")
lexical.reserved ++= List("Hello","World")
-
+
/*
* exp = sum | prod | num
* sum = exp ~ "+" ~ num
@@ -40,18 +40,18 @@ object grammars2 extends StandardTokenParsers with PackratParsers{
val exp : PackratParser[Int] = sum | prod | numericLit ^^{_.toInt} | "("~>exp<~")"
val sum : PackratParser[Int] = exp~("+"~>exp) ^^ {case x~y => x+y}
val prod: PackratParser[Int] = exp~("*"~>(numericLit ^^{_.toInt} | exp)) ^^ {case x~y => x*y}
-
-
+
+
/* lexical.reserved ++= List("a","b", "c")
val a : PackratParser[Any] = numericLit^^{x => primeFactors(x.toInt)}
val b : PackratParser[Any] = memo("b")
val c : PackratParser[Any] = memo("c")
- val AnBnCn : PackratParser[Any] =
+ val AnBnCn : PackratParser[Any] =
parseButDontEat(repMany1(a,b))~not(b)~>rep1(a)~repMany1(b,c)// ^^{case x~y => x:::y}
//val c : PackratParser[Any] = parseButDontEat(a)~a~a
//println(c((new PackratReader(new lexical.Scanner("45 24")))))
val r = new PackratReader(new lexical.Scanner("45 b c"))
println(AnBnCn(r))
println(r.getCache.size)
-*/
+*/
}
diff --git a/test/files/run/packrat3.scala b/test/files/run/packrat3.scala
index 3820f05158..e58d16a1fb 100644
--- a/test/files/run/packrat3.scala
+++ b/test/files/run/packrat3.scala
@@ -24,28 +24,28 @@ object Test {
}
object grammars3 extends StandardTokenParsers with PackratParsers {
-
+
def extractResult(r: ParseResult[_]) = r match {
case Success(a,_) => a
case NoSuccess(a,_) => a
}
-
+
lexical.reserved ++= List("a","b", "c")
val a: PackratParser[Any] = memo("a")
val b: PackratParser[Any] = memo("b")
val c: PackratParser[Any] = memo("c")
- val AnBnCn: PackratParser[Any] =
+ val AnBnCn: PackratParser[Any] =
guard(repMany1(a,b) ~ not(b)) ~ rep1(a) ~ repMany1(b,c)// ^^{case x~y => x:::y}
- private def repMany[T](p: => Parser[T], q: => Parser[T]): Parser[List[T]] =
+ private def repMany[T](p: => Parser[T], q: => Parser[T]): Parser[List[T]] =
( p~repMany(p,q)~q ^^ {case x~xs~y => x::xs:::(y::Nil)}
| success(Nil)
)
- def repMany1[T](p: => Parser[T], q: => Parser[T]): Parser[List[T]] =
+ def repMany1[T](p: => Parser[T], q: => Parser[T]): Parser[List[T]] =
p~opt(repMany(p,q))~q ^^ {case x~Some(xs)~y => x::xs:::(y::Nil)}
-}
+}
diff --git a/test/files/run/patmat-seqs.check b/test/files/run/patmat-seqs.check
new file mode 100644
index 0000000000..bb2a5ee44a
--- /dev/null
+++ b/test/files/run/patmat-seqs.check
@@ -0,0 +1,13 @@
+s3
+s2
+s1
+s0
+ss6
+d
+s3
+s3
+d
+s1
+s3
+d
+d
diff --git a/test/files/run/patmat-seqs.scala b/test/files/run/patmat-seqs.scala
new file mode 100644
index 0000000000..e23711697c
--- /dev/null
+++ b/test/files/run/patmat-seqs.scala
@@ -0,0 +1,42 @@
+object Test {
+ def f1(x: Any) = x match {
+ case Seq(1, 2, 3) => "s3"
+ case Seq(4, 5) => "s2"
+ case Seq(7) => "s1"
+ case Nil => "s0"
+ case Seq(_, _, _, _, _, x: String) => "ss6"
+ case _ => "d"
+ }
+
+ def f2(x: Any) = x match {
+ case Seq("a", "b", _*) => "s2"
+ case Seq(1, _*) => "s1"
+ case Seq(5, 6, 7, _*) => "s3"
+ case _ => "d"
+ }
+
+ def main(args: Array[String]): Unit = {
+ val xs1 = List(
+ List(1,2,3),
+ List(4,5),
+ Vector(7),
+ Seq(),
+ Seq(1, 2, 3, 4, 5, "abcd"),
+ "abc"
+ ) map f1
+
+ xs1 foreach println
+
+ val xs2 = List(
+ Seq(5, 6, 7),
+ Seq(5, 6, 7, 8, 9),
+ Seq("a"),
+ Seq(1, 6, 7),
+ List(5, 6, 7),
+ Nil,
+ 5
+ ) map f2
+
+ xs2 foreach println
+ }
+}
diff --git a/test/files/run/patmatnew.scala b/test/files/run/patmatnew.scala
index 025d6bf2ef..e2c95e3c81 100644
--- a/test/files/run/patmatnew.scala
+++ b/test/files/run/patmatnew.scala
@@ -72,7 +72,7 @@ object Test extends TestConsoleMain {
object SeqUnapply extends TestCase("seqUnapply") {
case class SFB(i: Int, xs: List[Int])
- override def runTest() {
+ override def runTest() {
List(1,2) match {
case List(1) => assert(false, "wrong case")
case List(1,2,xs @ _*) => assert(xs.isEmpty, "not empty")
@@ -104,11 +104,11 @@ object Test extends TestConsoleMain {
case 0 => 0
})
assertEquals("s2", 1, 1 match {
- case 1 => 1
+ case 1 => 1
case _ => 0
})
assertEquals("s2boxed", 1, (1:Any) match {
- case 1 => 1
+ case 1 => 1
case _ => 0
})
assertEquals("s3", 1, ("hello") match {
@@ -235,7 +235,7 @@ object Test extends TestConsoleMain {
case class Foo() extends Con
case class Bar(xs:Con*) extends Con
-
+
override def runTest() {
val res = (Bar(Foo()):Con) match {
case Bar(xs@_*) => xs // this should be optimized away to a pattern Bar(xs)
@@ -246,9 +246,9 @@ object Test extends TestConsoleMain {
}
object TestSequence06 extends TestCase("sei (not regular) fancy guards / bug#644 ") {
-
+
case class A(i: Any)
-
+
def doMatch(x: Any, bla: Int) = x match {
case x:A if (bla==1) => 0
case A(1) => 1
@@ -273,7 +273,7 @@ object Test extends TestConsoleMain {
//def doMatch3(xs:List[char]) = xs match {
// case List(_*, z, w) => w::Nil
//}
- //
+ //
// Since the second case should have been unreachable all along,
// let's just comment this one out.
//
@@ -299,8 +299,8 @@ object Test extends TestConsoleMain {
assertEquals(List('c','d'), doMatch6(List('a','b','c','d')))
}
}
-
- object TestSequence08 extends TestCase("backquoted identifiers in pattern") {
+
+ object TestSequence08 extends TestCase("backquoted identifiers in pattern") {
override def runTest() {
val xs = List(2, 3)
val ys = List(1, 2, 3) match {
@@ -318,17 +318,17 @@ object Test extends TestConsoleMain {
case Stream.Empty => 0
case Stream.cons(hd, tl) => hd + sum(tl)
}
-
+
val str: Stream[Int] = List(1,2,3).iterator.toStream
def runTest() = assertEquals(sum(str), 6)
}
-
+
class Test1163_Order extends TestCase("bug#1163 order of temps must be preserved") {
abstract class Function
case class Var(n: String) extends Function
case class Const(v: Double) extends Function
-
+
def f(): (Function, Function) = {
(Var("x"): Function, Var("y"): Function) match {
case (Const(v), Const(w)) => throw new Error
@@ -336,13 +336,13 @@ object Test extends TestConsoleMain {
case (leftTwo, rightTwo) => (leftTwo, rightTwo) // was giving "y","x"
}
}
-
- def flips(l: List[Int]): Int = (l: @unchecked) match {
+
+ def flips(l: List[Int]): Int = (l: @unchecked) match {
case 1 :: ls => 0
case n :: ls => flips((l take n reverse) ::: (l drop n)) + 1
}
- def runTest() = assertEquals("both", (Var("x"),Var("y")), f)
+ def runTest() = assertEquals("both", (Var("x"),Var("y")), f)
}
class TestUnbox extends TestCase("unbox") {
@@ -366,8 +366,8 @@ object Test extends TestConsoleMain {
case dq : DoubleQuoteImpl => dq
}
}
-
- trait IfElseMatcher {
+
+ trait IfElseMatcher {
type Node <: NodeImpl
trait NodeImpl
trait IfImpl
@@ -375,7 +375,7 @@ object Test extends TestConsoleMain {
case node : IfImpl => node // var node is of type Node with IfImpl!
case _ => null
}
- }
+ }
}
@@ -385,7 +385,7 @@ object Test extends TestConsoleMain {
}
object PersonFather {
- def unapply(p : Person) : Option[Person] =
+ def unapply(p : Person) : Option[Person] =
if (p.father == null)
None
else
@@ -425,11 +425,11 @@ object Test extends TestConsoleMain {
object Foo1 {
class Bar1(val x : String)
def p(b : Bar1) = Console.println(b.x)
-
+
def unapply(s : String) : Option[Bar1] =
Some(new Bar1(s))
}
-
+
object bug881 extends TestCase("881") {
override def runTest = {
"baz" match {
@@ -448,19 +448,19 @@ object Test extends TestConsoleMain {
case Pair(Some(x),Some(y)) => 3
case _ => 4
}
-
+
def g1[A](z:Option[List[A]]) = z match {
- case Some(Nil) => true
- case Some(x::Nil) => true
+ case Some(Nil) => true
+ case Some(x::Nil) => true
case _ => true
}
-
+
def g2[A](z:Option[List[A]]) = z match {
- case Some(x::Nil) => true
+ case Some(x::Nil) => true
case Some(_) => false
case _ => true
}
-
+
def h[A](x: (Option[A],Option[A])) = x match {
case Pair(None,_:Some[_]) => 1
case Pair(_:Some[_],None ) => 2
@@ -485,20 +485,20 @@ object Test extends TestConsoleMain {
}
object Bug1270 { // unapply13
-
+
class Sync {
def apply(x: Int): Int = 42
def unapply(scrut: Any): Option[Int] = None
}
-
+
class Buffer {
object Get extends Sync
-
+
var ps: PartialFunction[Any, Any] = {
- case Get(y) if y > 4 => // y gets a wildcard type for some reason?! hack
+ case Get(y) if y > 4 => // y gets a wildcard type for some reason?! hack
}
}
-
+
println((new Buffer).ps.isDefinedAt(42))
}
@@ -509,7 +509,7 @@ object Test extends TestConsoleMain {
trait Row extends Elem
object Row {
def unapply(r: Row) = true
-
+
def f(elem: Elem) {
elem match {
case Bar() => ;
@@ -520,9 +520,9 @@ object Test extends TestConsoleMain {
/*
object Feature1196 {
def f(l: List[Int]) { }
-
+
val l: Seq[Int] = List(1, 2, 3)
-
+
l match {
case x @ List(1, _) => f(x) // x needs to get better type List[int] here
}
@@ -546,9 +546,9 @@ object Test extends TestConsoleMain {
trait Core { def next : Position = null }
trait Dir
val NEXT = new Dir{}
-
+
trait Position extends Core
-
+
(null:Core, null:Dir) match {
case (_, NEXT) if true => false // no matter whether NEXT test succeed, cannot throw column because of guard
case (at2:Position,dir) => true
@@ -573,15 +573,15 @@ object Test extends TestConsoleMain {
else None
}
}
-
+
class Buffer {
val Get = new Sync
-
+
val jp: PartialFunction[Any, Any] = {
case Get(xs) => println(xs) // the argDummy <unapply-selector> should have proper arg.tpe (Int in this case)
}
}
-
+
println((new Buffer).jp.isDefinedAt(40))
println((new Buffer).jp.isDefinedAt(42))
}
@@ -595,7 +595,7 @@ object Test extends TestConsoleMain {
val s:PartialFunction[Any,Any] = {
case List(4::xs) => 1
case List(5::xs) => 1
- case _ if false =>
+ case _ if false =>
case List(3::xs) if List(3:Any).forall { g => g.hashCode() > 0 } => 1
}
z.isDefinedAt(42)
@@ -644,10 +644,10 @@ object Test extends TestConsoleMain {
// bug#508
- object Bug508 extends TestCase("aladdin #508") {
+ object Bug508 extends TestCase("aladdin #508") {
case class Operator(x: Int);
val EQ = new Operator(2);
-
+
def analyze(x: Pair[Operator, Int]) = x match {
case Pair(EQ, 0) => "0"
case Pair(EQ, 1) => "1"
@@ -660,35 +660,35 @@ object Test extends TestConsoleMain {
assertEquals("1", analyze(y)); // should print "1"
val z = Pair(EQ, 2);
assertEquals("2", analyze(z)); // should print "2"
- }
+ }
}
// bug#789
-
+
object Bug789 extends TestCase("aladdin #789") { // don't do this at home
trait Impl
-
+
trait SizeImpl extends Impl { def size = 42 }
-
+
trait ColorImpl extends Impl { def color = "red" }
-
+
type Both = SizeImpl with ColorImpl
-
+
def info(x:Impl) = x match {
case x:Both => "size "+x.size+" color "+x.color // you wish
case x:SizeImpl => "!size "+x.size
case x:ColorImpl => "color "+x.color
case _ => "n.a."
}
-
+
def info2(x:Impl) = x match {
case x:SizeImpl with ColorImpl => "size "+x.size+" color "+x.color // you wish
case x:SizeImpl => "!size "+x.size
case x:ColorImpl => "color "+x.color
case _ => "n.a."
}
-
+
override def runTest {
// make up some class that has a size
class MyNode extends SizeImpl
@@ -705,7 +705,7 @@ object Test extends TestConsoleMain {
case a: AnyRef if runtime.ScalaRunTime.isArray(a) => "Array"
case _ => v.toString
}
- override def runTest { assertEquals("Array", foo(Array(0))) }
+ override def runTest { assertEquals("Array", foo(Array(0))) }
}
// bug#1093 (contribution #460)
@@ -739,7 +739,7 @@ object Test extends TestConsoleMain {
}
}
}
-
+
object Ticket2 extends TestCase("#2") { override def runTest {
val o1 = new Outer_2; val o2 = new Outer_2; val x: Any = o1.Foo(1, 2); val y: Any = o2.Foo(1, 2)
assertFalse("equals test returns true (but should not)", x equals y)
@@ -758,9 +758,9 @@ object Test extends TestConsoleMain {
// will cause the test to succeed.
trait SpecialException extends MyException1
// trait SpecialException
-
+
class MyException2 extends MyException1 with SpecialException
-
+
object Ticket11 extends TestCase("#11") {
override def runTest {
Array[Throwable](new Exception("abc"),
@@ -781,7 +781,7 @@ object Test extends TestConsoleMain {
}
// #37
-
+
object Ticket37 extends TestCase("#37") {
def foo() {}
val (a,b) = { foo(); (2,3) }
@@ -799,7 +799,7 @@ object Test extends TestConsoleMain {
object Y extends _X {
val foo = _Foo()
foo match {
- case _Bar() =>
+ case _Bar() =>
case _ => assert(false)
}
}
@@ -819,7 +819,7 @@ object Test extends TestConsoleMain {
sealed abstract class Tree
case class Node(l: Tree, v: Int, r: Tree) extends Tree
case object EmptyTree extends Tree
-
+
object Ticket335 extends TestCase("#335") { // compile-only
override def runTest {
(EmptyTree: Tree @unchecked) match {
@@ -849,7 +849,7 @@ class L(val content: List[Int]) {
}
object N extends L(Nil) {
-
+
override def equals(that: Any): Boolean = {
val result = (that.isInstanceOf[L] && that.asInstanceOf[L].isEmpty)
//println("N.equals("+that+") returning "+result)
@@ -887,13 +887,13 @@ override def runTest() {
} // end Ticket346
object Ticket495bis { // compile-only
- def signum(x: Int): Int =
- x match {
+ def signum(x: Int): Int =
+ x match {
case 0 => 0
case _ if x < 0 => -1
case _ if x > 0 => 1
}
- def pair_m(x: Int, y: Int) =
+ def pair_m(x: Int, y: Int) =
(x,y) match {
case (_, 0) => 0
case (-1, _) => -1
@@ -903,14 +903,14 @@ override def runTest() {
object Ticket522 { // compile-only
class Term[X]
- object App {
+ object App {
// i'm hidden
case class InternalApply[Y,Z](fun:Y=>Z, arg:Y) extends Term[Z]
- def apply[Y,Z](fun:Y=>Z, arg:Y): Term[Z] =
+ def apply[Y,Z](fun:Y=>Z, arg:Y): Term[Z] =
new InternalApply[Y,Z](fun,arg)
- def unapply[X](arg: Term[X]): Option[(Y=>Z,Y)] forSome {type Y; type Z} =
+ def unapply[X](arg: Term[X]): Option[(Y=>Z,Y)] forSome {type Y; type Z} =
arg match {
case i:InternalApply[y,z] => Some(i.fun, i.arg)
case _ => None
@@ -918,7 +918,7 @@ override def runTest() {
}
App({x: Int => x}, 5) match {
- case App(arg, a) =>
+ case App(arg, a) =>
}
} // end Ticket522
diff --git a/test/files/run/priorityQueue.scala b/test/files/run/priorityQueue.scala
index 9f453788fc..a3460cd8d6 100644
--- a/test/files/run/priorityQueue.scala
+++ b/test/files/run/priorityQueue.scala
@@ -1,24 +1,346 @@
+
+
+import scala.collection.mutable.PriorityQueue
+
+
+
+
+
+
// populate a priority queue a few different ways and make sure they all seem equal
-object Test extends Application {
- import scala.collection.mutable.PriorityQueue
- import scala.util.Random.nextInt
- val pq1 = new PriorityQueue[String]
- val pq2 = new PriorityQueue[String]
- val pq3 = new PriorityQueue[String]
- val pq4 = new PriorityQueue[String]
+object Test {
- val strings = (1 to 20).toList map (i => List.fill((Math.abs(nextInt % 20)) + 1)("x").mkString)
+ def main(args: Array[String]) {
+ testInsertionsAndEqualities
+ testIntensiveEnqueueDequeue
+ testIndexing
+ testTails
+ testInits
+ testFilters
+ testDrops
+ testUpdates
+ testEquality
+ testMisc
+ testReverse
+ }
+
+ def testInsertionsAndEqualities {
+ import scala.util.Random.nextInt
+ val pq1 = new PriorityQueue[String]
+ val pq2 = new PriorityQueue[String]
+ val pq3 = new PriorityQueue[String]
+ val pq4 = new PriorityQueue[String]
+
+ val strings = (1 to 20).toList map (i => List.fill((Math.abs(nextInt % 20)) + 1)("x").mkString)
+
+ pq1 ++= strings
+ pq2 ++= strings.reverse
+ for (s <- strings) pq3 += s
+ for (s <- strings.reverse) pq4 += s
+
+ val pqs = List(pq1, pq2, pq3, pq4, pq1.clone, pq2.clone)
+
+ for (queue1 <- pqs ; queue2 <- pqs) {
+ assert(queue1 == queue2)
+ assert(queue1.max == queue2.max)
+ }
+
+ assertPriority(pq1)
+ }
+
+ def testIndexing {
+ val pq = new PriorityQueue[Char]
+ "The quick brown fox jumps over the lazy dog".foreach(pq += _)
- pq1 ++= strings
- pq2 ++= strings.reverse
- for (s <- strings) pq3 += s
- for (s <- strings.reverse) pq4 += s
+ // val iter = pq.iterator
+ // while (iter.hasNext) println("`" + iter.next + "`")
+ assert(pq(0) == 'z')
+ assert(pq(1) == 'y')
+ assert(pq(2) == 'x')
+ assert(pq(3) == 'w')
+ assert(pq(4) == 'v')
+ assert(pq(5) == 'u')
+ assert(pq(7) == 't')
+ assert(pq(8) == 's')
+ assert(pq(9) == 'r')
+ assert(pq(10) == 'r')
- val pqs = List(pq1, pq2, pq3, pq4, pq1.clone, pq2.clone)
+ pq.clear
+ "abcdefghijklmnopqrstuvwxyz".foreach(pq += _)
+ for (i <- 0 until 26) assert(pq(i) == ('z' - i))
- for (queue1 <- pqs ; queue2 <- pqs) {
- assert(queue1 == queue2)
- assert(queue1.max == queue2.max)
+ val intpq = new PriorityQueue[Int]
+ val intlst = new collection.mutable.ArrayBuffer ++ (0 until 100)
+ val random = new util.Random(101)
+ while (intlst.nonEmpty) {
+ val idx = random.nextInt(intlst.size)
+ intpq += intlst(idx)
+ intlst.remove(idx)
+ }
+ for (i <- 0 until 100) assert(intpq(i) == (99 - i))
}
+
+ def testTails {
+ val pq = new PriorityQueue[Int]
+ for (i <- 0 until 10) pq += i * 4321 % 200
+
+ assert(pq.size == 10)
+ assert(pq.nonEmpty)
+
+ val tailpq = pq.tail
+ // pq.printstate
+ // tailpq.printstate
+ assert(tailpq.size == 9)
+ assert(tailpq.nonEmpty)
+ assertPriorityDestructive(tailpq)
+ }
+
+ def assertPriorityDestructive[A](pq: PriorityQueue[A])(implicit ord: Ordering[A]) {
+ import ord._
+ var prev: A = null.asInstanceOf[A]
+ while (pq.nonEmpty) {
+ val curr = pq.dequeue
+ if (prev != null) assert(curr <= prev)
+ prev = curr
+ }
+ }
+
+ def assertPriority[A](pq: PriorityQueue[A])(implicit ord: Ordering[A]) {
+ import ord._
+ var prev: A = null.asInstanceOf[A]
+ val iter = pq.iterator
+ while (iter.hasNext) {
+ val curr = iter.next
+ if (prev != null) assert(curr <= prev)
+ prev = curr
+ }
+ }
+
+ def testInits {
+ val pq = new PriorityQueue[Long]
+ for (i <- 0 until 20) pq += (i + 313) * 111 % 300
+
+ assert(pq.size == 20)
+
+ val initpq = pq.init
+ assert(initpq.size == 19)
+ assertPriorityDestructive(initpq)
+ }
+
+ def testFilters {
+ val pq = new PriorityQueue[String]
+ for (i <- 0 until 100) pq += "Some " + (i * 312 % 200)
+
+ val filpq = pq.filter(_.indexOf('0') != -1)
+ assertPriorityDestructive(filpq)
+ }
+
+ def testIntensiveEnqueueDequeue {
+ val pq = new PriorityQueue[Int]
+
+ testIntensive(1000, pq)
+ pq.clear
+ testIntensive(200, pq)
+ }
+
+ def testIntensive(sz: Int, pq: PriorityQueue[Int]) {
+ val lst = new collection.mutable.ArrayBuffer[Int] ++ (0 until sz)
+ val rand = new util.Random(7)
+ while (lst.nonEmpty) {
+ val idx = rand.nextInt(lst.size)
+ pq.enqueue(lst(idx))
+ lst.remove(idx)
+ if (rand.nextDouble < 0.25 && pq.nonEmpty) pq.dequeue
+ assertPriority(pq)
+ }
+ }
+
+ def testDrops {
+ val pq = new PriorityQueue[Int]
+ pq ++= (0 until 100)
+ val droppq = pq.drop(50)
+ assertPriority(droppq)
+
+ pq.clear
+ pq ++= droppq
+ assertPriorityDestructive(droppq)
+ assertPriority(pq)
+ assertPriorityDestructive(pq)
+ }
+
+ def testUpdates {
+ val pq = new PriorityQueue[Int]
+ pq ++= (0 until 36)
+ assertPriority(pq)
+
+ pq(0) = 100
+ assert(pq(0) == 100)
+ assert(pq.dequeue == 100)
+ assertPriority(pq)
+
+ pq.clear
+
+ pq ++= (1 to 100)
+ pq(5) = 200
+ assert(pq(0) == 200)
+ assert(pq(1) == 100)
+ assert(pq(2) == 99)
+ assert(pq(3) == 98)
+ assert(pq(4) == 97)
+ assert(pq(5) == 96)
+ assert(pq(6) == 94)
+ assert(pq(7) == 93)
+ assert(pq(98) == 2)
+ assert(pq(99) == 1)
+ assertPriority(pq)
+
+ pq(99) = 450
+ assert(pq(0) == 450)
+ assert(pq(1) == 200)
+ assert(pq(99) == 2)
+ assertPriority(pq)
+
+ pq(1) = 0
+ assert(pq(1) == 100)
+ assert(pq(99) == 0)
+ assertPriority(pq)
+ assertPriorityDestructive(pq)
+ }
+
+ def testEquality {
+ val pq1 = new PriorityQueue[Int]
+ val pq2 = new PriorityQueue[Int]
+
+ pq1 ++= (0 until 50)
+ var i = 49
+ while (i >= 0) {
+ pq2 += i
+ i -= 1
+ }
+ assert(pq1 == pq2)
+ assertPriority(pq2)
+
+ pq1 += 100
+ assert(pq1 != pq2)
+ pq2 += 100
+ assert(pq1 == pq2)
+ pq2 += 200
+ assert(pq1 != pq2)
+ pq1 += 200
+ assert(pq1 == pq2)
+ assertPriorityDestructive(pq1)
+ assertPriorityDestructive(pq2)
+ }
+
+ def testMisc {
+ val pq = new PriorityQueue[Int]
+ pq ++= (0 until 100)
+ assert(pq.size == 100)
+
+ val (p1, p2) = pq.partition(_ < 50)
+ assertPriorityDestructive(p1)
+ assertPriorityDestructive(p2)
+
+ val spq = pq.slice(25, 75)
+ assertPriorityDestructive(spq)
+
+ pq.clear
+ pq ++= (0 until 10)
+ pq += 5
+ assert(pq.size == 11)
+
+ val ind = pq.lastIndexWhere(_ == 5)
+ assert(ind == 5)
+ assertPriorityDestructive(pq)
+
+ pq.clear
+ pq ++= (0 until 10)
+ assert(pq.lastIndexWhere(_ == 9) == 0)
+ assert(pq.lastIndexOf(8) == 1)
+ assert(pq.lastIndexOf(7) == 2)
+
+ pq += 5
+ pq += 9
+ assert(pq.lastIndexOf(9) == 1)
+ assert(pq.lastIndexWhere(_ % 2 == 1) == 10)
+ assert(pq.lastIndexOf(5) == 6)
+
+ val lst = pq.reverseIterator.toList
+ for (i <- 0 until 5) assert(lst(i) == i)
+ assert(lst(5) == 5)
+ assert(lst(6) == 5)
+ assert(lst(7) == 6)
+ assert(lst(8) == 7)
+ assert(lst(9) == 8)
+ assert(lst(10) == 9)
+ assert(lst(11) == 9)
+
+ pq.clear
+ assert(pq.reverseIterator.toList.isEmpty)
+
+ pq ++= (50 to 75)
+ assert(pq.lastIndexOf(70) == 5)
+
+ pq += 55
+ pq += 70
+ assert(pq.lastIndexOf(70) == 6)
+ assert(pq.lastIndexOf(55) == 22)
+ assert(pq.lastIndexOf(55, 21) == 21)
+ assert(pq.lastIndexWhere(_ > 54) == 22)
+ assert(pq.lastIndexWhere(_ > 54, 21) == 21)
+ assert(pq.lastIndexWhere(_ > 69, 5) == 5)
+ }
+
+ def testReverse {
+ val pq = new PriorityQueue[(Int, Int)]
+ pq ++= (for (i <- 0 until 10) yield (i, i * i % 10))
+
+ assert(pq.reverse.size == pq.reverseIterator.toList.size)
+ assert((pq.reverse zip pq.reverseIterator.toList).forall(p => p._1 == p._2))
+ assert(pq.reverse.sameElements(pq.reverseIterator.toSeq))
+ assert(pq.reverse(0)._1 == pq(9)._1)
+ assert(pq.reverse(1)._1 == pq(8)._1)
+ assert(pq.reverse(4)._1 == pq(5)._1)
+ assert(pq.reverse(9)._1 == pq(0)._1)
+
+ pq += ((7, 7))
+ pq += ((7, 9))
+ pq += ((7, 8))
+ assert(pq.reverse.reverse == pq)
+ assert(pq.reverse.lastIndexWhere(_._2 == 6) == 6)
+ assertPriorityDestructive(pq.reverse.reverse)
+
+ val iq = new PriorityQueue[Int]
+ iq ++= (0 until 50)
+ assert(iq.reverse == iq.reverseIterator.toSeq)
+ assert(iq.reverse.reverse == iq)
+
+ iq += 25
+ iq += 40
+ iq += 10
+ assert(iq.reverse == iq.reverseIterator.toList)
+ assert(iq.reverse.reverse == iq)
+ assert(iq.reverse.lastIndexWhere(_ == 10) == 11)
+ assertPriorityDestructive(iq.reverse.reverse)
+ }
+
}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check
new file mode 100644
index 0000000000..e6c83a6f48
--- /dev/null
+++ b/test/files/run/programmatic-main.check
@@ -0,0 +1,25 @@
+parser
+namer
+packageobjects
+typer
+superaccessors
+pickler
+refchecks
+liftcode
+uncurry
+tailcalls
+specialize
+explicitouter
+erasure
+lazyvals
+lambdalift
+constructors
+flatten
+mixin
+cleanup
+icode
+inliner
+closelim
+dce
+jvm
+terminal
diff --git a/test/files/run/programmatic-main.scala b/test/files/run/programmatic-main.scala
new file mode 100644
index 0000000000..b44b1a9a55
--- /dev/null
+++ b/test/files/run/programmatic-main.scala
@@ -0,0 +1,12 @@
+import scala.tools.nsc._
+import io.Path
+
+object Test {
+ val cwd = Option(System.getProperty("partest.cwd")) getOrElse "."
+ val basedir = Path(cwd).parent / "lib" path
+ val baseargs = Array("-usejavacp", "-bootclasspath", basedir + "/scala-library.jar", "-cp", basedir + "/scala-compiler.jar")
+
+ def main(args: Array[String]): Unit = {
+ Main process (baseargs ++ Array("-Xshow-phases"))
+ }
+}
diff --git a/test/files/run/proxy.scala b/test/files/run/proxy.scala
index 48192a7a79..869cbf87df 100644
--- a/test/files/run/proxy.scala
+++ b/test/files/run/proxy.scala
@@ -1,6 +1,6 @@
object Test extends Application {
val p = new Proxy {
- def self = 2
+ def self = 2
}
println(p equals 1)
println(p equals 2)
diff --git a/test/files/run/randomAccessSeq-apply.scala b/test/files/run/randomAccessSeq-apply.scala
index 863a4d42a2..f398ea1536 100644
--- a/test/files/run/randomAccessSeq-apply.scala
+++ b/test/files/run/randomAccessSeq-apply.scala
@@ -1,14 +1,14 @@
object Test extends Application {
val empty = RandomAccessSeq()
assert(empty.isEmpty)
-
+
val single = RandomAccessSeq(1)
assert(List(1) == single.toList)
-
+
val two = RandomAccessSeq("a", "b")
- assert("a" == two.first)
+ assert("a" == two.head)
assert("b" == two.apply(1))
-
+
println("OK")
}
diff --git a/test/files/run/range.scala b/test/files/run/range.scala
index 65d33a6134..387b2d0d5b 100644
--- a/test/files/run/range.scala
+++ b/test/files/run/range.scala
@@ -6,30 +6,30 @@ object Test {
range.foreach(buffer += _);
assert(buffer.toList == range.iterator.toList, buffer.toList+"/"+range.iterator.toList)
}
-
+
case class GR[T](val x: T)(implicit val num: Integral[T]) {
import num._
-
+
def negated = GR[T](-x)
-
+
def gr1 = NumericRange(x, x, x)
def gr2 = NumericRange.inclusive(x, x, x)
def gr3 = NumericRange(x, x * fromInt(10), x)
def gr4 = NumericRange.inclusive(x, x * fromInt(10), x)
-
+
def check = assert(
gr1.isEmpty && !gr2.isEmpty &&
- gr3.size == 9 && gr4.size == 10 &&
- (gr3.toList ::: negated.gr3.toList).sum == num.zero &&
+ gr3.size == 9 && gr4.size == 10 &&
+ (gr3.toList ::: negated.gr3.toList).sum == num.zero &&
!(gr3 contains (x * fromInt(10))) &&
(gr4 contains (x * fromInt(10)))
)
- }
-
+ }
+
def main(args: Array[String]): Unit = {
implicit val imp1 = Numeric.BigDecimalAsIfIntegral
implicit val imp2 = Numeric.DoubleAsIfIntegral
-
+
val _grs = List[GR[_]](
GR(BigDecimal(5.0)),
GR(BigInt(5)),
@@ -39,16 +39,19 @@ object Test {
)
val grs = _grs ::: (_grs map (_.negated))
grs foreach (_.check)
-
+
assert(NumericRange(1, 10, 1) sameElements (1 until 10))
assert(NumericRange.inclusive(1, 10, 1) sameElements (1 to 10))
assert(NumericRange.inclusive(1, 100, 3) sameElements (1 to 100 by 3))
-
+
+ // #2518
+ assert((3L to 7 by 2) sameElements List(3L, 5L, 7L))
+
rangeForeach(1 to 10);
rangeForeach(1 until 10);
rangeForeach(10 to 1 by -1);
rangeForeach(10 until 1 by -1);
rangeForeach(10 to 1 by -3);
- rangeForeach(10 until 1 by -3);
+ rangeForeach(10 until 1 by -3);
}
}
diff --git a/test/files/run/regularpatmat.check b/test/files/run/regularpatmat.check
deleted file mode 100644
index 3417d9a98a..0000000000
--- a/test/files/run/regularpatmat.check
+++ /dev/null
@@ -1,126 +0,0 @@
-pretest
-passed ok
-testWR_1
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testWR_2
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testWR_3
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testWR_4
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testWR_5
-passed ok
-passed ok
-passed ok
-testWR_6
-passed ok
-passed ok
-testWR_7
-passed ok
-testWR_8
-passed ok
-testWS
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testWT
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testWV
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testBK
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testBM
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-BN preTest: true
-testBN
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testBO
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testMZ - bugs #132 #133b #180 #195 #196 #398 #406 #441
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
diff --git a/test/files/run/runtime-richChar.scala b/test/files/run/runtime-richChar.scala
index b2c488983d..f5ddc95f04 100644
--- a/test/files/run/runtime-richChar.scala
+++ b/test/files/run/runtime-richChar.scala
@@ -5,19 +5,19 @@ object Test extends Application {
else
println(name + " failed: " + expected + " differs from " + got)
}
-
+
testSeq("'a' to 'c'", List('a', 'b', 'c'), 'a' to 'c')
testSeq("'a' until 'c'", List('a', 'b'), 'a' until 'c')
-
+
testSeq("'a' to 'b'", List('a', 'b'), 'a' to 'b')
testSeq("'a' until 'b'", List('a'), 'a' until 'b')
-
+
testSeq("'a' to 'a'", List('a'), 'a' to 'a')
testSeq("'a' until 'a'", List(), 'a' until 'a')
-
+
testSeq("'b' to 'a'", List(), 'b' to 'a')
testSeq("'b' until 'a'", List(), 'b' until 'a')
-
+
testSeq("'c' to 'a'", List(), 'c' to 'a')
testSeq("'c' until 'a'", List(), 'c' until 'a')
}
diff --git a/test/files/run/sequenceComparisons.scala b/test/files/run/sequenceComparisons.scala
index 30192096db..c8e14e452c 100644
--- a/test/files/run/sequenceComparisons.scala
+++ b/test/files/run/sequenceComparisons.scala
@@ -2,12 +2,12 @@ import scala.collection.{ mutable, immutable }
import collection.{ Seq, Traversable }
object Test {
- // TODO:
+ // TODO:
//
// SeqProxy
// SeqForwarder
// the commented out ones in seqMakers
-
+
val seqMakers = List[List[Int] => Seq[Int]](
// scala.Array(_: _*),
mutable.ArrayBuffer(_: _*),
@@ -22,14 +22,14 @@ object Test {
// mutable.Queue(_: _*),
immutable.Seq(_: _*),
mutable.Seq(_: _*),
- // immutable.Stack(_: _*),
- // mutable.Stack(_: _*),
+ immutable.Stack(_: _*),
+ // mutable.Stack(_: _*),
immutable.IndexedSeq(_: _*), // was Vector
//mutable.Vector(_: _*),
immutable.List(_: _*),
immutable.Stream(_: _*)
)
-
+
abstract class Data[T] {
val seq: Seq[T]
private def seqList = seq.toList
@@ -45,50 +45,50 @@ object Test {
}
lazy val eqeq = Method(_ == _, (List(seqList), List(Nil, seqList drop 1, seqList ::: seqList)), "%s == %s")
-
+
val startsWithInputs: Inputs
lazy val startsWith = Method(_ startsWith _, startsWithInputs, "%s startsWith %s")
-
+
val endsWithInputs: Inputs
lazy val endsWith = Method(_ endsWith _, endsWithInputs, "%s endsWith %s")
- val indexOfSeqInputs: Inputs
- private def subseqTest(s1: Seq[T], s2: Seq[T]) = (s1 indexOfSeq s2) != -1
- lazy val indexOfSeq = Method(subseqTest _, indexOfSeqInputs, "(%s indexOfSeq %s) != -1")
-
+ val indexOfSliceInputs: Inputs
+ private def subseqTest(s1: Seq[T], s2: Seq[T]) = (s1 indexOfSlice s2) != -1
+ lazy val indexOfSlice = Method(subseqTest _, indexOfSliceInputs, "(%s indexOfSlice %s) != -1")
+
val sameElementsInputs: Inputs
lazy val sameElements = Method(_ sameElements _, sameElementsInputs, "%s sameElements %s")
-
- def methodList = List(eqeq, startsWith, endsWith, indexOfSeq, sameElements)
+
+ def methodList = List(eqeq, startsWith, endsWith, indexOfSlice, sameElements)
}
-
+
object test1 extends Data[Int] {
val seq = List(1,2,3,4,5)
-
+
val startsWithInputs = (
List(Nil, List(1), List(1,2), seq),
List(List(1,2,3,4,6), seq ::: List(5), List(0))
)
-
+
val endsWithInputs = (
List(Nil, List(5), List(4,5), seq),
List(0 :: seq, List(5,2,3,4,5), List(3,4), List(5,6))
)
-
- val indexOfSeqInputs = (
+
+ val indexOfSliceInputs = (
List(Nil, List(1), List(3), List(5), List(1,2), List(2,3,4), List(4,5), seq),
List(List(1,2,3,5), List(6), List(5,4,3,2,1), List(2,1))
)
-
+
val sameElementsInputs = (
List(List(1,2,3,4,5)),
List(Nil, List(1), List(1,2), List(2,3,4), List(2,3,4,5), List(2,3,4,5,1), List(1,2,3,5,4), seq reverse)
)
}
-
+
val failures = new mutable.ListBuffer[String]
var testCount = 0
-
+
def assertOne(op1: Any, op2: Any, res: Boolean, str: String) {
testCount += 1
val resStr = str.format(op1, op2)
@@ -97,25 +97,25 @@ object Test {
failures += ("FAIL: " + resStr)
// assert(res, resStr)
}
-
+
def runSeqs() = {
for (s1f <- seqMakers ; s2f <- seqMakers ; testData <- List(test1)) {
import testData._
val scrut = s1f(seq)
-
+
for (Method(f, (trueList, falseList), descr) <- methodList) {
for (s <- trueList; val rhs = s2f(s))
assertOne(scrut, rhs, f(scrut, rhs), descr)
-
+
for (s <- falseList; val rhs = s2f(s))
assertOne(scrut, rhs, !f(scrut, rhs), "!(" + descr + ")")
}
}
}
-
+
def main(args: Array[String]) {
runSeqs()
-
+
assert(failures.isEmpty, failures mkString "\n")
}
}
diff --git a/test/files/run/slice-strings.scala b/test/files/run/slice-strings.scala
new file mode 100644
index 0000000000..23085866f5
--- /dev/null
+++ b/test/files/run/slice-strings.scala
@@ -0,0 +1,19 @@
+object Test {
+ def cmp(x1: String) = {
+ val x2 = x1.toList
+
+ -10 to 10 foreach { i =>
+ assert(x1.take(i) == x2.take(i).mkString)
+ assert(x1.drop(i) == x2.drop(i).mkString)
+ assert(x1.takeRight(i) == x2.takeRight(i).mkString)
+ assert(x1.dropRight(i) == x2.dropRight(i).mkString)
+ }
+ for (idx1 <- -3 to 3 ; idx2 <- -3 to 3) {
+ assert(x1.slice(idx1, idx2) == x2.slice(idx1, idx2).mkString)
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ cmp("abcde")
+ }
+}
diff --git a/test/files/run/spec-absfun.scala b/test/files/run/spec-absfun.scala
new file mode 100644
index 0000000000..2b780548f5
--- /dev/null
+++ b/test/files/run/spec-absfun.scala
@@ -0,0 +1,43 @@
+
+/** Test inheritance. See #3085.
+ * Anonymous functions extend AbstractFunction1[SpecializedPair[Int], Unit]. The
+ * specialized type SpecializedPair$mcI$sp should not leak into the superclass because
+ * the definition of apply would vary covariantly, and erasure won't consider it an
+ * override of the abstract apply, leading to an AbstractMethodError at runtime.
+ */
+
+object Test {
+
+ private val Max = 1000
+
+ def main(args: Array[String]) {
+ notSpecialized()
+ specialized()
+ }
+
+ def notSpecialized() {
+ val pairs = for { i <- 1 to Max; j <- 1 to i } yield new Pair(i, j)
+ val time0 = System.nanoTime
+ pairs foreach { p => p.first * p.second }
+ val time1 = System.nanoTime
+// println(time1 - time0)
+ }
+
+ def specialized() {
+ val pairs = for { i <- 1 to Max; j <- 1 to i } yield new SpecializedPair(i, j)
+ val time0 = System.nanoTime
+ pairs foreach { p => p.first * p.second }
+ val time1 = System.nanoTime
+// println(time1 - time0)
+ }
+}
+
+class Pair[A](_first: A, _second: A) {
+ def first = _first
+ def second = _second
+}
+
+class SpecializedPair[@specialized(Int) A](_first: A, _second: A) {
+ def first = _first
+ def second = _second
+}
diff --git a/test/files/run/spec-ame.check b/test/files/run/spec-ame.check
new file mode 100644
index 0000000000..afa12db4df
--- /dev/null
+++ b/test/files/run/spec-ame.check
@@ -0,0 +1,2 @@
+abc
+10
diff --git a/test/files/run/spec-ame.scala b/test/files/run/spec-ame.scala
new file mode 100644
index 0000000000..5898170ff2
--- /dev/null
+++ b/test/files/run/spec-ame.scala
@@ -0,0 +1,17 @@
+// ticket #3432
+object Test {
+ trait B[@specialized(Int) T] {
+ def value: T
+ }
+
+ class A[@specialized(Int) T](x: T) {
+ def foo: B[T] = new B[T] {
+ def value = x
+ }
+ }
+
+ def main(args: Array[String]) {
+ println((new A("abc")).foo.value)
+ println((new A(10)).foo.value)
+ }
+}
diff --git a/test/files/run/spec-constr.check b/test/files/run/spec-constr.check
new file mode 100644
index 0000000000..29d70d9de8
--- /dev/null
+++ b/test/files/run/spec-constr.check
@@ -0,0 +1,2 @@
+hello?
+goodbye
diff --git a/test/files/run/spec-constr.scala b/test/files/run/spec-constr.scala
new file mode 100644
index 0000000000..4c80d0954d
--- /dev/null
+++ b/test/files/run/spec-constr.scala
@@ -0,0 +1,14 @@
+object Test {
+ class E[@specialized(Int) A](var f: A => Boolean) {
+ def this() = this(null)
+
+ println("hello?")
+ if (f == null) f = { _ => false }
+ }
+
+ def main(args: Array[String]) {
+ new E[Int]
+ println("goodbye")
+ }
+}
+
diff --git a/test/files/run/spec-early.check b/test/files/run/spec-early.check
new file mode 100644
index 0000000000..414aacc419
--- /dev/null
+++ b/test/files/run/spec-early.check
@@ -0,0 +1,4 @@
+a
+abc
+42
+abc
diff --git a/test/files/run/spec-early.scala b/test/files/run/spec-early.scala
new file mode 100644
index 0000000000..84a8983f8c
--- /dev/null
+++ b/test/files/run/spec-early.scala
@@ -0,0 +1,15 @@
+trait Tr
+
+class Foo[@specialized(Int) T](_x: T) extends {
+ val bar = "abc"
+ val baz = "bbc"
+} with Tr {
+ val x = _x
+ println(x)
+ println(bar)
+}
+
+object Test extends Application {
+ new Foo("a")
+ new Foo(42)
+}
diff --git a/test/files/run/spec-init.check b/test/files/run/spec-init.check
new file mode 100644
index 0000000000..8a659f868c
--- /dev/null
+++ b/test/files/run/spec-init.check
@@ -0,0 +1,9 @@
+abc
+abc
+null
+shouldn't see two initialized values and one uninitialized
+42
+42
+0
+ok
+ok
diff --git a/test/files/run/spec-init.scala b/test/files/run/spec-init.scala
new file mode 100644
index 0000000000..bd3428f4ea
--- /dev/null
+++ b/test/files/run/spec-init.scala
@@ -0,0 +1,41 @@
+class Foo[@specialized(Int) T](_x: T) {
+ val x = _x
+ def bar {}
+
+ val y = x
+ println(x)
+ println(y)
+ println(z)
+
+ def baz {}
+ val z = y
+
+}
+
+class Bar[@specialized(Int) T] {
+ def foo(x: T) = print(x)
+}
+
+object Global {
+ var msg = "ok"
+}
+
+class TouchGlobal[@specialized(Int) T](_x: T) {
+ println(Global.msg)
+ val x = {
+ Global.msg = "not ok"
+ _x
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ (new Foo("abc"))
+ println("shouldn't see two initialized values and one uninitialized")
+ (new Foo(42))
+
+ (new TouchGlobal(new Object))
+ Global.msg = "ok" // reset the value
+ (new TouchGlobal(42))
+ }
+}
diff --git a/test/files/run/spec-matrix.check b/test/files/run/spec-matrix.check
new file mode 100644
index 0000000000..72e8ffc0db
--- /dev/null
+++ b/test/files/run/spec-matrix.check
@@ -0,0 +1 @@
+*
diff --git a/test/files/run/spec-matrix.scala b/test/files/run/spec-matrix.scala
new file mode 100644
index 0000000000..2acf6204ff
--- /dev/null
+++ b/test/files/run/spec-matrix.scala
@@ -0,0 +1,70 @@
+/** Test matrix multiplication with specialization.
+ */
+
+class Matrix[@specialized A: ClassManifest](val rows: Int, val cols: Int) {
+ private val arr: Array[Array[A]] = new Array[Array[A]](rows, cols)
+
+ def apply(i: Int, j: Int): A = {
+ if (i < 0 || i >= rows || j < 0 || j >= cols)
+ throw new NoSuchElementException("Indexes out of bounds: " + (i, j))
+
+ arr(i)(j)
+ }
+
+ def update(i: Int, j: Int, e: A) {
+ arr(i)(j) = e
+ }
+
+ def rowsIterator: Iterator[Array[A]] = new Iterator[Array[A]] {
+ var idx = 0;
+ def hasNext = idx < rows
+ def next = {
+ idx += 1
+ arr(idx - 1)
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val m = randomMatrix(200, 100)
+ val n = randomMatrix(100, 200)
+
+ mult(m, n)
+ println("*")
+ }
+
+ def randomMatrix(n: Int, m: Int) = {
+ val r = new util.Random(10)
+ val x = new Matrix[Int](n, m)
+ for (i <- 0 until n; j <- 0 until m)
+ x(i, j) = r.nextInt
+ x
+ }
+
+
+ def multManifest[@specialized(Int) T](m: Matrix[T], n: Matrix[T])(implicit cm: ClassManifest[T], num: Numeric[T]) {
+ val p = new Matrix[T](m.rows, n.cols)
+ import num._
+
+ for (i <- 0 until m.rows)
+ for (j <- 0 until n.cols) {
+ var sum = num.zero
+ for (k <- 0 until n.rows)
+ sum += m(i, k) * n(k, j)
+ p(i, j) = sum
+ }
+ }
+
+ def mult(m: Matrix[Int], n: Matrix[Int]) {
+ val p = new Matrix[Int](m.rows, n.cols)
+
+ for (i <- 0 until m.rows)
+ for (j <- 0 until n.cols) {
+ var sum = 0
+ for (k <- 0 until n.rows)
+ sum += m(i, k) * n(k, j)
+ p(i, j) = sum
+ }
+ }
+}
diff --git a/test/files/run/spec-overrides.check b/test/files/run/spec-overrides.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/spec-overrides.check
diff --git a/test/files/run/spec-overrides.scala b/test/files/run/spec-overrides.scala
new file mode 100644
index 0000000000..f5d2e2f9c7
--- /dev/null
+++ b/test/files/run/spec-overrides.scala
@@ -0,0 +1,20 @@
+ trait Base[@specialized(Double) B] {
+ def default: B;
+ }
+
+ trait D1 extends Base[Double] {
+ override def default = 0.0;
+ }
+
+ class D2 extends D1 {
+ override def default: Double = 1.0;
+ }
+
+
+object Test extends Application {
+ val d2 = new D2
+
+ assert(d2.default == 1.0, d2.default)
+ assert((d2: Base[_]).default == 1.0, (d2: Base[_]).default)
+ assert((d2: D1).default == 1.0, (d2: D1).default)
+}
diff --git a/test/files/run/spec-patmatch.check b/test/files/run/spec-patmatch.check
new file mode 100644
index 0000000000..a6679fa1c7
--- /dev/null
+++ b/test/files/run/spec-patmatch.check
@@ -0,0 +1,19 @@
+bool
+byte
+short
+char
+int
+long
+double
+float
+default
+object instantiations:
+bool
+byte
+short
+char
+int
+long
+double
+float
+default
diff --git a/test/files/run/spec-patmatch.scala b/test/files/run/spec-patmatch.scala
new file mode 100644
index 0000000000..92938836d8
--- /dev/null
+++ b/test/files/run/spec-patmatch.scala
@@ -0,0 +1,52 @@
+class Foo[@specialized A] {
+ def test(x: A) = println(x match {
+ case _: Boolean => "bool"
+ case _: Byte => "byte"
+ case _: Short => "short"
+ case _: Char => "char"
+ case i: Int => "int"
+ case l: Long => "long"
+ case d: Double => "double"
+ case e: Float => "float"
+ case _ => "default"
+ })
+}
+
+object Test {
+ def test[@specialized A] (x: A) = println(x match {
+ case _: Boolean => "bool"
+ case _: Byte => "byte"
+ case _: Short => "short"
+ case _: Char => "char"
+ case i: Int => "int"
+ case l: Long => "long"
+ case d: Double => "double"
+ case e: Float => "float"
+ case _ => "default"
+ })
+
+ def main(args: Array[String]) {
+ test(true)
+ test(42.toByte)
+ test(42.toShort)
+ test('b')
+ test(42)
+ test(42l)
+ test(42.0)
+ test(42.0f)
+ test(new Object)
+
+ println("object instantiations:")
+ (new Foo).test(true)
+ (new Foo).test(42.toByte)
+ (new Foo).test(42.toShort)
+ (new Foo).test('b')
+ (new Foo).test(42)
+ (new Foo).test(42l)
+ (new Foo).test(42.0)
+ (new Foo).test(42.0f)
+ (new Foo).test(new Object)
+
+ }
+
+}
diff --git a/test/files/run/streamWithFilter.check b/test/files/run/streamWithFilter.check
new file mode 100644
index 0000000000..6b0e91a147
--- /dev/null
+++ b/test/files/run/streamWithFilter.check
@@ -0,0 +1,5 @@
+15
+30
+45
+60
+75
diff --git a/test/files/run/streamWithFilter.scala b/test/files/run/streamWithFilter.scala
new file mode 100644
index 0000000000..7f8f9a09d2
--- /dev/null
+++ b/test/files/run/streamWithFilter.scala
@@ -0,0 +1,11 @@
+object Test {
+ val nums = Stream.from(1)
+ def isFizz(x: Int) = x % 3 == 0
+ def isBuzz(x: Int) = x % 5 == 0
+ // next line will run forever if withFilter isn't doing its thing.
+ val fizzbuzzes = for (n <- nums ; if isFizz(n) ; if isBuzz(n)) yield n
+
+ def main(args: Array[String]): Unit = {
+ fizzbuzzes take 5 foreach println
+ }
+}
diff --git a/test/files/run/stream_length.check b/test/files/run/stream_length.check
new file mode 100644
index 0000000000..9906de773c
--- /dev/null
+++ b/test/files/run/stream_length.check
@@ -0,0 +1 @@
+Length: 970299
diff --git a/test/files/run/stream_length.scala b/test/files/run/stream_length.scala
new file mode 100644
index 0000000000..68e9cad5ac
--- /dev/null
+++ b/test/files/run/stream_length.scala
@@ -0,0 +1,15 @@
+
+
+object Test {
+ def walk(depth: Int, bias: String): Stream[String] = {
+ if (depth == 0)
+ Stream(bias)
+ else {
+ Stream.concat(Stream.range(1, 100).map((x: Int) => walk(depth-1, bias + x)))
+ }
+ }
+
+ def main(args: Array[String]) {
+ println("Length: " + walk(3, "---").length)
+ }
+}
diff --git a/test/files/run/streams.scala b/test/files/run/streams.scala
index 83b7772886..e3c871c693 100644
--- a/test/files/run/streams.scala
+++ b/test/files/run/streams.scala
@@ -29,7 +29,7 @@ object Test extends Application {
def powers(x: Int) = if ((x&(x-1)) == 0) Some(x) else None
println(s3.flatMap(powers).reverse.head)
- // large enough to generate StackOverflows (on most systems)
+ // large enough to generate StackOverflows (on most systems)
// unless the following methods are tail call optimized.
val size = 100000
diff --git a/test/files/run/stringbuilder.scala b/test/files/run/stringbuilder.scala
index c669f1c3db..8ee8435e37 100644
--- a/test/files/run/stringbuilder.scala
+++ b/test/files/run/stringbuilder.scala
@@ -1,19 +1,21 @@
object Test extends Application {
val str = "ABCDEFGHIJKLMABCDEFGHIJKLM"
- type SB = {
+ val surrogateStr = "an old Turkic letter: \uD803\uDC22"
+
+ type SB = {
def indexOf(str: String): Int
def indexOf(str: String, fromIndex: Int): Int
def lastIndexOf(str: String): Int
def lastIndexOf(str: String, fromIndex: Int): Int
}
-
+
import scala.collection.mutable.{ StringBuilder => ScalaStringBuilder }
import java.lang.{ StringBuilder => JavaStringBuilder }
-
+
val sbScala = new ScalaStringBuilder() append str
val sbJava = new JavaStringBuilder() append str
val sbs: List[SB] = List[SB](sbScala, sbJava)
-
+
def sameAnswers(f: (SB) => Int) = assert(f(sbScala) == f(sbJava))
sameAnswers(_.indexOf(""))
@@ -29,4 +31,10 @@ object Test extends Application {
sameAnswers(_.lastIndexOf("QZV"))
sameAnswers(_.lastIndexOf("GHI", 22))
sameAnswers(_.lastIndexOf("KLM", 22))
+
+ // testing that the "reverse" implementation avoids reversing surrogate pairs
+ val jsb = new JavaStringBuilder(surrogateStr) reverse
+ val ssb = new ScalaStringBuilder(surrogateStr) reverseContents ;
+
+ assert(jsb.toString == ssb.toString)
}
diff --git a/test/files/run/structural.scala b/test/files/run/structural.scala
index 5fc2292b42..c9a13b8469 100644
--- a/test/files/run/structural.scala
+++ b/test/files/run/structural.scala
@@ -1,18 +1,18 @@
object test1 {
-
+
val o1 = new Object { override def toString = "ohone" }
val o2 = new Object { override def toString = "ohtwo" }
-
+
val t1 = new Tata("tieone")
val t2 = new Tata("tietwo")
-
+
class Tata(name: String) {
override def toString = name
def tatMe = "oy"
}
-
+
class Titi extends Tata("titi")
-
+
object Rec {
val a = 1
val b = 2
@@ -41,7 +41,7 @@ object test1 {
val y: Tata = null
def z(t: Tata) = ()
}
-
+
type rt = Object {
val a: Int;
val c: String;
@@ -65,7 +65,7 @@ object test1 {
var v: Int
val y: Tata
}
-
+
def l (r: rt) {
println(" 1. " + r.c)
println(" 2. " + r.a + 1)
@@ -94,33 +94,33 @@ object test1 {
println("25. " + r.y)
println("26. " + r.e(null))
}
-
+
/*def ma[T](r: Object{def e(x: T): T; val x: T}) {
println("30. " + r.e(r.x)) // static error
}*/
-
+
def mb(r: Object { def e[T](x: T): T }) {
println("31. " + r.e[Int](4)) // while this is ok
}
-
+
def m1(r: Object { def z(x: Tata): Unit }) {
println("32. " + r.z(new Titi)) // while this is ok
}
-
+
def m2[T](r: Object { def e(x: Tata): T; val x: Tata }) {
println("33. " + r.e(r.x)) // and this too
}
-
+
class Rec3[T] {
def e(x: T): T = x
}
-
+
def m3[T](r: Rec3[T], x: T) {
println("33. " + r.e(x)) // and this too
}
-
+
Rec.g(11)
-
+
this.l(Rec)
this.mb(new Object{def e[T](x: T): T = x})
this.m1(Rec)
@@ -132,7 +132,7 @@ object test2 {
class C extends { def f() { println("1") } }
val x1 = new C
x1.f()
-
+
abstract class D extends { def f() }
val x2 = new D { def f() { println("2") } }
x2.f()
@@ -153,11 +153,11 @@ object test2 {
object test3 {
case class Exc extends Exception
-
+
object Rec {
def f = throw Exc()
}
-
+
def m(r: { def f: Nothing }) =
try {
r.f
@@ -166,31 +166,31 @@ object test3 {
case e: Exc => println("caught")
case e => println(e)
}
-
+
m(Rec)
-
+
}
object test4 {
class A
-
+
val aar = Array(new A, new A, new A)
val nar = Array(1, 2)
-
+
def f(p: {def size: Int}) = println(p.size)
//def g[T <: {def size: Int}](p: T) = println(p.size) // open issue
//def h[T <% {def size: Int}](p: T) = println(p.size) // open issue
-
+
f(aar)
f(nar)
-
+
//g(aar)
//g(nar)
-
+
//h(aar)
//h(nar)
-
+
}
object Test extends Application {
diff --git a/test/files/run/t0017.check b/test/files/run/t0017.check
index 86c5fe56a8..3a72142467 100644
--- a/test/files/run/t0017.check
+++ b/test/files/run/t0017.check
@@ -1 +1 @@
-Array(GenericArray(1, 3), GenericArray(2, 4))
+Array(ArraySeq(1, 3), ArraySeq(2, 4))
diff --git a/test/files/run/t0017.scala b/test/files/run/t0017.scala
index f43481f39b..5f02606b74 100644
--- a/test/files/run/t0017.scala
+++ b/test/files/run/t0017.scala
@@ -7,7 +7,7 @@ def transpose[A](arr: Array[Array[A]]) = {
var my_arr = Array(Array(1,2),Array(3,4))
-for (i <- Array.range(0, my_arr(0).length)) yield
+for (i <- Array.range(0, my_arr(0).length)) yield
for (row <- my_arr) yield row(i)
val transposed = transpose(my_arr)
diff --git a/test/files/run/t0421.scala b/test/files/run/t0421.scala
index b69373f341..121fc4f2f0 100644
--- a/test/files/run/t0421.scala
+++ b/test/files/run/t0421.scala
@@ -7,17 +7,17 @@ object Test extends Application {
}
def scalprod(xs: Array[Double], ys: Array[Double]) = {
- var acc = 0.0
- for ((x, y) <- xs zip ys) acc = acc + x * y
+ var acc = 0.0
+ for ((x, y) <- xs zip ys) acc = acc + x * y
acc
}
def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = {
- val ysst = transpose(yss)
+ val ysst = transpose(yss)
val ysst1: Array[Array[Double]] = yss.transpose
assert(ysst.deep == ysst1.deep)
for (xs <- xss) yield
- for (yst <- ysst) yield
+ for (yst <- ysst) yield
scalprod(xs, yst)
}
@@ -25,6 +25,6 @@ object Test extends Application {
println(transpose(a1).deepMkString("[", ",", "]"))
println(matmul(Array(Array(2, 3)), Array(Array(5), Array(7))).deepMkString("[", ",", "]"))
-
+
println(matmul(Array(Array(4)), Array(Array(6, 8))).deepMkString("[", ",", "]"))
}
diff --git a/test/files/run/t0432.scala b/test/files/run/t0432.scala
new file mode 100644
index 0000000000..8ba9015d81
--- /dev/null
+++ b/test/files/run/t0432.scala
@@ -0,0 +1,15 @@
+object Test {
+ type valueType = { def value: this.type }
+
+ class StringValue(x: String) {
+ def value: this.type = this
+ }
+
+ def m(x: valueType) = x.value
+
+ val s = new StringValue("hei")
+
+ def main(args: Array[String]) {
+ m(s)
+ }
+}
diff --git a/test/files/run/t0508.scala b/test/files/run/t0508.scala
index 7ef6f8197f..8dc34dd417 100644
--- a/test/files/run/t0508.scala
+++ b/test/files/run/t0508.scala
@@ -9,5 +9,5 @@ object Test extends Application {
}
}
- foo(Foo.unapply, Foo("this might be fun", 10))
+ foo(Foo.unapply, Foo("this might be fun", 10))
}
diff --git a/test/files/run/t0528.scala b/test/files/run/t0528.scala
index 5a4755c947..df6d13a17a 100644
--- a/test/files/run/t0528.scala
+++ b/test/files/run/t0528.scala
@@ -3,7 +3,7 @@ trait Sequ[A] {
}
class RichStr extends Sequ[Char] {
- // override to a primitve array
+ // override to a primitive array
def toArray: Array[Char] = Array('0', '1', '2', '3', '4', '5', '6', '7', '8', '9')
}
diff --git a/test/files/run/t0631.scala b/test/files/run/t0631.scala
index e64301e514..48bb3c986c 100644
--- a/test/files/run/t0631.scala
+++ b/test/files/run/t0631.scala
@@ -1,5 +1,5 @@
object Test extends Application {
- class Foo {
+ class Foo {
override def equals(that: Any) = {
println("Foo.equals called")
super.equals(that)
diff --git a/test/files/run/t0677.scala b/test/files/run/t0677.scala
index eb01a85fcb..94f30815b2 100644
--- a/test/files/run/t0677.scala
+++ b/test/files/run/t0677.scala
@@ -1,7 +1,7 @@
object Test extends Application {
- class X[T: ClassManifest] {
+ class X[T: ClassManifest] {
val a = new Array[Array[T]](3,4)
- val b = Array.ofDim[T](3, 4)
+ val b = Array.ofDim[T](3, 4)
}
val x = new X[String]
x.a(1)(2) = "hello"
diff --git a/test/files/run/t0807.scala b/test/files/run/t0807.scala
index e69aa1c71c..b4aa0b14a4 100644
--- a/test/files/run/t0807.scala
+++ b/test/files/run/t0807.scala
@@ -1,5 +1,5 @@
trait A
-trait B extends A { val x = println("early") }
+trait B extends A { val x = println("early") }
object Test extends Application {
new B {}
}
diff --git a/test/files/run/t0883.scala b/test/files/run/t0883.scala
index b9d71702d8..c55992642d 100644
--- a/test/files/run/t0883.scala
+++ b/test/files/run/t0883.scala
@@ -1,14 +1,14 @@
object Foo { def apply(x: String) = new Foo(x) }
class Foo(name: String)
case object Bar extends Foo("Bar")
-case class Baz extends Foo("Baz")
+case class Baz extends Foo("Baz")
object Test extends Application {
- Foo("Bar") match {
- case Bar => println("What?")
+ Foo("Bar") match {
+ case Bar => println("What?")
case _ => println("OK")
}
- Foo("Baz") match {
- case Baz() => println("What?")
+ Foo("Baz") match {
+ case Baz() => println("What?")
case _ => println("OK")
- }
+ }
}
diff --git a/test/files/run/t1167.check b/test/files/run/t1167.check
new file mode 100644
index 0000000000..885d4c9e26
--- /dev/null
+++ b/test/files/run/t1167.check
@@ -0,0 +1,3 @@
+$anon$1
+$anon$2
+$anonfun$testFunc$1
diff --git a/test/files/run/t1167.scala b/test/files/run/t1167.scala
new file mode 100644
index 0000000000..ac9626227b
--- /dev/null
+++ b/test/files/run/t1167.scala
@@ -0,0 +1,25 @@
+/** Tests for compatible InnerClasses attribute between trait and
+ * impl classes, as well as anonymous classes.
+ */
+
+trait Test1 {
+ def testFunc(i:Int): Unit = {
+ (i:Int) => i + 5
+ }
+}
+
+abstract class Foo {
+ override def toString = getClass.getSimpleName
+
+ abstract class Bar {
+ override def toString = getClass.getSimpleName
+ }
+}
+
+object Test extends Application {
+ val foo = new Foo {}
+ val bar = new foo.Bar {}
+ println(foo)
+ println(bar)
+ println(Class.forName("Test1$$anonfun$testFunc$1").getSimpleName)
+}
diff --git a/test/files/run/t1323.scala b/test/files/run/t1323.scala
index d84239a12c..831f8c72f0 100644
--- a/test/files/run/t1323.scala
+++ b/test/files/run/t1323.scala
@@ -1,25 +1,25 @@
object Test extends Application {
- println(" 1:" + List(1,2,3,4).indexOfSeq(List(0,1))) // -1
- println(" 2:" + List(1,2,3,4).indexOfSeq(List(1,2))) // 0
- println(" 3:" + List(1,2,3,4).indexOfSeq(List(2,3))) // 1
- println(" 4:" + List(1,2,3,4).indexOfSeq(List(3,4))) // 2
- println(" 5:" + List(1,2,3,4).indexOfSeq(List(4,5))) // -1
- println(" 6:" + List(1,2,3,4).indexOfSeq(List(2,4))) // -1
- println(" 7:" + List(1,2,3,4).indexOfSeq(List(4,3))) // -1
- println(" 8:" + List(1,2,3,4).indexOfSeq(List(1,3))) // -1
- println(" 9:" + List(1,2,3,4).indexOfSeq(List(1,3))) // -1
- println("10:" + List(1,2,3,4).indexOfSeq(List(1,2,3,4))) // 0
- println("11:" + List(1,2,3,4).indexOfSeq(List(4,3,2,1))) // -1
- println("12:" + List(1,2,3,4).indexOfSeq(List(1,2,3,4,5))) // -1
- println("13:" + List(1,2,3,4).indexOfSeq(List(5,4,3,2,1))) // -1
- println("14:" + List(1,2,3,4).indexOfSeq(List())) // 0
- println("15:" + List().indexOfSeq(List())) // 0
- println("16:" + List().indexOfSeq(List(1,2,3,4))) // -1
+ println(" 1:" + List(1,2,3,4).indexOfSlice(List(0,1))) // -1
+ println(" 2:" + List(1,2,3,4).indexOfSlice(List(1,2))) // 0
+ println(" 3:" + List(1,2,3,4).indexOfSlice(List(2,3))) // 1
+ println(" 4:" + List(1,2,3,4).indexOfSlice(List(3,4))) // 2
+ println(" 5:" + List(1,2,3,4).indexOfSlice(List(4,5))) // -1
+ println(" 6:" + List(1,2,3,4).indexOfSlice(List(2,4))) // -1
+ println(" 7:" + List(1,2,3,4).indexOfSlice(List(4,3))) // -1
+ println(" 8:" + List(1,2,3,4).indexOfSlice(List(1,3))) // -1
+ println(" 9:" + List(1,2,3,4).indexOfSlice(List(1,3))) // -1
+ println("10:" + List(1,2,3,4).indexOfSlice(List(1,2,3,4))) // 0
+ println("11:" + List(1,2,3,4).indexOfSlice(List(4,3,2,1))) // -1
+ println("12:" + List(1,2,3,4).indexOfSlice(List(1,2,3,4,5))) // -1
+ println("13:" + List(1,2,3,4).indexOfSlice(List(5,4,3,2,1))) // -1
+ println("14:" + List(1,2,3,4).indexOfSlice(List())) // 0
+ println("15:" + List().indexOfSlice(List())) // 0
+ println("16:" + List().indexOfSlice(List(1,2,3,4))) // -1
// Do some testing with infinite sequences
def from(n: Int): Stream[Int] = Stream.cons(n, from(n + 1))
- println("17:" + List(1,2,3,4).indexOfSeq(from(1))) // -1
- println("18:" + from(1).indexOfSeq(List(4,5,6))) // 3
+ println("17:" + List(1,2,3,4).indexOfSlice(from(1))) // -1
+ println("18:" + from(1).indexOfSlice(List(4,5,6))) // 3
}
diff --git a/test/files/run/t1423.scala b/test/files/run/t1423.scala
index 83c35a46bd..4f2c911530 100644
--- a/test/files/run/t1423.scala
+++ b/test/files/run/t1423.scala
@@ -5,4 +5,4 @@ object Test extends Application{
case 1L => println(1);
case _ => println("????");
}
-}
+}
diff --git a/test/files/run/t1500.scala b/test/files/run/t1500.scala
index de79b84e75..c312a9a883 100644
--- a/test/files/run/t1500.scala
+++ b/test/files/run/t1500.scala
@@ -1,27 +1,27 @@
-import scala.tools.nsc._
+import scala.tools.nsc._
object Test {
-
+
/**
* Type inference overlooks constraints posed by type parameters in annotations on types.
*/
-
+
val testCode = <code>
-
+
class posingAs[A] extends TypeConstraint
-
+
def resolve[A,B](x: A @posingAs[B]): B = x.asInstanceOf[B]
-
+
val x = resolve(7: @posingAs[Any])
-
+
</code>.text
-
+
def main(args: Array[String]) = {
-
- val tool = new Interpreter(new Settings())
+
+ val settings = new Settings()
+ settings.classpath.value = System.getProperty("java.class.path")
+ val tool = new Interpreter(settings)
val global = tool.compiler
- // when running that compiler, give it a scala-library to the classpath
- global.settings.classpath.value = System.getProperty("java.class.path")
import global._
import definitions._
@@ -35,11 +35,11 @@ object Test {
}
}
-
+
global.addAnnotationChecker(checker)
-
+
tool.interpret(testCode)
-
+
}
}
diff --git a/test/files/run/t1501.scala b/test/files/run/t1501.scala
index 851daae0bb..05e4da8c7a 100644
--- a/test/files/run/t1501.scala
+++ b/test/files/run/t1501.scala
@@ -1,15 +1,15 @@
-import scala.tools.nsc._
+import scala.tools.nsc._
object Test {
-
+
/**
* ...
*/
-
+
val testCode = <code>
-
+
class xyz[A] extends TypeConstraint
-
+
def loopWhile[T](cond: =>Boolean)(body: =>(Unit @xyz[T])): Unit @ xyz[T] = {{
if (cond) {{
body
@@ -24,16 +24,14 @@ object Test {
(): @xyz[Int]
}}
}}
-
+
</code>.text
-
+
def main(args: Array[String]) = {
-
- val tool = new Interpreter(new Settings())
+ val settings = new Settings()
+ settings.classpath.value = System.getProperty("java.class.path")
+ val tool = new Interpreter(settings)
val global = tool.compiler
- // when running that compiler, give it a scala-library to the classpath
- global.settings.classpath.value = System.getProperty("java.class.path")
-
import global._
import definitions._
@@ -47,11 +45,11 @@ object Test {
}
}
-
+
global.addAnnotationChecker(checker)
-
+
tool.interpret(testCode)
-
+
}
}
diff --git a/test/files/run/t1524.scala b/test/files/run/t1524.scala
index ecd90adec7..4f6c65d052 100644
--- a/test/files/run/t1524.scala
+++ b/test/files/run/t1524.scala
@@ -3,5 +3,5 @@ object Test extends Application {
val buf = new scala.collection.mutable.ArrayBuffer[String] { override val initialSize = 0 }
buf += "initial"
buf += "second"
- println(buf.first)
+ println(buf.head)
}
diff --git a/test/files/run/t153.check b/test/files/run/t153.check
index 504fd7fc7f..648a6de7c3 100644
--- a/test/files/run/t153.check
+++ b/test/files/run/t153.check
@@ -1 +1 @@
-Stream(524288, 262144, 131072, 65536, 32768, 16384, 8192, 4096, 2048, 1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1)
+Stream(262144, 131072, 65536, 32768, 16384, 8192, 4096, 2048, 1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1)
diff --git a/test/files/run/t153.scala b/test/files/run/t153.scala
index c7b3c1c762..359e40407b 100644
--- a/test/files/run/t153.scala
+++ b/test/files/run/t153.scala
@@ -1,5 +1,5 @@
object Test extends Application {
def powers(x: Int) = if ((x&(x-1))==0) Some(x) else None
- val res = (Stream.range(1, 1000000) flatMap powers).reverse
+ val res = (Stream.range(1, 500000) flatMap powers).reverse
println(res take 42 force)
} \ No newline at end of file
diff --git a/test/files/run/t1591.check b/test/files/run/t1591.check
new file mode 100644
index 0000000000..48082f72f0
--- /dev/null
+++ b/test/files/run/t1591.check
@@ -0,0 +1 @@
+12
diff --git a/test/files/run/t1591.scala b/test/files/run/t1591.scala
new file mode 100644
index 0000000000..28e77775ff
--- /dev/null
+++ b/test/files/run/t1591.scala
@@ -0,0 +1,14 @@
+abstract class A {
+
+ lazy val lazyBar = bar
+
+ object bar {
+ val foo = 12
+ }
+
+}
+
+object Test extends Application {
+ val a = new A{}
+ println(a.lazyBar.foo)
+}
diff --git a/test/files/run/t1718.scala b/test/files/run/t1718.scala
index 61a17b2906..431e237327 100644
--- a/test/files/run/t1718.scala
+++ b/test/files/run/t1718.scala
@@ -1,10 +1,10 @@
object Test extends Application{
- def matchesNull[T](mightBeNull: Array[T]): Boolean = mightBeNull match {
+ def matchesNull[T](mightBeNull: Array[T]): Boolean = mightBeNull match {
case null => true
case x => false
}
val nullArray: Array[String] = null
- println(matchesNull(nullArray))
+ println(matchesNull(nullArray))
}
diff --git a/test/files/run/t1773.scala b/test/files/run/t1773.scala
index 81c6005f86..04729df7e7 100644
--- a/test/files/run/t1773.scala
+++ b/test/files/run/t1773.scala
@@ -7,6 +7,6 @@ object Test extends Application
<a>{""}</a>,
<a>{ if (true) "" else "I like turtles" }</a>
)
-
- for (x1 <- xs; x2 <- xs) assert (x1 == x2)
+
+ for (x1 <- xs; x2 <- xs) assert (x1 xml_== x2)
}
diff --git a/test/files/run/t1829.scala b/test/files/run/t1829.scala
index 8240527424..7c39d33ae9 100644
--- a/test/files/run/t1829.scala
+++ b/test/files/run/t1829.scala
@@ -1,6 +1,6 @@
object Test{
def main(args : Array[String]){
- import scala.collection.immutable._
+ import scala.collection.immutable._
assert(IntMap.empty == HashMap.empty);
assert(HashMap.empty == IntMap.empty);
assert(LongMap.empty == HashMap.empty);
diff --git a/test/files/run/t2074.scala b/test/files/run/t2074.scala
deleted file mode 100644
index 60646be733..0000000000
--- a/test/files/run/t2074.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- List.range(1,11).view.patch(5, List(100,101), 2)
-}
diff --git a/test/files/run/t2074_2.check b/test/files/run/t2074_2.check
index eb1f072de3..e4fc89b1ce 100644
--- a/test/files/run/t2074_2.check
+++ b/test/files/run/t2074_2.check
@@ -1,3 +1,3 @@
-IndexedSeqView(1, 2, 3)
-IndexedSeqView(1, 2, 3)
-IndexedSeqViewZ((1,1), (2,2), (3,3))
+SeqView(1, 2, 3)
+SeqView(1, 2, 3)
+SeqViewZ(...)
diff --git a/test/files/run/t2074_2.scala b/test/files/run/t2074_2.scala
index 7d1d8181d8..1f59e0b8a1 100644
--- a/test/files/run/t2074_2.scala
+++ b/test/files/run/t2074_2.scala
@@ -1,15 +1,18 @@
// replaced all occurrences of 'Vector' with 'IndexedSeq'
import scala.collection.immutable.IndexedSeq
-import scala.collection.IndexedSeqView
+import scala.collection.SeqView
object Test {
- val v = new IndexedSeqView[Int, IndexedSeq[Int]] {
+ val funWithCCE = List.range(1,11).view.patch(5, List(100,101), 2)
+
+ val v = new SeqView[Int, IndexedSeq[Int]] {
def underlying = IndexedSeq(1,2,3)
def apply(idx: Int) = underlying(idx)
def length = underlying.length
+ def iterator = underlying.iterator
}
val w = IndexedSeq(1, 2, 3).view
-
+
def main(args: Array[String]): Unit = {
println(v)
println(w)
diff --git a/test/files/run/t2212.scala b/test/files/run/t2212.scala
new file mode 100644
index 0000000000..b9c0cd776b
--- /dev/null
+++ b/test/files/run/t2212.scala
@@ -0,0 +1,10 @@
+object Test {
+ def main(args: Array[String]) {
+ import collection.mutable._
+ val x4 = LinkedList[Int](1)
+ println(x4)
+ val y4 = LinkedList[Int](1)
+ println(y4)
+ println(x4 equals y4) // or (y4 equals x4)
+ }
+}
diff --git a/test/files/run/t2417.check b/test/files/run/t2417.check
new file mode 100644
index 0000000000..36c954be24
--- /dev/null
+++ b/test/files/run/t2417.check
@@ -0,0 +1,12 @@
+testing small Map that doesn't promote to HashMap...
+
+testing single-threaded HashMap use...
+
+testing HashMap.size from multiple threads...
+
+testing small Set that doesn't promote to HashSet...
+
+testing single-threaded HashSet use...
+
+testing HashSet.size from multiple threads...
+
diff --git a/test/files/run/t2417.scala b/test/files/run/t2417.scala
new file mode 100644
index 0000000000..aeb61a7d1d
--- /dev/null
+++ b/test/files/run/t2417.scala
@@ -0,0 +1,77 @@
+// #2417
+object Test {
+
+ def parallel(numThreads: Int)(block: => Unit) {
+ var failure: Throwable = null
+ val threads = Array.fromFunction(i => new Thread {
+ override def run {
+ try {
+ block
+ } catch {
+ case x => failure = x
+ }
+ }
+ })(numThreads)
+ for (t <- threads) t.start
+ for (t <- threads) t.join
+ if (failure != null) println("FAILURE: " + failure)
+ }
+
+ def testSet(initialSize: Int, numThreads: Int, passes: Int) {
+ val orig = Set.empty ++ (1 to initialSize)
+ parallel(numThreads) {
+ for (pass <- 0 until passes) {
+ var s = orig
+ for (e <- (initialSize to 1 by -1)) {
+ s -= e
+ val obs = s.size
+ if (obs != e - 1) {
+ throw new Exception("removed e=" + e + ", size was " + obs + ", s=" + s)
+ }
+ }
+ }
+ }
+ }
+
+ def testMap(initialSize: Int, numThreads: Int, passes: Int) {
+ val orig = Map.empty ++ ((1 to initialSize) map ((_,"v")))
+ parallel(numThreads) {
+ for (pass <- 0 until passes) {
+ var m = orig
+ for (e <- (initialSize to 1 by -1)) {
+ m -= e
+ val obs = m.size
+ if (obs != e - 1) {
+ throw new Exception("removed e=" + e + ", size was " + obs + ", m=" + m)
+ }
+ }
+ }
+ }
+ }
+
+ def main(args: Array[String]) {
+ println("testing small Map that doesn't promote to HashMap...")
+ testMap(4, 2, 1000000)
+ println()
+
+ println("testing single-threaded HashMap use...")
+ testMap(5, 1, 1000000)
+ println()
+
+ println("testing HashMap.size from multiple threads...")
+ testMap(5, 2, 1000000)
+ println()
+
+ println("testing small Set that doesn't promote to HashSet...")
+ testSet(4, 2, 1000000)
+ println()
+
+ println("testing single-threaded HashSet use...")
+ testSet(5, 1, 1000000)
+ println()
+
+ println("testing HashSet.size from multiple threads...")
+ testSet(5, 2, 1000000)
+ println()
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t2526.scala b/test/files/run/t2526.scala
new file mode 100644
index 0000000000..d37185535f
--- /dev/null
+++ b/test/files/run/t2526.scala
@@ -0,0 +1,53 @@
+/**
+ * Checks that various foreach methods overridden in mutable.HashMap as part of ticket #2526
+ * still work correctly.
+ */
+object Test {
+ import collection._
+
+ def main(args: Array[String]) {
+ val m = new mutable.HashMap[String, String]
+
+ /* Use non hash-based structure for verification */
+ val keys = List("a", "b", "c", "d", "e")
+ val valueSuffix = "value"
+ val values = keys.map(_ + valueSuffix)
+ val entries = keys.zip(values)
+
+ for (k <- keys) m(k) = k + valueSuffix
+
+ assertForeach(keys, m.keySet.iterator)
+ assertForeach(keys, m.keysIterator)
+ assertForeach(keys, m.keySet)
+
+ assertForeach(values, m.values.iterator)
+ assertForeach(values, m.valuesIterator)
+
+ assertForeach(entries, m)
+ }
+
+ /* Checks foreach of `actual` goes over all the elements in `expected` */
+ private def assertForeach[E](expected: Traversable[E], actual: Iterator[E]): Unit = {
+ val notYetFound = new mutable.ArrayBuffer[E]() ++= expected
+ actual.foreach { e =>
+ assert(notYetFound.contains(e))
+ notYetFound -= e
+ }
+ assert(notYetFound.size == 0, "mutable.HashMap.foreach should have iterated over: " + notYetFound)
+ }
+
+ /*
+ * Checks foreach of `actual` goes over all the elements in `expected`
+ * We duplicate the method above because there is no common inteface between Traversable and
+ * Iterator and we want to avoid converting between collections to ensure that we test what
+ * we mean to test.
+ */
+ private def assertForeach[E](expected: Traversable[E], actual: Traversable[E]): Unit = {
+ val notYetFound = new mutable.ArrayBuffer[E]() ++= expected
+ actual.foreach { e =>
+ assert(notYetFound.contains(e))
+ notYetFound -= e
+ }
+ assert(notYetFound.size == 0, "mutable.HashMap.foreach should have iterated over: " + notYetFound)
+ }
+}
diff --git a/test/files/run/t2594_tcpoly.check b/test/files/run/t2594_tcpoly.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/t2594_tcpoly.check
diff --git a/test/files/run/t2594_tcpoly.scala b/test/files/run/t2594_tcpoly.scala
new file mode 100644
index 0000000000..e759ca8b0f
--- /dev/null
+++ b/test/files/run/t2594_tcpoly.scala
@@ -0,0 +1,18 @@
+trait Monad[M[_]] {
+ def foo[A](a: M[A]): M[A]
+}
+
+class Bar[A, B]
+class Bar1[A] { type And[B] = Bar[A, B] }
+
+object Test {
+ // the combination of partial applications and anonymous class is essential to reproduce the bug
+ // problem: missing bridge method
+ // --> abstractmethoderror `Main$$anon$1.foo(Ljava/lang/Object;)Ljava/lang/Object;`
+ // the anonymous class only gets `public Bar foo(Bar a)`
+ def BarMonad[X] = new Monad[Bar1[X]#And] {
+ def foo[A](a: Bar[X, A]) = a
+ }
+
+ def main(as: Array[String]) { BarMonad[Int] foo (new Bar[Int, Int]) }
+} \ No newline at end of file
diff --git a/test/files/run/t2754.scala b/test/files/run/t2754.scala
new file mode 100644
index 0000000000..aeb5259dab
--- /dev/null
+++ b/test/files/run/t2754.scala
@@ -0,0 +1,39 @@
+object Test {
+ def main(args: Array[String]) {
+ val v: FooBarPlus[Int] = new FooBarPlusImpl()
+ v.foo += 10
+ }
+}
+
+trait Foo[P] {
+ def foo: P
+}
+
+trait FooBar[P] extends Foo[P] {
+ def bar: P
+}
+
+trait FooBarPlus[P] extends FooBar[P] {
+ override def foo: P
+ override def bar: P
+
+ def foo_=(x: P)
+ def bar_=(x: P)
+}
+
+class FooImpl extends Foo[Int] {
+ def foo = 1
+}
+
+class FooBarImpl extends FooImpl with FooBar[Int] {
+ protected var f = 0
+ protected var b = 0
+
+ override def foo = f
+ def bar = b
+}
+
+class FooBarPlusImpl extends FooBarImpl with FooBarPlus[Int] {
+ def foo_=(x: Int) { f = x }
+ def bar_=(x: Int) { b = x }
+}
diff --git a/test/files/run/t2849.scala b/test/files/run/t2849.scala
new file mode 100644
index 0000000000..0588e5ed92
--- /dev/null
+++ b/test/files/run/t2849.scala
@@ -0,0 +1,46 @@
+
+
+
+import scala.collection.immutable.SortedSet
+import scala.collection.immutable.TreeSet
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ ticketExample
+ similarExample
+ }
+
+ def ticketExample {
+ var big = 100000
+
+ var aSortedSet: SortedSet[Int] = TreeSet(big)
+
+ for (i <- 1 until 10000) {
+ aSortedSet = (aSortedSet - big) ++ (TreeSet(i, big - 1))
+ big = big - 1
+ if (i % 1000 == 0) {
+ aSortedSet.until(i)
+ }
+ }
+ }
+
+ def similarExample {
+ var big = 100
+
+ var aSortedSet: SortedSet[Int] = TreeSet(big)
+
+ for (i <- 1 until 10000) {
+ aSortedSet = (aSortedSet - big) ++ (TreeSet(i, big - 1)) + big
+ big = big - 1
+ if (i % 1000 == 0) {
+ aSortedSet.until(i)
+ }
+ }
+ }
+
+}
+
+
diff --git a/test/files/run/t2857.check b/test/files/run/t2857.check
new file mode 100644
index 0000000000..c508d5366f
--- /dev/null
+++ b/test/files/run/t2857.check
@@ -0,0 +1 @@
+false
diff --git a/test/files/run/t2857.scala b/test/files/run/t2857.scala
new file mode 100644
index 0000000000..bd0d6fde16
--- /dev/null
+++ b/test/files/run/t2857.scala
@@ -0,0 +1,9 @@
+object Test extends Application {
+ import collection.mutable._
+ val m = new HashMap[Int, Set[String]] with MultiMap[Int, String]
+ m.addBinding(6, "Foo")
+ m.removeBinding(6, "Foo")
+ println(m.contains(6))
+}
+
+
diff --git a/test/files/run/t2867.scala b/test/files/run/t2867.scala
new file mode 100644
index 0000000000..0d30f95f8d
--- /dev/null
+++ b/test/files/run/t2867.scala
@@ -0,0 +1,15 @@
+object Test {
+ case class A(l: List[_]*)
+
+ def main(args: Array[String]): Unit = {
+ /** Kind of sneaking a slightly different test in here as well as
+ * testing 2867. How subversive.
+ */
+ val xs1 = List(1, 2, 3)
+ val xs2 = List(1.0, 2.0, 3.0)
+ val xs3 = List[Any](1.0f, 2.0f, 3.0f)
+ val xs4 = List[Byte](1, 2, 3)
+
+ assert(A(List(xs1, xs2)) == A(List(xs3, xs4)))
+ }
+}
diff --git a/test/files/run/t2886.check b/test/files/run/t2886.check
new file mode 100644
index 0000000000..39ee46a3df
--- /dev/null
+++ b/test/files/run/t2886.check
@@ -0,0 +1 @@
+Function(List(LocalValue(NoSymbol,x,PrefixedType(SingleType(ThisType(Class(scala)),Field(scala.Predef,PrefixedType(ThisType(Class(scala)),Class(scala.Predef)))),TypeField(scala.Predef.String,PrefixedType(ThisType(Class(java.lang)),Class(java.lang.String)))))),Block(List(ValDef(LocalValue(NoSymbol,x$1,NoType),Ident(LocalValue(NoSymbol,x,PrefixedType(SingleType(ThisType(Class(scala)),Field(scala.Predef,PrefixedType(ThisType(Class(scala)),Class(scala.Predef)))),TypeField(scala.Predef.String,PrefixedType(ThisType(Class(java.lang)),Class(java.lang.String))))))), ValDef(LocalValue(NoSymbol,x$2,NoType),Ident(LocalValue(NoSymbol,x,PrefixedType(SingleType(ThisType(Class(scala)),Field(scala.Predef,PrefixedType(ThisType(Class(scala)),Class(scala.Predef)))),TypeField(scala.Predef.String,PrefixedType(ThisType(Class(java.lang)),Class(java.lang.String)))))))),Apply(Select(This(Class(Test)),Method(Test.test,MethodType(List(LocalValue(NoSymbol,name,PrefixedType(SingleType(ThisType(Class(scala)),Field(scala.Predef,PrefixedType(ThisType(Class(scala)),Class(scala.Predef)))),TypeField(scala.Predef.String,PrefixedType(ThisType(Class(java.lang)),Class(java.lang.String))))), LocalValue(NoSymbol,address,PrefixedType(SingleType(ThisType(Class(scala)),Field(scala.Predef,PrefixedType(ThisType(Class(scala)),Class(scala.Predef)))),TypeField(scala.Predef.String,PrefixedType(ThisType(Class(java.lang)),Class(java.lang.String)))))),PrefixedType(ThisType(Class(scala)),Class(scala.Null))))),List(Ident(LocalValue(NoSymbol,x$2,NoType)), Ident(LocalValue(NoSymbol,x$1,NoType)))))) \ No newline at end of file
diff --git a/test/files/run/t2886.scala b/test/files/run/t2886.scala
new file mode 100644
index 0000000000..eb392f0c58
--- /dev/null
+++ b/test/files/run/t2886.scala
@@ -0,0 +1,7 @@
+object Test {
+ def test(name: String, address: String) = null
+ def main(args: Array[String]) = {
+ val tree = scala.reflect.Code.lift((x:String) => test(address=x,name=x)).tree
+ println(tree)
+ }
+}
diff --git a/test/files/run/t3026.check b/test/files/run/t3026.check
new file mode 100644
index 0000000000..8c29b615fa
--- /dev/null
+++ b/test/files/run/t3026.check
@@ -0,0 +1,2 @@
+RED
+YELLOW
diff --git a/test/files/run/t3026.scala b/test/files/run/t3026.scala
new file mode 100755
index 0000000000..0231c7bcd1
--- /dev/null
+++ b/test/files/run/t3026.scala
@@ -0,0 +1,8 @@
+object Test {
+ abstract class Colour
+ case object RED extends Colour
+ case object YELLOW extends Colour
+ val items = Array(RED, YELLOW)
+
+ def main(args: Array[String]): Unit = items foreach println
+}
diff --git a/test/files/run/t3112.check b/test/files/run/t3112.check
new file mode 100644
index 0000000000..a95644c82c
--- /dev/null
+++ b/test/files/run/t3112.check
@@ -0,0 +1,4 @@
+Vector()
+Vector()
+Vector()
+Vector() \ No newline at end of file
diff --git a/test/files/run/t3112.scala b/test/files/run/t3112.scala
new file mode 100644
index 0000000000..88677fa09e
--- /dev/null
+++ b/test/files/run/t3112.scala
@@ -0,0 +1,11 @@
+// #3112
+object Test {
+
+ def main(args: Array[String]): Unit = {
+ println((Vector() ++ (0 until 32)) take 0) // works
+ println((Vector() ++ (0 until 33)) take 0) // error
+ println((Vector() ++ (0 until 32)) takeRight 0) // works
+ println((Vector() ++ (0 until 33)) takeRight 0) // error
+ }
+
+} \ No newline at end of file
diff --git a/test/files/run/t3158.check b/test/files/run/t3158.check
new file mode 100644
index 0000000000..ab1cb284d5
--- /dev/null
+++ b/test/files/run/t3158.check
@@ -0,0 +1 @@
+Array(<function1>)
diff --git a/test/files/run/t3158.scala b/test/files/run/t3158.scala
new file mode 100644
index 0000000000..2261b5cd81
--- /dev/null
+++ b/test/files/run/t3158.scala
@@ -0,0 +1,9 @@
+object Test {
+ def main(args: Array[String]) {
+ println(args.map(_ => foo _).deep)
+ }
+
+ def foo(xs: String*) {
+ println(xs)
+ }
+}
diff --git a/test/files/run/t3186.check b/test/files/run/t3186.check
new file mode 100644
index 0000000000..c508d5366f
--- /dev/null
+++ b/test/files/run/t3186.check
@@ -0,0 +1 @@
+false
diff --git a/test/files/run/t3186.scala b/test/files/run/t3186.scala
new file mode 100644
index 0000000000..2534d4a164
--- /dev/null
+++ b/test/files/run/t3186.scala
@@ -0,0 +1,7 @@
+object Dist1 extends Enumeration { val Mile, Foot, Inch = Value }
+
+object Dist2 extends Enumeration { val Kilometer, Millimeter, Parsec = Value }
+
+object Test extends Application {
+ println(Dist1.Mile == Dist2.Kilometer)
+}
diff --git a/test/files/run/t3241.check b/test/files/run/t3241.check
new file mode 100644
index 0000000000..348ebd9491
--- /dev/null
+++ b/test/files/run/t3241.check
@@ -0,0 +1 @@
+done \ No newline at end of file
diff --git a/test/files/run/t3241.scala b/test/files/run/t3241.scala
new file mode 100644
index 0000000000..40097a046f
--- /dev/null
+++ b/test/files/run/t3241.scala
@@ -0,0 +1,23 @@
+object Test {
+
+ def main(args : Array[String]) : Unit = {
+ recurse(Map(1->1, 2->2, 3->3, 4->4, 5->5, 6->6, 7->7))
+ recurse(Set(1,2,3,4,5,6,7))
+ println("done")
+ }
+
+ def recurse(map: collection.immutable.Map[Int, Int]): Unit = {
+ if (!map.isEmpty) {
+ val x = map.keys.head
+ recurse(map - x)
+ }
+ }
+
+ def recurse(set: collection.immutable.Set[Int]): Unit = {
+ if (!set.isEmpty) {
+ val x = set.toStream.head
+ recurse(set - x)
+ }
+ }
+
+}
diff --git a/test/files/run/t3242.check b/test/files/run/t3242.check
new file mode 100644
index 0000000000..a145f6df8f
--- /dev/null
+++ b/test/files/run/t3242.check
@@ -0,0 +1,18 @@
+ append [num: 200] vec
+ remove [num: 200] vec
+ append [num: 400] vec
+ remove [num: 400] vec
+ append [num: 600] vec
+ remove [num: 600] vec
+ append [num: 800] vec
+ remove [num: 800] vec
+>> comparison done, num: 200
+ append [num: 2000] vec
+ remove [num: 2000] vec
+ append [num: 4000] vec
+ remove [num: 4000] vec
+ append [num: 6000] vec
+ remove [num: 6000] vec
+ append [num: 8000] vec
+ remove [num: 8000] vec
+>> comparison done, num: 2000
diff --git a/test/files/run/t3242.scala b/test/files/run/t3242.scala
new file mode 100644
index 0000000000..f8defaa5cd
--- /dev/null
+++ b/test/files/run/t3242.scala
@@ -0,0 +1,49 @@
+object Test {
+
+ def benchmarkA(num: Int) {
+
+ type A = Int
+
+ def updateM[M[_]](ms: M[A], update: (M[A], A)=>M[A]): M[A] = {
+ var is = ms
+ for (i <- 0 until num) is = update(is, i)
+ is
+ }
+
+ //
+ def vectorAppend: Vector[A] = updateM[Vector](Vector(), (as, a)=>{
+ val v = (as :+ a)
+ //println("==>append: i: "+i1+", v: "+v)
+ v
+ })
+ // this will crash, Vector bug!
+ def vectorRemove(vec: Vector[A]): Vector[A] = updateM[Vector](vec, (as, a)=>{
+ val v = (as filterNot{ _ == a})
+ //val v = (is filter{ _ != i})
+ //println("==>remove: i: "+a)
+ v
+ })
+
+ val ct = vectorAppend
+ println(" append [num: "+num+"] vec")
+ vectorRemove(ct)
+ println(" remove [num: "+num+"] vec")
+ } // BenchmarkA
+
+ def comparison(num: Int): Unit = {
+ for (i <- 1 until 5) benchmarkA(num*i)
+ println(">> comparison done, num: "+num);
+ }
+
+ def main(args: Array[String]): Unit = {
+ try {
+ //createBenchmarkA(23).testRun
+
+ comparison(200) // OK
+ comparison(2000) // this will crach
+
+ } catch {
+ case e: Exception => e.printStackTrace()
+ }
+ }
+}
diff --git a/test/files/run/t3242b.scala b/test/files/run/t3242b.scala
new file mode 100644
index 0000000000..7a296aac15
--- /dev/null
+++ b/test/files/run/t3242b.scala
@@ -0,0 +1,17 @@
+import scala.collection.immutable._
+
+object Test {
+
+ def test(n: Int) = {
+ var vb = new VectorBuilder[Int]
+ for (i <- 0 until n)
+ vb += i
+ val v = vb.result
+ assert(v == (0 until n), "not same as (0 until " + n + "): " + v)
+ }
+
+ def main(args: Array[String]): Unit = {
+ for (i <- 0 until 2000)
+ test(i)
+ }
+}
diff --git a/test/files/run/t3361.scala b/test/files/run/t3361.scala
new file mode 100644
index 0000000000..3e6fc30c8f
--- /dev/null
+++ b/test/files/run/t3361.scala
@@ -0,0 +1,100 @@
+object Test extends Application {
+ import scala.collection.mutable.DoubleLinkedList
+
+ empty
+ builder_1
+ builder_2
+ chaining_1
+ chaining_2
+ insert_1
+ insert_2
+ append_1
+ append_2
+
+ def empty {
+ val none = DoubleLinkedList()
+ require(none.size == 0)
+ none.foreach( _ => require(false))
+ }
+
+ def builder_1 {
+ val ten = DoubleLinkedList(1 to 10: _*)
+ require(10 == ten.size)
+ }
+
+ def builder_2 {
+ val ten = DoubleLinkedList(1 to 10: _*)
+ require((ten.size*(ten.size+1))/2 == ten.reduceLeft(_ + _))
+ }
+
+ def chaining_1 {
+ val ten = DoubleLinkedList(1 to 10: _*)
+ require(ten.reverse == DoubleLinkedList((1 to 10).reverse: _*))
+ }
+
+ def chaining_2 {
+ val ten = DoubleLinkedList(1 to 10: _*)
+ require(ten == ten.reverse.reverse)
+ }
+
+ def insert_1 {
+ val ten = DoubleLinkedList(1 to 10: _*)
+ ten.insert(DoubleLinkedList(11)) match {
+ case _: Unit => require(true)
+ case _ => require(false)
+ }
+ // Post-insert size test
+ require(11 == ten.size)
+ // Post-insert data test
+ require((ten.size*(ten.size+1))/2 == ten.reduceLeft(_ + _))
+ // Post-insert chaining test
+ require(ten == ten.reverse.reverse)
+ // Post-insert position test
+ require(ten.last == 11)
+ }
+
+ def insert_2 {
+ val ten = DoubleLinkedList(1 to 10: _*)
+ try {
+ DoubleLinkedList().insert(ten)
+ } catch {
+ case _: IllegalArgumentException => require(true)
+ case _ => require(false)
+ }
+ val zero = DoubleLinkedList(0)
+ zero.insert(ten)
+ require(zero.size == 11)
+ require(zero.head == 0)
+ require(zero.last == 10)
+ }
+
+ def append_1 {
+ val ten = DoubleLinkedList(1 to 10: _*)
+ val eleven = ten.append(DoubleLinkedList(11))
+ // Post-append equality test
+ require(ten == eleven)
+ // Post-append size test
+ require(11 == ten.size)
+ // Post-append data test
+ require((ten.size*(ten.size+1))/2 == ten.reduceLeft(_ + _))
+ // Post-append chaining test
+ require(ten == ten.reverse.reverse)
+ // Post-append position test
+ require(ten.last == 11)
+ }
+
+ def append_2 {
+ val ten = DoubleLinkedList(1 to 10: _*)
+ try {
+ DoubleLinkedList().append(ten)
+ } catch {
+ case _: IllegalArgumentException => require(true)
+ case _ => require(false)
+ }
+ val zero = DoubleLinkedList(0)
+ zero.append(ten)
+ require(zero.size == 11)
+ require(zero.head == 0)
+ require(zero.last == 10)
+ }
+}
diff --git a/test/files/run/t3493.scala b/test/files/run/t3493.scala
new file mode 100644
index 0000000000..b0b7589cfd
--- /dev/null
+++ b/test/files/run/t3493.scala
@@ -0,0 +1,15 @@
+
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ import scala.collection.immutable._
+ val x = TreeSet("a", "b", "c", "d")
+ val x2 = x + "e"
+ assert(x2.toString == "TreeSet(a, b, c, d, e)")
+ assert(x2.toString == runtime.ScalaRunTime.stringOf(x2).trim)
+ }
+
+}
diff --git a/test/files/run/t3496.scala b/test/files/run/t3496.scala
new file mode 100644
index 0000000000..80a4e6bd86
--- /dev/null
+++ b/test/files/run/t3496.scala
@@ -0,0 +1,15 @@
+
+
+
+
+// ticket #3496
+object Test {
+
+ def main(args: Array[String]) {
+ val s = Stream.from(1)
+ s.take(5)
+ s.drop(5)
+ s.splitAt(5)
+ }
+
+}
diff --git a/test/files/run/t3502.scala b/test/files/run/t3502.scala
new file mode 100644
index 0000000000..9492b2d4d4
--- /dev/null
+++ b/test/files/run/t3502.scala
@@ -0,0 +1,24 @@
+
+
+
+
+
+// ticket #3502
+object Test {
+
+ object GeneratePrimeFactorsLazy extends (Int => List[Int]) {
+ override def apply(n:Int) = {
+ val s = Stream.range(2, n / 2).filter(n % _ == 0)
+ //val s = for (i <- Stream.range(2, n / 2); if n % i == 0) yield i
+ s.headOption.map(x => x :: apply(n / x)).getOrElse(List(n))
+ }
+ }
+
+ def main(args:Array[String]) {
+ // a prime number
+ //val num = 623456789
+ val num = 2796203
+ assert(GeneratePrimeFactorsLazy(num) == List(num))
+ }
+
+}
diff --git a/test/files/run/t3508.scala b/test/files/run/t3508.scala
new file mode 100644
index 0000000000..01d976ba0d
--- /dev/null
+++ b/test/files/run/t3508.scala
@@ -0,0 +1,11 @@
+
+
+import collection.immutable._
+
+
+// ticket #3508
+object Test {
+ def main(args: Array[String]) {
+ assert(Stream.tabulate(123)(_ + 1).toList == List.tabulate(123)(_ + 1))
+ }
+}
diff --git a/test/files/run/t3580.scala b/test/files/run/t3580.scala
new file mode 100644
index 0000000000..ac9f81ab9e
--- /dev/null
+++ b/test/files/run/t3580.scala
@@ -0,0 +1,17 @@
+
+
+
+
+
+object Test {
+
+ class Empty extends Traversable[Nothing] {
+ def foreach[U](f: Nothing => U) {}
+ }
+
+ def main(args: Array[String]) {
+ val t = new Empty
+ t.toStream
+ }
+
+}
diff --git a/test/files/run/t3603.scala b/test/files/run/t3603.scala
new file mode 100644
index 0000000000..a0821a2a45
--- /dev/null
+++ b/test/files/run/t3603.scala
@@ -0,0 +1,18 @@
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ import collection.immutable._
+
+ val intmap = IntMap(1 -> 1, 2 -> 2)
+ val intres = intmap.map { case (a, b) => (a, b.toString) }
+ assert(intres.isInstanceOf[IntMap[_]])
+
+ val longmap = LongMap(1L -> 1, 2L -> 2)
+ val longres = longmap.map { case (a, b) => (a, b.toString) }
+ assert(longres.isInstanceOf[LongMap[_]])
+ }
+
+}
diff --git a/test/files/run/t3645.scala b/test/files/run/t3645.scala
new file mode 100644
index 0000000000..af2543377b
--- /dev/null
+++ b/test/files/run/t3645.scala
@@ -0,0 +1,6 @@
+object Test {
+ def main(args: Array[String]) {
+ val s = Stream.tabulate(5)(x => x+2)
+ assert( s.toList == List(2,3,4,5,6) )
+ }
+}
diff --git a/test/files/run/t3667.check b/test/files/run/t3667.check
new file mode 100644
index 0000000000..01e79c32a8
--- /dev/null
+++ b/test/files/run/t3667.check
@@ -0,0 +1,3 @@
+1
+2
+3
diff --git a/test/files/run/t3667.scala b/test/files/run/t3667.scala
new file mode 100644
index 0000000000..7bd0b4ec5e
--- /dev/null
+++ b/test/files/run/t3667.scala
@@ -0,0 +1,53 @@
+object Test {
+ def main(args: Array[String]) {
+ val o1 = new Outer1
+ val o2 = new Outer2
+ val o3 = new Outer3
+
+ println(1)
+ ser(new o1.Inner(1))
+ o1.Inner // make sure the Inner$module field of the Outer1 instance is initialized!
+ ser(new o1.Inner(1))
+
+ println(2)
+ ser(new o2.Inner(1))
+ o2.Inner
+ ser(new o2.Inner(1))
+
+ println(3)
+ ser(new o3.Inner(1))
+ o3.Inner
+ ser(new o3.Inner(1))
+
+ foo
+ }
+
+ def foo {
+ case class C(x: Int)
+ ser(new C(1))
+ ser(C)
+ }
+
+ def ser(o: AnyRef) {
+ val oos = new java.io.ObjectOutputStream(new java.io.ByteArrayOutputStream())
+ oos.writeObject(o)
+ oos.close()
+ }
+
+}
+
+@serializable
+class Outer1 {
+ @serializable
+ class Inner(x: Int = 1)
+}
+
+@serializable
+class Outer2 {
+ case class Inner(x: Int = 1)
+}
+
+@serializable
+class Outer3 {
+ case class Inner(x: Int)
+}
diff --git a/test/files/run/t3687.check b/test/files/run/t3687.check
new file mode 100644
index 0000000000..0f35862645
--- /dev/null
+++ b/test/files/run/t3687.check
@@ -0,0 +1,2 @@
+t.ValueSet(a, b)
+t.ValueSet(a, b) \ No newline at end of file
diff --git a/test/files/run/t3687.scala b/test/files/run/t3687.scala
new file mode 100644
index 0000000000..25141f8a32
--- /dev/null
+++ b/test/files/run/t3687.scala
@@ -0,0 +1,6 @@
+object t extends Enumeration { val a, b = Value }
+
+object Test extends Application {
+ println(t.values)
+ println(t.values)
+}
diff --git a/test/files/run/t3719.check b/test/files/run/t3719.check
new file mode 100644
index 0000000000..111fc7fd63
--- /dev/null
+++ b/test/files/run/t3719.check
@@ -0,0 +1,4 @@
+List(Mon, Tue, Wed, Thu, Fri, Sat, Sun)
+Mon
+Tue
+Mon \ No newline at end of file
diff --git a/test/files/run/t3719.scala b/test/files/run/t3719.scala
new file mode 100644
index 0000000000..0dd3fc2af9
--- /dev/null
+++ b/test/files/run/t3719.scala
@@ -0,0 +1,35 @@
+object Days extends Enumeration {
+ type Day = DayValue
+ val Mon, Tue, Wed, Thu, Fri, Sat, Sun = new DayValue // DayValue
+
+ protected class DayValue extends Val {
+ def isWeekday: Boolean =
+ this match {
+ case Sun => false
+ case Sat => false
+ case _ => true
+ }
+ }
+}
+
+object Test extends Application {
+ def dayElementsShouldBeNamed(): List[String] =
+ Days.values.toList.sorted.map(x => x.toString)
+
+ def nameOfMon(): String = {
+ import Days._
+ val d: Day = Mon
+ d.toString
+ }
+
+ def nameOfTue(): String = {
+ import Days._
+ val d: Day = Tue
+ d.toString
+ }
+
+ println(dayElementsShouldBeNamed())
+ println(nameOfMon())
+ println(nameOfTue())
+ println(nameOfMon())
+}
diff --git a/test/files/run/t3726.check b/test/files/run/t3726.check
new file mode 100644
index 0000000000..7a5775bf34
--- /dev/null
+++ b/test/files/run/t3726.check
@@ -0,0 +1,2 @@
+hi there
+5 \ No newline at end of file
diff --git a/test/files/run/t3726.scala b/test/files/run/t3726.scala
new file mode 100644
index 0000000000..5ceed5416e
--- /dev/null
+++ b/test/files/run/t3726.scala
@@ -0,0 +1,8 @@
+object Test extends Application {
+ def test(f: () => Int) = {
+ val x = f()
+ 5
+ }
+
+ println(test(() => { println("hi there"); 0 }))
+}
diff --git a/test/files/run/t3763.scala b/test/files/run/t3763.scala
new file mode 100644
index 0000000000..c8462b7437
--- /dev/null
+++ b/test/files/run/t3763.scala
@@ -0,0 +1,3 @@
+object Test extends Application {
+ val x = Array(Array(1), List(1))
+}
diff --git a/test/files/run/t3950.check b/test/files/run/t3950.check
new file mode 100644
index 0000000000..10f81c51ad
--- /dev/null
+++ b/test/files/run/t3950.check
@@ -0,0 +1,3 @@
+minus
+zero
+plus \ No newline at end of file
diff --git a/test/files/run/t3950.scala b/test/files/run/t3950.scala
new file mode 100644
index 0000000000..fe99a7cc6f
--- /dev/null
+++ b/test/files/run/t3950.scala
@@ -0,0 +1,17 @@
+
+object NegativeId extends Enumeration {
+ val Negative = Value(-1, "minus")
+ val Zero = Value(0, "zero")
+ val Positive = Value(1, "plus")
+
+ def fromInt(id: Int) = values find (_.id == id) match {
+ case Some(v) => v
+ case None => null
+ }
+}
+
+object Test extends Application {
+ println(NegativeId.fromInt(-1))
+ println(NegativeId.fromInt(0))
+ println(NegativeId.fromInt(1))
+}
diff --git a/test/files/run/tailcalls.scala b/test/files/run/tailcalls.scala
index 7f40277d4d..33382405e1 100644
--- a/test/files/run/tailcalls.scala
+++ b/test/files/run/tailcalls.scala
@@ -194,10 +194,10 @@ object FancyTailCalls {
}
object PolyObject extends Application {
- def tramp[A](x: Int): Int =
+ def tramp[A](x: Int): Int =
if (x > 0)
tramp[A](x - 1)
- else
+ else
0
}
@@ -233,7 +233,7 @@ class NonTailCall {
if (n == 0) 0
else f2(n - 1)
}
-
+
}
//############################################################################
@@ -273,7 +273,7 @@ object Test {
}
println
}
-
+
def check_overflow(name: String, closure: => Int) {
print("test " + name)
try {
@@ -367,7 +367,7 @@ object Test {
check_success("TailCall.g3", TailCall.g3(max, max, Nil), 0)
check_success("TailCall.h1", TailCall.h1(max, max ), 0)
println
-
+
val NonTailCall = new NonTailCall
check_success("NonTailCall.f1", NonTailCall.f1(2), 0)
check_overflow("NonTailCall.f2", NonTailCall.f2(max))
@@ -381,6 +381,18 @@ object Test {
check_success("PolyObject.tramp", PolyObject.tramp[Int](max), 0)
}
+ // testing explicit tailcalls.
+
+ import scala.util.control.TailCalls._
+
+ def isEven(xs: List[Int]): TailRec[Boolean] =
+ if (xs.isEmpty) done(true) else tailcall(isOdd(xs.tail))
+
+ def isOdd(xs: List[Int]): TailRec[Boolean] =
+ if (xs.isEmpty) done(false) else tailcall(isEven(xs.tail))
+
+ assert(isEven((1 to 100000).toList).result)
+
}
//############################################################################
diff --git a/test/files/run/takeAndDrop.scala b/test/files/run/takeAndDrop.scala
index 8d2dff0bfa..6e87838e11 100644
--- a/test/files/run/takeAndDrop.scala
+++ b/test/files/run/takeAndDrop.scala
@@ -1,9 +1,9 @@
-object Test {
+object Test {
def main(args: Array[String]): Unit = {
val range = 1 to 10
val target = (3 to 8).toList
val confirm = (xs: Seq[Int]) => assert(xs.toList == target, xs)
-
+
confirm(range drop 2 dropRight 2)
confirm(range drop 1 dropRight 1 drop 1 dropRight 1)
confirm(range take 8 drop 2)
diff --git a/test/files/run/tcpoly_monads.scala b/test/files/run/tcpoly_monads.scala
index bcfe6d2363..a549caa51a 100644
--- a/test/files/run/tcpoly_monads.scala
+++ b/test/files/run/tcpoly_monads.scala
@@ -4,15 +4,15 @@ trait Monads {
* (>>=) :: m a -> (a -> m b) -> m b
* return :: a -> m a
*
- * MonadTC encodes the above Haskell type class,
+ * MonadTC encodes the above Haskell type class,
* an instance of MonadTC corresponds to a method dictionary.
* (see http://lampwww.epfl.ch/~odersky/talks/wg2.8-boston06.pdf)
*
* Note that the identity (`this') of the method dictionary does not really correspond
- * to the instance of m[x] (`self') that is `wrapped': e.g., unit does not use `self' (which
+ * to the instance of m[x] (`self') that is `wrapped': e.g., unit does not use `self' (which
* corresponds to the argument of the implicit conversion that encodes an instance of this type class)
*/
- trait MonadTC[m[x], a] {
+ trait MonadTC[m[x], a] {
def unit[a](orig: a): m[a]
// >>='s first argument comes from the implicit definition constructing this "method dictionary"
@@ -27,7 +27,7 @@ trait Monads {
*/
trait OptionMonad extends Monads {
// this implicit method encodes the Monad type class instance for Option
- implicit def OptionInstOfMonad[a](self: Option[a]): MonadTC[Option, a]
+ implicit def OptionInstOfMonad[a](self: Option[a]): MonadTC[Option, a]
= new MonadTC[Option, a] {
def unit[a](orig: a) = Some(orig)
def >>=[b](fun: a => Option[b]): Option[b] = self match {
diff --git a/test/files/run/tcpoly_parseridioms.scala b/test/files/run/tcpoly_parseridioms.scala
index edc5d5cad8..2747e1bc8d 100644
--- a/test/files/run/tcpoly_parseridioms.scala
+++ b/test/files/run/tcpoly_parseridioms.scala
@@ -1,10 +1,10 @@
trait Parsers {
type Input = List[Char]
-
+
sealed class ParseResult[+t](val next: Input)
- case class Success[+t](override val next: Input, result: t) extends ParseResult[t](next)
+ case class Success[+t](override val next: Input, result: t) extends ParseResult[t](next)
case class Failure(override val next: Input, msg: String) extends ParseResult[Nothing](next)
-
+
abstract class Parser[+t] {
def apply(in: Input): ParseResult[t]
}
@@ -16,7 +16,7 @@ trait Parsers {
case Success(next2, y) => Success(next2, Pair(x,y))
case Failure(_, msg) => Failure(in, msg)
}
- case Failure(_, msg) => Failure(in, msg)
+ case Failure(_, msg) => Failure(in, msg)
}
}
@@ -38,20 +38,20 @@ trait Parsers {
case Failure(n, msg) => Failure(n, msg)
}
}
-
+
def accept[T](c: Char, r: T): Parser[T] = new Parser[T] {
def apply(in: Input) = in match {
case c2 :: n if c2 == c => Success(n, r)
case n => Failure(n, "expected "+c+" at the head of "+n)
}
}
-
- def apply_++[s, tt](fun: Parser[s => tt], arg: Parser[s]): Parser[tt] = lift[Pair[s=>tt, s], tt]({case Pair(f, a) => f(a)})(sq(fun, arg))
-
+
+ def apply_++[s, tt](fun: Parser[s => tt], arg: Parser[s]): Parser[tt] = lift[Pair[s=>tt, s], tt]({case Pair(f, a) => f(a)})(sq(fun, arg))
+
def success[u](v: u): Parser[u] = new Parser[u] {
def apply(in: Input) = Success(in, v)
}
-
+
}
trait Idioms {
@@ -61,21 +61,21 @@ trait Idioms {
def pureMethod[a](name: String, x: a): idi[a] = pure(x) // hack for Mirrors: allow passing of method names
}
- class IdiomaticTarget[idi[x], idiom <: Idiom[idi], s](i: idiom, tgt: s) {
+ class IdiomaticTarget[idi[x], idiom <: Idiom[idi], s](i: idiom, tgt: s) {
def dot [t](fun: s => t, name: String) = new IdiomaticApp2[idi, idiom, t](i, i.liftedApply(i.pureMethod(name, fun))(i.pure(tgt)))
} // TODO: `.` --> java.lang.ClassFormatError: Illegal method name "." in class Idioms$Id$
- class IdiomaticFunction[idi[x], idiom <: Idiom[idi], s, t](i: idiom, fun: s => t) {
+ class IdiomaticFunction[idi[x], idiom <: Idiom[idi], s, t](i: idiom, fun: s => t) {
def <| (a: idi[s]) = new IdiomaticApp[idi, idiom, t](i, i.liftedApply(i.pure(fun))(a))
}
class IdiomaticApp[idi[x], idiom <: Idiom[idi], x](i: idiom, a: idi[x]) {
// where x <: s=>t -- TODO can this be expressed without generalised constraints?
def <> [s, t](b: idi[s]) = new IdiomaticApp[idi, idiom, t](i, i.liftedApply(a.asInstanceOf[idi[s=>t]])(b))
-
+
def |> : idi[x] = a
}
-
+
class IdiomaticApp2[idi[x], idiom <: Idiom[idi], x](i: idiom, a: idi[x]) extends IdiomaticApp[idi, idiom, x](i, a) {
def <| [s, t](b: idi[s]) = <>[s,t](b)
}
@@ -86,22 +86,22 @@ trait ParserIdioms extends Parsers with Idioms {
def liftedApply[s, t](fun: Parser[s => t])(arg: Parser[s]): Parser[t] = apply_++(fun, arg)
def pure[a](x: a): Parser[a] = success(x)
}
-
- implicit def parserIdiomFun[s, t](fun: s=>t): IdiomaticFunction[Parser, ParserIdiom.type, s, t] =
+
+ implicit def parserIdiomFun[s, t](fun: s=>t): IdiomaticFunction[Parser, ParserIdiom.type, s, t] =
new IdiomaticFunction[Parser, ParserIdiom.type, s, t](ParserIdiom, fun)
- implicit def parserIdiomTgt[s](tgt: s): IdiomaticTarget[Parser, ParserIdiom.type, s] =
+ implicit def parserIdiomTgt[s](tgt: s): IdiomaticTarget[Parser, ParserIdiom.type, s] =
new IdiomaticTarget[Parser, ParserIdiom.type, s](ParserIdiom, tgt)
-
+
trait Expr
case class Plus(a: Int, b: Int) extends Expr
-
+
def num = or(accept('0', 0), or(accept('1', 1),accept('2', 2)))
-
- // TODO: how can parserIdiom(curry2(_)) be omitted?
+
+ // TODO: how can parserIdiom(curry2(_)) be omitted?
def expr: Parser[Expr] = parserIdiomFun(curry2(Plus)) <| num <> num |>
-
+
implicit def curry2[s,t,u](fun: (s, t)=>u)(a: s)(b: t) = fun(a, b)
- implicit def curry3[r,s,t,u](fun: (r,s, t)=>u)(a: r)(b: s)(c: t) = fun(a, b, c)
+ implicit def curry3[r,s,t,u](fun: (r,s, t)=>u)(a: r)(b: s)(c: t) = fun(a, b, c)
}
object Test extends ParserIdioms with Application {
diff --git a/test/files/run/treePrint.check b/test/files/run/treePrint.check
new file mode 100644
index 0000000000..3360815ac1
--- /dev/null
+++ b/test/files/run/treePrint.check
@@ -0,0 +1,5 @@
+def foo = {
+ var q: Boolean = false;
+ val x = 5;
+ ((x == 5) || (!q)) || (true)
+}
diff --git a/test/files/run/treePrint.scala b/test/files/run/treePrint.scala
new file mode 100644
index 0000000000..075ceb5234
--- /dev/null
+++ b/test/files/run/treePrint.scala
@@ -0,0 +1,40 @@
+/** Testing compact tree printers.
+ */
+object Test {
+ import scala.tools.nsc._
+ import java.io.{ OutputStream, BufferedReader, StringReader, PrintWriter, Writer, OutputStreamWriter}
+
+ val code = """
+ def foo = {
+ var q: Boolean = false
+ val x = if (true) {
+ if (true) {
+ if (true) {
+ 5
+ }
+ else if (true) {
+ 5
+ } else {
+ 10
+ }
+ }
+ else 20
+ }
+ else 30
+
+ (x == 5) || !q || true
+ }
+ """
+
+ class NullOutputStream extends OutputStream { def write(b: Int) { } }
+
+ def main(args: Array[String]) {
+ val settings = new Settings
+ settings.classpath.value = System.getProperty("java.class.path")
+ settings.Ycompacttrees.value = true
+
+ val repl = new Interpreter(settings, new PrintWriter(new NullOutputStream))
+ repl.interpret("""def initialize = "Have to interpret something or we get errors." """)
+ println(repl.power mkTree code)
+ }
+}
diff --git a/test/files/run/try-2.scala b/test/files/run/try-2.scala
index 909a68bbd2..677f0b48eb 100644
--- a/test/files/run/try-2.scala
+++ b/test/files/run/try-2.scala
@@ -7,7 +7,7 @@
object Test {
- def tryAllUnit: Unit =
+ def tryAllUnit: Unit =
try {
throw new Error();
}
@@ -15,28 +15,28 @@ object Test {
case _ => Console.println("exception happened\n");
}
- def tryUnitAll: Unit =
+ def tryUnitAll: Unit =
try {
Console.println("Nothin");
} catch {
case _ => error("Bad, bad, lama!");
}
- def tryAllAll: Unit =
+ def tryAllAll: Unit =
try {
throw new Error();
} catch {
case _ => error("Bad, bad, lama!");
}
- def tryUnitUnit: Unit =
+ def tryUnitUnit: Unit =
try {
Console.println("Nothin");
} catch {
case _ => Console.println("Nothin");
}
- def tryIntUnit: Unit =
+ def tryIntUnit: Unit =
try {
10;
} catch {
@@ -55,7 +55,7 @@ object Test {
execute(tryAllUnit);
execute(tryUnitAll);
execute(tryAllAll);
- execute(tryUnitUnit);
+ execute(tryUnitUnit);
execute(tryIntUnit);
}
}
diff --git a/test/files/run/try.scala b/test/files/run/try.scala
index 594c630cc8..7e615ec1a4 100644
--- a/test/files/run/try.scala
+++ b/test/files/run/try.scala
@@ -17,8 +17,8 @@ object Test extends AnyRef with Application {
Console.println(
(try { x } catch {
case _: Error => 1;
- })
- +
+ })
+ +
(try { x } catch {
case _: Error => 1;
})
@@ -116,7 +116,7 @@ object Test extends AnyRef with Application {
}
*/
-
+
try1;
try2;
try3;
diff --git a/test/files/run/typealias_overriding.scala b/test/files/run/typealias_overriding.scala
index 60e783791d..a102b66b79 100644
--- a/test/files/run/typealias_overriding.scala
+++ b/test/files/run/typealias_overriding.scala
@@ -1,21 +1,21 @@
// this bug (http://scala-webapps.epfl.ch/bugtracking/bugs/displayItem.do?id=1065)
-// was caused by Uncurry not normalizing all the types
+// was caused by Uncurry not normalizing all the types
// (more specifically the argument/return types of an anonymous Function)
object Test extends Application {
trait AddRemove {
type TNode <: NodeImpl;
trait NodeImpl;
-
+
object removing {
type TNode = AddRemove.this.TNode;
def printNode(node: TNode, f: TNode => String) = Console.println(f(node))
}
}
-
+
class Linked extends AddRemove {
type TNode = Node // can also directly write `class Node extends super.NodeImpl' -- doesn't change the bug
class Node extends super.NodeImpl { override def toString = "LinkedNode" }
-
+
removing.printNode(new Node, (x: removing.TNode) => x.toString) // make inference explicit, doesn't affect the bug
}
diff --git a/test/files/run/unapply.scala b/test/files/run/unapply.scala
index 72a4b0ac64..7b746af997 100644
--- a/test/files/run/unapply.scala
+++ b/test/files/run/unapply.scala
@@ -23,7 +23,7 @@ object Faa {
def unapply(x: Any): Option[String] = if(x.isInstanceOf[Bar]) Some(x.asInstanceOf[Bar].name) else None
}
object FaaPrecise {
- def unapply(x: Bar): Option[String] = Some(x.name)
+ def unapply(x: Bar): Option[String] = Some(x.name)
}
object FaaPreciseSome {
def unapply(x: Bar) = Some(x.name) // return type Some[String]
@@ -94,7 +94,7 @@ object LisSeqArr extends TestCase("LisSeqArr") with Assert {
//assertEquals((Array(1,2,3): Any) match { case Array(x,y,_*) => {x,y}}, {1,2})
// just compile, feature request #1196
-// (List(1,2,3): Any) match {
+// (List(1,2,3): Any) match {
// case a @ List(x,y,_*) => foo(a)
// }
@@ -111,7 +111,7 @@ object StreamFoo extends TestCase("unapply for Streams") with Assert {
case Stream.cons(hd, tl) => hd + sum(tl)
}
override def runTest {
- val str: Stream[int] = Stream.fromIterator(List(1,2,3).iterator)
+ val str: Stream[Int] = Stream.fromIterator(List(1,2,3).iterator)
assertEquals(sum(str), 6)
}
}
@@ -120,14 +120,14 @@ object Test1256 extends TestCase("1256") {
class Sync {
def unapply(scrut: Any): Boolean = false
}
-
+
class Buffer {
val Get = new Sync
-
+
val jp: PartialFunction[Any, Any] = {
case Get() =>
}
}
-
+
override def runTest { assertFalse((new Buffer).jp.isDefinedAt(42)) }
}
diff --git a/test/files/run/unapplyArray.scala b/test/files/run/unapplyArray.scala
index bf6582dadf..bf7c9e2300 100644
--- a/test/files/run/unapplyArray.scala
+++ b/test/files/run/unapplyArray.scala
@@ -1,7 +1,7 @@
object Test {
def main(args:Array[String]): Unit = {
val z = Array(1,2,3,4)
- val zs: Seq[int] = z
+ val zs: Seq[Int] = z
val za: Any = z
/*
diff --git a/test/files/run/unittest_collection.scala b/test/files/run/unittest_collection.scala
index 5d7ab97425..dd95540359 100644
--- a/test/files/run/unittest_collection.scala
+++ b/test/files/run/unittest_collection.scala
@@ -6,7 +6,7 @@ object Test {
trait BufferTest extends Assert {
def doTest(x:Buffer[String]) = {
- // testing method +=
+ // testing method +=
x += "one"
assertEquals("retrieving 'one'", x(0), "one")
assertEquals("length A ", x.length, 1)
@@ -19,12 +19,12 @@ object Test {
assertEquals("length C ", x.length, 1)
- try { x(1); fail("no exception for removed element") }
+ try { x(1); fail("no exception for removed element") }
catch { case i:IndexOutOfBoundsException => }
- try { x.remove(1); fail("no exception for removed element") }
+ try { x.remove(1); fail("no exception for removed element") }
catch { case i:IndexOutOfBoundsException => }
-
+
x += "two2"
assertEquals("length D ", x.length, 2)
@@ -38,7 +38,7 @@ object Test {
// clear
x.clear
assertEquals("length F ", x.length, 0)
-
+
// copyToBuffer
x += "a"
x += "b"
@@ -54,7 +54,7 @@ object Test {
var x: ArrayBuffer[String] = _
override def runTest = { setUp; doTest(x); tearDown }
-
+
override def setUp = { x = new scala.collection.mutable.ArrayBuffer }
override def tearDown = { x.clear; x = null }
@@ -90,13 +90,13 @@ object Test {
def main(args:Array[String]) = {
val ts = new TestSuite(
- //new TArrayBuffer,
- new TListBuffer//,
+ //new TArrayBuffer,
+ new TListBuffer//,
//new TBufferProxy
)
val tr = new TestResult()
ts.run(tr)
- for(val failure <- tr.failures) {
+ for (failure <- tr.failures) {
Console.println(failure)
}
}
diff --git a/test/files/run/unittest_io.scala b/test/files/run/unittest_io.scala
index 974dcff5b3..c2d95a3a7e 100644
--- a/test/files/run/unittest_io.scala
+++ b/test/files/run/unittest_io.scala
@@ -5,7 +5,7 @@ object Test extends TestConsoleMain {
class UTF8Tests extends TestCase("UTF8Codec") {
import io.UTF8Codec.encode
-
+
def runTest {
assertEquals(new String( encode(0x004D), "utf8"), new String(Array(0x004D.asInstanceOf[Char])))
assertEquals(new String( encode(0x0430), "utf8"), new String(Array(0x0430.asInstanceOf[Char])))
@@ -31,7 +31,7 @@ object Test extends TestConsoleMain {
f.copyToBuffer(b)
assertEquals(s, new String(b.toArray))
- /* todo: same factories for BufferedSource and Source
+ /* todo: same factories for BufferedSource and Source
val g = io.BufferedSource.fromBytes(s.getBytes("utf-8"))
val c = new collection.mutable.ArrayBuffer[Char]()
g.copyToBuffer(c)
diff --git a/test/files/run/unittest_iterator.scala b/test/files/run/unittest_iterator.scala
index 93aaa4a834..89ccdb9d58 100644
--- a/test/files/run/unittest_iterator.scala
+++ b/test/files/run/unittest_iterator.scala
@@ -1,6 +1,5 @@
// Some iterator grouped/sliding unit tests
-object Test
-{
+object Test {
def it = (1 to 10).iterator
def assertThat[T](expectedLength: Int, expectedLast: Seq[T])(it: Iterator[Seq[T]]) {
val xs = it.toList
@@ -8,15 +7,15 @@ object Test
assert(xs.size == expectedLength, fail("expected length " + expectedLength))
assert(xs.last == expectedLast, fail("expected last " + expectedLast))
}
-
+
def main(args: Array[String]): Unit = {
val itSum = it.toStream.sum
for (i <- it) {
// sum of the groups == sum of the original
val thisSum = ((it grouped i) map (_.sum)).toStream.sum
- assert(thisSum == itSum, thisSum + " != " + itSum)
+ assert(thisSum == itSum, thisSum + " != " + itSum)
}
-
+
// grouped
assertThat(4, List(10)) { it grouped 3 }
assertThat(3, List(7, 8, 9)) { it grouped 3 withPartial false }
@@ -33,5 +32,22 @@ object Test
assertThat(1, (1 to 8).toList) { it.sliding(8, 8) withPartial false }
assertThat(2, List(9, 10, -1, -1, -1)) { it.sliding(5, 8) withPadding -1 }
assertThat(1, (1 to 5).toList) { it.sliding(5, 8) withPartial false }
+
+ // larger step than window
+ assertThat(5, List(9)) { it.sliding(1, 2) }
+ assertThat(3, List(9, 10)) { it.sliding(2, 4) }
+
+ // make sure it throws past the end
+ val thrown = try {
+ val it = List(1,2,3).sliding(2)
+ it.next
+ it.next
+ it.next
+ false
+ }
+ catch {
+ case _: NoSuchElementException => true
+ }
+ assert(thrown)
}
}
diff --git a/test/files/run/vector1.scala b/test/files/run/vector1.scala
index 320bef220c..b37cfe82e8 100644
--- a/test/files/run/vector1.scala
+++ b/test/files/run/vector1.scala
@@ -9,7 +9,7 @@ import scala.collection.mutable.Builder
object Test {
-
+
def vector(label: String, n: Int): Vector[String] = {
val a = new VectorBuilder[String]
for (i <- 0 until n)
@@ -22,7 +22,7 @@ object Test {
def vectorForward(label: String, n: Int): Vector[String] = {
var a: Vector[String] = Vector.empty
for (i <- 0 until n)
- a = a.appendBack(label + i)
+ a = a :+ (label + i)
assertVector(a, label, 0, n)
}
@@ -30,7 +30,7 @@ object Test {
def vectorBackward(label: String, n: Int): Vector[String] = {
var a: Vector[String] = Vector.empty
for (i <- 0 until n)
- a = a.appendFront(label + (n-1-i))
+ a = (label + (n-1-i)) +: a
assertVector(a, label, 0, n)
}
@@ -67,7 +67,7 @@ object Test {
def test1() = {
println("===== test1 =====")
-
+
val N = 150000
val a = vector("a", N)
val b = vectorForward("b", N)
@@ -81,22 +81,22 @@ object Test {
println("===== test2 =====")
var a: Vector[String] = Vector.empty
-
+
val rand = new java.util.Random
-
+
val N = 150000
var min = N/2//rand.nextInt(N)
var max = min
-
+
val chunkLimit = 11
-
+
def nextChunkSize = 3 //rand.nextInt(chunkLimit)
-
- def seqBack() = for (i <- 0 until Math.min(nextChunkSize, N-max)) { a = a.appendBack("a"+max); max += 1 }
- def seqFront() = for (i <- 0 until Math.min(nextChunkSize, min)) { min -= 1; a = a.appendFront("a"+min) }
-
+
+ def seqBack() = for (i <- 0 until Math.min(nextChunkSize, N-max)) { a = a :+ ("a"+max); max += 1 }
+ def seqFront() = for (i <- 0 until Math.min(nextChunkSize, min)) { min -= 1; a = ("a"+min) +: a }
+
try {
-
+
while (min > 0 || max < N) {
seqFront()
seqBack()
@@ -104,10 +104,10 @@ object Test {
} catch {
case ex =>
//println("----------------")
- a.debug
+ //a.debug
throw ex
}
-
+
assertVector(a, "a", 0, N)
}
@@ -122,14 +122,14 @@ object Test {
val pos = scala.util.Random.shuffle(scala.collection.mutable.WrappedArray.make[Int](Array.tabulate[Int](N)(i => i)))
var b = a
-
+
{
var i = 0
while (i < N) {
b = b.updated(pos(i), "b"+(pos(i)))
i += 1
}
-
+
assertVector(b, "b", 0, N)
}
diff --git a/test/files/run/viewtest.check b/test/files/run/viewtest.check
index ded3ac0e92..6e0fe81a67 100644
--- a/test/files/run/viewtest.check
+++ b/test/files/run/viewtest.check
@@ -1,17 +1,11 @@
-SeqViewZ((x,0))
+SeqViewZ(...)
ys defined
mapping 1
2
-mapping 1
-mapping 2
-mapping 3
-SeqViewMS(3, 4)
+SeqViewMS(...)
mapping 3
4
-mapping 1
-mapping 2
-mapping 3
-SeqViewM(2, 3, 4)
+SeqViewM(...)
mapping 1
mapping 2
mapping 3
diff --git a/test/files/run/viewtest.scala b/test/files/run/viewtest.scala
index 280ded57cf..c5dffa5dbd 100755
--- a/test/files/run/viewtest.scala
+++ b/test/files/run/viewtest.scala
@@ -13,7 +13,7 @@ object Test extends Application {
println(ys.force)
val zs = Array(1, 2, 3).view
- val as: IndexedSeqView[Int, Array[Int]] = zs map (_ + 1)
+ val as: SeqView[Int, Array[Int]] = zs map (_ + 1)
val bs: Array[Int] = as.force
val cs = zs.reverse
cs(0) += 1
@@ -26,8 +26,8 @@ object Test extends Application {
2.8 regression: CCE when zipping list projection with stream
Reported by: szeiger Owned by: odersky
Priority: normal Component: Standard Library
-Keywords: collections, zip Cc:
-Fixed in version:
+Keywords: collections, zip Cc:
+Fixed in version:
Description
Welcome to Scala version 2.8.0.r18784-b20090925021043 (Java HotSpot(TM) Client VM, Java 1.6.0_11).
diff --git a/test/files/run/weakconform.scala b/test/files/run/weakconform.scala
new file mode 100755
index 0000000000..1ea81c9f64
--- /dev/null
+++ b/test/files/run/weakconform.scala
@@ -0,0 +1,4 @@
+object Test extends Application {
+ val x: Float = 10/3
+ assert(x == 3.0)
+}
diff --git a/test/files/run/withIndex.scala b/test/files/run/withIndex.scala
index 3b9c9e84e5..910b1f1f9e 100644
--- a/test/files/run/withIndex.scala
+++ b/test/files/run/withIndex.scala
@@ -3,7 +3,7 @@ object Test {
val ary: Array[String] = Array("a", "b", "c")
val lst: List[String] = List("a", "b", "c")
val itr: Iterator[String] = lst.iterator
- val str: Stream[String] = Stream.fromIterator(lst.iterator)
+ val str: Stream[String] = lst.iterator.toStream
Console.println(ary.zipWithIndex.toList)
Console.println(lst.zipWithIndex.toList)
diff --git a/test/files/run/xml-loop-bug.scala b/test/files/run/xml-loop-bug.scala
new file mode 100644
index 0000000000..378ae9bc2b
--- /dev/null
+++ b/test/files/run/xml-loop-bug.scala
@@ -0,0 +1,6 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ scala.tools.nsc.io.NullPrintStream.setOutAndErr()
+ scala.xml.parsing.ConstructingParser.fromSource(scala.io.Source.fromString("<!DOCTYPE xmeml SYSTEM> <xmeml> <sequence> </sequence> </xmeml> "), true).document.docElem
+ }
+}
diff --git a/test/files/scalacheck/.gitignore b/test/files/scalacheck/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/scalacheck/.gitignore
diff --git a/test/files/scalacheck/array.scala b/test/files/scalacheck/array.scala
deleted file mode 100644
index 2febca4447..0000000000
--- a/test/files/scalacheck/array.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-import org.scalacheck._
-import Prop._
-import Gen._
-import Arbitrary._
-import util._
-import Buildable._
-
-object Test extends Properties("Array") {
- val myGens: Seq[Gen[Array[_]]] = List(
- arbArray[Int],
- arbArray[Array[Int]],
- arbArray[List[String]],
- arbArray[String],
- arbArray[Boolean],
- arbArray[AnyVal](arbAnyVal)
- ) map (_.arbitrary)
-
- // inspired by #1857 and #2352
- property("eq/ne") =
- forAll(oneOf(myGens: _*)) { c1 =>
- forAll(oneOf(myGens: _*)) { c2 =>
- (c1 eq c2) || (c1 ne c2)
- }
- }
-
- def smallInt = choose(1, 10)
- // inspired by #2299
- property("ofDim") = forAll(smallInt) { i1 =>
- forAll(smallInt) { i2 =>
- forAll(smallInt) { i3 =>
- val arr = Array.ofDim[String](i1, i2, i3)
- val flattened = arr flatMap (x => x) flatMap (x => x)
-
- flattened.length == i1 * i2 * i3
- }
- }
- }
-}
-
diff --git a/test/files/scalap/caseClass/A.scala b/test/files/scalap/caseClass/A.scala
index be86714898..95f9984519 100644
--- a/test/files/scalap/caseClass/A.scala
+++ b/test/files/scalap/caseClass/A.scala
@@ -1,3 +1,3 @@
case class CaseClass[A <: Seq[Int]](i: A, s: String) {
- def foo = 239
+ def foo = 239
}
diff --git a/test/files/scalap/caseClass/result.test b/test/files/scalap/caseClass/result.test
index 9ea19f7e9d..eb1ad74295 100644
--- a/test/files/scalap/caseClass/result.test
+++ b/test/files/scalap/caseClass/result.test
@@ -1,10 +1,9 @@
-case class CaseClass[A >: scala.Nothing <: scala.Seq[scala.Int]] extends java.lang.Object with scala.ScalaObject with scala.Product {
+@scala.serializable
+case class CaseClass[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) extends java.lang.Object with scala.ScalaObject with scala.Product {
val i : A = { /* compiled code */ }
val s : scala.Predef.String = { /* compiled code */ }
def foo : scala.Int = { /* compiled code */ }
- def copy[A >: scala.Nothing <: scala.Seq[scala.Int]]() : CaseClass[A] = { /* compiled code */ }
- def copy$default$1[A >: scala.Nothing <: scala.Seq[scala.Int]] : A = { /* compiled code */ }
- def copy$default$2[A >: scala.Nothing <: scala.Seq[scala.Int]] : scala.Predef.String = { /* compiled code */ }
+ def copy[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) : CaseClass[A] = { /* compiled code */ }
override def hashCode() : scala.Int = { /* compiled code */ }
override def toString() : scala.Predef.String = { /* compiled code */ }
override def equals(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
@@ -12,4 +11,4 @@ case class CaseClass[A >: scala.Nothing <: scala.Seq[scala.Int]] extends java.la
override def productArity : scala.Int = { /* compiled code */ }
override def productElement(x$1 : scala.Int) : scala.Any = { /* compiled code */ }
override def canEqual(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
-}
+} \ No newline at end of file
diff --git a/test/files/scalap/caseObject/A.scala b/test/files/scalap/caseObject/A.scala
index 809341ade9..6a3ff10d75 100644
--- a/test/files/scalap/caseObject/A.scala
+++ b/test/files/scalap/caseObject/A.scala
@@ -1,3 +1,3 @@
case object CaseObject {
- def bar = 239
+ def bar = 239
}
diff --git a/test/files/scalap/cbnParam/A.scala b/test/files/scalap/cbnParam/A.scala
index 978a718032..2f366df64a 100644
--- a/test/files/scalap/cbnParam/A.scala
+++ b/test/files/scalap/cbnParam/A.scala
@@ -1 +1 @@
-class CbnParam(s: => String)
+class CbnParam(s: => String)
diff --git a/test/files/scalap/classPrivate/A.scala b/test/files/scalap/classPrivate/A.scala
new file mode 100644
index 0000000000..9f1bd34a6a
--- /dev/null
+++ b/test/files/scalap/classPrivate/A.scala
@@ -0,0 +1,9 @@
+class ClassPrivate {
+ private def foo = 1
+ private[ClassPrivate] def bar = 2
+ def baz = 3
+ class Outer {
+ private[ClassPrivate] def qux = 4
+ }
+ protected def quux = 5
+} \ No newline at end of file
diff --git a/test/files/scalap/classPrivate/result.test b/test/files/scalap/classPrivate/result.test
new file mode 100644
index 0000000000..0d12b779c3
--- /dev/null
+++ b/test/files/scalap/classPrivate/result.test
@@ -0,0 +1,10 @@
+class ClassPrivate extends java.lang.Object with scala.ScalaObject {
+ def this() = { /* compiled code */ }
+ def baz : scala.Int = { /* compiled code */ }
+ class Outer extends java.lang.Object with scala.ScalaObject {
+ def this() = { /* compiled code */ }
+ private[ClassPrivate] def qux : scala.Int = { /* compiled code */ }
+ }
+ protected def quux : scala.Int = { /* compiled code */ }
+ private[ClassPrivate] def bar : scala.Int = { /* compiled code */ }
+} \ No newline at end of file
diff --git a/test/files/scalap/classWithExistential/result.test b/test/files/scalap/classWithExistential/result.test
index 243f51e2ad..91afddaf0e 100644
--- a/test/files/scalap/classWithExistential/result.test
+++ b/test/files/scalap/classWithExistential/result.test
@@ -1,4 +1,4 @@
class ClassWithExistential extends java.lang.Object with scala.ScalaObject {
def this() = { /* compiled code */ }
- def foo[A >: scala.Nothing <: scala.Any, B >: scala.Nothing <: scala.Any] : scala.Function1[A, B forSome {type A >: scala.Nothing <: scala.Seq[scala.Int]; type B >: scala.Predef.String <: scala.Any}] = { /* compiled code */ }
-}
+ def foo[A, B] : scala.Function1[A, B forSome {type A <: scala.Seq[scala.Int]; type B >: scala.Predef.String}] = { /* compiled code */ }
+} \ No newline at end of file
diff --git a/test/files/scalap/covariantParam/result.test b/test/files/scalap/covariantParam/result.test
index ce480ee0cd..8acd9b497a 100644
--- a/test/files/scalap/covariantParam/result.test
+++ b/test/files/scalap/covariantParam/result.test
@@ -1,4 +1,4 @@
-class CovariantParam[+A >: scala.Nothing <: scala.Any] extends java.lang.Object with scala.ScalaObject {
+class CovariantParam[+A] extends java.lang.Object with scala.ScalaObject {
def this() = { /* compiled code */ }
- def foo[A >: scala.Nothing <: scala.Any](a : A) : scala.Int = { /* compiled code */ }
+ def foo[A](a : A) : scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/defaultParameter/A.scala b/test/files/scalap/defaultParameter/A.scala
new file mode 100644
index 0000000000..d3514952f4
--- /dev/null
+++ b/test/files/scalap/defaultParameter/A.scala
@@ -0,0 +1,3 @@
+trait DefaultParameter {
+ def foo(s: String = "hello"): Unit
+} \ No newline at end of file
diff --git a/test/files/scalap/defaultParameter/result.test b/test/files/scalap/defaultParameter/result.test
new file mode 100644
index 0000000000..38bf6ac4e3
--- /dev/null
+++ b/test/files/scalap/defaultParameter/result.test
@@ -0,0 +1,3 @@
+trait DefaultParameter extends java.lang.Object {
+ def foo(s : scala.Predef.String) : scala.Unit
+} \ No newline at end of file
diff --git a/test/files/scalap/typeAnnotations/A.scala b/test/files/scalap/typeAnnotations/A.scala
new file mode 100644
index 0000000000..ff2445edc9
--- /dev/null
+++ b/test/files/scalap/typeAnnotations/A.scala
@@ -0,0 +1,9 @@
+abstract class TypeAnnotations[@specialized R] {
+ @specialized val x = 10
+ @specialized type T
+
+ def compose[@specialized A](x: A, y: R): A = {
+ val y: A = x
+ x
+ }
+} \ No newline at end of file
diff --git a/test/files/scalap/typeAnnotations/result.test b/test/files/scalap/typeAnnotations/result.test
new file mode 100644
index 0000000000..b565d6185b
--- /dev/null
+++ b/test/files/scalap/typeAnnotations/result.test
@@ -0,0 +1,8 @@
+abstract class TypeAnnotations[@scala.specialized R] extends java.lang.Object with scala.ScalaObject {
+ def this() = { /* compiled code */ }
+ @scala.specialized
+ val x : scala.Int = { /* compiled code */ }
+ @scala.specialized
+ type T
+ def compose[@scala.specialized A](x : A, y : R) : A = { /* compiled code */ }
+} \ No newline at end of file
diff --git a/test/files/script/fact.scala b/test/files/script/fact.scala
index d48dac6f0f..d48dac6f0f 100644..100755
--- a/test/files/script/fact.scala
+++ b/test/files/script/fact.scala
diff --git a/test/files/script/t1017.scala b/test/files/script/t1017.scala
index d1b43ea923..2600f4f553 100755
--- a/test/files/script/t1017.scala
+++ b/test/files/script/t1017.scala
@@ -23,7 +23,7 @@ exec scala -nocompdaemon "$SOURCE" "$@"
def foo = {
bar
}
-
+
var x = 1
-
+
def bar = 1
diff --git a/test/partest b/test/partest
index e1d6356580..1e7da8bd4a 100755
--- a/test/partest
+++ b/test/partest
@@ -3,7 +3,7 @@
##############################################################################
# Scala test runner 2.8.0
##############################################################################
-# (c) 2002-2009 LAMP/EPFL
+# (c) 2002-2010 LAMP/EPFL
#
# This is free software; see the distribution for copying conditions.
# There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
@@ -78,4 +78,9 @@ fi
[ -n "$JAVA_OPTS" ] || JAVA_OPTS="-Xmx512M -Xms16M"
[ -n "$SCALAC_OPTS" ] || SCALAC_OPTS="-deprecation"
-${JAVACMD:=java} $JAVA_OPTS -cp "$EXT_CLASSPATH" -Dpartest.debug="${PARTEST_DEBUG}" -Dscala.home="${SCALA_HOME}" -Dscalatest.javacmd="${JAVACMD}" -Dscalatest.java_opts="${JAVA_OPTS}" -Dscalatest.scalac_opts="${SCALAC_OPTS}" -Dscalatest.javac_cmd="${JAVA_HOME}/bin/javac" scala.tools.partest.nest.NestRunner "$@"
+partestDebugStr=""
+if [ ! -z "${PARTEST_DEBUG}" ] ; then
+ partestDebugStr="-Dpartest.debug=${PARTEST_DEBUG}"
+fi
+
+${JAVACMD:=java} $JAVA_OPTS -cp "$EXT_CLASSPATH" ${partestDebugStr} -Dscala.home="${SCALA_HOME}" -Dpartest.javacmd="${JAVACMD}" -Dpartest.java_opts="${JAVA_OPTS}" -Dpartest.scalac_opts="${SCALAC_OPTS}" -Dpartest.javac_cmd="${JAVA_HOME}/bin/javac" scala.tools.partest.nest.NestRunner "$@"
diff --git a/test/partest-tests/jvm/actor-receivewithin.check b/test/partest-tests/jvm/actor-receivewithin.check
new file mode 100644
index 0000000000..a6a3e88c61
--- /dev/null
+++ b/test/partest-tests/jvm/actor-receivewithin.check
@@ -0,0 +1,16 @@
+'msg
+'msg
+'msg
+'msg
+'msg
+TIMEOUT
+TIMEOUT
+TIMEOUT
+TIMEOUT
+TIMEOUT
+'msg2
+'msg2
+'msg2
+'msg2
+'msg2
+TIMEOUT
diff --git a/test/partest-tests/jvm/actor-receivewithin.scala b/test/partest-tests/jvm/actor-receivewithin.scala
new file mode 100644
index 0000000000..a5c87c2722
--- /dev/null
+++ b/test/partest-tests/jvm/actor-receivewithin.scala
@@ -0,0 +1,69 @@
+import scala.actors.{Actor, TIMEOUT}
+
+object A extends Actor {
+ def act() {
+ receive {
+ case 'done =>
+ var cnt = 0
+ while (cnt < 500) {
+ cnt += 1
+ receiveWithin (0) {
+ case 'msg =>
+ if (cnt % 100 == 0)
+ println("'msg")
+ case TIMEOUT =>
+ // should not happen
+ println("FAIL1")
+ }
+ }
+ cnt = 0
+ while (cnt < 500) {
+ cnt += 1
+ receiveWithin (0) {
+ case 'msg =>
+ // should not happen
+ println("FAIL2")
+ case TIMEOUT =>
+ if (cnt % 100 == 0)
+ println("TIMEOUT")
+ }
+ }
+ B ! 'next
+ receive { case 'done => }
+ cnt = 0
+ while (cnt < 501) {
+ cnt += 1
+ receiveWithin (500) {
+ case 'msg2 =>
+ if (cnt % 100 == 0)
+ println("'msg2")
+ case TIMEOUT =>
+ println("TIMEOUT")
+ }
+ }
+ }
+ }
+}
+
+object B extends Actor {
+ def act() {
+ A.start()
+ for (_ <- 1 to 500) {
+ A ! 'msg
+ }
+ A ! 'done
+ receive {
+ case 'next =>
+ for (_ <- 1 to 500) {
+ A ! 'msg2
+ }
+ A ! 'done
+ }
+ }
+}
+
+object Test {
+ def main(args:Array[String]) {
+ B.start()
+ }
+}
diff --git a/test/partest-tests/run/crash.scala b/test/partest-tests/run/crash.scala
new file mode 100644
index 0000000000..1735cc444e
--- /dev/null
+++ b/test/partest-tests/run/crash.scala
@@ -0,0 +1,6 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val t = new Throwable("DOOM")
+ throw t
+ }
+}
diff --git a/test/partest-tests/run/streamWithFilter.check b/test/partest-tests/run/streamWithFilter.check
new file mode 100644
index 0000000000..6b0e91a147
--- /dev/null
+++ b/test/partest-tests/run/streamWithFilter.check
@@ -0,0 +1,5 @@
+15
+30
+45
+60
+75
diff --git a/test/partest-tests/run/streamWithFilter.scala b/test/partest-tests/run/streamWithFilter.scala
new file mode 100644
index 0000000000..7f8f9a09d2
--- /dev/null
+++ b/test/partest-tests/run/streamWithFilter.scala
@@ -0,0 +1,11 @@
+object Test {
+ val nums = Stream.from(1)
+ def isFizz(x: Int) = x % 3 == 0
+ def isBuzz(x: Int) = x % 5 == 0
+ // next line will run forever if withFilter isn't doing its thing.
+ val fizzbuzzes = for (n <- nums ; if isFizz(n) ; if isBuzz(n)) yield n
+
+ def main(args: Array[String]): Unit = {
+ fizzbuzzes take 5 foreach println
+ }
+}
diff --git a/test/partest-tests/run/timeout.scala b/test/partest-tests/run/timeout.scala
new file mode 100644
index 0000000000..91417b39ab
--- /dev/null
+++ b/test/partest-tests/run/timeout.scala
@@ -0,0 +1,5 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ Thread.sleep(10000000)
+ }
+}
diff --git a/test/partest.bat b/test/partest.bat
index aea95fca3e..39fe830082 100755
--- a/test/partest.bat
+++ b/test/partest.bat
@@ -3,7 +3,7 @@
rem ##########################################################################
rem # Scala code runner 2.7.0-final
rem ##########################################################################
-rem # (c) 2002-2009 LAMP/EPFL
+rem # (c) 2002-2010 LAMP/EPFL
rem #
rem # This is free software; see the distribution for copying conditions.
rem # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
@@ -53,7 +53,7 @@ if "%_EXTENSION_CLASSPATH%"=="" (
)
)
-set _PROPS=-Dscala.home="%_SCALA_HOME%" -Dscalatest.javacmd="%_JAVACMD%" -Dscalatest.java_options="%_JAVA_OPTS%" -Dscalatest.scalac_options="%_SCALAC_OPTS%" -Dscalatest.javac_cmd="%JAVA_HOME%\bin\javac"
+set _PROPS=-Dscala.home="%_SCALA_HOME%" -Dpartest.javacmd="%_JAVACMD%" -Dpartest.java_options="%_JAVA_OPTS%" -Dpartest.scalac_options="%_SCALAC_OPTS%" -Dpartest.javac_cmd="%JAVA_HOME%\bin\javac"
rem echo %_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.nest.NestRunner %_ARGS%
%_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.nest.NestRunner %_ARGS%
diff --git a/test/pending/buildmanager/t2443/BitSet.scala b/test/pending/buildmanager/t2443/BitSet.scala
new file mode 100644
index 0000000000..8d7c8dcd23
--- /dev/null
+++ b/test/pending/buildmanager/t2443/BitSet.scala
@@ -0,0 +1,2 @@
+import scala.collection.BitSet
+//class BitSet
diff --git a/test/pending/buildmanager/t2443/t2443.changes/BitSet2.scala b/test/pending/buildmanager/t2443/t2443.changes/BitSet2.scala
new file mode 100644
index 0000000000..27a5d4de9f
--- /dev/null
+++ b/test/pending/buildmanager/t2443/t2443.changes/BitSet2.scala
@@ -0,0 +1 @@
+import scala.collection.BitSet
diff --git a/test/pending/buildmanager/t2443/t2443.check b/test/pending/buildmanager/t2443/t2443.check
new file mode 100644
index 0000000000..dd88e1ceb9
--- /dev/null
+++ b/test/pending/buildmanager/t2443/t2443.check
@@ -0,0 +1,6 @@
+builder > BitSet.scala
+compiling Set(BitSet.scala)
+builder > BitSet.scala
+Changes: Map(class BitSet -> List(Removed(Class(BitSet))))
+
+
diff --git a/test/pending/buildmanager/t2443/t2443.test b/test/pending/buildmanager/t2443/t2443.test
new file mode 100644
index 0000000000..a1d61ff5a3
--- /dev/null
+++ b/test/pending/buildmanager/t2443/t2443.test
@@ -0,0 +1,3 @@
+>>compile BitSet.scala
+>>update BitSet.scala=>BitSet2.scala
+>>compile BitSet.scala
diff --git a/test/pending/continuations-run/example0.scala b/test/pending/continuations-run/example0.scala
new file mode 100644
index 0000000000..de5ea54e9d
--- /dev/null
+++ b/test/pending/continuations-run/example0.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test0.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example1.scala b/test/pending/continuations-run/example1.scala
new file mode 100644
index 0000000000..e31d6af88c
--- /dev/null
+++ b/test/pending/continuations-run/example1.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test1.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example16.scala b/test/pending/continuations-run/example16.scala
new file mode 100644
index 0000000000..561f0ab0eb
--- /dev/null
+++ b/test/pending/continuations-run/example16.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test16Printf.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example2.scala b/test/pending/continuations-run/example2.scala
new file mode 100644
index 0000000000..730f7cc63e
--- /dev/null
+++ b/test/pending/continuations-run/example2.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test2.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example3.scala b/test/pending/continuations-run/example3.scala
new file mode 100644
index 0000000000..41cf1cce0c
--- /dev/null
+++ b/test/pending/continuations-run/example3.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test3.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example4.scala b/test/pending/continuations-run/example4.scala
new file mode 100644
index 0000000000..adcc7aa90e
--- /dev/null
+++ b/test/pending/continuations-run/example4.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test4.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example5.scala b/test/pending/continuations-run/example5.scala
new file mode 100644
index 0000000000..241e8cd069
--- /dev/null
+++ b/test/pending/continuations-run/example5.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test5.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example6.scala b/test/pending/continuations-run/example6.scala
new file mode 100644
index 0000000000..00f84fcd6c
--- /dev/null
+++ b/test/pending/continuations-run/example6.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test6.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example7.scala b/test/pending/continuations-run/example7.scala
new file mode 100644
index 0000000000..64abc6d9a6
--- /dev/null
+++ b/test/pending/continuations-run/example7.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test7.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example8.scala b/test/pending/continuations-run/example8.scala
new file mode 100644
index 0000000000..a5f953d3fc
--- /dev/null
+++ b/test/pending/continuations-run/example8.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test8.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example9.scala b/test/pending/continuations-run/example9.scala
new file mode 100644
index 0000000000..09d792c427
--- /dev/null
+++ b/test/pending/continuations-run/example9.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test9Monads.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/foreach.check b/test/pending/continuations-run/foreach.check
new file mode 100644
index 0000000000..9bab7a2eed
--- /dev/null
+++ b/test/pending/continuations-run/foreach.check
@@ -0,0 +1,4 @@
+1
+2
+3
+enough is enough \ No newline at end of file
diff --git a/test/pending/continuations-run/foreach.scala b/test/pending/continuations-run/foreach.scala
new file mode 100644
index 0000000000..76823e7604
--- /dev/null
+++ b/test/pending/continuations-run/foreach.scala
@@ -0,0 +1,33 @@
+// $Id$
+
+import scala.util.continuations._
+
+import scala.util.continuations.Loops._
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+
+ reset {
+
+ val list = List(1,2,3,4,5)
+
+ for (x <- list.suspendable) {
+
+ shift { k: (Unit => Unit) =>
+ println(x)
+ if (x < 3)
+ k()
+ else
+ println("enough is enough")
+ }
+
+ }
+
+ }
+
+
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/jvm/actor-executor4.check b/test/pending/jvm/actor-executor4.check
new file mode 100644
index 0000000000..da78f45836
--- /dev/null
+++ b/test/pending/jvm/actor-executor4.check
@@ -0,0 +1,21 @@
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+One exited
diff --git a/test/pending/jvm/actor-executor4.scala b/test/pending/jvm/actor-executor4.scala
new file mode 100644
index 0000000000..a912d76094
--- /dev/null
+++ b/test/pending/jvm/actor-executor4.scala
@@ -0,0 +1,64 @@
+import scala.actors.{Actor, Exit}
+import scala.actors.scheduler.ExecutorScheduler
+import java.util.concurrent.Executors
+
+object One extends AdaptedActor {
+ def act() {
+ Two.start()
+ var i = 0
+ loopWhile (i < Test.NUM_MSG) {
+ i += 1
+ Two ! 'MsgForTwo
+ react {
+ case 'MsgForOne =>
+ if (i % (Test.NUM_MSG/10) == 0)
+ println("One: OK")
+ }
+ }
+ }
+}
+
+object Two extends AdaptedActor {
+ def act() {
+ var i = 0
+ loopWhile (i < Test.NUM_MSG) {
+ i += 1
+ react {
+ case 'MsgForTwo =>
+ if (i % (Test.NUM_MSG/10) == 0)
+ println("Two: OK")
+ One ! 'MsgForOne
+ }
+ }
+ }
+}
+
+trait AdaptedActor extends Actor {
+ override def scheduler =
+ Test.scheduler
+}
+
+object Test {
+ val NUM_MSG = 100000
+
+ val scheduler =
+ ExecutorScheduler(
+ Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()),
+ false)
+
+ def main(args: Array[String]) {
+ (new AdaptedActor {
+ def act() {
+ trapExit = true
+ link(One)
+ One.start()
+
+ receive {
+ case Exit(from, reason) =>
+ println("One exited")
+ Test.scheduler.shutdown()
+ }
+ }
+ }).start()
+ }
+}
diff --git a/test/pending/jvm/actorgc_leak.scala b/test/pending/jvm/actorgc_leak.scala
index 5e2b9d51e1..de3e04f1e8 100644
--- a/test/pending/jvm/actorgc_leak.scala
+++ b/test/pending/jvm/actorgc_leak.scala
@@ -14,7 +14,7 @@ object Test {
}
}
}
-
+
class FatActor extends Actor {
def act() {
fat = new Array[Int](fatness)
diff --git a/test/disabled/jvm/natives.scala b/test/pending/jvm/natives.scala
index 14ee4e1c1b..ba868dc85e 100644
--- a/test/disabled/jvm/natives.scala
+++ b/test/pending/jvm/natives.scala
@@ -1,16 +1,16 @@
object Test {
//println("java.library.path=" + System.getProperty("java.library.path"))
-
+
val sysWordSize = System.getProperty("sun.arch.data.model", "32")
val sysType = System.getProperty("os.name")
-
+
val libName =
if (sysType == "Mac OS X")
"natives"
else
"natives-" + sysWordSize
-
+
System.loadLibrary(libName)
@native
diff --git a/test/files/jvm/t1801.check b/test/pending/jvm/t1801.check
index bf78a99db9..bf78a99db9 100644
--- a/test/files/jvm/t1801.check
+++ b/test/pending/jvm/t1801.check
diff --git a/test/files/jvm/t1801.scala b/test/pending/jvm/t1801.scala
index 6ed7c56336..6ed7c56336 100644
--- a/test/files/jvm/t1801.scala
+++ b/test/pending/jvm/t1801.scala
diff --git a/test/files/jvm/t2515.check b/test/pending/jvm/t2515.check
index 8cb8bde11e..8cb8bde11e 100644
--- a/test/files/jvm/t2515.check
+++ b/test/pending/jvm/t2515.check
diff --git a/test/pending/jvm/t2515.scala b/test/pending/jvm/t2515.scala
new file mode 100644
index 0000000000..ee655967f3
--- /dev/null
+++ b/test/pending/jvm/t2515.scala
@@ -0,0 +1,43 @@
+import scala.actors.{Futures, TIMEOUT}
+import scala.actors.Actor._
+
+object Test {
+
+ def compute(): Option[Boolean] = {
+ val fts = for (j <- 0 until 5) yield Futures.future {
+ receiveWithin (100) {
+ case TIMEOUT => true
+ case other => false
+ }
+ }
+ val done = Futures.awaitAll(2000, fts.toArray: _*) // list to array, as varargs
+ if (done.contains(None))
+ None
+ else
+ Some(true)
+ }
+
+ def main(args:Array[String]) : Unit = {
+ val ft = Futures.future {
+ val format = new java.text.DecimalFormat("000.00'ms'")
+ var iter = 1
+ val done = 11
+ while (iter < done) {
+ val start = System.nanoTime()
+ val result = compute()
+ val time = System.nanoTime() - start
+ result match {
+ case Some(result) =>
+ //printf("Iteration %2d succeeded after %s %n", iter, format.format(time / 1e6))
+ printf("Iteration %2d succeeded%n", iter)
+ iter += 1
+ case None =>
+ printf(">>>> Iteration %2d failed after %s <<<<< %n", iter, format.format(time / 1e6))
+ iter = done
+ }
+ }
+ }
+ ft()
+ }
+
+}
diff --git a/test/pending/jvm/t2705/GenericInterface.java b/test/pending/jvm/t2705/GenericInterface.java
new file mode 100644
index 0000000000..ff4ecd403d
--- /dev/null
+++ b/test/pending/jvm/t2705/GenericInterface.java
@@ -0,0 +1 @@
+public interface GenericInterface<T> { }
diff --git a/test/pending/jvm/t2705/Methods.java b/test/pending/jvm/t2705/Methods.java
new file mode 100644
index 0000000000..00eed6c595
--- /dev/null
+++ b/test/pending/jvm/t2705/Methods.java
@@ -0,0 +1,4 @@
+public class Methods {
+ public static <T> GenericInterface<T> getGenericInterface() { return null; }
+ public static <T> void acceptGenericInterface(GenericInterface<? super T> gi) { }
+} \ No newline at end of file
diff --git a/test/pending/jvm/t2705/t2705.scala b/test/pending/jvm/t2705/t2705.scala
new file mode 100644
index 0000000000..cc3cfd9faf
--- /dev/null
+++ b/test/pending/jvm/t2705/t2705.scala
@@ -0,0 +1,5 @@
+class GenericsCompilerCrashTest {
+ def test() {
+ Methods.acceptGenericInterface(Methods.getGenericInterface())
+ }
+} \ No newline at end of file
diff --git a/test/pending/jvm/timeout.scala b/test/pending/jvm/timeout.scala
index 3005beab2c..22b3647dce 100644
--- a/test/pending/jvm/timeout.scala
+++ b/test/pending/jvm/timeout.scala
@@ -16,7 +16,7 @@ object Test extends Application {
case 'doTiming =>
val s = sender
reactWithin(500) {
- case TIMEOUT =>
+ case TIMEOUT =>
s ! Timing(System.currentTimeMillis)
}
}
diff --git a/test/pending/neg/bug112506A.scala b/test/pending/neg/bug112506A.scala
index 5dffb5ebe6..e1ecab2a15 100644
--- a/test/pending/neg/bug112506A.scala
+++ b/test/pending/neg/bug112506A.scala
@@ -7,7 +7,7 @@ trait TypeManagerXXX {
}
trait ScalaTyperXXX extends TypeManagerXXX {
private var typed : Node = null;
- private val dependMap = new HashMap[String,ListSet[TypedNode]];
+ private val dependMap = new HashMap[String,ListSet[TypedNode]];
override def lookupEntry(name: String): String = {
val set = dependMap.get(name) match {
case Some(set) => set;
diff --git a/test/files/neg/bug1210.check b/test/pending/neg/bug1210.check
index 4db920556f..4db920556f 100644
--- a/test/files/neg/bug1210.check
+++ b/test/pending/neg/bug1210.check
diff --git a/test/pending/neg/bug1210.scala b/test/pending/neg/bug1210.scala
index fc2c954ff2..58ef50b497 100644
--- a/test/pending/neg/bug1210.scala
+++ b/test/pending/neg/bug1210.scala
@@ -1,6 +1,6 @@
object Test {
def id[T](f: T => T): T = error("bla")
-
+
abstract class M { self =>
type Settings
type selfType = M {type Settings = self.Settings}
diff --git a/test/pending/neg/bug3189.check b/test/pending/neg/bug3189.check
new file mode 100644
index 0000000000..520644fd43
--- /dev/null
+++ b/test/pending/neg/bug3189.check
@@ -0,0 +1,7 @@
+bug3189.scala:2: error: illegal start of simple pattern
+ val Array(a,b*) = ("": Any)
+ ^
+bug3189.scala:3: error: ')' expected but '}' found.
+}
+^
+two errors found
diff --git a/test/pending/neg/bug3189.scala b/test/pending/neg/bug3189.scala
new file mode 100644
index 0000000000..4ea4bb7581
--- /dev/null
+++ b/test/pending/neg/bug3189.scala
@@ -0,0 +1,3 @@
+object A {
+ val Array(a,b*) = ("": Any)
+} \ No newline at end of file
diff --git a/test/files/neg/plugin-after-terminal.check b/test/pending/neg/plugin-after-terminal.check
index 096efe09cd..096efe09cd 100644
--- a/test/files/neg/plugin-after-terminal.check
+++ b/test/pending/neg/plugin-after-terminal.check
diff --git a/test/files/neg/plugin-after-terminal.flags b/test/pending/neg/plugin-after-terminal.flags
index 6a44376213..6a44376213 100644
--- a/test/files/neg/plugin-after-terminal.flags
+++ b/test/pending/neg/plugin-after-terminal.flags
diff --git a/test/pending/neg/plugin-after-terminal/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-after-terminal/lib/plugins.jar.desired.sha1
new file mode 100644
index 0000000000..3e382f3a12
--- /dev/null
+++ b/test/pending/neg/plugin-after-terminal/lib/plugins.jar.desired.sha1
@@ -0,0 +1 @@
+f174c50c4363c492362a05c72dd45b0da18fdcd8 ?plugins.jar
diff --git a/test/files/neg/plugin-after-terminal/misc/build.sh b/test/pending/neg/plugin-after-terminal/misc/build.sh
index 8899009d7f..8899009d7f 100755
--- a/test/files/neg/plugin-after-terminal/misc/build.sh
+++ b/test/pending/neg/plugin-after-terminal/misc/build.sh
diff --git a/test/files/neg/plugin-after-terminal/misc/scalac-plugin.xml b/test/pending/neg/plugin-after-terminal/misc/scalac-plugin.xml
index 90ff27dc2a..90ff27dc2a 100644
--- a/test/files/neg/plugin-after-terminal/misc/scalac-plugin.xml
+++ b/test/pending/neg/plugin-after-terminal/misc/scalac-plugin.xml
diff --git a/test/files/neg/plugin-after-terminal/src/ThePlugin.scala b/test/pending/neg/plugin-after-terminal/src/ThePlugin.scala
index f3c913086e..2a4607392f 100644
--- a/test/files/neg/plugin-after-terminal/src/ThePlugin.scala
+++ b/test/pending/neg/plugin-after-terminal/src/ThePlugin.scala
@@ -12,7 +12,7 @@ class ThePlugin(val global: Global) extends Plugin {
val name = "afterterminal"
val description = "Declares one plugin that wants to be after the terminal phase"
val components = List[PluginComponent](thePhase)
-
+
private object thePhase extends PluginComponent {
val global = ThePlugin.this.global
@@ -20,9 +20,9 @@ class ThePlugin(val global: Global) extends Plugin {
val phaseName = ThePlugin.this.name
- def newPhase(prev: Phase) = new ThePhase(prev)
+ def newPhase(prev: Phase) = new ThePhase(prev)
}
-
+
private class ThePhase(prev: Phase) extends Phase(prev) {
def name = ThePlugin.this.name
def run {}
diff --git a/test/files/neg/plugin-after-terminal/testsource.scala b/test/pending/neg/plugin-after-terminal/testsource.scala
index 519d162fdf..519d162fdf 100644
--- a/test/files/neg/plugin-after-terminal/testsource.scala
+++ b/test/pending/neg/plugin-after-terminal/testsource.scala
diff --git a/test/files/neg/plugin-before-parser.check b/test/pending/neg/plugin-before-parser.check
index 9a407923b1..9a407923b1 100644
--- a/test/files/neg/plugin-before-parser.check
+++ b/test/pending/neg/plugin-before-parser.check
diff --git a/test/files/neg/plugin-before-parser.flags b/test/pending/neg/plugin-before-parser.flags
index 632530922c..632530922c 100644
--- a/test/files/neg/plugin-before-parser.flags
+++ b/test/pending/neg/plugin-before-parser.flags
diff --git a/test/pending/neg/plugin-before-parser/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-before-parser/lib/plugins.jar.desired.sha1
new file mode 100644
index 0000000000..e82eed76ce
--- /dev/null
+++ b/test/pending/neg/plugin-before-parser/lib/plugins.jar.desired.sha1
@@ -0,0 +1 @@
+d7b100ad483484b598b7cd643424bd2e33898a0d ?plugins.jar
diff --git a/test/files/neg/plugin-before-parser/misc/build.sh b/test/pending/neg/plugin-before-parser/misc/build.sh
index 8899009d7f..8899009d7f 100755
--- a/test/files/neg/plugin-before-parser/misc/build.sh
+++ b/test/pending/neg/plugin-before-parser/misc/build.sh
diff --git a/test/files/neg/plugin-before-parser/misc/scalac-plugin.xml b/test/pending/neg/plugin-before-parser/misc/scalac-plugin.xml
index 90ff27dc2a..90ff27dc2a 100644
--- a/test/files/neg/plugin-before-parser/misc/scalac-plugin.xml
+++ b/test/pending/neg/plugin-before-parser/misc/scalac-plugin.xml
diff --git a/test/files/neg/plugin-before-parser/src/ThePlugin.scala b/test/pending/neg/plugin-before-parser/src/ThePlugin.scala
index 8714a55dc4..7ca896650d 100644
--- a/test/files/neg/plugin-before-parser/src/ThePlugin.scala
+++ b/test/pending/neg/plugin-before-parser/src/ThePlugin.scala
@@ -12,7 +12,7 @@ class ThePlugin(val global: Global) extends Plugin {
val name = "beforeparser"
val description = "Declares one plugin that wants to be before the parser phase"
val components = List[PluginComponent](thePhase)
-
+
private object thePhase extends PluginComponent {
val global = ThePlugin.this.global
@@ -21,9 +21,9 @@ class ThePlugin(val global: Global) extends Plugin {
val phaseName = ThePlugin.this.name
- def newPhase(prev: Phase) = new ThePhase(prev)
+ def newPhase(prev: Phase) = new ThePhase(prev)
}
-
+
private class ThePhase(prev: Phase) extends Phase(prev) {
def name = ThePlugin.this.name
def run {}
diff --git a/test/files/neg/plugin-before-parser/testsource.scala b/test/pending/neg/plugin-before-parser/testsource.scala
index 9928aaa83c..9928aaa83c 100644
--- a/test/files/neg/plugin-before-parser/testsource.scala
+++ b/test/pending/neg/plugin-before-parser/testsource.scala
diff --git a/test/files/neg/plugin-cyclic-dependency.check b/test/pending/neg/plugin-cyclic-dependency.check
index a29bc3f5be..a29bc3f5be 100644
--- a/test/files/neg/plugin-cyclic-dependency.check
+++ b/test/pending/neg/plugin-cyclic-dependency.check
diff --git a/test/files/neg/plugin-cyclic-dependency.flags b/test/pending/neg/plugin-cyclic-dependency.flags
index 8716aaa65f..8716aaa65f 100644
--- a/test/files/neg/plugin-cyclic-dependency.flags
+++ b/test/pending/neg/plugin-cyclic-dependency.flags
diff --git a/test/pending/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha1
new file mode 100644
index 0000000000..7e565e9e61
--- /dev/null
+++ b/test/pending/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha1
@@ -0,0 +1 @@
+7e6be9e33a87194e7061f94f6be115619f91ada2 ?plugins.jar
diff --git a/test/files/neg/plugin-cyclic-dependency/misc/build.sh b/test/pending/neg/plugin-cyclic-dependency/misc/build.sh
index 8899009d7f..8899009d7f 100755
--- a/test/files/neg/plugin-cyclic-dependency/misc/build.sh
+++ b/test/pending/neg/plugin-cyclic-dependency/misc/build.sh
diff --git a/test/files/neg/plugin-cyclic-dependency/misc/scalac-plugin.xml b/test/pending/neg/plugin-cyclic-dependency/misc/scalac-plugin.xml
index 90ff27dc2a..90ff27dc2a 100644
--- a/test/files/neg/plugin-cyclic-dependency/misc/scalac-plugin.xml
+++ b/test/pending/neg/plugin-cyclic-dependency/misc/scalac-plugin.xml
diff --git a/test/files/neg/plugin-cyclic-dependency/src/ThePlugin.scala b/test/pending/neg/plugin-cyclic-dependency/src/ThePlugin.scala
index 1dfc15cb28..bd94ce60d7 100644
--- a/test/files/neg/plugin-cyclic-dependency/src/ThePlugin.scala
+++ b/test/pending/neg/plugin-cyclic-dependency/src/ThePlugin.scala
@@ -12,7 +12,7 @@ class ThePlugin(val global: Global) extends Plugin {
val name = "cyclicdependency"
val description = "Declares two phases that have a cyclic dependency"
val components = List[PluginComponent](thePhase1,thePhase2)
-
+
private object thePhase1 extends PluginComponent {
val global = ThePlugin.this.global
@@ -20,9 +20,9 @@ class ThePlugin(val global: Global) extends Plugin {
val phaseName = ThePlugin.this.name + "1"
- def newPhase(prev: Phase) = new ThePhase(prev)
+ def newPhase(prev: Phase) = new ThePhase(prev)
}
-
+
private object thePhase2 extends PluginComponent {
val global = ThePlugin.this.global
@@ -30,9 +30,9 @@ class ThePlugin(val global: Global) extends Plugin {
val phaseName = ThePlugin.this.name + "2"
- def newPhase(prev: Phase) = new ThePhase(prev)
+ def newPhase(prev: Phase) = new ThePhase(prev)
}
-
+
private class ThePhase(prev: Phase) extends Phase(prev) {
def name = ThePlugin.this.name
def run {}
diff --git a/test/files/neg/plugin-cyclic-dependency/testsource.scala b/test/pending/neg/plugin-cyclic-dependency/testsource.scala
index f1513ec9a0..f1513ec9a0 100644
--- a/test/files/neg/plugin-cyclic-dependency/testsource.scala
+++ b/test/pending/neg/plugin-cyclic-dependency/testsource.scala
diff --git a/test/files/neg/plugin-multiple-rafter.check b/test/pending/neg/plugin-multiple-rafter.check
index c54f884feb..c54f884feb 100644
--- a/test/files/neg/plugin-multiple-rafter.check
+++ b/test/pending/neg/plugin-multiple-rafter.check
diff --git a/test/files/neg/plugin-multiple-rafter.flags b/test/pending/neg/plugin-multiple-rafter.flags
index dcae7f2f96..dcae7f2f96 100644
--- a/test/files/neg/plugin-multiple-rafter.flags
+++ b/test/pending/neg/plugin-multiple-rafter.flags
diff --git a/test/pending/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha1
new file mode 100644
index 0000000000..f4905fcbd4
--- /dev/null
+++ b/test/pending/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha1
@@ -0,0 +1 @@
+2bda582b574287429ad5ee2e1d9a3effc88b0a5f ?plugins.jar
diff --git a/test/files/neg/plugin-multiple-rafter/misc/build.sh b/test/pending/neg/plugin-multiple-rafter/misc/build.sh
index 8899009d7f..8899009d7f 100755
--- a/test/files/neg/plugin-multiple-rafter/misc/build.sh
+++ b/test/pending/neg/plugin-multiple-rafter/misc/build.sh
diff --git a/test/files/neg/plugin-multiple-rafter/misc/scalac-plugin.xml b/test/pending/neg/plugin-multiple-rafter/misc/scalac-plugin.xml
index 90ff27dc2a..90ff27dc2a 100644
--- a/test/files/neg/plugin-multiple-rafter/misc/scalac-plugin.xml
+++ b/test/pending/neg/plugin-multiple-rafter/misc/scalac-plugin.xml
diff --git a/test/files/neg/plugin-multiple-rafter/src/ThePlugin.scala b/test/pending/neg/plugin-multiple-rafter/src/ThePlugin.scala
index 4c761517c1..819176fa88 100644
--- a/test/files/neg/plugin-multiple-rafter/src/ThePlugin.scala
+++ b/test/pending/neg/plugin-multiple-rafter/src/ThePlugin.scala
@@ -12,7 +12,7 @@ class ThePlugin(val global: Global) extends Plugin {
val name = "multi-rafter"
val description = ""
val components = List[PluginComponent](thePhase)
-
+
private object thePhase extends PluginComponent {
val global = ThePlugin.this.global
@@ -20,9 +20,9 @@ class ThePlugin(val global: Global) extends Plugin {
override val runsRightAfter = Some("explicitouter")
val phaseName = ThePlugin.this.name
- def newPhase(prev: Phase) = new ThePhase(prev)
+ def newPhase(prev: Phase) = new ThePhase(prev)
}
-
+
private class ThePhase(prev: Phase) extends Phase(prev) {
def name = ThePlugin.this.name
def run {}
diff --git a/test/files/neg/plugin-multiple-rafter/testsource.scala b/test/pending/neg/plugin-multiple-rafter/testsource.scala
index f73db1eb60..f73db1eb60 100644
--- a/test/files/neg/plugin-multiple-rafter/testsource.scala
+++ b/test/pending/neg/plugin-multiple-rafter/testsource.scala
diff --git a/test/files/neg/plugin-rafter-before-1.check b/test/pending/neg/plugin-rafter-before-1.check
index 19ed4d2fba..19ed4d2fba 100644
--- a/test/files/neg/plugin-rafter-before-1.check
+++ b/test/pending/neg/plugin-rafter-before-1.check
diff --git a/test/files/neg/plugin-rafter-before-1.flags b/test/pending/neg/plugin-rafter-before-1.flags
index 8bf03145b9..8bf03145b9 100644
--- a/test/files/neg/plugin-rafter-before-1.flags
+++ b/test/pending/neg/plugin-rafter-before-1.flags
diff --git a/test/pending/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha1
new file mode 100644
index 0000000000..8ad591b6ea
--- /dev/null
+++ b/test/pending/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha1
@@ -0,0 +1 @@
+af91fd67ccef349e7f8ea662615e17796a339485 ?plugins.jar
diff --git a/test/files/neg/plugin-rafter-before-1/misc/build.sh b/test/pending/neg/plugin-rafter-before-1/misc/build.sh
index 8899009d7f..8899009d7f 100755
--- a/test/files/neg/plugin-rafter-before-1/misc/build.sh
+++ b/test/pending/neg/plugin-rafter-before-1/misc/build.sh
diff --git a/test/files/neg/plugin-rafter-before-1/misc/scalac-plugin.xml b/test/pending/neg/plugin-rafter-before-1/misc/scalac-plugin.xml
index 90ff27dc2a..90ff27dc2a 100644
--- a/test/files/neg/plugin-rafter-before-1/misc/scalac-plugin.xml
+++ b/test/pending/neg/plugin-rafter-before-1/misc/scalac-plugin.xml
diff --git a/test/files/neg/plugin-rafter-before-1/src/ThePlugin.scala b/test/pending/neg/plugin-rafter-before-1/src/ThePlugin.scala
index c42a914066..81ba85ae80 100644
--- a/test/files/neg/plugin-rafter-before-1/src/ThePlugin.scala
+++ b/test/pending/neg/plugin-rafter-before-1/src/ThePlugin.scala
@@ -12,7 +12,7 @@ class ThePlugin(val global: Global) extends Plugin {
val name = "rafter-before-1"
val description = ""
val components = List[PluginComponent](thePhase1)
-
+
private object thePhase1 extends PluginComponent {
val global = ThePlugin.this.global
@@ -20,9 +20,9 @@ class ThePlugin(val global: Global) extends Plugin {
override val runsBefore = List[String]("erasure")
val phaseName = ThePlugin.this.name
- def newPhase(prev: Phase) = new ThePhase(prev)
+ def newPhase(prev: Phase) = new ThePhase(prev)
}
-
+
private class ThePhase(prev: Phase) extends Phase(prev) {
def name = ThePlugin.this.name
def run {}
diff --git a/test/files/neg/plugin-rafter-before-1/testsource.scala b/test/pending/neg/plugin-rafter-before-1/testsource.scala
index 836459db22..836459db22 100644
--- a/test/files/neg/plugin-rafter-before-1/testsource.scala
+++ b/test/pending/neg/plugin-rafter-before-1/testsource.scala
diff --git a/test/files/neg/plugin-rightafter-terminal.check b/test/pending/neg/plugin-rightafter-terminal.check
index 6fe4f63c82..6fe4f63c82 100644
--- a/test/files/neg/plugin-rightafter-terminal.check
+++ b/test/pending/neg/plugin-rightafter-terminal.check
diff --git a/test/files/neg/plugin-rightafter-terminal.flags b/test/pending/neg/plugin-rightafter-terminal.flags
index 948a318668..948a318668 100644
--- a/test/files/neg/plugin-rightafter-terminal.flags
+++ b/test/pending/neg/plugin-rightafter-terminal.flags
diff --git a/test/pending/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha1
new file mode 100644
index 0000000000..c2e2b9cd43
--- /dev/null
+++ b/test/pending/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha1
@@ -0,0 +1 @@
+8cccde4914da2058dca893783c231cda23855603 ?plugins.jar
diff --git a/test/files/neg/plugin-rightafter-terminal/misc/build.sh b/test/pending/neg/plugin-rightafter-terminal/misc/build.sh
index 8899009d7f..8899009d7f 100755
--- a/test/files/neg/plugin-rightafter-terminal/misc/build.sh
+++ b/test/pending/neg/plugin-rightafter-terminal/misc/build.sh
diff --git a/test/files/neg/plugin-rightafter-terminal/misc/scalac-plugin.xml b/test/pending/neg/plugin-rightafter-terminal/misc/scalac-plugin.xml
index 90ff27dc2a..90ff27dc2a 100644
--- a/test/files/neg/plugin-rightafter-terminal/misc/scalac-plugin.xml
+++ b/test/pending/neg/plugin-rightafter-terminal/misc/scalac-plugin.xml
diff --git a/test/files/neg/plugin-rightafter-terminal/src/ThePlugin.scala b/test/pending/neg/plugin-rightafter-terminal/src/ThePlugin.scala
index 47dd06ec8a..9d6d30b327 100644
--- a/test/files/neg/plugin-rightafter-terminal/src/ThePlugin.scala
+++ b/test/pending/neg/plugin-rightafter-terminal/src/ThePlugin.scala
@@ -12,18 +12,18 @@ class ThePlugin(val global: Global) extends Plugin {
val name = "rightafterterminal"
val description = "Declares one plugin that wants to be right after the terminal phase"
val components = List[PluginComponent](thePhase)
-
+
private object thePhase extends PluginComponent {
val global = ThePlugin.this.global
val runsAfter = List[String]()
override val runsRightAfter = Some("terminal")
-
+
val phaseName = ThePlugin.this.name
- def newPhase(prev: Phase) = new ThePhase(prev)
+ def newPhase(prev: Phase) = new ThePhase(prev)
}
-
+
private class ThePhase(prev: Phase) extends Phase(prev) {
def name = ThePlugin.this.name
def run {}
diff --git a/test/files/neg/plugin-rightafter-terminal/testsource.scala b/test/pending/neg/plugin-rightafter-terminal/testsource.scala
index 7af767b638..7af767b638 100644
--- a/test/files/neg/plugin-rightafter-terminal/testsource.scala
+++ b/test/pending/neg/plugin-rightafter-terminal/testsource.scala
diff --git a/test/pending/neg/t1477.scala b/test/pending/neg/t1477.scala
index 0cc0cd5f7a..3ecd275a55 100644
--- a/test/pending/neg/t1477.scala
+++ b/test/pending/neg/t1477.scala
@@ -8,7 +8,7 @@ object Test extends Application {
type V <: D
val y: V#T = new B { }
}
-
+
trait Middle extends C {
type V <: (D with U)
}
diff --git a/test/pending/neg/t2079.scala b/test/pending/neg/t2079.scala
index a86674c7e0..d60910321b 100644
--- a/test/pending/neg/t2079.scala
+++ b/test/pending/neg/t2079.scala
@@ -4,8 +4,8 @@ trait A {
}
object B {
- def f(x : { val y : A }) { x.y.v = x.y.v }
-
+ def f(x : { val y : A }) { x.y.v = x.y.v }
+
var a : A = _
var b : Boolean = false
def y : A = {
diff --git a/test/pending/neg/t2080.scala b/test/pending/neg/t2080.scala
index 0880a40faa..3f4306c091 100644
--- a/test/pending/neg/t2080.scala
+++ b/test/pending/neg/t2080.scala
@@ -14,4 +14,4 @@ object C extends B {
}
override def f(x : T) : T = { x.g; x }
}
-//It compiles without errors, but T in B and T in C are completely unrelated types.
+//It compiles without errors, but T in B and T in C are completely unrelated types.
diff --git a/test/pending/neg/tcpoly_typealias_eta.scala b/test/pending/neg/tcpoly_typealias_eta.scala
index 0fb2c2d33e..033c911f7c 100644
--- a/test/pending/neg/tcpoly_typealias_eta.scala
+++ b/test/pending/neg/tcpoly_typealias_eta.scala
@@ -12,7 +12,7 @@ trait A3 {
trait FooCov[+x]
trait FooCon[-x]
-trait FooBound[+x <: String]
+trait FooBound[+x <: String]
trait BOk1 extends A {
type m/*[+x]*/ = FooCov/*[x]*/
@@ -30,8 +30,8 @@ trait BOk4 extends A3 {
type m/*[+x]*/ = FooCov/*[x]*/ // weaker variance
}
-// there are two aspects to check:
- // does type alias signature (not considering RHS) correspond to abstract type member in super class
+// there are two aspects to check:
+ // does type alias signature (not considering RHS) correspond to abstract type member in super class
// does RHS correspond to the type alias sig
trait BInv extends A{
type m/*[x]*/ = FooCov/*[x]*/ // error: invariant x in alias def
diff --git a/test/pending/neg/tcpoly_variance_enforce_getter_setter.scala b/test/pending/neg/tcpoly_variance_enforce_getter_setter.scala
index 321d392cc4..deafba8d8a 100644
--- a/test/pending/neg/tcpoly_variance_enforce_getter_setter.scala
+++ b/test/pending/neg/tcpoly_variance_enforce_getter_setter.scala
@@ -1,12 +1,12 @@
trait coll[+m[+x]]
-class FooInvar[x]
+class FooInvar[x]
class FooContra[-x]
class FooCov[+x]
object test {
var ok: coll[FooCov] = _
-
+
var x: coll[FooInvar] = _ // TODO: error should be reported only once instead of separately for getter and setter
var y: coll[FooContra] = _
}
diff --git a/test/pending/pos/bug0305.scala b/test/pending/pos/bug0305.scala
index 433f0e72c3..1fc66788b1 100644
--- a/test/pending/pos/bug0305.scala
+++ b/test/pending/pos/bug0305.scala
@@ -1,5 +1,5 @@
object Test extends Application {
-
+
def foo(is:int*) = 1;
def foo(i:int) = 2;
diff --git a/test/pending/pos/bug112606B.scala b/test/pending/pos/bug112606B.scala
index 6dfaf4bf01..3e50949ef2 100644
--- a/test/pending/pos/bug112606B.scala
+++ b/test/pending/pos/bug112606B.scala
@@ -5,7 +5,7 @@ trait Test2 {
def decode(tok : KeywordToken) = tok match {
// constructor cannot be instantiated to expected type;
// found : Test2.this.Token
- // required: Test2.this.KeywordToken
- case Token("final") => true;
+ // required: Test2.this.KeywordToken
+ case Token("final") => true;
}
}
diff --git a/test/files/pos/bug1357.scala b/test/pending/pos/bug1357.scala
index fcdecb3ad3..7bc6d45034 100644
--- a/test/files/pos/bug1357.scala
+++ b/test/pending/pos/bug1357.scala
@@ -6,7 +6,7 @@ object NonEmptyCons {
object Main {
type BT[+H, +T <: Tuple2[Tuple2[H, T], Tuple2[H, T]]] = Tuple2[H, T]
-
+
// type T = Tuple2[String,String]
type BinaryTree[+E] = BT[E, T forSome { type T <: Tuple2[BT[E, T], BT[E, T]] }]
diff --git a/test/pending/pos/bug3420.flags b/test/pending/pos/bug3420.flags
new file mode 100644
index 0000000000..ea03113c66
--- /dev/null
+++ b/test/pending/pos/bug3420.flags
@@ -0,0 +1 @@
+-optimise -Xfatal-warnings \ No newline at end of file
diff --git a/test/pending/pos/bug3420.scala b/test/pending/pos/bug3420.scala
new file mode 100644
index 0000000000..0fc56ed67b
--- /dev/null
+++ b/test/pending/pos/bug3420.scala
@@ -0,0 +1,5 @@
+class C {
+ val cv = Map[Int, Int](1 -> 2)
+ lazy val cl = Map[Int, Int](1 -> 2)
+ def cd = Map[Int, Int](1 -> 2)
+}
diff --git a/test/pending/pos/bug563.scala b/test/pending/pos/bug563.scala
index d559226bdb..927773d1e5 100644
--- a/test/pending/pos/bug563.scala
+++ b/test/pending/pos/bug563.scala
@@ -1,6 +1,6 @@
object Test {
def map[A,R](a : List[A], f : A => R) : List[R] = a.map(f);
-
+
def split(sn : Iterable[List[Cell[int]]]) : unit =
for (val n <- sn)
map(n,ptr => new Cell(ptr.elem));
diff --git a/test/pending/pos/bug572.scala b/test/pending/pos/bug572.scala
index 3a69cde4a6..31eac9370c 100644
--- a/test/pending/pos/bug572.scala
+++ b/test/pending/pos/bug572.scala
@@ -14,15 +14,15 @@ object DirX {
import DirX._;
abstract class Linked {
type Node <: Node0;
-
+
abstract class Node0 {
var next : Node = _;
var prev : Node = _;
-
+
def get(dir : Dir) = if (dir == BEFORE) prev; else next;
- private def set(dir : Dir, node : Node) =
+ private def set(dir : Dir, node : Node) =
if (dir == BEFORE) prev = node; else next = node;
-
+
def link(dir : Dir, node : Node) = {
assert(get(dir) == null);
assert(node.get(dir.reverse) == null);
diff --git a/test/pending/pos/bug573.scala b/test/pending/pos/bug573.scala
index b1b4f75098..c3d68107ff 100644
--- a/test/pending/pos/bug573.scala
+++ b/test/pending/pos/bug573.scala
@@ -16,15 +16,15 @@ import DirX._;
abstract class Linked {
type Node <: Node0;
-
+
abstract class Node0 {
var next : Node = _;
var prev : Node = _;
-
+
def self : Node;
-
+
def get(dir : Dir) = if (dir == BEFORE) prev; else next;
- private def set(dir : Dir, node : Node) =
+ private def set(dir : Dir, node : Node) =
if (dir == BEFORE) prev = node; else next = node;
def link(dir : Dir, node : Node) = {
@@ -34,7 +34,7 @@ abstract class Linked {
node.set(dir.reverse, self);
}
-
+
def end(dir : Dir) : Node = {
if (get(dir) == null) this;
else get(dir).end(dir);
diff --git a/test/pending/pos/bug579.scala b/test/pending/pos/bug579.scala
index a0806919e0..c42ac172fa 100644
--- a/test/pending/pos/bug579.scala
+++ b/test/pending/pos/bug579.scala
@@ -8,7 +8,7 @@ class MyBean {
}
object Test extends Application {
-
+
val x = new MyBean;
x.frombulizer = "hello"
diff --git a/test/pending/pos/bug586.scala b/test/pending/pos/bug586.scala
index b736e6ab43..f07d0cd2a7 100644
--- a/test/pending/pos/bug586.scala
+++ b/test/pending/pos/bug586.scala
@@ -2,11 +2,11 @@ import scala.collection.immutable.{Map, TreeMap, ListMap, ListSet, Set}
import scala.collection.{immutable=>imm, mutable=>mut}
case class HashTreeSet[A](map: imm.Map[A, Unit])
-extends Object
+extends Object
with imm.Set[A]
{
def this() = this(null)
-
+
def size = map.size
def +(elem: A) = new HashTreeSet(map + elem -> ())
def -(elem: A) = new HashTreeSet(map - elem)
@@ -20,13 +20,13 @@ abstract class Goal2 {
type Question
val question: Question
- type Answer
+ type Answer
def initialAnswer: Answer
}
-abstract class AbstractRespondersGoal
+abstract class AbstractRespondersGoal
extends Goal2 // TYPEFIX -- comment out the extends Goal2
{
}
@@ -68,7 +68,7 @@ class RespondersGoalSet
// case StaticMethodSelector(method: MethodRef) =>
case true =>
new SingleResponderGoal(null, null)
-
+
// case DynamicMethodSelector(signature: MethodSignature) => {
case false => {
new RespondersGoal(null, null,null,null)
diff --git a/test/pending/pos/misc/B.scala b/test/pending/pos/misc/B.scala
index 3a080e4712..afc30944f5 100644
--- a/test/pending/pos/misc/B.scala
+++ b/test/pending/pos/misc/B.scala
@@ -1,7 +1,7 @@
package test
class B {
-
+
def myA = new A()
}
diff --git a/test/pending/pos/moors.scala b/test/pending/pos/moors.scala
index 4f7346f57f..40f0e54f63 100644
--- a/test/pending/pos/moors.scala
+++ b/test/pending/pos/moors.scala
@@ -7,6 +7,6 @@ object Test {
def testCoercionThis = baz // --> error: not found: value baz
def testCoercionThis = (this: Foo).baz // --> error: value baz is not a member of Foo
}
-
- class Bar { def baz = System.out.println("baz")}
+
+ class Bar { def baz = System.out.println("baz")}
}
diff --git a/test/pending/pos/sig/sigs.scala b/test/pending/pos/sig/sigs.scala
index 72a293d0e6..bdb72a09bb 100644
--- a/test/pending/pos/sig/sigs.scala
+++ b/test/pending/pos/sig/sigs.scala
@@ -1,5 +1,5 @@
package test
-class T {
+class T {
def foo[T <: String](x: T): T = x
def bar[T](x: T): T = x
class Inner {
diff --git a/test/pending/pos/t0621.scala b/test/pending/pos/t0621.scala
index d178bed0fb..1d2531c4bd 100644
--- a/test/pending/pos/t0621.scala
+++ b/test/pending/pos/t0621.scala
@@ -1,7 +1,7 @@
object Test {
val x1 : List[T] forSome { type T } = List(42)
val w1 = x1 match { case y : List[u] => ((z : u) => z)(y.head) }
-
+
val x2 : T forSome { type T } = 42
val w2 = x2 match { case y : u => ((z : u) => z)(y) }
}
diff --git a/test/pending/pos/t0756.scala b/test/pending/pos/t0756.scala
index a778bd63d0..8011a9c526 100644
--- a/test/pending/pos/t0756.scala
+++ b/test/pending/pos/t0756.scala
@@ -2,7 +2,7 @@ object Test {
for {
n <- Some(42)
- _
+ _
m <- Some(24)
} yield n
}
diff --git a/test/pending/pos/t0805.scala b/test/pending/pos/t0805.scala
index 565a2a6527..c6348f45e3 100644
--- a/test/pending/pos/t0805.scala
+++ b/test/pending/pos/t0805.scala
@@ -3,7 +3,7 @@ object Test {
def make(t: Test) : Test = TestList(t.args.toList)
}
case class TestList[T](elements: List[T])(implicit f: T => Test)
-
+
class Test {
val args: Array[Test]
}
diff --git a/test/pending/pos/t1004.scala b/test/pending/pos/t1004.scala
index e86631acea..e121d13618 100644
--- a/test/pending/pos/t1004.scala
+++ b/test/pending/pos/t1004.scala
@@ -1,6 +1,6 @@
object A {
def main(args: Array[String]) = {
val x = new { def copy(a : this.type) = a };
- x.copy(x)
+ x.copy(x)
}
}
diff --git a/test/files/pos/t1380/gnujaxp.jar.desired.sha1 b/test/pending/pos/t1380/gnujaxp.jar.desired.sha1
index c155c2aaa2..c155c2aaa2 100644
--- a/test/files/pos/t1380/gnujaxp.jar.desired.sha1
+++ b/test/pending/pos/t1380/gnujaxp.jar.desired.sha1
diff --git a/test/pending/pos/t1380/hallo.scala b/test/pending/pos/t1380/hallo.scala
new file mode 100644
index 0000000000..bb8fff2333
--- /dev/null
+++ b/test/pending/pos/t1380/hallo.scala
@@ -0,0 +1,3 @@
+object hallo {
+ def main(args:Array[String]) = println("hallo")
+}
diff --git a/test/pending/pos/t1659.scala b/test/pending/pos/t1659.scala
deleted file mode 100644
index 10470d66f8..0000000000
--- a/test/pending/pos/t1659.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait Y { type X }
-trait W { def u[A](v : Y { type X = A }) : Unit }
-class Z extends W { def u[A](v : Y { type X = A }) = null }
-
diff --git a/test/pending/pos/t1786.scala b/test/pending/pos/t1786.scala
index d0cf8c7bac..dca2edaab4 100644
--- a/test/pending/pos/t1786.scala
+++ b/test/pending/pos/t1786.scala
@@ -1,10 +1,10 @@
/** This a consequence of the current type checking algorithm, where bounds
* are checked only after variables are instantiated. I believe this will change once we go to contraint-based type inference. Assigning low priority until then.
- *
+ *
*
*/
class SomeClass(val intValue:Int)
-class MyClass[T <: SomeClass](val myValue:T)
+class MyClass[T <: SomeClass](val myValue:T)
object Test extends Application {
def myMethod(i:MyClass[_]) {
diff --git a/test/pending/pos/t2060.scala b/test/pending/pos/t2060.scala
deleted file mode 100644
index 3f47259849..0000000000
--- a/test/pending/pos/t2060.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-object Test {
- class Op[I];
- class IntOp extends Op[Int];
-
- class Rich(x : Double) {
- def + (op : IntOp) = op;
- def + [I](op : Op[I]) = op;
- def plus [I](op : Op[I]) = op;
- }
-
- implicit def iToRich(x : Double) =
- new Rich(x);
-
- // fails to compile
- val failure = 1.0 + new Op[Int];
-
- // works as expected --
- // problem isn't in adding new "+"
- val a = 1.0 + new IntOp;
-
- // works as expected --
- // problem isn't in binding type variable I
- val b = 1.0 plus new Op[Int];
-
- // works as expected --
- // problem isn't in using Rich.+[I](op : Op[I])
- val c = iToRich(1.0) + new Op[Int];
-}
diff --git a/test/pending/pos/t2625.scala b/test/pending/pos/t2625.scala
new file mode 100644
index 0000000000..94240cb6c6
--- /dev/null
+++ b/test/pending/pos/t2625.scala
@@ -0,0 +1,9 @@
+package t
+
+object T {
+ case class A(x: Int)(x: Int)
+
+ def A(x: Boolean): Int = 34
+
+ A(23)
+} \ No newline at end of file
diff --git a/test/pending/pos/t2635.scala b/test/pending/pos/t2635.scala
new file mode 100644
index 0000000000..378631b23d
--- /dev/null
+++ b/test/pending/pos/t2635.scala
@@ -0,0 +1,16 @@
+abstract class Base
+
+object Test
+{
+ def run(c: Class[_ <: Base]): Unit = {
+ }
+
+ def main(args: Array[String]): Unit =
+ {
+ val sc: Option[Class[_ <: Base]] = Some(classOf[Base])
+ sc match {
+ case Some((c: Class[_ <: Base])) => run(c)
+ case None =>
+ }
+ }
+} \ No newline at end of file
diff --git a/test/pending/pos/t2641.scala b/test/pending/pos/t2641.scala
new file mode 100644
index 0000000000..fec825c4f9
--- /dev/null
+++ b/test/pending/pos/t2641.scala
@@ -0,0 +1,16 @@
+import scala.collection._
+import scala.collection.generic._
+
+abstract class ManagedSeqStrict[+A]
+ extends Traversable[A]
+ with GenericTraversableTemplate[A, ManagedSeqStrict]
+
+trait ManagedSeq[+A, +Coll]
+ extends ManagedSeqStrict[A]
+ with TraversableView[A, ManagedSeqStrict[A]]
+ with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A/*ERROR: too few type args*/]]
+{ self =>
+ trait Transformed[+B] extends ManagedSeq[B, Coll] with super.Transformed[B]
+
+ trait Sliced extends Transformed[A] with super.Sliced
+} \ No newline at end of file
diff --git a/test/pending/pos/unappgadteval.scala b/test/pending/pos/unappgadteval.scala
index 0c22c71dee..007d57d7ea 100644
--- a/test/pending/pos/unappgadteval.scala
+++ b/test/pending/pos/unappgadteval.scala
@@ -11,21 +11,21 @@ class App[b, c] (val f : Term[b => c], val e : Term[b]) extends Term[c]
object Suc{ def unapply(a:Suc) = true }
class Suc () extends Term[int => int]
// Environments :
-abstract class Env {
+abstract class Env {
def apply[a](v : Var[a]): a
- def extend[a](v : Var[a], x : a) = new Env {
- def apply[b](w: Var[b]): b = w match {
+ def extend[a](v : Var[a], x : a) = new Env {
+ def apply[b](w: Var[b]): b = w match {
case _ : v.type => x // v eq w, hence a = b
case _ => Env.this.apply(w)
}}
}
-object empty extends Env {
- def apply[a](x : Var[a]): a = throw new Error("not found : "+x.name)
+object empty extends Env {
+ def apply[a](x : Var[a]): a = throw new Error("not found : "+x.name)
}
object Test {
// Evaluation :
-def eval[a](t : Term[a], env : Env): a = t match {
+def eval[a](t : Term[a], env : Env): a = t match {
case v : Var[b] => env(v) // a = b
case n @ Num(value) => value // a = int
case i @ Suc() => { y: int => y + 1 } // a = int=>int
diff --git a/test/pending/pos/virt.scala b/test/pending/pos/virt.scala
index 6fe21246b0..99dcd747b2 100644
--- a/test/pending/pos/virt.scala
+++ b/test/pending/pos/virt.scala
@@ -1,9 +1,9 @@
object Virt extends Application {
- class Foo {
+ class Foo {
trait Inner <: { val x : Int = 3 }
}
- class Bar extends Foo {
+ class Bar extends Foo {
trait Inner <: { val y : Int = x }
}
}
diff --git a/test/pending/res/bug837/DeadCode.scala b/test/pending/res/bug837/DeadCode.scala
index 2978e24d42..c63af909af 100644
--- a/test/pending/res/bug837/DeadCode.scala
+++ b/test/pending/res/bug837/DeadCode.scala
@@ -1,6 +1,6 @@
package test;
trait DeadcodeAnalysis {
- object liveness extends Liveness;
+ object liveness extends Liveness;
val a = new liveness.LivenessAnalysis();
var live = a.out("hello");
}
diff --git a/test/pending/run/array_casts.scala b/test/pending/run/array_casts.scala
index 9d298bbc2b..4940f9419b 100644
--- a/test/pending/run/array_casts.scala
+++ b/test/pending/run/array_casts.scala
@@ -24,7 +24,7 @@ object Test {
try {
Console.println(a.asInstanceOf[RandomAccessSeq.Mutable[_]])
} catch { case ex : ClassCastException => Console.println("Bad, arrays should be mutable random access seqs") }
- try {
+ try {
Console.println("not expected: " + a.asInstanceOf[List[_]])
} catch { case ex : ClassCastException => Console.println("Good, arrays are not lists") }
try {
@@ -38,5 +38,5 @@ object Test {
Console.println(s.asInstanceOf[Array[Int]].getClass)
} catch { case ex : ClassCastException => Console.println("Bad, arrays as seqs should still be arrays of int") }
()
- }
+ }
}
diff --git a/test/pending/run/arrays-2.scala b/test/pending/run/arrays-2.scala
index bee22db577..5f8096deec 100644
--- a/test/pending/run/arrays-2.scala
+++ b/test/pending/run/arrays-2.scala
@@ -20,6 +20,6 @@ object Test extends Application {
println("a2=" + _toString(a2))
util.Sorting.stableSort(a2)
println("a2=" + _toString(a2))
-
+
println(a1 deepEquals a3)
}
diff --git a/test/files/run/bug1697.scala b/test/pending/run/bug1697.scala
index 01590dd405..01590dd405 100644
--- a/test/files/run/bug1697.scala
+++ b/test/pending/run/bug1697.scala
diff --git a/test/pending/run/bug2087.scala b/test/pending/run/bug2087.scala
index b3f96fa415..787e411115 100644
--- a/test/pending/run/bug2087.scala
+++ b/test/pending/run/bug2087.scala
@@ -2,7 +2,7 @@ object Test {
def main(args: Array[String]): Unit = {
val s: Short = 0xFA99.toShort
val c: Char = 0xFA99.toChar
-
+
assert((s == c) == (c == s))
}
} \ No newline at end of file
diff --git a/test/pending/run/bug2364.check b/test/pending/run/bug2364.check
new file mode 100644
index 0000000000..219305e43a
--- /dev/null
+++ b/test/pending/run/bug2364.check
@@ -0,0 +1 @@
+<test></test>
diff --git a/test/pending/run/bug2364.scala b/test/pending/run/bug2364.scala
new file mode 100644
index 0000000000..d5805a13b8
--- /dev/null
+++ b/test/pending/run/bug2364.scala
@@ -0,0 +1,60 @@
+import java.io.ByteArrayInputStream
+import java.io.ByteArrayOutputStream
+import com.sun.xml.internal.fastinfoset._
+import com.sun.xml.internal.fastinfoset.sax._
+import scala.xml.parsing.NoBindingFactoryAdapter
+import scala.xml._
+
+// Note - this is in pending because com.sun.xml.etc is not standard,
+// and I don't have time to extract a smaller test.
+
+object Test {
+ def main(args: Array[String]) {
+ val node = <test/>
+ val bytes = new ByteArrayOutputStream
+ val serializer = new SAXDocumentSerializer()
+
+ serializer.setOutputStream(bytes)
+ serializer.startDocument()
+ serialize(node, serializer)
+ serializer.endDocument()
+ println(parse(new ByteArrayInputStream(bytes.toByteArray)))
+ }
+ def serialize(node: Node, serializer: SAXDocumentSerializer) {
+ node match {
+ case _ : ProcInstr | _ : Comment | _ : EntityRef =>
+ case x : Atom[_] =>
+ val chars = x.text.toCharArray
+ serializer.characters(chars, 0, chars.length)
+ case _ : Elem =>
+ serializer.startElement("", node.label.toLowerCase, node.label.toLowerCase, attributes(node.attributes))
+ for (m <- node.child) serialize(m, serializer)
+ serializer.endElement("", node.label.toLowerCase, node.label.toLowerCase)
+ }
+ }
+ def parse(str: ByteArrayInputStream) = {
+ val parser = new SAXDocumentParser
+ val fac = new NoBindingFactoryAdapter
+
+ parser.setContentHandler(fac)
+ try {
+ parser.parse(str)
+ } catch {
+ case x: Exception =>
+ x.printStackTrace
+ }
+ fac.rootElem
+ }
+ def attributes(d: MetaData) = {
+ val attrs = new AttributesHolder
+
+ if (d != null) {
+ for (attr <- d) {
+ val sb = new StringBuilder()
+ Utility.sequenceToXML(attr.value, TopScope, sb, true)
+ attrs.addAttribute(new QualifiedName("", "", attr.key.toLowerCase), sb.toString)
+ }
+ }
+ attrs
+ }
+}
diff --git a/test/pending/run/bug2365/Test.scala b/test/pending/run/bug2365/Test.scala
new file mode 100644
index 0000000000..75f0c39b2f
--- /dev/null
+++ b/test/pending/run/bug2365/Test.scala
@@ -0,0 +1,35 @@
+import scala.tools.nsc.io._
+import java.net.URL
+
+object A { def apply(d: { def apply(): Int}) = d.apply() }
+object A2 { def apply(d: { def apply(): Int}) = d.apply() }
+object A3 { def apply(d: { def apply(): Int}) = d.apply() }
+object A4 { def apply(d: { def apply(): Int}) = d.apply() }
+
+class B extends Function0[Int] {
+ def apply() = 3
+}
+
+object Test
+{
+ type StructF0 = { def apply(): Int }
+ def main(args: Array[String]) {
+ for(i <- 0 until 150)
+ println(i + " " + test(A.apply) + " " + test(A2.apply) + " " + test(A3.apply) + " " + test(A3.apply))
+ }
+
+ def test(withF0: StructF0 => Int): Int = {
+ // Some large jar
+ val ivyJar = File("/local/lib/java/ivy.jar").toURL
+ // load a class in a separate loader that will be passed to A
+ val loader = new java.net.URLClassLoader(Array(File(".").toURL, ivyJar))
+ // load a real class to fill perm gen space
+ Class.forName("org.apache.ivy.Ivy", true, loader).newInstance
+ // create a class from another class loader with an apply: Int method
+ val b = Class.forName("B", true, loader).newInstance
+
+ // pass instance to a, which will call apply using structural type reflection.
+ // This should hold on to the class for B, which means bLoader will not get collected
+ withF0(b.asInstanceOf[StructF0])
+ }
+}
diff --git a/test/pending/run/bug2365/bug2365.javaopts b/test/pending/run/bug2365/bug2365.javaopts
new file mode 100644
index 0000000000..357e033c1c
--- /dev/null
+++ b/test/pending/run/bug2365/bug2365.javaopts
@@ -0,0 +1 @@
+-XX:MaxPermSize=25M
diff --git a/test/pending/run/bug2365/run b/test/pending/run/bug2365/run
new file mode 100755
index 0000000000..f3c44ad086
--- /dev/null
+++ b/test/pending/run/bug2365/run
@@ -0,0 +1,13 @@
+#!/bin/sh
+#
+# This script should fail with any build of scala where #2365
+# is not fixed, and otherwise succeed. Failure means running out
+# of PermGen space.
+
+CP=.:/local/lib/java/ivy.jar
+# SCALAC=/scala/inst/28/bin/scalac
+SCALAC=scalac
+RUN_OPTS="-XX:MaxPermSize=25M -verbose:gc"
+
+$SCALAC -cp $CP *.scala
+JAVA_OPTS="${RUN_OPTS}" scala -cp $CP Test
diff --git a/test/pending/run/bug3050.scala b/test/pending/run/bug3050.scala
new file mode 100644
index 0000000000..2b85149d7c
--- /dev/null
+++ b/test/pending/run/bug3050.scala
@@ -0,0 +1,10 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val x =
+ try { ("": Any) match { case List(_*) => true } }
+ catch { case _ => false }
+
+ assert(x == false)
+ }
+}
+ \ No newline at end of file
diff --git a/test/pending/run/bug3150.scala b/test/pending/run/bug3150.scala
new file mode 100644
index 0000000000..8acdb50354
--- /dev/null
+++ b/test/pending/run/bug3150.scala
@@ -0,0 +1,10 @@
+object Test {
+ case object Bob { override def equals(other: Any) = true }
+ def f(x: Any) = x match { case Bob => Bob }
+
+ def main(args: Array[String]): Unit = {
+ assert(f(Bob) eq Bob)
+ assert(f(0) eq Bob)
+ assert(f(Nil) eq Bob)
+ }
+}
diff --git a/test/pending/run/bug874.scala b/test/pending/run/bug874.scala
index 29dfabe0e9..f8d14eec09 100644
--- a/test/pending/run/bug874.scala
+++ b/test/pending/run/bug874.scala
@@ -5,7 +5,7 @@ object Test {
}
U("xyz")(2)
}
- class Mix extends Base {
+ class Mix extends Base {
case class U[A](x1: A)(x2: int) {
Console.println("U created with "+x1+" and "+x2)
}
diff --git a/test/pending/run/bugs425-and-816.scala b/test/pending/run/bugs425-and-816.scala
deleted file mode 100644
index d9267d06af..0000000000
--- a/test/pending/run/bugs425-and-816.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-object Test {
- object bug425 {
- case class A(x: Int)
- case class B(override val x: Int, y: Double) extends A(x)
-
- val b: A = B(5, 3.3)
- b match {
- case B(x, y) => Console.println(y)
- case A(x) => Console.println(x)
- }
- }
-
- object bug816 {
- abstract class Atest(val data: String)
-
- case class Btest(override val data: String, val b: boolean) extends Atest(data)
-
- case class Ctest(override val data: String) extends Btest(data, true)
-
- class testCaseClass {
- def test(x: Atest) = x match {
- case Ctest(data) => Console.println("C")
- case Btest(data, b) => Console.println("B")
- }
- }
- }
-}
diff --git a/test/pending/run/castsingleton.scala b/test/pending/run/castsingleton.scala
index 171b380dc9..59c912e092 100644
--- a/test/pending/run/castsingleton.scala
+++ b/test/pending/run/castsingleton.scala
@@ -7,4 +7,4 @@ object Test extends Application {
}
empty(L())
-}
+}
diff --git a/test/pending/run/collections.scala b/test/pending/run/collections.scala
index 6717b524bb..ea8149e16a 100644
--- a/test/pending/run/collections.scala
+++ b/test/pending/run/collections.scala
@@ -60,7 +60,7 @@ object Test extends Application {
}
time {
var x = 0
- for (val i <- (0 to 10000))
+ for (val i <- (0 to 10000))
s get i match {
case Some(i) => x = x + i
case None =>
@@ -81,7 +81,7 @@ object Test extends Application {
}
time {
var x = 0
- for (val i <- (0 to 10000))
+ for (val i <- (0 to 10000))
s get i match {
case Some(i) => x = x + i
case None =>
diff --git a/test/pending/run/deprecated.scala b/test/pending/run/deprecated.scala
index 122e339d2f..0b3919aa76 100644
--- a/test/pending/run/deprecated.scala
+++ b/test/pending/run/deprecated.scala
@@ -2,16 +2,16 @@ object Test {
abstract class AbstractStuff {
def dostuff: Unit
}
-
+
[postabstract]
class BlueStuff extends AbstractStuff {
[deprecated] def dostuff = Console.println("blue")
def five = "five"
}
-
+
class LightBlueStuff extends BlueStuff {
[deprecated] override def dostuff = {Console.println("light blue")}
-
+
// warning: deprecated method overrides a concrete method
[deprecated] override def five = "light five"
}
@@ -21,15 +21,15 @@ object Test {
[deprecated] def dostuff = Console.println("red")
}
-
+
def main(args: Array[String]) {
// warning: BlueStuff will be abstract after deprecated methods are removed
- val blue = new BlueStuff
+ val blue = new BlueStuff
// warning: RedStuff will be abstract after deprecated methods are removed
- val red = new RedStuff
-
+ val red = new RedStuff
+
// warning: dostuff is deprecated
- blue.dostuff
- }
+ blue.dostuff
+ }
}
diff --git a/test/files/run/hashCodeDistribution.flags b/test/pending/run/hashCodeDistribution.flags
index 7806652d4d..7806652d4d 100644
--- a/test/files/run/hashCodeDistribution.flags
+++ b/test/pending/run/hashCodeDistribution.flags
diff --git a/test/files/run/hashCodeDistribution.scala b/test/pending/run/hashCodeDistribution.scala
index dbb6e833bd..5be9d1db6d 100644
--- a/test/files/run/hashCodeDistribution.scala
+++ b/test/pending/run/hashCodeDistribution.scala
@@ -8,7 +8,7 @@ object Test {
val hashCodes =
for (x <- 0 until COUNT; y <- 0 until COUNT) yield C(x,y).hashCode
- val uniques = hashCodes.removeDuplicates
+ val uniques = hashCodes.distinct
val collisionRate = (totalCodes - uniques.size) * 1000 / totalCodes
assert(collisionRate < 5, "Collision rate too high: %d / 1000".format(collisionRate))
diff --git a/test/pending/run/instanceOfAndTypeMatching.scala b/test/pending/run/instanceOfAndTypeMatching.scala
new file mode 100644
index 0000000000..5273060c25
--- /dev/null
+++ b/test/pending/run/instanceOfAndTypeMatching.scala
@@ -0,0 +1,193 @@
+// Summary of incorrect or questionable behavior.
+// Full code and successful parts follow.
+
+object Summary {
+ class Outer {
+ class Inner { }
+ def f() = { class MethodInner ; new MethodInner }
+ }
+
+ // 1 static issue:
+ //
+ // Given method in MethodInner: def g(other: MethodInner) = ()
+ // method1.g(method1) fails to compile with type error.
+ //
+ // Note that this cannot be worked around by widening the return type
+ // of f() because MethodInner is declared inside of f. So there is no way
+ // I see for a class declared inside a method to receive members of its
+ // own declared type -- not only the narrow type of those from this
+ // instance, but ANY members, because there is no Foo#Bar syntax which will
+ // traverse a method.
+ //
+ // 4 runtime issues:
+ //
+ // From the outside: inner1.isInstanceOf[outer2.Inner] is true, should (maybe) be false
+ // From inside inner1: inner2.isInstanceOf[Outer.this.Inner] is true, should (maybe) be false
+ // From the outside: inner1 match { case _: outer2.Inner => true ... } is true, should definitely be false
+ // From inside method1: method2 match { case _: MethodInner => true ... } is true, should definitely be false
+ //
+ // Note that the fact that every test returns true on instances of MethodInner means
+ // that it is impossible to draw any type distinction between instances. As far as one
+ // can tell, they are all of the same type regardless not only of whether they were
+ // created on the same method invocation but whether they are contained in the same
+ // instance of Outer.
+ //
+ // WRT "same method invocation", see Iterator.duplicate for an example of this.
+}
+
+// Tests
+
+class Outer {
+ class Inner {
+ def passOuter(other: Outer) = () // pass any Outer
+ def passThisType(other: Outer.this.type) = () // pass only this Outer instance
+ def passInner(other: Inner) = () // pass only Inners from this Outer instance
+ def passInner2(other: Outer.this.Inner) = () // same as above
+ def passInnerSharp(other: Outer#Inner) = () // pass any Inner
+
+ def compareSimpleWithTypeMatch(other: Any) = other match {
+ case _: Inner => true
+ case _ => false
+ }
+ def compareSimpleWithInstanceOf(other: Any) = other.isInstanceOf[Inner]
+
+ def compareSharpWithTypeMatch(other: Any) = {
+ other match {
+ case _: Outer#Inner => true
+ case _ => false
+ }
+ }
+ def compareSharpWithInstanceOf(other: Any) = other.isInstanceOf[Outer#Inner]
+
+ def comparePathWithTypeMatch(other: Any) = other match {
+ case _: Outer.this.Inner => true
+ case _ => false
+ }
+ def comparePathWithInstanceOf(other: Any) = other.isInstanceOf[Outer.this.Inner]
+ }
+
+ def f() = {
+ class MethodInner {
+ def passOuter(other: Outer) = () // pass any Outer
+ def passThisType(other: Outer.this.type) = () // pass only this Outer instance
+ def passInner(other: Inner) = () // pass only Inners from this Outer instance
+ def passInner2(other: Outer.this.Inner) = () // same as above
+ def passInnerSharp(other: Outer#Inner) = () // pass any Inner
+ def passMethodInner(other: MethodInner) = () // pass only MethodInners from this Outer instance
+ // is there any way to refer to Outer#MethodInner? Not that there should be.
+
+ def compareWithInstanceOf(other: Any) = other.isInstanceOf[MethodInner]
+ def compareWithTypeMatch(other: Any) = other match {
+ case _: MethodInner => true
+ case _ => false
+ }
+ }
+
+ new MethodInner
+ }
+}
+
+object Test
+{
+ val outer1 = new Outer
+ val outer2 = new Outer
+ val inner1 = new outer1.Inner
+ val inner2 = new outer2.Inner
+ val method1 = outer1.f()
+ val method2 = outer2.f()
+
+ def testInnerStatic = {
+ // these should all work
+ inner1.passOuter(outer1)
+ inner1.passOuter(outer2)
+ inner1.passThisType(outer1)
+ inner1.passInner(inner1)
+ inner1.passInner2(inner1)
+ inner1.passInnerSharp(inner1)
+ inner1.passInnerSharp(inner2)
+
+ // these should all fail to compile, and do
+ //
+ // inner1.passThisType(outer2)
+ // inner1.passInner(inner2)
+ // inner1.passInner2(inner2)
+ }
+ def testInnerRuntime = {
+ println("testInnerRuntime\n")
+
+ List("These should be true under any scenario: ",
+ inner1.isInstanceOf[outer1.Inner] ,
+ inner1.isInstanceOf[Outer#Inner] ,
+ inner1 match { case _: Outer#Inner => true ; case _ => false } ,
+ inner1 match { case _: outer1.Inner => true ; case _ => false } ,
+ inner1.compareSharpWithTypeMatch(inner2) ,
+ inner1.compareSharpWithInstanceOf(inner2)
+ ) foreach println
+
+ List("These should be true under current proposal: ",
+ inner1.compareSimpleWithInstanceOf(inner2)
+ ) foreach println
+
+ List("These should be false under current proposal: ",
+ inner1.compareSimpleWithTypeMatch(inner2) ,
+ inner1.comparePathWithTypeMatch(inner2)
+ ) foreach println
+
+ List("These return true but I think should return false: ",
+ inner1.isInstanceOf[outer2.Inner] , // true
+ inner1.comparePathWithInstanceOf(inner2) // true
+ ) foreach println
+
+ List("These are doing the wrong thing under current proposal",
+ inner1 match { case _: outer2.Inner => true ; case _ => false } // should be false
+ ) foreach println
+ }
+
+ def testMethodInnerStatic = {
+ // these should all work
+ method1.passOuter(outer1)
+ method1.passOuter(outer2)
+ method1.passThisType(outer1)
+ method1.passInner(inner1)
+ method1.passInner2(inner1)
+ method1.passInnerSharp(inner1)
+ method1.passInnerSharp(inner2)
+ // This fails with:
+ //
+ // a.scala:114: error: type mismatch;
+ // found : Test.method1.type (with underlying type MethodInner forSome { type MethodInner <: java.lang.Object with ScalaObject{def passOuter(other: Outer): Unit; def passThisType(other: Test.outer1.type): Unit; def passInner(other: Test.outer1.Inner): Unit; def passInner2(other: Test.outer1.Inner): Unit; def passInnerSharp(other: Outer#Inner): Unit; def passMethodInner(other: MethodInner): Unit} })
+ // required: MethodInner where type MethodInner <: java.lang.Object with ScalaObject{def passOuter(other: Outer): Unit; def passThisType(other: Test.outer1.type): Unit; def passInner(other: Test.outer1.Inner): Unit; def passInner2(other: Test.outer1.Inner): Unit; def passInnerSharp(other: Outer#Inner): Unit; def passMethodInner(other: MethodInner): Unit}
+ // method1.passMethodInner(method1)
+ // ^
+ // method1.passMethodInner(method1)
+
+ // these should all fail to compile, and do
+ //
+ // method1.passThisType(outer2)
+ // method1.passInner(inner2)
+ // method1.passInner2(inner2)
+ // method1.passMethodInner(method2)
+ }
+
+ def testMethodInnerRuntime = {
+ println("\ntestMethodInnerRuntime\n")
+
+ List("These should be true under any scenario: ",
+ method1.compareWithInstanceOf(method1) ,
+ method1.compareWithTypeMatch(method1)
+ ) foreach println
+
+ List("These should be true under current proposal: ",
+ method1.compareWithInstanceOf(method2)
+ ) foreach println
+
+ List("These are doing the wrong thing under current proposal",
+ method1.compareWithTypeMatch(method2) // should be false
+ ) foreach println
+ }
+
+ def main(args: Array[String]): Unit = {
+ testInnerRuntime
+ testMethodInnerRuntime
+ }
+}
diff --git a/test/pending/run/records.scala b/test/pending/run/records.scala
index 87b15265f2..edab6b46fb 100644
--- a/test/pending/run/records.scala
+++ b/test/pending/run/records.scala
@@ -16,10 +16,10 @@ object Test {
val y = new C {
def f = 2
def g = " world"
- }
-
+ }
+
val z: T = y
-
+
Console.println(x.f+z.f+", expected = 3")
Console.println(x.g+z.g+", expected = hello world")
}
diff --git a/test/pending/run/string-reverse.scala b/test/pending/run/string-reverse.scala
new file mode 100644
index 0000000000..976a970dec
--- /dev/null
+++ b/test/pending/run/string-reverse.scala
@@ -0,0 +1,22 @@
+/** In case we ever feel like taking on unicode string reversal.
+ * See ticket #2565.
+ */
+object Test {
+ val xs = "Les Mise\u0301rables" // this is the tricky one to reverse
+ val ys = "Les Misérables"
+ val xs2 = new StringBuilder(xs)
+ val ys2 = new StringBuilder(ys)
+
+ def main(args: Array[String]): Unit = {
+ val out = new java.io.PrintStream(System.out, true, "UTF-8")
+
+ out.println("Strings")
+ List(xs, xs.reverse, ys, ys.reverse) foreach (out println _)
+
+ out.println("StringBuilder")
+ out.println(xs2.toString)
+ out.println(xs2.reverseContents().toString)
+ out.println(ys2.toString)
+ out.println(ys2.reverseContents().toString)
+ }
+} \ No newline at end of file
diff --git a/test/pending/run/t0508x.scala b/test/pending/run/t0508x.scala
index 0c1ffde3ed..12d3d09711 100644
--- a/test/pending/run/t0508x.scala
+++ b/test/pending/run/t0508x.scala
@@ -4,12 +4,12 @@
};
def foo[A >: Nothing <: Any, B >: Nothing <: Any, C >: Nothing <: Any]
- (unapply1: (A) => Option[(B, C)], v: A): Unit =
+ (unapply1: (A) => Option[(B, C)], v: A): Unit =
unapply1.apply(v) match {
- case Some((fst @ _, snd @ _)) =>
+ case Some((fst @ _, snd @ _)) =>
scala.Predef.println(scala.Tuple2.apply[java.lang.String, java.lang.String]("first: ".+(fst), " second: ".+(snd)))
case _ => scala.Predef.println(":(")
- }
+ }
Test.this.foo[Test.Foo, String, Int]({
((eta$0$1: Test.Foo) => Test.this.Foo.unapply(eta$0$1))
}, Test.this.Foo.apply("this might be fun", 10));
diff --git a/test/pending/run/t0807.scala b/test/pending/run/t0807.scala
index e69aa1c71c..b4aa0b14a4 100644
--- a/test/pending/run/t0807.scala
+++ b/test/pending/run/t0807.scala
@@ -1,5 +1,5 @@
trait A
-trait B extends A { val x = println("early") }
+trait B extends A { val x = println("early") }
object Test extends Application {
new B {}
}
diff --git a/test/pending/run/t0947.scala b/test/pending/run/t0947.scala
index f5daca3d30..c93ee40fda 100644
--- a/test/pending/run/t0947.scala
+++ b/test/pending/run/t0947.scala
@@ -1,6 +1,6 @@
import scala.tools.nsc._
-object Test extends Application {
+object Test extends Application {
class Foo { override def toString = "Foo" };
val int = new Interpreter(new Settings());
diff --git a/test/pending/run/t1980.scala b/test/pending/run/t1980.scala
index 67cb3ada9e..9ecf5a4100 100644
--- a/test/pending/run/t1980.scala
+++ b/test/pending/run/t1980.scala
@@ -2,7 +2,7 @@ by-name argument incorrectly evaluated on :-ending operator
Reported by: extempore Owned by: odersky
Priority: normal Component: Compiler
Keywords: Cc: paulp@…
-Fixed in version:
+Fixed in version:
Description
scala> def foo() = { println("foo") ; 5 }
diff --git a/test/pending/run/t3609.scala b/test/pending/run/t3609.scala
new file mode 100755
index 0000000000..262948137d
--- /dev/null
+++ b/test/pending/run/t3609.scala
@@ -0,0 +1,11 @@
+object Test extends Application {
+ class A
+ class B extends A
+ def foo(x: A, y: B) = print(1)
+ val foo = new {
+ //def apply(x: B, y: A) = print(3)
+ def apply = (x: B, z: B) => print(4)
+ }
+
+ foo(new B, new B)
+}
diff --git a/test/pending/scalacheck/CheckEither.scala b/test/pending/scalacheck/CheckEither.scala
index a6c728451b..767ae7739e 100644
--- a/test/pending/scalacheck/CheckEither.scala
+++ b/test/pending/scalacheck/CheckEither.scala
@@ -16,7 +16,7 @@ import org.scalacheck.ConsoleReporter.testStatsEx
import Function.tupled
object CheckEither {
- implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] =
+ implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] =
Arbitrary[Either[X, Y]](oneOf(arbitrary[X].map(Left(_)), arbitrary[Y].map(Right(_))))
val prop_either1 = property((n: Int) => Left(n).either(x => x, b => error("fail")) == n)
@@ -27,7 +27,7 @@ object CheckEither {
case Left(a) => e.swap.right.value == a
case Right(b) => e.swap.left.value == b
})
-
+
val prop_isLeftRight = property((e: Either[Int, Int]) => e.isLeft != e.isRight)
object CheckLeftProjection {
@@ -50,7 +50,7 @@ object CheckEither {
val prop_exists = property((e: Either[Int, Int]) =>
e.left.exists(_ % 2 == 0) == (e.isLeft && e.left.value % 2 == 0))
-
+
val prop_flatMapLeftIdentity = property((e: Either[Int, Int], n: Int, s: String) => {
def f(x: Int) = if(x % 2 == 0) Left(s) else Right(s)
Left(n).left.flatMap(f(_)) == f(n)})
@@ -159,7 +159,7 @@ object CheckEither {
}
val prop_Either_left = property((n: Int) => Either.left(n).left.value == n)
-
+
val prop_Either_right = property((n: Int) => Either.right(n).right.value == n)
val prop_Either_joinLeft = property((e: Either[Either[Int, Int], Int]) => e match {
@@ -172,33 +172,33 @@ object CheckEither {
case Right(ee) => Either.joinRight(e) == ee
})
- val prop_Either_lefts = property((es: List[Either[Int, Int]]) =>
+ val prop_Either_lefts = property((es: List[Either[Int, Int]]) =>
Either.lefts(es) == es.filter(_.isLeft).map(_.left.value))
-
- val prop_Either_rights = property((es: List[Either[Int, Int]]) =>
+
+ val prop_Either_rights = property((es: List[Either[Int, Int]]) =>
Either.rights(es) == es.filter(_.isRight).map(_.right.value))
-
- val prop_Either_leftRights = property((es: List[Either[Int, Int]]) =>
+
+ val prop_Either_leftRights = property((es: List[Either[Int, Int]]) =>
Either.rights(es) == es.filter(_.isRight).map(_.right.value))
-
- val prop_Either_throws = property((n: Int) =>
+
+ val prop_Either_throws = property((n: Int) =>
Either.throws(n) == Right(n) && Either.throws(error("error")).isLeft)
-
- val prop_Either_throwIt = property((e: Either[Throwable, Int]) =>
+
+ val prop_Either_throwIt = property((e: Either[Throwable, Int]) =>
try {
Either.throwIt(e) == e.right.value
} catch {
case (t) => e.isLeft && e.left.value == t
})
- val prop_Either_reduce = property((e: Either[Int, Int]) =>
+ val prop_Either_reduce = property((e: Either[Int, Int]) =>
Either.reduce(e) == (e match {
case Left(a) => a
case Right(a) => a
}))
-
+
val prop_Either_iif = property((c: Boolean, a: Int, b: Int) =>
- Either.iif(c, a, b) == (if(c) Right(b) else Left(a)))
+ Either.iif(c, a, b) == (if(c) Right(b) else Left(a)))
val tests = List(
("prop_either1", prop_either1),
@@ -234,20 +234,20 @@ object CheckEither {
("Right.prop_seq", CheckRightProjection.prop_seq),
("Right.prop_option", CheckRightProjection.prop_option),
("prop_Either_left", prop_Either_left),
- ("prop_Either_right", prop_Either_right),
+ ("prop_Either_right", prop_Either_right),
("prop_Either_joinLeft", prop_Either_joinLeft),
- ("prop_Either_joinRight", prop_Either_joinRight),
+ ("prop_Either_joinRight", prop_Either_joinRight),
("prop_Either_lefts", prop_Either_lefts),
- ("prop_Either_rights", prop_Either_rights),
- ("prop_Either_leftRights", prop_Either_leftRights),
- ("prop_Either_throws", prop_Either_throws),
- ("prop_Either_throwIt", prop_Either_throwIt),
- ("prop_Either_reduce", prop_Either_reduce),
+ ("prop_Either_rights", prop_Either_rights),
+ ("prop_Either_leftRights", prop_Either_leftRights),
+ ("prop_Either_throws", prop_Either_throws),
+ ("prop_Either_throwIt", prop_Either_throwIt),
+ ("prop_Either_reduce", prop_Either_reduce),
("prop_Either_iif", prop_Either_iif)
)
-
- def main(args: Array[String]) =
- tests foreach (tupled((name, prop) =>
+
+ def main(args: Array[String]) =
+ tests foreach (tupled((name, prop) =>
testStatsEx(name, testReport(check(Params(500, 0, 0, 500, StdRand), prop, propReport)))))
}
diff --git a/test/pending/scalacheck/array.scala b/test/pending/scalacheck/array.scala
new file mode 100644
index 0000000000..f262bc6320
--- /dev/null
+++ b/test/pending/scalacheck/array.scala
@@ -0,0 +1,37 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+import util._
+import Buildable._
+import scala.collection.mutable.ArraySeq
+
+object Test extends Properties("Array") {
+ /** At this moment the authentic scalacheck Array Builder/Arb bits are commented out.
+ */
+ implicit def arbArray[T](implicit a: Arbitrary[T], m: Manifest[T]): Arbitrary[Array[T]] =
+ Arbitrary(containerOf[List,T](arbitrary[T]) map (_.toArray))
+
+ val arrGen: Gen[Array[_]] = oneOf(
+ arbitrary[Array[Int]],
+ arbitrary[Array[Array[Int]]],
+ arbitrary[Array[List[String]]],
+ arbitrary[Array[String]],
+ arbitrary[Array[Boolean]],
+ arbitrary[Array[AnyVal]]
+ )
+
+ // inspired by #1857 and #2352
+ property("eq/ne") = forAll(arrGen, arrGen) { (c1, c2) =>
+ (c1 eq c2) || (c1 ne c2)
+ }
+
+ // inspired by #2299
+ def smallInt = choose(1, 10)
+ property("ofDim") = forAll(smallInt, smallInt, smallInt) { (i1, i2, i3) =>
+ val arr = Array.ofDim[String](i1, i2, i3)
+ val flattened = arr flatMap (x => x) flatMap (x => x)
+ flattened.length == i1 * i2 * i3
+ }
+}
+
diff --git a/test/pending/scalacheck/eqeq.scala b/test/pending/scalacheck/eqeq.scala
new file mode 100644
index 0000000000..a783805e46
--- /dev/null
+++ b/test/pending/scalacheck/eqeq.scala
@@ -0,0 +1,37 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+
+object Test extends Properties("==") {
+ def equalObjectsEqualHashcodes(x: Any, y: Any) = (x != y) || (x == y && x.## == y.##)
+
+ // ticket #2087
+ property("short/char") = forAll { (x: Short) => {
+ val ch: Char = x.toChar
+ (x == ch) == (ch == x)
+ }
+ }
+
+ property("symmetry") = forAll { (x: AnyVal, y: AnyVal) => (x == y) == (y == x) }
+ property("transitivity") = forAll { (x: AnyVal, y: AnyVal, z: AnyVal) => x != y || y != z || x == z }
+
+ property("##") = forAll {
+ (x: Short) => {
+ val anyvals = List(x.toByte, x.toChar, x, x.toInt, x.toLong, x.toFloat, x.toDouble, BigInt(x), BigDecimal(x))
+ val shortAndLarger = anyvals drop 2
+
+ val result = (
+ ((anyvals, anyvals).zipped forall equalObjectsEqualHashcodes) &&
+ ((shortAndLarger, shortAndLarger).zipped forall (_ == _)) &&
+ ((shortAndLarger, shortAndLarger).zipped forall ((x, y) => (x: Any) == (y: Any)))
+ )
+ result
+ }
+ }
+ property("## 2") = forAll {
+ (dv: Double) => {
+ val fv = dv.toFloat
+ (fv != dv) || (fv.## == dv.##)
+ }
+ }
+}
diff --git a/test/files/scalacheck/list.scala b/test/pending/scalacheck/list.scala
index 87ecd70a48..dfa84468d1 100644
--- a/test/files/scalacheck/list.scala
+++ b/test/pending/scalacheck/list.scala
@@ -7,14 +7,14 @@ object Test extends Properties("List") {
property("concat size") = forAll { (l1: List[Int], l2: List[Int]) => (l1.size + l2.size) == (l1 ::: l2).size }
property("reverse") = forAll { (l1: List[Int]) => l1.reverse.reverse == l1 }
- property("toSet") = forAll { (l1: List[Int]) => sorted(l1.toSet.toList) sameElements sorted(l1).removeDuplicates }
+ property("toSet") = forAll { (l1: List[Int]) => sorted(l1.toSet.toList) sameElements sorted(l1).distinct }
property("flatten") = forAll { (xxs: List[List[Int]]) => xxs.flatten.length == (xxs map (_.length) sum) }
property("startsWith/take") = forAll { (xs: List[Int], count: Int) => xs startsWith (xs take count) }
property("endsWith/takeRight") = forAll { (xs: List[Int], count: Int) => xs endsWith (xs takeRight count) }
property("fill") = forAll(choose(1, 100)) { count =>
- forAll { (x: Int) =>
+ forAll { (x: Int) =>
val xs = List.fill(count)(x)
- (xs.length == count) && (xs.removeDuplicates == List(x))
+ (xs.length == count) && (xs.distinct == List(x))
}
}
}
diff --git a/test/pending/scalacheck/range.scala b/test/pending/scalacheck/range.scala
new file mode 100644
index 0000000000..364c6947e9
--- /dev/null
+++ b/test/pending/scalacheck/range.scala
@@ -0,0 +1,205 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+class Counter(r: Range) {
+ var cnt = 0L
+ var last: Option[Int] = None
+ val str = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")")
+ def apply(x: Int) = {
+ cnt += 1L
+ if (cnt % 500000000L == 0L) {
+ println("Working: %s %d %d" format (str, cnt, x))
+ }
+ if (cnt > (Int.MaxValue.toLong + 1) * 2)
+ error("Count exceeds maximum possible for an Int Range")
+ if ((r.step > 0 && last.exists(_ > x)) || (r.step < 0 && last.exists(_ < x)))
+ error("Range wrapped: %d %s" format (x, last.toString))
+ last = Some(x)
+ }
+}
+
+abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
+ def myGen: Gen[Range]
+
+ val genRange = for {
+ start <- arbitrary[Int]
+ end <- arbitrary[Int]
+ step <- Gen.choose(1, (start - end).abs + 1)
+ } yield if (start < end) Range(start, end, step) else Range(start, end, -step)
+
+ val genReasonableSizeRange = for {
+ start <- choose(-Int.MinValue, Int.MaxValue)
+ end <- choose(-Int.MinValue, Int.MaxValue)
+ step <- choose(-Int.MaxValue, Int.MaxValue)
+ } yield Range(start, end, if (step == 0) 100 else step)
+
+ val genSmallRange = for {
+ start <- choose(-100, 100)
+ end <- choose(-100, 100)
+ step <- choose(1, 1)
+ } yield if (start < end) Range(start, end, step) else Range(start, end, -step)
+
+ val genRangeByOne = for {
+ start <- arbitrary[Int]
+ end <- arbitrary[Int]
+ if (end.toLong - start.toLong).abs <= 10000000L
+ } yield if (start < end) Range(start, end) else Range(end, start)
+
+ def str(r: Range) = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")")
+
+ def expectedSize(r: Range): Long = if (r.isInclusive) {
+ (r.end.toLong - r.start.toLong < 0, r.step < 0) match {
+ case (true, true) | (false, false) => (r.end.toLong - r.start.toLong).abs / r.step.abs.toLong + 1L
+ case _ => if (r.start == r.end) 1L else 0L
+ }
+ } else {
+ (r.end.toLong - r.start.toLong < 0, r.step < 0) match {
+ case (true, true) | (false, false) => (
+ (r.end.toLong - r.start.toLong).abs / r.step.abs.toLong
+ + (if ((r.end.toLong - r.start.toLong).abs % r.step.abs.toLong > 0L) 1L else 0L)
+ )
+ case _ => 0L
+ }
+ }
+
+ def within(r: Range, x: Int) = if (r.step > 0)
+ r.start <= x && (if (r.isInclusive) x <= r.end else x < r.end)
+ else
+ r.start >= x && (if (r.isInclusive) x >= r.end else x > r.end)
+
+ def multiple(r: Range, x: Int) = (x.toLong - r.start) % r.step == 0
+
+ property("foreach.step") = forAll(myGen) { r =>
+ var allValid = true
+ val cnt = new Counter(r)
+// println("--------------------")
+// println(r)
+ r foreach { x => cnt(x)
+// println(x + ", " + (x - r.start) + ", " + (x.toLong - r.start) + ", " + ((x.toLong - r.start) % r.step))
+ allValid &&= multiple(r, x)
+ }
+ allValid :| str(r)
+ }
+
+ property("foreach.inside.range") = forAll(myGen) { r =>
+ var allValid = true
+ var last: Option[Int] = None
+ val cnt = new Counter(r)
+ r foreach { x => cnt(x)
+ allValid &&= within(r, x)
+ }
+ allValid :| str(r)
+ }
+
+ property("foreach.visited.size") = forAll(myGen) { r =>
+ var visited = 0L
+ val cnt = new Counter(r)
+ r foreach { x => cnt(x)
+ visited += 1L
+ }
+// println("----------")
+// println(str(r))
+// println("size: " + r.size)
+// println("expected: " + expectedSize(r))
+// println("visited: " + visited)
+ (visited == expectedSize(r)) :| str(r)
+ }
+
+ property("length") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+ (r.length == expectedSize(r)) :| str(r)
+ }
+
+ property("isEmpty") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+ (r.isEmpty == (expectedSize(r) == 0L)) :| str(r)
+ }
+
+ property("contains") = forAll(myGen, arbInt.arbitrary) { (r, x) =>
+// println("----------------")
+// println(str(r))
+// println(x)
+// println("within: " + within(r, x))
+// println("multiple: " + multiple(r, x))
+// println("contains: " + r.contains(x))
+ ((within(r, x) && multiple(r, x)) == r.contains(x)) :| str(r)+": "+x
+ }
+
+ property("take") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
+ val t = r take x
+ (t.size == (0 max x min r.size) && t.start == r.start && t.step == r.step) :| str(r)+" / "+str(t)+": "+x
+ }
+
+ property("takeWhile") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
+ val t = (if (r.step > 0) r takeWhile (_ <= x) else r takeWhile(_ >= x))
+ if (r.size == 0) {
+ (t.size == 0) :| str(r)+" / "+str(t)+": "+x
+ } else {
+ val t2 = (if (r.step > 0) Range(r.start, x min r.last, r.step).inclusive else Range(r.start, x max r.last, r.step).inclusive)
+ (t.start == r.start && t.size == t2.size && t.step == r.step) :| str(r)+" / "+str(t)+" / "+str(t2)+": "+x
+ }
+ }
+
+ property("reverse.toSet.equal") = forAll(myGen) { r =>
+ val reversed = r.reverse
+ val aresame = r.toSet == reversed.toSet
+ if (!aresame) {
+ println(str(r))
+ println(r)
+ println(reversed)
+ println(r.toSet)
+ println(reversed.toSet)
+ }
+ aresame
+ }
+}
+
+object NormalRangeTest extends RangeTest("normal") {
+ override def myGen = genReasonableSizeRange
+ def genOne = for {
+ start <- arbitrary[Int]
+ end <- arbitrary[Int]
+ if (start.toLong - end.toLong).abs < Int.MaxValue.toLong
+ } yield Range(start, end, if (start < end) 1 else - 1)
+ property("by 1.size + 1 == inclusive.size") = forAll(genOne) { r =>
+ (r.size + 1 == r.inclusive.size) :| str(r)
+ }
+}
+
+object InclusiveRangeTest extends RangeTest("inclusive") {
+ override def myGen = for (r <- genReasonableSizeRange) yield r.inclusive
+}
+
+object ByOneRangeTest extends RangeTest("byOne") {
+ override def myGen = genSmallRange
+}
+
+object InclusiveByOneRangeTest extends RangeTest("inclusiveByOne") {
+ override def myGen = for (r <- genSmallRange) yield r.inclusive
+}
+
+object SmallValuesRange extends RangeTest("smallValues") {
+ override def myGen = genSmallRange
+}
+
+object Test extends Properties("Range") {
+ include(NormalRangeTest)
+ include(InclusiveRangeTest)
+ include(ByOneRangeTest)
+ include(InclusiveByOneRangeTest)
+}
+
+/* Mini-benchmark
+def testRange(i: Int, j: Int, k: Int) = {
+ var count = 0
+ for {
+ vi <- 0 to i
+ vj <- 0 to j
+ vk <- 0 to k
+ } { count += 1 }
+}
+
+testRange(10, 1000, 10000)
+testRange(10000, 1000, 10)
+*/
+
diff --git a/test/pending/scalacheck/scan.scala b/test/pending/scalacheck/scan.scala
new file mode 100644
index 0000000000..e9b25ce3df
--- /dev/null
+++ b/test/pending/scalacheck/scan.scala
@@ -0,0 +1,17 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+
+
+object Test extends Properties("TraversableLike.scanLeft") {
+ property("scanLeft") = forAll { (xs: List[Int], z: Int) => {
+ val sums = xs.scanLeft(z)(_ + _)
+ (xs.size == 0) || sums.zip(sums.tail).map(x => x._2 - x._1) == xs
+ }}
+}
+
+
+
+
+
+
diff --git a/test/pending/script/bug2365.javaopts b/test/pending/script/bug2365.javaopts
new file mode 100644
index 0000000000..357e033c1c
--- /dev/null
+++ b/test/pending/script/bug2365.javaopts
@@ -0,0 +1 @@
+-XX:MaxPermSize=25M
diff --git a/test/pending/script/bug2365/Test.scala b/test/pending/script/bug2365/Test.scala
new file mode 100644
index 0000000000..110dea2ab6
--- /dev/null
+++ b/test/pending/script/bug2365/Test.scala
@@ -0,0 +1,35 @@
+import scala.tools.nsc.io._
+import java.net.URL
+
+object A { def apply(d: { def apply(): Int}) = d.apply() }
+object A2 { def apply(d: { def apply(): Int}) = d.apply() }
+object A3 { def apply(d: { def apply(): Int}) = d.apply() }
+object A4 { def apply(d: { def apply(): Int}) = d.apply() }
+
+class B extends Function0[Int] {
+ def apply() = 3
+}
+
+object Test
+{
+ type StructF0 = { def apply(): Int }
+ def main(args: Array[String]) {
+ for(i <- 0 until 150)
+ println(i + " " + test(A.apply) + " " + test(A2.apply) + " " + test(A3.apply) + " " + test(A3.apply))
+ }
+
+ def test(withF0: StructF0 => Int): Int = {
+ // Some large jar
+ val jar = File("../../../../lib/scalacheck.jar").toURL
+ // load a class in a separate loader that will be passed to A
+ val loader = new java.net.URLClassLoader(Array(File(".").toURL, jar))
+ // load a real class to fill perm gen space
+ Class.forName("org.scalacheck.Properties", true, loader).newInstance
+ // create a class from another class loader with an apply: Int method
+ val b = Class.forName("B", true, loader).newInstance
+
+ // pass instance to a, which will call apply using structural type reflection.
+ // This should hold on to the class for B, which means bLoader will not get collected
+ withF0(b.asInstanceOf[StructF0])
+ }
+}
diff --git a/test/pending/script/bug2365/bug2365.scala b/test/pending/script/bug2365/bug2365.scala
new file mode 100755
index 0000000000..b5e05325cf
--- /dev/null
+++ b/test/pending/script/bug2365/bug2365.scala
@@ -0,0 +1,9 @@
+#!/bin/sh
+#
+# This script should fail with any build of scala where #2365
+# is not fixed, and otherwise succeed. Failure means running out
+# of PermGen space.
+#
+
+scalac -cp .:/local/lib/java/ivy.jar Test.scala
+JAVA_OPTS="-XX:MaxPermSize=25M -verbose:gc" scalac -cp $CP Test
diff --git a/test/pending/shootout/fasta.check b/test/pending/shootout/fasta.check
new file mode 100644
index 0000000000..f1caba0d62
--- /dev/null
+++ b/test/pending/shootout/fasta.check
@@ -0,0 +1,171 @@
+>ONE Homo sapiens alu
+GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGA
+TCACCTGAGGTCAGGAGTTCGAGACCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACT
+AAAAATACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCAGCTACTCGGGAG
+GCTGAGGCAGGAGAATCGCTTGAACCCGGGAGGCGGAGGTTGCAGTGAGCCGAGATCGCG
+CCACTGCACTCCAGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAAGGCCGGGCGCGGT
+GGCTCACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGATCACCTGAGGTCA
+GGAGTTCGAGACCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAATACAAAAA
+TTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCAGCTACTCGGGAGGCTGAGGCAGGAG
+AATCGCTTGAACCCGGGAGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCCA
+GCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAAGGCCGGGCGCGGTGGCTCACGCCTGT
+AATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGTTCGAGACC
+AGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAATACAAAAATTAGCCGGGCGTG
+GTGGCGCGCGCCTGTAATCCCAGCTACTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACC
+CGGGAGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCCAGCCTGGGCGACAG
+AGCGAGACTCCGTCTCAAAAAGGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTT
+TGGGAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGTTCGAGACCAGCCTGGCCAACA
+TGGTGAAACCCCGTCTCTACTAAAAATACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCT
+GTAATCCCAGCTACTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACCCGGGAGGCGGAGG
+TTGCAGTGAGCCGAGATCGCGCCACTGCACTCCAGCCTGGGCGACAGAGCGAGACTCCGT
+CTCAAAAAGGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGG
+CGGGCGGATCACCTGAGGTCAGGAGTTCGAGACCAGCCTGGCCAACATGGTGAAACCCCG
+TCTCTACTAAAAATACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCAGCTA
+CTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACCCGGGAGGCGGAGGTTGCAGTGAGCCG
+AGATCGCGCCACTGCACTCCAGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAAGGCCG
+GGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGATCACC
+TGAGGTCAGGAGTTCGAGACCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAA
+TACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCAGCTACTCGGGAGGCTGA
+GGCAGGAGAATCGCTTGAACCCGGGAGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACT
+GCACTCCAGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAAGGCCGGGCGCGGTGGCTC
+ACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGT
+TCGAGACCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAATACAAAAATTAGC
+CGGGCGTGGTGGCGCGCGCCTGTAATCCCAGCTACTCGGGAGGCTGAGGCAGGAGAATCG
+CTTGAACCCGGGAGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCCAGCCTG
+GGCGACAGAGCGAGACTCCG
+>TWO IUB ambiguity codes
+cttBtatcatatgctaKggNcataaaSatgtaaaDcDRtBggDtctttataattcBgtcg
+tactDtDagcctatttSVHtHttKtgtHMaSattgWaHKHttttagacatWatgtRgaaa
+NtactMcSMtYtcMgRtacttctWBacgaaatatagScDtttgaagacacatagtVgYgt
+cattHWtMMWcStgttaggKtSgaYaaccWStcgBttgcgaMttBYatcWtgacaYcaga
+gtaBDtRacttttcWatMttDBcatWtatcttactaBgaYtcttgttttttttYaaScYa
+HgtgttNtSatcMtcVaaaStccRcctDaataataStcYtRDSaMtDttgttSagtRRca
+tttHatSttMtWgtcgtatSSagactYaaattcaMtWatttaSgYttaRgKaRtccactt
+tattRggaMcDaWaWagttttgacatgttctacaaaRaatataataaMttcgDacgaSSt
+acaStYRctVaNMtMgtaggcKatcttttattaaaaagVWaHKYagtttttatttaacct
+tacgtVtcVaattVMBcttaMtttaStgacttagattWWacVtgWYagWVRctDattBYt
+gtttaagaagattattgacVatMaacattVctgtBSgaVtgWWggaKHaatKWcBScSWa
+accRVacacaaactaccScattRatatKVtactatatttHttaagtttSKtRtacaaagt
+RDttcaaaaWgcacatWaDgtDKacgaacaattacaRNWaatHtttStgttattaaMtgt
+tgDcgtMgcatBtgcttcgcgaDWgagctgcgaggggVtaaScNatttacttaatgacag
+cccccacatYScaMgtaggtYaNgttctgaMaacNaMRaacaaacaKctacatagYWctg
+ttWaaataaaataRattagHacacaagcgKatacBttRttaagtatttccgatctHSaat
+actcNttMaagtattMtgRtgaMgcataatHcMtaBSaRattagttgatHtMttaaKagg
+YtaaBataSaVatactWtataVWgKgttaaaacagtgcgRatatacatVtHRtVYataSa
+KtWaStVcNKHKttactatccctcatgWHatWaRcttactaggatctataDtDHBttata
+aaaHgtacVtagaYttYaKcctattcttcttaataNDaaggaaaDYgcggctaaWSctBa
+aNtgctggMBaKctaMVKagBaactaWaDaMaccYVtNtaHtVWtKgRtcaaNtYaNacg
+gtttNattgVtttctgtBaWgtaattcaagtcaVWtactNggattctttaYtaaagccgc
+tcttagHVggaYtgtNcDaVagctctctKgacgtatagYcctRYHDtgBattDaaDgccK
+tcHaaStttMcctagtattgcRgWBaVatHaaaataYtgtttagMDMRtaataaggatMt
+ttctWgtNtgtgaaaaMaatatRtttMtDgHHtgtcattttcWattRSHcVagaagtacg
+ggtaKVattKYagactNaatgtttgKMMgYNtcccgSKttctaStatatNVataYHgtNa
+BKRgNacaactgatttcctttaNcgatttctctataScaHtataRagtcRVttacDSDtt
+aRtSatacHgtSKacYagttMHtWataggatgactNtatSaNctataVtttRNKtgRacc
+tttYtatgttactttttcctttaaacatacaHactMacacggtWataMtBVacRaSaatc
+cgtaBVttccagccBcttaRKtgtgcctttttRtgtcagcRttKtaaacKtaaatctcac
+aattgcaNtSBaaccgggttattaaBcKatDagttactcttcattVtttHaaggctKKga
+tacatcBggScagtVcacattttgaHaDSgHatRMaHWggtatatRgccDttcgtatcga
+aacaHtaagttaRatgaVacttagattVKtaaYttaaatcaNatccRttRRaMScNaaaD
+gttVHWgtcHaaHgacVaWtgttScactaagSgttatcttagggDtaccagWattWtRtg
+ttHWHacgattBtgVcaYatcggttgagKcWtKKcaVtgaYgWctgYggVctgtHgaNcV
+taBtWaaYatcDRaaRtSctgaHaYRttagatMatgcatttNattaDttaattgttctaa
+ccctcccctagaWBtttHtBccttagaVaatMcBHagaVcWcagBVttcBtaYMccagat
+gaaaaHctctaacgttagNWRtcggattNatcRaNHttcagtKttttgWatWttcSaNgg
+gaWtactKKMaacatKatacNattgctWtatctaVgagctatgtRaHtYcWcttagccaa
+tYttWttaWSSttaHcaaaaagVacVgtaVaRMgattaVcDactttcHHggHRtgNcctt
+tYatcatKgctcctctatVcaaaaKaaaagtatatctgMtWtaaaacaStttMtcgactt
+taSatcgDataaactaaacaagtaaVctaggaSccaatMVtaaSKNVattttgHccatca
+cBVctgcaVatVttRtactgtVcaattHgtaaattaaattttYtatattaaRSgYtgBag
+aHSBDgtagcacRHtYcBgtcacttacactaYcgctWtattgSHtSatcataaatataHt
+cgtYaaMNgBaatttaRgaMaatatttBtttaaaHHKaatctgatWatYaacttMctctt
+ttVctagctDaaagtaVaKaKRtaacBgtatccaaccactHHaagaagaaggaNaaatBW
+attccgStaMSaMatBttgcatgRSacgttVVtaaDMtcSgVatWcaSatcttttVatag
+ttactttacgatcaccNtaDVgSRcgVcgtgaacgaNtaNatatagtHtMgtHcMtagaa
+attBgtataRaaaacaYKgtRccYtatgaagtaataKgtaaMttgaaRVatgcagaKStc
+tHNaaatctBBtcttaYaBWHgtVtgacagcaRcataWctcaBcYacYgatDgtDHccta
+>THREE Homo sapiens frequency
+aacacttcaccaggtatcgtgaaggctcaagattacccagagaacctttgcaatataaga
+atatgtatgcagcattaccctaagtaattatattctttttctgactcaaagtgacaagcc
+ctagtgtatattaaatcggtatatttgggaaattcctcaaactatcctaatcaggtagcc
+atgaaagtgatcaaaaaagttcgtacttataccatacatgaattctggccaagtaaaaaa
+tagattgcgcaaaattcgtaccttaagtctctcgccaagatattaggatcctattactca
+tatcgtgtttttctttattgccgccatccccggagtatctcacccatccttctcttaaag
+gcctaatattacctatgcaaataaacatatattgttgaaaattgagaacctgatcgtgat
+tcttatgtgtaccatatgtatagtaatcacgcgactatatagtgctttagtatcgcccgt
+gggtgagtgaatattctgggctagcgtgagatagtttcttgtcctaatatttttcagatc
+gaatagcttctatttttgtgtttattgacatatgtcgaaactccttactcagtgaaagtc
+atgaccagatccacgaacaatcttcggaatcagtctcgttttacggcggaatcttgagtc
+taacttatatcccgtcgcttactttctaacaccccttatgtatttttaaaattacgttta
+ttcgaacgtacttggcggaagcgttattttttgaagtaagttacattgggcagactcttg
+acattttcgatacgactttctttcatccatcacaggactcgttcgtattgatatcagaag
+ctcgtgatgattagttgtcttctttaccaatactttgaggcctattctgcgaaatttttg
+ttgccctgcgaacttcacataccaaggaacacctcgcaacatgccttcatatccatcgtt
+cattgtaattcttacacaatgaatcctaagtaattacatccctgcgtaaaagatggtagg
+ggcactgaggatatattaccaagcatttagttatgagtaatcagcaatgtttcttgtatt
+aagttctctaaaatagttacatcgtaatgttatctcgggttccgcgaataaacgagatag
+attcattatatatggccctaagcaaaaacctcctcgtattctgttggtaattagaatcac
+acaatacgggttgagatattaattatttgtagtacgaagagatataaaaagatgaacaat
+tactcaagtcaagatgtatacgggatttataataaaaatcgggtagagatctgctttgca
+attcagacgtgccactaaatcgtaatatgtcgcgttacatcagaaagggtaactattatt
+aattaataaagggcttaatcactacatattagatcttatccgatagtcttatctattcgt
+tgtatttttaagcggttctaattcagtcattatatcagtgctccgagttctttattattg
+ttttaaggatgacaaaatgcctcttgttataacgctgggagaagcagactaagagtcgga
+gcagttggtagaatgaggctgcaaaagacggtctcgacgaatggacagactttactaaac
+caatgaaagacagaagtagagcaaagtctgaagtggtatcagcttaattatgacaaccct
+taatacttccctttcgccgaatactggcgtggaaaggttttaaaagtcgaagtagttaga
+ggcatctctcgctcataaataggtagactactcgcaatccaatgtgactatgtaatactg
+ggaacatcagtccgcgatgcagcgtgtttatcaaccgtccccactcgcctggggagacat
+gagaccacccccgtggggattattagtccgcagtaatcgactcttgacaatccttttcga
+ttatgtcatagcaatttacgacagttcagcgaagtgactactcggcgaaatggtattact
+aaagcattcgaacccacatgaatgtgattcttggcaatttctaatccactaaagcttttc
+cgttgaatctggttgtagatatttatataagttcactaattaagatcacggtagtatatt
+gatagtgatgtctttgcaagaggttggccgaggaatttacggattctctattgatacaat
+ttgtctggcttataactcttaaggctgaaccaggcgtttttagacgacttgatcagctgt
+tagaatggtttggactccctctttcatgtcagtaacatttcagccgttattgttacgata
+tgcttgaacaatattgatctaccacacacccatagtatattttataggtcatgctgttac
+ctacgagcatggtattccacttcccattcaatgagtattcaacatcactagcctcagaga
+tgatgacccacctctaataacgtcacgttgcggccatgtgaaacctgaacttgagtagac
+gatatcaagcgctttaaattgcatataacatttgagggtaaagctaagcggatgctttat
+ataatcaatactcaataataagatttgattgcattttagagttatgacacgacatagttc
+actaacgagttactattcccagatctagactgaagtactgatcgagacgatccttacgtc
+gatgatcgttagttatcgacttaggtcgggtctctagcggtattggtacttaaccggaca
+ctatactaataacccatgatcaaagcataacagaatacagacgataatttcgccaacata
+tatgtacagaccccaagcatgagaagctcattgaaagctatcattgaagtcccgctcaca
+atgtgtcttttccagacggtttaactggttcccgggagtcctggagtttcgacttacata
+aatggaaacaatgtattttgctaatttatctatagcgtcatttggaccaatacagaatat
+tatgttgcctagtaatccactataacccgcaagtgctgatagaaaatttttagacgattt
+ataaatgccccaagtatccctcccgtgaatcctccgttatactaattagtattcgttcat
+acgtataccgcgcatatatgaacatttggcgataaggcgcgtgaattgttacgtgacaga
+gatagcagtttcttgtgatatggttaacagacgtacatgaagggaaactttatatctata
+gtgatgcttccgtagaaataccgccactggtctgccaatgatgaagtatgtagctttagg
+tttgtactatgaggctttcgtttgtttgcagagtataacagttgcgagtgaaaaaccgac
+gaatttatactaatacgctttcactattggctacaaaatagggaagagtttcaatcatga
+gagggagtatatggatgctttgtagctaaaggtagaacgtatgtatatgctgccgttcat
+tcttgaaagatacataagcgataagttacgacaattataagcaacatccctaccttcgta
+acgatttcactgttactgcgcttgaaatacactatggggctattggcggagagaagcaga
+tcgcgccgagcatatacgagacctataatgttgatgatagagaaggcgtctgaattgata
+catcgaagtacactttctttcgtagtatctctcgtcctctttctatctccggacacaaga
+attaagttatatatatagagtcttaccaatcatgttgaatcctgattctcagagttcttt
+ggcgggccttgtgatgactgagaaacaatgcaatattgctccaaatttcctaagcaaatt
+ctcggttatgttatgttatcagcaaagcgttacgttatgttatttaaatctggaatgacg
+gagcgaagttcttatgtcggtgtgggaataattcttttgaagacagcactccttaaataa
+tatcgctccgtgtttgtatttatcgaatgggtctgtaaccttgcacaagcaaatcggtgg
+tgtatatatcggataacaattaatacgatgttcatagtgacagtatactgatcgagtcct
+ctaaagtcaattacctcacttaacaatctcattgatgttgtgtcattcccggtatcgccc
+gtagtatgtgctctgattgaccgagtgtgaaccaaggaacatctactaatgcctttgtta
+ggtaagatctctctgaattccttcgtgccaacttaaaacattatcaaaatttcttctact
+tggattaactacttttacgagcatggcaaattcccctgtggaagacggttcattattatc
+ggaaaccttatagaaattgcgtgttgactgaaattagatttttattgtaagagttgcatc
+tttgcgattcctctggtctagcttccaatgaacagtcctcccttctattcgacatcgggt
+ccttcgtacatgtctttgcgatgtaataattaggttcggagtgtggccttaatgggtgca
+actaggaatacaacgcaaatttgctgacatgatagcaaatcggtatgccggcaccaaaac
+gtgctccttgcttagcttgtgaatgagactcagtagttaaataaatccatatctgcaatc
+gattccacaggtattgtccactatctttgaactactctaagagatacaagcttagctgag
+accgaggtgtatatgactacgctgatatctgtaaggtaccaatgcaggcaaagtatgcga
+gaagctaataccggctgtttccagctttataagattaaaatttggctgtcctggcggcct
+cagaattgttctatcgtaatcagttggttcattaattagctaagtacgaggtacaactta
+tctgtcccagaacagctccacaagtttttttacagccgaaacccctgtgtgaatcttaat
+atccaagcgcgttatctgattagagtttacaactcagtattttatcagtacgttttgttt
+ccaacattacccggtatgacaaaatgacgccacgtgtcgaataatggtctgaccaatgta
+ggaagtgaaaagataaatat
diff --git a/test/pending/shootout/fasta.scala b/test/pending/shootout/fasta.scala
new file mode 100644
index 0000000000..16b6f42201
--- /dev/null
+++ b/test/pending/shootout/fasta.scala
@@ -0,0 +1,162 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy
+*/
+
+import java.io._
+
+object fasta {
+ def main(args: Array[String]) = {
+
+ val ALU =
+ "GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGG" +
+ "GAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGTTCGAGA" +
+ "CCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAAT" +
+ "ACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCA" +
+ "GCTACTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACCCGGG" +
+ "AGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCC" +
+ "AGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAA"
+
+ val _IUB = Array(
+ Pair('a', 0.27),
+ Pair('c', 0.12),
+ Pair('g', 0.12),
+ Pair('t', 0.27),
+
+ Pair('B', 0.02),
+ Pair('D', 0.02),
+ Pair('H', 0.02),
+ Pair('K', 0.02),
+ Pair('M', 0.02),
+ Pair('N', 0.02),
+ Pair('R', 0.02),
+ Pair('S', 0.02),
+ Pair('V', 0.02),
+ Pair('W', 0.02),
+ Pair('Y', 0.02)
+ )
+
+ val IUB = makeCumulative(_IUB)
+
+ val _HomoSapiens = Array(
+ Pair('a', 0.3029549426680),
+ Pair('c', 0.1979883004921),
+ Pair('g', 0.1975473066391),
+ Pair('t', 0.3015094502008)
+ )
+
+ val HomoSapiens = makeCumulative(_HomoSapiens)
+
+
+ val n = Integer parseInt(args(0))
+ val s = new FastaOutputStream(System.out)
+
+ s.writeDescription("ONE Homo sapiens alu")
+ s.writeRepeatingSequence(ALU,n*2)
+
+ s.writeDescription("TWO IUB ambiguity codes")
+ s.writeRandomSequence(IUB,n*3)
+
+ s.writeDescription("THREE Homo sapiens frequency")
+ s.writeRandomSequence(HomoSapiens,n*5)
+
+ s.close
+ }
+
+ def makeCumulative(a: Array[Pair[Char,double]]) = {
+ var cp = 0.0
+ a map (frequency =>
+ frequency match {
+ case Pair(code,percent) =>
+ cp = cp + percent; new Frequency(code.toByte,cp)
+ }
+ )
+ }
+
+}
+
+
+// We could use instances of Pair or Tuple2 but specific labels
+// make the code more readable than index numbers
+
+class Frequency(_code: byte, _percent: double){
+ var code = _code; var percent = _percent;
+}
+
+
+// extend the Java BufferedOutputStream class
+
+class FastaOutputStream(out: OutputStream) extends BufferedOutputStream(out) {
+
+ private val LineLength = 60
+ private val nl = '\n'.toByte
+
+ def writeDescription(desc: String) = { write( (">" + desc + "\n").getBytes ) }
+
+ def writeRepeatingSequence(_alu: String, length: int) = {
+ val alu = _alu.getBytes
+ var n = length; var k = 0; val kn = alu.length;
+
+ while (n > 0) {
+ val m = if (n < LineLength) n else LineLength
+
+ var i = 0
+ while (i < m){
+ if (k == kn) k = 0
+ val b = alu(k)
+ if (count < buf.length){ buf(count) = b; count = count + 1 }
+ else { write(b) } // flush buffer
+ k = k+1
+ i = i+1
+ }
+
+ write(nl)
+ n = n - LineLength
+ }
+
+ }
+
+ def writeRandomSequence(distribution: Array[Frequency], length: int) = {
+ var n = length
+ while (n > 0) {
+ val m = if (n < LineLength) n else LineLength
+
+ var i = 0
+ while (i < m){
+ val b = selectRandom(distribution)
+ if (count < buf.length){ buf(count) = b; count = count + 1 }
+ else { write(b) } // flush buffer
+ i = i+1
+ }
+
+ if (count < buf.length){ buf(count) = nl; count = count + 1 }
+ else { write(nl) } // flush buffer
+ n = n - LineLength
+ }
+ }
+
+ private def selectRandom(distribution: Array[Frequency]): Byte = {
+ val n = distribution.length
+ val r = RandomNumber scaledTo(1.0)
+
+ var i = 0
+ while (i < n) {
+ if (r < distribution(i).percent) return distribution(i).code
+ i = i+1
+ }
+ return distribution(n-1).code
+ }
+}
+
+
+object RandomNumber {
+ private val IM = 139968
+ private val IA = 3877
+ private val IC = 29573
+ private var seed = 42
+
+ def scaledTo(max: double) = {
+ seed = (seed * IA + IC) % IM
+ max * seed / IM
+ }
+}
diff --git a/test/pending/shootout/fasta.scala.runner b/test/pending/shootout/fasta.scala.runner
new file mode 100644
index 0000000000..89b43d7b6a
--- /dev/null
+++ b/test/pending/shootout/fasta.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(25000,250000,2500000)) fasta.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/harmonic.scala-2.scala b/test/pending/shootout/harmonic.scala-2.scala
new file mode 100644
index 0000000000..46c36a23c8
--- /dev/null
+++ b/test/pending/shootout/harmonic.scala-2.scala
@@ -0,0 +1,14 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy (Scala novice)
+*/
+
+object harmonic {
+ def main(args: Array[String]) = {
+ val n = Integer.parseInt(args(0));
+ var partialSum = 0.0;
+
+ for (val i <- Iterator.range(1,n+1)) partialSum = partialSum + 1.0/i;
+ Console.printf("{0,number,#.000000000}\n")(partialSum);
+ }
+}
diff --git a/test/pending/shootout/harmonic.scala-2.scala.runner b/test/pending/shootout/harmonic.scala-2.scala.runner
new file mode 100644
index 0000000000..4b4055909d
--- /dev/null
+++ b/test/pending/shootout/harmonic.scala-2.scala.runner
@@ -0,0 +1,16 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy (Scala novice)
+*/
+object Test extends Application {
+ for(val n <- List(6000000,8000000,10000000)) harmonic.main(Array(n.toString))
+}
+object harmonic {
+ def main(args: Array[String]) = {
+ val n = Integer.parseInt(args(0));
+ var partialSum = 0.0;
+
+ for (val i <- Iterator.range(1,n+1)) partialSum = partialSum + 1.0/i;
+ Console.printf("{0,number,#.000000000}\n")(partialSum);
+ }
+}
diff --git a/test/pending/shootout/harmonic.scala-3.scala b/test/pending/shootout/harmonic.scala-3.scala
new file mode 100644
index 0000000000..28d031ae46
--- /dev/null
+++ b/test/pending/shootout/harmonic.scala-3.scala
@@ -0,0 +1,15 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy (Scala novice)
+*/
+
+object harmonic {
+ def main(args: Array[String]) = {
+ val n = Integer.parseInt(args(0));
+ var partialSum = 0.0;
+ var i = 1;
+
+ while (i < n){ partialSum = partialSum + 1.0/i; i = i + 1; }
+ Console.printf("{0,number,#.000000000}\n")(partialSum);
+ }
+}
diff --git a/test/pending/shootout/harmonic.scala-3.scala.runner b/test/pending/shootout/harmonic.scala-3.scala.runner
new file mode 100644
index 0000000000..5cf77d077a
--- /dev/null
+++ b/test/pending/shootout/harmonic.scala-3.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(6000000,8000000,10000000)) harmonic.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/heapsort.scala b/test/pending/shootout/heapsort.scala
new file mode 100644
index 0000000000..0097d92956
--- /dev/null
+++ b/test/pending/shootout/heapsort.scala
@@ -0,0 +1,72 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy (Scala novice)
+*/
+
+object heapsort {
+ def main(args: Array[String]) = {
+ val n = toPositiveInt(args);
+
+ val numbers = new Array[Double](n+1);
+ for (val i <- Iterator.range(1,n+1))
+ numbers(i) = generate(100.0);
+
+ heapsort(n, numbers);
+
+ Console.printf("{0,number,#.000000000}\n")(numbers(n));
+ }
+
+
+ def heapsort(n: Int, ra: Array[Double]): Unit = {
+ var l = 0; var j = 0; var ir = 0; var i = 0;
+ var rra = 0.0d;
+
+ if (n < 2) return;
+ l = (n >> 1) + 1;
+ ir = n;
+ while (true) {
+ if (l > 1) { l = l-1; rra = ra(l); }
+ else {
+ rra = ra(ir);
+ ra(ir) = ra(1);
+ ir = ir-1;
+ if (ir == 1) {
+ ra(1) = rra;
+ return;
+ }
+ }
+ i = l;
+ j = l << 1;
+ while (j <= ir) {
+ if (j < ir && ra(j) < ra(j+1)) { j = j+1; }
+ if (rra < ra(j)) {
+ ra(i) = ra(j);
+ i = j;
+ j = j + i;
+ }
+ else j = ir + 1;
+ }
+ ra(i) = rra;
+ }
+ }
+
+
+ private val IM = 139968;
+ private val IA = 3877;
+ private val IC = 29573;
+ private var seed = 42;
+
+ private def generate(max: Double) = {
+ seed = (seed * IA + IC) % IM;
+ max * seed / IM;
+ }
+
+
+ private def toPositiveInt(s: Array[String]) = {
+ val i =
+ try { Integer.parseInt(s(0)); }
+ catch { case _ => 1 }
+ if (i>0) i; else 1;
+ }
+
+}
diff --git a/test/pending/shootout/heapsort.scala.runner b/test/pending/shootout/heapsort.scala.runner
new file mode 100644
index 0000000000..7030aa3f8f
--- /dev/null
+++ b/test/pending/shootout/heapsort.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(20000,40000,60000,80000,100000)) heapsort.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/mandelbrot.scala-2.check b/test/pending/shootout/mandelbrot.scala-2.check
new file mode 100644
index 0000000000..2f7bbbc6b0
--- /dev/null
+++ b/test/pending/shootout/mandelbrot.scala-2.check
Binary files differ
diff --git a/test/pending/shootout/mandelbrot.scala-2.scala b/test/pending/shootout/mandelbrot.scala-2.scala
new file mode 100644
index 0000000000..dffdc354a0
--- /dev/null
+++ b/test/pending/shootout/mandelbrot.scala-2.scala
@@ -0,0 +1,79 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy
+*/
+
+// This test is in pending because it fails on windows only,
+// but partest's output and the fact that this test outputs in
+// binary makes it a challenge to debug remotely. However,
+// it's easy to guess that it has to do with the BufferedOutputStream
+// and some kind of windows-specific damage that requires an extra
+// flush, or different line-ending characters, or any of the various
+// write-once-know-quirks-everywhere aspects of java i/o.
+//
+// [partest] testing: [...]\files\shootout\mandelbrot.scala-2.scala [FAILED]
+// [partest] P4
+// [partest] 200 200
+// [partest]
+// ^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^B^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@
+// ^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@
+// [etc]
+
+import java.io.BufferedOutputStream
+
+object mandelbrot {
+ def main(args: Array[String]) = {
+ val side = Integer.parseInt(args(0))
+ val limitSquared = 4.0
+ val max = 50
+ var bits = 0
+ var bitnum = 0
+ val w = new BufferedOutputStream(System.out)
+
+ Console.println("P4\n" + side + " " + side)
+
+ var y = 0
+ while (y < side){
+
+ var x = 0
+ while (x < side){
+
+ val cr = 2.0 * x / side - 1.5
+ val ci = 2.0 * y / side - 1.0
+
+ var zr = 0.0; var zi = 0.0
+ var tr = 0.0; var ti = 0.0
+
+ var j = max
+ do {
+ zi = 2.0 * zr * zi + ci
+ zr = tr - ti + cr
+ ti = zi*zi
+ tr = zr*zr
+
+ j = j - 1
+ } while (!(tr + ti > limitSquared) && j > 0)
+
+
+ bits = bits << 1
+ if (!(tr + ti > limitSquared)) bits = bits + 1
+ bitnum = bitnum + 1
+
+ if (x == side - 1){
+ bits = bits << (8 - bitnum)
+ bitnum = 8
+ }
+
+ if (bitnum == 8){
+ w.write(bits.toByte)
+ bits = 0
+ bitnum = 0
+ }
+
+ x = x + 1
+ }
+ y = y + 1
+ }
+ w.close
+ }
+}
diff --git a/test/pending/shootout/mandelbrot.scala-2.scala.runner b/test/pending/shootout/mandelbrot.scala-2.scala.runner
new file mode 100644
index 0000000000..9d2d31408b
--- /dev/null
+++ b/test/pending/shootout/mandelbrot.scala-2.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(200,400,600)) mandelbrot.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/message.check b/test/pending/shootout/message.check
new file mode 100644
index 0000000000..354b2529b2
--- /dev/null
+++ b/test/pending/shootout/message.check
@@ -0,0 +1 @@
+500000
diff --git a/test/pending/shootout/message.javaopts b/test/pending/shootout/message.javaopts
new file mode 100644
index 0000000000..1879c77427
--- /dev/null
+++ b/test/pending/shootout/message.javaopts
@@ -0,0 +1 @@
+-Xss128k
diff --git a/test/pending/shootout/message.scala b/test/pending/shootout/message.scala
new file mode 100644
index 0000000000..a7a1dacc9d
--- /dev/null
+++ b/test/pending/shootout/message.scala
@@ -0,0 +1,47 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy
+*/
+
+
+import scala.concurrent._
+
+object message {
+ def main(args: Array[String]) = {
+ val n = Integer.parseInt(args(0))
+ val nActors = 500
+ val finalSum = n * nActors
+
+ case class Message(value: Int)
+
+ class Incrementor(next: Pid) extends Actor {
+ var sum = 0
+
+ override def run() = {
+ while (true) {
+ receive {
+ case Message(value) =>
+ val j = value + 1
+ if (null != next){
+ next ! Message(j)
+ } else {
+ sum = sum + j
+ if (sum >= finalSum){
+ Console.println(sum);
+ System.exit(0) // exit without cleaning up
+ }
+ }
+ }
+ }
+ }
+
+ def pid() = { this.start; this.self }
+ }
+
+ def actorChain(i: Int, a: Pid): Pid =
+ if (i > 0) actorChain(i-1, new Incrementor(a).pid ) else a
+
+ val firstActor = actorChain(nActors, null)
+ var i = n; while (i > 0){ firstActor ! Message(0); i = i-1 }
+ }
+}
diff --git a/test/pending/shootout/message.scala.runner b/test/pending/shootout/message.scala.runner
new file mode 100644
index 0000000000..32738b030e
--- /dev/null
+++ b/test/pending/shootout/message.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(1000,2000,3000)) message.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/meteor.scala b/test/pending/shootout/meteor.scala
new file mode 100644
index 0000000000..1154ce95cc
--- /dev/null
+++ b/test/pending/shootout/meteor.scala
@@ -0,0 +1,496 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy
+*/
+
+// This is an un-optimised example implementation
+
+
+import scala.collection.mutable._
+
+object meteor {
+ def main(args: Array[String]) = {
+ val solver = new Solver( Integer.parseInt(args(0)) )
+ solver.findSolutions
+ solver.printSolutions
+ }
+}
+
+
+
+
+// Solver.scala
+// import scala.collection.mutable._
+
+final class Solver (n: Int) {
+ private var countdown = n
+ private var first: String = _
+ private var last: String = _
+
+ private val board = new Board()
+
+ val pieces = Array(
+ new Piece(0), new Piece(1), new Piece(2), new Piece(3), new Piece(4),
+ new Piece(5), new Piece(6), new Piece(7), new Piece(8), new Piece(9) )
+
+ val unplaced = new BitSet(pieces.length)
+
+ { unplaced ++= Iterator.range(0,unplaced.capacity) }
+
+
+ def findSolutions(): Unit = {
+ if (countdown == 0) return
+
+ if (unplaced.size > 0){
+ val emptyCellIndex = board.firstEmptyCellIndex
+
+ for (val k <- Iterator.range(0,pieces.length)){
+ if (unplaced.contains(k)){
+ unplaced -= k
+
+ for (val i <- Iterator.range(0,Piece.orientations)){
+ val piece = pieces(k).nextOrientation
+
+ for (val j <- Iterator.range(0,Piece.size)){
+ if (board.add(j,emptyCellIndex,piece)) {
+
+ if (!shouldPrune) findSolutions
+
+ board.remove(piece)
+ }
+ }
+ }
+ unplaced += k
+ }
+ }
+ }
+ else {
+ puzzleSolved
+ }
+ }
+
+ private def puzzleSolved() = {
+ val b = board.asString
+ if (first == null){
+ first = b; last = b
+ } else {
+ if (b < first){ first = b } else { if (b > last){ last = b } }
+ }
+ countdown = countdown - 1
+ }
+
+ private def shouldPrune() = {
+ board.unmark
+ !board.cells.forall(c => c.contiguousEmptyCells % Piece.size == 0)
+ }
+
+
+ def printSolutions() = {
+
+ def printBoard(s: String) = {
+ var indent = false
+ var i = 0
+ while (i < s.length){
+ if (indent) Console.print(' ')
+ for (val j <- Iterator.range(0,Board.cols)){
+ Console.print(s.charAt(i)); Console.print(' ')
+ i = i + 1
+ }
+ Console.print('\n')
+ indent = !indent
+ }
+ Console.print('\n')
+ }
+
+ Console.print(n + " solutions found\n\n")
+ printBoard(first)
+ printBoard(last)
+ }
+
+/*
+ def printPieces() =
+ for (val i <- Iterator.range(0,Board.pieces)) pieces(i).print
+*/
+
+}
+
+
+
+
+// Board.scala
+// import scala.collection.mutable._
+
+object Board {
+ val cols = 5
+ val rows = 10
+ val size = rows * cols
+}
+
+final class Board {
+ val cells = boardCells()
+
+ val cellsPieceWillFill = new Array[BoardCell](Piece.size)
+ var cellCount = 0
+
+ def unmark() = for (val c <- cells) c.unmark
+
+ def asString() =
+ new String( cells map(
+ c => if (c.piece == null) '-'.toByte
+ else (c.piece.number + 48).toByte ))
+
+ def firstEmptyCellIndex() = cells.findIndexOf(c => c.isEmpty)
+
+ def add(pieceIndex: Int, boardIndex: Int, p: Piece) = {
+ cellCount = 0
+ p.unmark
+
+ find( p.cells(pieceIndex), cells(boardIndex))
+
+ val boardHasSpace = cellCount == Piece.size &&
+ cellsPieceWillFill.forall(c => c.isEmpty)
+
+ if (boardHasSpace) cellsPieceWillFill.foreach(c => c.piece = p)
+
+ boardHasSpace
+ }
+
+ def remove(piece: Piece) = for (val c <- cells; c.piece == piece) c.empty
+
+
+ private def find(p: PieceCell, b: BoardCell): Unit = {
+ if (p != null && !p.marked && b != null){
+ cellsPieceWillFill(cellCount) = b
+ cellCount = cellCount + 1
+ p.mark
+ for (val i <- Iterator.range(0,Cell.sides)) find(p.next(i), b.next(i))
+ }
+ }
+
+
+ private def boardCells() = {
+ val a = for (val i <- Array.range(0,Board.size)) yield new BoardCell(i)
+ val m = (Board.size / Board.cols) - 1
+
+ for (val i <- Iterator.range(0,a.length)){
+ val row = i / Board.cols
+ val isFirst = i % Board.cols == 0
+ val isLast = (i+1) % Board.cols == 0
+ val c = a(i)
+
+ if (row % 2 == 1) {
+ if (!isLast) c.next(Cell.NE) = a(i-(Board.cols-1))
+ c.next(Cell.NW) = a(i-Board.cols)
+ if (row != m) {
+ if (!isLast) c.next(Cell.SE) = a(i+(Board.cols+1))
+ c.next(Cell.SW) = a(i+Board.cols)
+ }
+ } else {
+ if (row != 0) {
+ if (!isFirst) c.next(Cell.NW) = a(i-(Board.cols+1))
+ c.next(Cell.NE) = a(i-Board.cols)
+ }
+ if (row != m) {
+ if (!isFirst) c.next(Cell.SW) = a(i+(Board.cols-1))
+ c.next(Cell.SE) = a(i+Board.cols)
+ }
+ }
+ if (!isFirst) c.next(Cell.W) = a(i-1)
+ if (!isLast) c.next(Cell.E) = a(i+1)
+ }
+ a
+ }
+
+
+/*
+// Printing all the board cells and their neighbours
+// helps check that they are connected properly
+
+ def printBoardCellsAndNeighbours() = {
+ Console.println("cell\tNW NE W E SW SE")
+ for (val i <- Iterator.range(0,Board.size)){
+ Console.print(i + "\t")
+ for (val j <- Iterator.range(0,Cell.sides)){
+ val c = cells(i).next(j)
+ if (c == null)
+ Console.print("-- ")
+ else
+ Console.printf("{0,number,00} ")(c.number)
+ }
+ Console.println("")
+ }
+ Console.println("")
+ }
+*/
+
+}
+
+
+
+
+// Piece.scala
+
+object Piece {
+ val size = 5
+ val rotations = Cell.sides
+ val flips = 2
+ val orientations = rotations * flips
+}
+
+final class Piece(_number: Int) {
+ val number = _number
+ val cells = for (val i <- Array.range(0,Piece.size)) yield new PieceCell()
+
+ {
+ number match {
+ case 0 => make0
+ case 1 => make1
+ case 2 => make2
+ case 3 => make3
+ case 4 => make4
+ case 5 => make5
+ case 6 => make6
+ case 7 => make7
+ case 8 => make8
+ case 9 => make9
+ }
+ }
+
+ def flip() = for (val c <- cells) c.flip
+ def rotate() = for (val c <- cells) c.rotate
+ def unmark() = for (val c <- cells) c.unmark
+
+
+ private var orientation = 0
+
+ def nextOrientation() = {
+ if (orientation == Piece.orientations) orientation = 0
+ if (orientation % Piece.rotations == 0) flip else rotate
+ orientation = orientation + 1
+ this
+ }
+
+
+ private def make0() = {
+ cells(0).next(Cell.E) = cells(1)
+ cells(1).next(Cell.W) = cells(0)
+ cells(1).next(Cell.E) = cells(2)
+ cells(2).next(Cell.W) = cells(1)
+ cells(2).next(Cell.E) = cells(3)
+ cells(3).next(Cell.W) = cells(2)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make1() = {
+ cells(0).next(Cell.SE) = cells(1)
+ cells(1).next(Cell.NW) = cells(0)
+ cells(1).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.NE) = cells(1)
+ cells(2).next(Cell.W) = cells(3)
+ cells(3).next(Cell.E) = cells(2)
+ cells(3).next(Cell.SW) = cells(4)
+ cells(4).next(Cell.NE) = cells(3)
+ }
+
+ private def make2() = {
+ cells(0).next(Cell.W) = cells(1)
+ cells(1).next(Cell.E) = cells(0)
+ cells(1).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.NE) = cells(1)
+ cells(2).next(Cell.SE) = cells(3)
+ cells(3).next(Cell.NW) = cells(2)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make3() = {
+ cells(0).next(Cell.SW) = cells(1)
+ cells(1).next(Cell.NE) = cells(0)
+ cells(1).next(Cell.W) = cells(2)
+ cells(2).next(Cell.E) = cells(1)
+ cells(1).next(Cell.SW) = cells(3)
+ cells(3).next(Cell.NE) = cells(1)
+ cells(2).next(Cell.SE) = cells(3)
+ cells(3).next(Cell.NW) = cells(2)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make4() = {
+ cells(0).next(Cell.SE) = cells(1)
+ cells(1).next(Cell.NW) = cells(0)
+ cells(1).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.NE) = cells(1)
+ cells(1).next(Cell.E) = cells(3)
+ cells(3).next(Cell.W) = cells(1)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make5() = {
+ cells(0).next(Cell.SW) = cells(1)
+ cells(1).next(Cell.NE) = cells(0)
+ cells(0).next(Cell.SE) = cells(2)
+ cells(2).next(Cell.NW) = cells(0)
+ cells(1).next(Cell.SE) = cells(3)
+ cells(3).next(Cell.NW) = cells(1)
+ cells(2).next(Cell.SW) = cells(3)
+ cells(3).next(Cell.NE) = cells(2)
+ cells(3).next(Cell.SW) = cells(4)
+ cells(4).next(Cell.NE) = cells(3)
+ }
+
+ private def make6() = {
+ cells(0).next(Cell.SW) = cells(1)
+ cells(1).next(Cell.NE) = cells(0)
+ cells(2).next(Cell.SE) = cells(1)
+ cells(1).next(Cell.NW) = cells(2)
+ cells(1).next(Cell.SE) = cells(3)
+ cells(3).next(Cell.NW) = cells(1)
+ cells(3).next(Cell.SW) = cells(4)
+ cells(4).next(Cell.NE) = cells(3)
+ }
+
+ private def make7() = {
+ cells(0).next(Cell.SE) = cells(1)
+ cells(1).next(Cell.NW) = cells(0)
+ cells(0).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.NE) = cells(0)
+ cells(2).next(Cell.SW) = cells(3)
+ cells(3).next(Cell.NE) = cells(2)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make8() = {
+ cells(0).next(Cell.E) = cells(1)
+ cells(1).next(Cell.W) = cells(0)
+ cells(1).next(Cell.E) = cells(2)
+ cells(2).next(Cell.W) = cells(1)
+ cells(2).next(Cell.NE) = cells(3)
+ cells(3).next(Cell.SW) = cells(2)
+ cells(3).next(Cell.E) = cells(4)
+ cells(4).next(Cell.W) = cells(3)
+ }
+
+ private def make9() = {
+ cells(0).next(Cell.E) = cells(1)
+ cells(1).next(Cell.W) = cells(0)
+ cells(1).next(Cell.E) = cells(2)
+ cells(2).next(Cell.W) = cells(1)
+ cells(2).next(Cell.NE) = cells(3)
+ cells(3).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.E) = cells(4)
+ cells(4).next(Cell.W) = cells(2)
+ cells(4).next(Cell.NW) = cells(3)
+ cells(3).next(Cell.SE) = cells(4)
+ }
+
+/*
+ def print() = {
+ Console.println("Piece # " + number)
+ Console.println("cell\tNW NE W E SW SE")
+ for (val i <- Iterator.range(0,Piece.size)){
+ Console.print(i + "\t")
+ for (val j <- Iterator.range(0,Cell.sides)){
+ val c = cells(i).next(j)
+ if (c == null)
+ Console.print("-- ")
+ else
+ for (val k <- Iterator.range(0,Piece.size)){
+ if (cells(k) == c) Console.printf(" {0,number,0} ")(k)
+ }
+ }
+ Console.println("")
+ }
+ Console.println("")
+ }
+*/
+
+}
+
+
+
+
+// Cell.scala
+
+object Cell {
+ val NW = 0; val NE = 1
+ val W = 2; val E = 3
+ val SW = 4; val SE = 5
+
+ val sides = 6
+}
+
+abstract class Cell {
+ type T
+ val next = new Array[T](Cell.sides)
+ var marked = false
+
+ def mark() = marked = true
+ def unmark() = marked = false
+}
+
+
+
+
+// BoardCell.scala
+
+final class BoardCell(_number: Int) extends Cell {
+ type T = BoardCell
+ val number = _number
+ var piece: Piece = _
+
+ def isEmpty() = piece == null
+ def empty() = piece = null
+
+ def contiguousEmptyCells(): Int = {
+ if (!marked && isEmpty){
+ mark
+ var count = 1
+
+ for (val neighbour <- next)
+ if (neighbour != null && neighbour.isEmpty)
+ count = count + neighbour.contiguousEmptyCells
+
+ count } else { 0 }
+ }
+}
+
+
+
+
+// PieceCell.scala
+
+final class PieceCell extends Cell {
+ type T = PieceCell
+
+ def flip = {
+ var swap = next(Cell.NE)
+ next(Cell.NE) = next(Cell.NW)
+ next(Cell.NW) = swap
+
+ swap = next(Cell.E)
+ next(Cell.E) = next(Cell.W)
+ next(Cell.W) = swap
+
+ swap = next(Cell.SE)
+ next(Cell.SE) = next(Cell.SW)
+ next(Cell.SW) = swap
+ }
+
+ def rotate = {
+ var swap = next(Cell.E)
+ next(Cell.E) = next(Cell.NE)
+ next(Cell.NE) = next(Cell.NW)
+ next(Cell.NW) = next(Cell.W)
+ next(Cell.W) = next(Cell.SW)
+ next(Cell.SW) = next(Cell.SE)
+ next(Cell.SE) = swap
+ }
+}
+
+
+
+
diff --git a/test/pending/shootout/meteor.scala-2.scala b/test/pending/shootout/meteor.scala-2.scala
new file mode 100644
index 0000000000..e8eb9a4e29
--- /dev/null
+++ b/test/pending/shootout/meteor.scala-2.scala
@@ -0,0 +1,496 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy
+*/
+
+// This is an un-optimised example implementation
+// classes BoardCell and PieceCell have Array
+
+
+import scala.collection.mutable._
+
+object meteor {
+ def main(args: Array[String]) = {
+ val solver = new Solver( Integer.parseInt(args(0)) )
+ solver.findSolutions
+ solver.printSolutions
+ }
+}
+
+
+
+
+// Solver.scala
+// import scala.collection.mutable._
+
+final class Solver (n: Int) {
+ private var countdown = n
+ private var first: String = _
+ private var last: String = _
+
+ private val board = new Board()
+
+ val pieces = Array(
+ new Piece(0), new Piece(1), new Piece(2), new Piece(3), new Piece(4),
+ new Piece(5), new Piece(6), new Piece(7), new Piece(8), new Piece(9) )
+
+ val unplaced = new BitSet(pieces.length)
+
+ { unplaced ++= Iterator.range(0,unplaced.capacity) }
+
+
+ def findSolutions(): Unit = {
+ if (countdown == 0) return
+
+ if (unplaced.size > 0){
+ val emptyCellIndex = board.firstEmptyCellIndex
+
+ for (val k <- Iterator.range(0,pieces.length)){
+ if (unplaced.contains(k)){
+ unplaced -= k
+
+ for (val i <- Iterator.range(0,Piece.orientations)){
+ val piece = pieces(k).nextOrientation
+
+ for (val j <- Iterator.range(0,Piece.size)){
+ if (board.add(j,emptyCellIndex,piece)) {
+
+ if (!shouldPrune) findSolutions
+
+ board.remove(piece)
+ }
+ }
+ }
+ unplaced += k
+ }
+ }
+ }
+ else {
+ puzzleSolved
+ }
+ }
+
+ private def puzzleSolved() = {
+ val b = board.asString
+ if (first == null){
+ first = b; last = b
+ } else {
+ if (b < first){ first = b } else { if (b > last){ last = b } }
+ }
+ countdown = countdown - 1
+ }
+
+ private def shouldPrune() = {
+ board.unmark
+ !board.cells.forall(c => c.contiguousEmptyCells % Piece.size == 0)
+ }
+
+
+ def printSolutions() = {
+
+ def printBoard(s: String) = {
+ var indent = false
+ var i = 0
+ while (i < s.length){
+ if (indent) Console.print(' ')
+ for (val j <- Iterator.range(0,Board.cols)){
+ Console.print(s.charAt(i)); Console.print(' ')
+ i = i + 1
+ }
+ Console.print('\n')
+ indent = !indent
+ }
+ Console.print('\n')
+ }
+
+ Console.print(n + " solutions found\n\n")
+ printBoard(first)
+ printBoard(last)
+ }
+
+/*
+ def printPieces() =
+ for (val i <- Iterator.range(0,Board.pieces)) pieces(i).print
+*/
+
+}
+
+
+
+
+// Board.scala
+// import scala.collection.mutable._
+
+object Board {
+ val cols = 5
+ val rows = 10
+ val size = rows * cols
+}
+
+final class Board {
+ val cells = boardCells()
+
+ val cellsPieceWillFill = new Array[BoardCell](Piece.size)
+ var cellCount = 0
+
+ def unmark() = for (val c <- cells) c.unmark
+
+ def asString() =
+ new String( cells map(
+ c => if (c.piece == null) '-'.toByte
+ else (c.piece.number + 48).toByte ))
+
+ def firstEmptyCellIndex() = cells.findIndexOf(c => c.isEmpty)
+
+
+ def add(pieceIndex: Int, boardIndex: Int, p: Piece) = {
+ cellCount = 0
+ p.unmark
+
+ find( p.cells(pieceIndex), cells(boardIndex))
+
+ val boardHasSpace = cellCount == Piece.size &&
+ cellsPieceWillFill.forall(c => c.isEmpty)
+
+ if (boardHasSpace) cellsPieceWillFill.foreach(c => c.piece = p)
+
+ boardHasSpace
+ }
+
+ def remove(piece: Piece) = for (val c <- cells; c.piece == piece) c.empty
+
+
+ private def find(p: PieceCell, b: BoardCell): Unit = {
+ if (p != null && !p.marked && b != null){
+ cellsPieceWillFill(cellCount) = b
+ cellCount = cellCount + 1
+ p.mark
+ for (val i <- Iterator.range(0,Cell.sides)) find(p.next(i), b.next(i))
+ }
+ }
+
+
+ private def boardCells() = {
+ val a = for (val i <- Array.range(0,Board.size)) yield new BoardCell(i)
+ val m = (Board.size / Board.cols) - 1
+
+ for (val i <- Iterator.range(0,a.length)){
+ val row = i / Board.cols
+ val isFirst = i % Board.cols == 0
+ val isLast = (i+1) % Board.cols == 0
+ val c = a(i)
+
+ if (row % 2 == 1) {
+ if (!isLast) c.next(Cell.NE) = a(i-(Board.cols-1))
+ c.next(Cell.NW) = a(i-Board.cols)
+ if (row != m) {
+ if (!isLast) c.next(Cell.SE) = a(i+(Board.cols+1))
+ c.next(Cell.SW) = a(i+Board.cols)
+ }
+ } else {
+ if (row != 0) {
+ if (!isFirst) c.next(Cell.NW) = a(i-(Board.cols+1))
+ c.next(Cell.NE) = a(i-Board.cols)
+ }
+ if (row != m) {
+ if (!isFirst) c.next(Cell.SW) = a(i+(Board.cols-1))
+ c.next(Cell.SE) = a(i+Board.cols)
+ }
+ }
+ if (!isFirst) c.next(Cell.W) = a(i-1)
+ if (!isLast) c.next(Cell.E) = a(i+1)
+ }
+ a
+ }
+
+
+/*
+// Printing all the board cells and their neighbours
+// helps check that they are connected properly
+
+ def printBoardCellsAndNeighbours() = {
+ Console.println("cell\tNW NE W E SW SE")
+ for (val i <- Iterator.range(0,Board.size)){
+ Console.print(i + "\t")
+ for (val j <- Iterator.range(0,Cell.sides)){
+ val c = cells(i).next(j)
+ if (c == null)
+ Console.print("-- ")
+ else
+ Console.printf("{0,number,00} ")(c.number)
+ }
+ Console.println("")
+ }
+ Console.println("")
+ }
+*/
+
+}
+
+
+
+
+// Piece.scala
+
+object Piece {
+ val size = 5
+ val rotations = Cell.sides
+ val flips = 2
+ val orientations = rotations * flips
+}
+
+final class Piece(_number: Int) {
+ val number = _number
+ val cells = for (val i <- Array.range(0,Piece.size)) yield new PieceCell()
+
+ {
+ number match {
+ case 0 => make0
+ case 1 => make1
+ case 2 => make2
+ case 3 => make3
+ case 4 => make4
+ case 5 => make5
+ case 6 => make6
+ case 7 => make7
+ case 8 => make8
+ case 9 => make9
+ }
+ }
+
+ def flip() = for (val c <- cells) c.flip
+ def rotate() = for (val c <- cells) c.rotate
+ def unmark() = for (val c <- cells) c.unmark
+
+
+ private var orientation = 0
+
+ def nextOrientation() = {
+ if (orientation == Piece.orientations) orientation = 0
+ if (orientation % Piece.rotations == 0) flip else rotate
+ orientation = orientation + 1
+ this
+ }
+
+
+ private def make0() = {
+ cells(0).next(Cell.E) = cells(1)
+ cells(1).next(Cell.W) = cells(0)
+ cells(1).next(Cell.E) = cells(2)
+ cells(2).next(Cell.W) = cells(1)
+ cells(2).next(Cell.E) = cells(3)
+ cells(3).next(Cell.W) = cells(2)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make1() = {
+ cells(0).next(Cell.SE) = cells(1)
+ cells(1).next(Cell.NW) = cells(0)
+ cells(1).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.NE) = cells(1)
+ cells(2).next(Cell.W) = cells(3)
+ cells(3).next(Cell.E) = cells(2)
+ cells(3).next(Cell.SW) = cells(4)
+ cells(4).next(Cell.NE) = cells(3)
+ }
+
+ private def make2() = {
+ cells(0).next(Cell.W) = cells(1)
+ cells(1).next(Cell.E) = cells(0)
+ cells(1).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.NE) = cells(1)
+ cells(2).next(Cell.SE) = cells(3)
+ cells(3).next(Cell.NW) = cells(2)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make3() = {
+ cells(0).next(Cell.SW) = cells(1)
+ cells(1).next(Cell.NE) = cells(0)
+ cells(1).next(Cell.W) = cells(2)
+ cells(2).next(Cell.E) = cells(1)
+ cells(1).next(Cell.SW) = cells(3)
+ cells(3).next(Cell.NE) = cells(1)
+ cells(2).next(Cell.SE) = cells(3)
+ cells(3).next(Cell.NW) = cells(2)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make4() = {
+ cells(0).next(Cell.SE) = cells(1)
+ cells(1).next(Cell.NW) = cells(0)
+ cells(1).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.NE) = cells(1)
+ cells(1).next(Cell.E) = cells(3)
+ cells(3).next(Cell.W) = cells(1)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make5() = {
+ cells(0).next(Cell.SW) = cells(1)
+ cells(1).next(Cell.NE) = cells(0)
+ cells(0).next(Cell.SE) = cells(2)
+ cells(2).next(Cell.NW) = cells(0)
+ cells(1).next(Cell.SE) = cells(3)
+ cells(3).next(Cell.NW) = cells(1)
+ cells(2).next(Cell.SW) = cells(3)
+ cells(3).next(Cell.NE) = cells(2)
+ cells(3).next(Cell.SW) = cells(4)
+ cells(4).next(Cell.NE) = cells(3)
+ }
+
+ private def make6() = {
+ cells(0).next(Cell.SW) = cells(1)
+ cells(1).next(Cell.NE) = cells(0)
+ cells(2).next(Cell.SE) = cells(1)
+ cells(1).next(Cell.NW) = cells(2)
+ cells(1).next(Cell.SE) = cells(3)
+ cells(3).next(Cell.NW) = cells(1)
+ cells(3).next(Cell.SW) = cells(4)
+ cells(4).next(Cell.NE) = cells(3)
+ }
+
+ private def make7() = {
+ cells(0).next(Cell.SE) = cells(1)
+ cells(1).next(Cell.NW) = cells(0)
+ cells(0).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.NE) = cells(0)
+ cells(2).next(Cell.SW) = cells(3)
+ cells(3).next(Cell.NE) = cells(2)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make8() = {
+ cells(0).next(Cell.E) = cells(1)
+ cells(1).next(Cell.W) = cells(0)
+ cells(1).next(Cell.E) = cells(2)
+ cells(2).next(Cell.W) = cells(1)
+ cells(2).next(Cell.NE) = cells(3)
+ cells(3).next(Cell.SW) = cells(2)
+ cells(3).next(Cell.E) = cells(4)
+ cells(4).next(Cell.W) = cells(3)
+ }
+
+ private def make9() = {
+ cells(0).next(Cell.E) = cells(1)
+ cells(1).next(Cell.W) = cells(0)
+ cells(1).next(Cell.E) = cells(2)
+ cells(2).next(Cell.W) = cells(1)
+ cells(2).next(Cell.NE) = cells(3)
+ cells(3).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.E) = cells(4)
+ cells(4).next(Cell.W) = cells(2)
+ cells(4).next(Cell.NW) = cells(3)
+ cells(3).next(Cell.SE) = cells(4)
+ }
+
+/*
+ def print() = {
+ Console.println("Piece # " + number)
+ Console.println("cell\tNW NE W E SW SE")
+ for (val i <- Iterator.range(0,Piece.size)){
+ Console.print(i + "\t")
+ for (val j <- Iterator.range(0,Cell.sides)){
+ val c = cells(i).next(j)
+ if (c == null)
+ Console.print("-- ")
+ else
+ for (val k <- Iterator.range(0,Piece.size)){
+ if (cells(k) == c) Console.printf(" {0,number,0} ")(k)
+ }
+ }
+ Console.println("")
+ }
+ Console.println("")
+ }
+*/
+
+}
+
+
+
+
+// Cell.scala
+
+object Cell {
+ val NW = 0; val NE = 1
+ val W = 2; val E = 3
+ val SW = 4; val SE = 5
+
+ val sides = 6
+}
+
+abstract class Cell {
+ var marked = false
+
+ def mark() = marked = true
+ def unmark() = marked = false
+}
+
+
+
+
+// BoardCell.scala
+
+final class BoardCell(_number: Int) extends Cell {
+ val next = new Array[BoardCell](Cell.sides)
+ val number = _number
+ var piece: Piece = _
+
+ def isEmpty() = piece == null
+ def empty() = piece = null
+
+ def contiguousEmptyCells(): Int = {
+ if (!marked && isEmpty){
+ mark
+ var count = 1
+
+ for (val neighbour <- next)
+ if (neighbour != null && neighbour.isEmpty)
+ count = count + neighbour.contiguousEmptyCells
+
+ count } else { 0 }
+ }
+}
+
+
+
+
+// PieceCell.scala
+
+final class PieceCell extends Cell {
+ val next = new Array[PieceCell](Cell.sides)
+
+ def flip = {
+ var swap = next(Cell.NE)
+ next(Cell.NE) = next(Cell.NW)
+ next(Cell.NW) = swap
+
+ swap = next(Cell.E)
+ next(Cell.E) = next(Cell.W)
+ next(Cell.W) = swap
+
+ swap = next(Cell.SE)
+ next(Cell.SE) = next(Cell.SW)
+ next(Cell.SW) = swap
+ }
+
+ def rotate = {
+ var swap = next(Cell.E)
+ next(Cell.E) = next(Cell.NE)
+ next(Cell.NE) = next(Cell.NW)
+ next(Cell.NW) = next(Cell.W)
+ next(Cell.W) = next(Cell.SW)
+ next(Cell.SW) = next(Cell.SE)
+ next(Cell.SE) = swap
+ }
+}
+
+
+
+
diff --git a/test/pending/shootout/meteor.scala-2.scala.runner b/test/pending/shootout/meteor.scala-2.scala.runner
new file mode 100644
index 0000000000..9f1b95e150
--- /dev/null
+++ b/test/pending/shootout/meteor.scala-2.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(0)) meteor.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/meteor.scala-3.scala b/test/pending/shootout/meteor.scala-3.scala
new file mode 100644
index 0000000000..7a4aca8fb8
--- /dev/null
+++ b/test/pending/shootout/meteor.scala-3.scala
@@ -0,0 +1,557 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy
+*/
+
+// Most for-comprehension replaced by while loops
+
+
+
+import scala.collection.mutable._
+
+object meteor {
+ def main(args: Array[String]) = {
+ val solver = new Solver( Integer.parseInt(args(0)) )
+ solver.findSolutions
+ solver.printSolutions
+ }
+}
+
+
+
+
+// Solver.scala
+// import scala.collection.mutable._
+
+final class Solver (n: Int) {
+ private var countdown = n
+ private var first: String = _
+ private var last: String = _
+
+ private val board = new Board()
+
+ val pieces = Array(
+ new Piece(0), new Piece(1), new Piece(2), new Piece(3), new Piece(4),
+ new Piece(5), new Piece(6), new Piece(7), new Piece(8), new Piece(9) )
+
+ val unplaced = new BitSet(pieces.length)
+
+ { unplaced ++= Iterator.range(0,unplaced.capacity) }
+
+
+ def findSolutions(): Unit = {
+ if (countdown == 0) return
+
+ if (unplaced.size > 0){
+ val emptyCellIndex = board.firstEmptyCellIndex
+
+ var k = 0
+ while (k < pieces.length){
+ if (unplaced.contains(k)){
+ unplaced -= k
+
+ var i = 0
+ while (i < Piece.orientations){
+ val piece = pieces(k).nextOrientation
+
+ var j = 0
+ while (j < Piece.size){
+ if (board.add(j,emptyCellIndex,piece)) {
+
+ if (!shouldPrune) findSolutions
+
+ board.remove(piece)
+ }
+ j = j + 1
+ }
+ i = i + 1
+ }
+ unplaced += k
+ }
+ k = k + 1
+ }
+ }
+ else {
+ puzzleSolved
+ }
+ }
+
+ private def puzzleSolved() = {
+ val b = board.asString
+ if (first == null){
+ first = b; last = b
+ } else {
+ if (b < first){ first = b } else { if (b > last){ last = b } }
+ }
+ countdown = countdown - 1
+ }
+
+ private def shouldPrune(): Boolean = {
+ board.unmark
+ var i = 0
+ while (i < board.cells.length){
+ if (board.cells(i).contiguousEmptyCells % Piece.size != 0) return true
+ i = i + 1
+ }
+ false
+ }
+
+
+ def printSolutions() = {
+
+ def printBoard(s: String) = {
+ var indent = false
+ var i = 0
+ while (i < s.length){
+ if (indent) Console.print(' ')
+ var j = 0
+ while (j < Board.cols){
+ Console.print(s.charAt(i)); Console.print(' ')
+ j = j + 1
+ i = i + 1
+ }
+ Console.print('\n')
+ indent = !indent
+ }
+ Console.print('\n')
+ }
+
+ Console.print(n + " solutions found\n\n")
+ printBoard(first)
+ printBoard(last)
+ }
+
+/*
+ def printPieces() =
+ for (val i <- Iterator.range(0,Board.pieces)) pieces(i).print
+*/
+
+}
+
+
+
+
+
+// Board.scala
+// import scala.collection.mutable._
+
+object Board {
+ val cols = 5
+ val rows = 10
+ val size = rows * cols
+}
+
+final class Board {
+ val cells = boardCells()
+
+ val cellsPieceWillFill = new Array[BoardCell](Piece.size)
+ var cellCount = 0
+
+ def unmark() = {
+ var i = 0
+ while (i < cells.length){
+ cells(i).unmark
+ i = i + 1
+ }
+ }
+
+ def asString() =
+ new String( cells map(
+ c => if (c.piece == null) '-'.toByte
+ else (c.piece.number + 48).toByte ))
+
+ def firstEmptyCellIndex() = cells.findIndexOf(c => c.isEmpty)
+
+
+ def add(pieceIndex: Int, boardIndex: Int, p: Piece): Boolean = {
+ cellCount = 0
+ p.unmark
+
+ find(p.cells(pieceIndex), cells(boardIndex))
+
+ if (cellCount != Piece.size) return false
+
+ var i = 0
+ while (i < cellCount){
+ if (!cellsPieceWillFill(i).isEmpty) return false
+ i = i + 1
+ }
+
+ i = 0
+ while (i < cellCount){
+ cellsPieceWillFill(i).piece = p
+ i = i + 1
+ }
+
+ true
+ }
+
+ def remove(piece: Piece) = {
+ var i = 0
+ while (i < cells.length){
+ if (cells(i).piece == piece) cells(i).empty
+ i = i + 1
+ }
+ }
+
+ private def find(p: PieceCell, b: BoardCell): Unit = {
+ if (p != null && !p.marked && b != null){
+ cellsPieceWillFill(cellCount) = b
+ cellCount = cellCount + 1
+ p.mark
+
+ var i = 0
+ while (i < Cell.sides){
+ find(p.next(i), b.next(i))
+ i = i + 1
+ }
+ }
+ }
+
+
+ private def boardCells() = {
+ val a = for (val i <- Array.range(0,Board.size)) yield new BoardCell(i)
+ val m = (Board.size / Board.cols) - 1
+
+ for (val i <- Iterator.range(0,a.length)){
+ val row = i / Board.cols
+ val isFirst = i % Board.cols == 0
+ val isLast = (i+1) % Board.cols == 0
+ val c = a(i)
+
+ if (row % 2 == 1) {
+ if (!isLast) c.next(Cell.NE) = a(i-(Board.cols-1))
+ c.next(Cell.NW) = a(i-Board.cols)
+ if (row != m) {
+ if (!isLast) c.next(Cell.SE) = a(i+(Board.cols+1))
+ c.next(Cell.SW) = a(i+Board.cols)
+ }
+ } else {
+ if (row != 0) {
+ if (!isFirst) c.next(Cell.NW) = a(i-(Board.cols+1))
+ c.next(Cell.NE) = a(i-Board.cols)
+ }
+ if (row != m) {
+ if (!isFirst) c.next(Cell.SW) = a(i+(Board.cols-1))
+ c.next(Cell.SE) = a(i+Board.cols)
+ }
+ }
+ if (!isFirst) c.next(Cell.W) = a(i-1)
+ if (!isLast) c.next(Cell.E) = a(i+1)
+ }
+ a
+ }
+
+/*
+// Printing all the board cells and their neighbours
+// helps check that they are connected properly
+
+ def printBoardCellsAndNeighbours() = {
+ Console.println("cell\tNW NE W E SW SE")
+ for (val i <- Iterator.range(0,Board.size)){
+ Console.print(i + "\t")
+ for (val j <- Iterator.range(0,Cell.sides)){
+ val c = cells(i).next(j)
+ if (c == null)
+ Console.print("-- ")
+ else
+ Console.printf("{0,number,00} ")(c.number)
+ }
+ Console.println("")
+ }
+ Console.println("")
+ }
+*/
+
+}
+
+
+
+
+// Piece.scala
+
+object Piece {
+ val size = 5
+ val rotations = Cell.sides
+ val flips = 2
+ val orientations = rotations * flips
+}
+
+final class Piece(_number: Int) {
+ val number = _number
+ val cells = for (val i <- Array.range(0,Piece.size)) yield new PieceCell()
+
+ {
+ number match {
+ case 0 => make0
+ case 1 => make1
+ case 2 => make2
+ case 3 => make3
+ case 4 => make4
+ case 5 => make5
+ case 6 => make6
+ case 7 => make7
+ case 8 => make8
+ case 9 => make9
+ }
+ }
+
+ def flip() = {
+ var i = 0
+ while (i < cells.length){
+ cells(i).flip
+ i = i + 1
+ }
+ }
+
+ def rotate() = {
+ var i = 0
+ while (i < cells.length){
+ cells(i).rotate
+ i = i + 1
+ }
+ }
+
+ def unmark() = {
+ var i = 0
+ while (i < cells.length){
+ cells(i).unmark
+ i = i + 1
+ }
+ }
+
+
+ private var orientation = 0
+
+ def nextOrientation() = {
+ if (orientation == Piece.orientations) orientation = 0
+ if (orientation % Piece.rotations == 0) flip else rotate
+ orientation = orientation + 1
+ this
+ }
+
+
+ private def make0() = {
+ cells(0).next(Cell.E) = cells(1)
+ cells(1).next(Cell.W) = cells(0)
+ cells(1).next(Cell.E) = cells(2)
+ cells(2).next(Cell.W) = cells(1)
+ cells(2).next(Cell.E) = cells(3)
+ cells(3).next(Cell.W) = cells(2)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make1() = {
+ cells(0).next(Cell.SE) = cells(1)
+ cells(1).next(Cell.NW) = cells(0)
+ cells(1).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.NE) = cells(1)
+ cells(2).next(Cell.W) = cells(3)
+ cells(3).next(Cell.E) = cells(2)
+ cells(3).next(Cell.SW) = cells(4)
+ cells(4).next(Cell.NE) = cells(3)
+ }
+
+ private def make2() = {
+ cells(0).next(Cell.W) = cells(1)
+ cells(1).next(Cell.E) = cells(0)
+ cells(1).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.NE) = cells(1)
+ cells(2).next(Cell.SE) = cells(3)
+ cells(3).next(Cell.NW) = cells(2)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make3() = {
+ cells(0).next(Cell.SW) = cells(1)
+ cells(1).next(Cell.NE) = cells(0)
+ cells(1).next(Cell.W) = cells(2)
+ cells(2).next(Cell.E) = cells(1)
+ cells(1).next(Cell.SW) = cells(3)
+ cells(3).next(Cell.NE) = cells(1)
+ cells(2).next(Cell.SE) = cells(3)
+ cells(3).next(Cell.NW) = cells(2)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make4() = {
+ cells(0).next(Cell.SE) = cells(1)
+ cells(1).next(Cell.NW) = cells(0)
+ cells(1).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.NE) = cells(1)
+ cells(1).next(Cell.E) = cells(3)
+ cells(3).next(Cell.W) = cells(1)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make5() = {
+ cells(0).next(Cell.SW) = cells(1)
+ cells(1).next(Cell.NE) = cells(0)
+ cells(0).next(Cell.SE) = cells(2)
+ cells(2).next(Cell.NW) = cells(0)
+ cells(1).next(Cell.SE) = cells(3)
+ cells(3).next(Cell.NW) = cells(1)
+ cells(2).next(Cell.SW) = cells(3)
+ cells(3).next(Cell.NE) = cells(2)
+ cells(3).next(Cell.SW) = cells(4)
+ cells(4).next(Cell.NE) = cells(3)
+ }
+
+ private def make6() = {
+ cells(0).next(Cell.SW) = cells(1)
+ cells(1).next(Cell.NE) = cells(0)
+ cells(2).next(Cell.SE) = cells(1)
+ cells(1).next(Cell.NW) = cells(2)
+ cells(1).next(Cell.SE) = cells(3)
+ cells(3).next(Cell.NW) = cells(1)
+ cells(3).next(Cell.SW) = cells(4)
+ cells(4).next(Cell.NE) = cells(3)
+ }
+
+ private def make7() = {
+ cells(0).next(Cell.SE) = cells(1)
+ cells(1).next(Cell.NW) = cells(0)
+ cells(0).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.NE) = cells(0)
+ cells(2).next(Cell.SW) = cells(3)
+ cells(3).next(Cell.NE) = cells(2)
+ cells(3).next(Cell.SE) = cells(4)
+ cells(4).next(Cell.NW) = cells(3)
+ }
+
+ private def make8() = {
+ cells(0).next(Cell.E) = cells(1)
+ cells(1).next(Cell.W) = cells(0)
+ cells(1).next(Cell.E) = cells(2)
+ cells(2).next(Cell.W) = cells(1)
+ cells(2).next(Cell.NE) = cells(3)
+ cells(3).next(Cell.SW) = cells(2)
+ cells(3).next(Cell.E) = cells(4)
+ cells(4).next(Cell.W) = cells(3)
+ }
+
+ private def make9() = {
+ cells(0).next(Cell.E) = cells(1)
+ cells(1).next(Cell.W) = cells(0)
+ cells(1).next(Cell.E) = cells(2)
+ cells(2).next(Cell.W) = cells(1)
+ cells(2).next(Cell.NE) = cells(3)
+ cells(3).next(Cell.SW) = cells(2)
+ cells(2).next(Cell.E) = cells(4)
+ cells(4).next(Cell.W) = cells(2)
+ cells(4).next(Cell.NW) = cells(3)
+ cells(3).next(Cell.SE) = cells(4)
+ }
+
+/*
+ def print() = {
+ Console.println("Piece # " + number)
+ Console.println("cell\tNW NE W E SW SE")
+ for (val i <- Iterator.range(0,Piece.size)){
+ Console.print(i + "\t")
+ for (val j <- Iterator.range(0,Cell.sides)){
+ val c = cells(i).next(j)
+ if (c == null)
+ Console.print("-- ")
+ else
+ for (val k <- Iterator.range(0,Piece.size)){
+ if (cells(k) == c) Console.printf(" {0,number,0} ")(k)
+ }
+ }
+ Console.println("")
+ }
+ Console.println("")
+ }
+*/
+
+}
+
+
+
+
+// Cell.scala
+
+object Cell {
+ val NW = 0; val NE = 1
+ val W = 2; val E = 3
+ val SW = 4; val SE = 5
+
+ val sides = 6
+}
+
+abstract class Cell {
+ var marked = false
+
+ def mark() = marked = true
+ def unmark() = marked = false
+}
+
+
+
+
+// BoardCell.scala
+
+final class BoardCell(_number: Int) extends Cell {
+ val next = new Array[BoardCell](Cell.sides)
+ val number = _number
+ var piece: Piece = _
+
+ def isEmpty() = piece == null
+ def empty() = piece = null
+
+ def contiguousEmptyCells(): Int = {
+ if (!marked && isEmpty){
+ mark
+ var count = 1
+
+ var i = 0
+ while (i < next.length){
+ if (next(i) != null && next(i).isEmpty)
+ count = count + next(i).contiguousEmptyCells
+ i = i + 1
+ }
+
+ count } else { 0 }
+ }
+}
+
+
+
+
+// PieceCell.scala
+
+final class PieceCell extends Cell {
+ val next = new Array[PieceCell](Cell.sides)
+
+ def flip = {
+ var swap = next(Cell.NE)
+ next(Cell.NE) = next(Cell.NW)
+ next(Cell.NW) = swap
+
+ swap = next(Cell.E)
+ next(Cell.E) = next(Cell.W)
+ next(Cell.W) = swap
+
+ swap = next(Cell.SE)
+ next(Cell.SE) = next(Cell.SW)
+ next(Cell.SW) = swap
+ }
+
+ def rotate = {
+ var swap = next(Cell.E)
+ next(Cell.E) = next(Cell.NE)
+ next(Cell.NE) = next(Cell.NW)
+ next(Cell.NW) = next(Cell.W)
+ next(Cell.W) = next(Cell.SW)
+ next(Cell.SW) = next(Cell.SE)
+ next(Cell.SE) = swap
+ }
+}
+
+
+
+
diff --git a/test/pending/shootout/meteor.scala-3.scala.runner b/test/pending/shootout/meteor.scala-3.scala.runner
new file mode 100644
index 0000000000..9f1b95e150
--- /dev/null
+++ b/test/pending/shootout/meteor.scala-3.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(0)) meteor.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/meteor.scala-4.scala b/test/pending/shootout/meteor.scala-4.scala
new file mode 100644
index 0000000000..eb7d04e11e
--- /dev/null
+++ b/test/pending/shootout/meteor.scala-4.scala
@@ -0,0 +1,587 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy
+*/
+
+// Most for-comprehension replaced by while loops
+// BoardCells occupied by each Piece orientation are cached
+// Piece orientations are cached
+
+import scala.collection.mutable._
+
+object meteor {
+ def main(args: Array[String]) = {
+ val solver = new Solver( Integer.parseInt(args(0)) )
+ solver.findSolutions
+ solver.printSolutions
+ }
+}
+
+
+
+
+// Solver.scala
+// import scala.collection.mutable._
+
+final class Solver (n: Int) {
+ private var countdown = n
+ private var first: String = _
+ private var last: String = _
+
+ private val board = new Board()
+
+ val pieces = Array(
+ new Piece(0), new Piece(1), new Piece(2), new Piece(3), new Piece(4),
+ new Piece(5), new Piece(6), new Piece(7), new Piece(8), new Piece(9) )
+
+ val unplaced = new BitSet(pieces.length)
+
+ { unplaced ++= Iterator.range(0,unplaced.capacity) }
+
+
+ def findSolutions(): Unit = {
+ if (countdown == 0) return
+
+ if (unplaced.size > 0){
+ val emptyCellIndex = board.firstEmptyCellIndex
+
+ var k = 0
+ while (k < pieces.length){
+ if (unplaced.contains(k)){
+ unplaced -= k
+
+ var i = 0
+ while (i < Piece.orientations){
+ val piece = pieces(k).nextOrientation
+
+ var j = 0
+ while (j < Piece.size){
+ if (board.add(j,emptyCellIndex,piece)) {
+
+ if (!shouldPrune) findSolutions
+
+ board.remove(piece)
+ }
+ j = j + 1
+ }
+ i = i + 1
+ }
+ unplaced += k
+ }
+ k = k + 1
+ }
+ }
+ else {
+ puzzleSolved
+ }
+ }
+
+ private def puzzleSolved() = {
+ val b = board.asString
+ if (first == null){
+ first = b; last = b
+ } else {
+ if (b < first){ first = b } else { if (b > last){ last = b } }
+ }
+ countdown = countdown - 1
+ }
+
+ private def shouldPrune(): Boolean = {
+ board.unmark
+ var i = 0
+ while (i < board.cells.length){
+ if (board.cells(i).contiguousEmptyCells % Piece.size != 0) return true
+ i = i + 1
+ }
+ false
+ }
+
+
+ def printSolutions() = {
+
+ def printBoard(s: String) = {
+ var indent = false
+ var i = 0
+ while (i < s.length){
+ if (indent) Console.print(' ')
+ var j = 0
+ while (j < Board.cols){
+ Console.print(s.charAt(i)); Console.print(' ')
+ j = j + 1
+ i = i + 1
+ }
+ Console.print('\n')
+ indent = !indent
+ }
+ Console.print('\n')
+ }
+
+ Console.print(n + " solutions found\n\n")
+ printBoard(first)
+ printBoard(last)
+ }
+
+/*
+ def printPieces() =
+ for (i <- Iterator.range(0,Board.pieces)) pieces(i).print
+*/
+
+}
+
+
+
+// Board.scala
+// import scala.collection.mutable._
+
+object Board {
+ val cols = 5
+ val rows = 10
+ val size = rows * cols
+ val pieces = 10
+ val noFit = new Array[BoardCell](0)
+}
+
+final class Board {
+ val cells = boardCells()
+
+ val cellsPieceWillFill = new Array[BoardCell](Piece.size)
+ var cellCount = 0
+
+ def unmark() = {
+ var i = 0
+ while (i < cells.length){
+ cells(i).unmark
+ i = i + 1
+ }
+ }
+
+ def asString() =
+ new String( cells map(
+ c => if (c.piece == null) '-'.toByte
+ else (c.piece.number + 48).toByte ))
+
+ def firstEmptyCellIndex() = cells.findIndexOf(c => c.isEmpty)
+
+
+ private val cache: Array[Array[Array[Array[ Array[BoardCell] ]]]] =
+ for (i <- Array.range(0,Board.pieces))
+ yield
+ for (j <- Array.range(0,Piece.orientations))
+ yield
+ for (k <- Array.range(0,Piece.size)) // piece cell index
+ yield
+ for (m <- Array.range(0,Board.size)) // board cell index
+ yield null
+
+
+ def add(pieceIndex: Int, boardIndex: Int, p: Piece): Boolean = {
+ var a = cache(p.number)(p.orientation)(pieceIndex)(boardIndex)
+
+ cellCount = 0
+ p.unmark
+
+ if (a == null){
+ find(p.cells(pieceIndex), cells(boardIndex))
+
+ if (cellCount != Piece.size){
+ cache(p.number)(p.orientation)(pieceIndex)(boardIndex) = Board.noFit
+ return false
+ }
+
+ a = cellsPieceWillFill .filter(c => true)
+ cache(p.number)(p.orientation)(pieceIndex)(boardIndex) = a
+ }
+ else {
+ if (a == Board.noFit) return false
+ }
+
+ var i = 0
+ while (i < a.length){
+ if (!a(i).isEmpty) return false
+ i = i + 1
+ }
+
+ i = 0
+ while (i < a.length){
+ a(i).piece = p
+ i = i + 1
+ }
+
+ true
+ }
+
+
+ def remove(piece: Piece) = {
+ var i = 0
+ while (i < cells.length){
+ if (cells(i).piece == piece) cells(i).empty
+ i = i + 1
+ }
+ }
+
+
+ private def find(p: PieceCell, b: BoardCell): Unit = {
+ if (p != null && !p.marked && b != null){
+ cellsPieceWillFill(cellCount) = b
+ cellCount = cellCount + 1
+ p.mark
+
+ var i = 0
+ while (i < Cell.sides){
+ find(p.next(i), b.next(i))
+ i = i + 1
+ }
+ }
+ }
+
+
+ private def boardCells() = {
+ val a = for (i <- Array.range(0,Board.size)) yield new BoardCell(i)
+ val m = (Board.size / Board.cols) - 1
+
+ for (i <- Iterator.range(0,a.length)){
+ val row = i / Board.cols
+ val isFirst = i % Board.cols == 0
+ val isLast = (i+1) % Board.cols == 0
+ val c = a(i)
+
+ if (row % 2 == 1) {
+ if (!isLast) c.next(Cell.NE) = a(i-(Board.cols-1))
+ c.next(Cell.NW) = a(i-Board.cols)
+ if (row != m) {
+ if (!isLast) c.next(Cell.SE) = a(i+(Board.cols+1))
+ c.next(Cell.SW) = a(i+Board.cols)
+ }
+ } else {
+ if (row != 0) {
+ if (!isFirst) c.next(Cell.NW) = a(i-(Board.cols+1))
+ c.next(Cell.NE) = a(i-Board.cols)
+ }
+ if (row != m) {
+ if (!isFirst) c.next(Cell.SW) = a(i+(Board.cols-1))
+ c.next(Cell.SE) = a(i+Board.cols)
+ }
+ }
+ if (!isFirst) c.next(Cell.W) = a(i-1)
+ if (!isLast) c.next(Cell.E) = a(i+1)
+ }
+ a
+ }
+
+
+/*
+// Printing all the board cells and their neighbours
+// helps check that they are connected properly
+
+ def printBoardCellsAndNeighbours() = {
+ Console.println("cell\tNW NE W E SW SE")
+ for (i <- Iterator.range(0,Board.size)){
+ Console.print(i + "\t")
+ for (j <- Iterator.range(0,Cell.sides)){
+ val c = cells(i).next(j)
+ if (c == null)
+ Console.print("-- ")
+ else
+ Console.printf("{0,number,00} ")(c.number)
+ }
+ Console.println("")
+ }
+ Console.println("")
+ }
+*/
+
+}
+
+
+
+
+// Piece.scala
+
+object Piece {
+ val size = 5
+ val rotations = Cell.sides
+ val flips = 2
+ val orientations = rotations * flips
+}
+
+final class Piece(_number: Int) {
+ val number = _number
+
+ def unmark() = {
+ val c = cache(orientation)
+ var i = 0
+ while (i < c.length){
+ c(i).unmark
+ i = i + 1
+ }
+ }
+
+ def cells = cache(orientation)
+
+ private val cache =
+ for (i <- Array.range(0,Piece.orientations))
+ yield pieceOrientation(i)
+
+ var orientation = 0
+
+ def nextOrientation() = {
+ orientation = (orientation + 1) % Piece.orientations
+ this
+ }
+
+
+ private def pieceOrientation(k: Int) = {
+ val cells = for (i <- Array.range(0,Piece.size)) yield new PieceCell()
+ makePiece(number,cells)
+
+ var i = 0
+ while (i < k){
+ if (i % Piece.rotations == 0)
+ for (c <- cells) c.flip
+ else
+ for (c <- cells) c.rotate
+
+ i = i + 1
+ }
+ cells
+ }
+
+ private def makePiece(number: Int, cells: Array[PieceCell]) = {
+ number match {
+ case 0 => make0(cells)
+ case 1 => make1(cells)
+ case 2 => make2(cells)
+ case 3 => make3(cells)
+ case 4 => make4(cells)
+ case 5 => make5(cells)
+ case 6 => make6(cells)
+ case 7 => make7(cells)
+ case 8 => make8(cells)
+ case 9 => make9(cells)
+ }
+ }
+
+ private def make0(a: Array[PieceCell]) = {
+ a(0).next(Cell.E) = a(1)
+ a(1).next(Cell.W) = a(0)
+ a(1).next(Cell.E) = a(2)
+ a(2).next(Cell.W) = a(1)
+ a(2).next(Cell.E) = a(3)
+ a(3).next(Cell.W) = a(2)
+ a(3).next(Cell.SE) = a(4)
+ a(4).next(Cell.NW) = a(3)
+ }
+
+ private def make1(a: Array[PieceCell]) = {
+ a(0).next(Cell.SE) = a(1)
+ a(1).next(Cell.NW) = a(0)
+ a(1).next(Cell.SW) = a(2)
+ a(2).next(Cell.NE) = a(1)
+ a(2).next(Cell.W) = a(3)
+ a(3).next(Cell.E) = a(2)
+ a(3).next(Cell.SW) = a(4)
+ a(4).next(Cell.NE) = a(3)
+ }
+
+ private def make2(a: Array[PieceCell]) = {
+ a(0).next(Cell.W) = a(1)
+ a(1).next(Cell.E) = a(0)
+ a(1).next(Cell.SW) = a(2)
+ a(2).next(Cell.NE) = a(1)
+ a(2).next(Cell.SE) = a(3)
+ a(3).next(Cell.NW) = a(2)
+ a(3).next(Cell.SE) = a(4)
+ a(4).next(Cell.NW) = a(3)
+ }
+
+ private def make3(a: Array[PieceCell]) = {
+ a(0).next(Cell.SW) = a(1)
+ a(1).next(Cell.NE) = a(0)
+ a(1).next(Cell.W) = a(2)
+ a(2).next(Cell.E) = a(1)
+ a(1).next(Cell.SW) = a(3)
+ a(3).next(Cell.NE) = a(1)
+ a(2).next(Cell.SE) = a(3)
+ a(3).next(Cell.NW) = a(2)
+ a(3).next(Cell.SE) = a(4)
+ a(4).next(Cell.NW) = a(3)
+ }
+
+ private def make4(a: Array[PieceCell]) = {
+ a(0).next(Cell.SE) = a(1)
+ a(1).next(Cell.NW) = a(0)
+ a(1).next(Cell.SW) = a(2)
+ a(2).next(Cell.NE) = a(1)
+ a(1).next(Cell.E) = a(3)
+ a(3).next(Cell.W) = a(1)
+ a(3).next(Cell.SE) = a(4)
+ a(4).next(Cell.NW) = a(3)
+ }
+
+ private def make5(a: Array[PieceCell]) = {
+ a(0).next(Cell.SW) = a(1)
+ a(1).next(Cell.NE) = a(0)
+ a(0).next(Cell.SE) = a(2)
+ a(2).next(Cell.NW) = a(0)
+ a(1).next(Cell.SE) = a(3)
+ a(3).next(Cell.NW) = a(1)
+ a(2).next(Cell.SW) = a(3)
+ a(3).next(Cell.NE) = a(2)
+ a(3).next(Cell.SW) = a(4)
+ a(4).next(Cell.NE) = a(3)
+ }
+
+ private def make6(a: Array[PieceCell]) = {
+ a(0).next(Cell.SW) = a(1)
+ a(1).next(Cell.NE) = a(0)
+ a(2).next(Cell.SE) = a(1)
+ a(1).next(Cell.NW) = a(2)
+ a(1).next(Cell.SE) = a(3)
+ a(3).next(Cell.NW) = a(1)
+ a(3).next(Cell.SW) = a(4)
+ a(4).next(Cell.NE) = a(3)
+ }
+
+ private def make7(a: Array[PieceCell]) = {
+ a(0).next(Cell.SE) = a(1)
+ a(1).next(Cell.NW) = a(0)
+ a(0).next(Cell.SW) = a(2)
+ a(2).next(Cell.NE) = a(0)
+ a(2).next(Cell.SW) = a(3)
+ a(3).next(Cell.NE) = a(2)
+ a(3).next(Cell.SE) = a(4)
+ a(4).next(Cell.NW) = a(3)
+ }
+
+ private def make8(a: Array[PieceCell]) = {
+ a(0).next(Cell.E) = a(1)
+ a(1).next(Cell.W) = a(0)
+ a(1).next(Cell.E) = a(2)
+ a(2).next(Cell.W) = a(1)
+ a(2).next(Cell.NE) = a(3)
+ a(3).next(Cell.SW) = a(2)
+ a(3).next(Cell.E) = a(4)
+ a(4).next(Cell.W) = a(3)
+ }
+
+ private def make9(a: Array[PieceCell]) = {
+ a(0).next(Cell.E) = a(1)
+ a(1).next(Cell.W) = a(0)
+ a(1).next(Cell.E) = a(2)
+ a(2).next(Cell.W) = a(1)
+ a(2).next(Cell.NE) = a(3)
+ a(3).next(Cell.SW) = a(2)
+ a(2).next(Cell.E) = a(4)
+ a(4).next(Cell.W) = a(2)
+ a(4).next(Cell.NW) = a(3)
+ a(3).next(Cell.SE) = a(4)
+ }
+
+/*
+ def print() = {
+ Console.println("Piece # " + number)
+ Console.println("cell\tNW NE W E SW SE")
+ for (i <- Iterator.range(0,Piece.size)){
+ Console.print(i + "\t")
+ for (j <- Iterator.range(0,Cell.sides)){
+ val c = cells(i).next(j)
+ if (c == null)
+ Console.print("-- ")
+ else
+ for (k <- Iterator.range(0,Piece.size)){
+ if (cells(k) == c) Console.printf(" {0,number,0} ")(k)
+ }
+ }
+ Console.println("")
+ }
+ Console.println("")
+ }
+*/
+}
+
+
+
+
+
+// Cell.scala
+
+object Cell {
+ val NW = 0; val NE = 1
+ val W = 2; val E = 3
+ val SW = 4; val SE = 5
+
+ val sides = 6
+}
+
+abstract class Cell {
+ var marked = false
+
+ def mark() = marked = true
+ def unmark() = marked = false
+}
+
+
+
+
+// BoardCell.scala
+
+final class BoardCell(_number: Int) extends Cell {
+ val next = new Array[BoardCell](Cell.sides)
+ val number = _number
+ var piece: Piece = _
+
+ def isEmpty() = piece == null
+ def empty() = piece = null
+
+ def contiguousEmptyCells(): Int = {
+ if (!marked && isEmpty){
+ mark
+ var count = 1
+
+ var i = 0
+ while (i < next.length){
+ if (next(i) != null && next(i).isEmpty)
+ count = count + next(i).contiguousEmptyCells
+ i = i + 1
+ }
+
+ count } else { 0 }
+ }
+}
+
+
+
+
+// PieceCell.scala
+
+final class PieceCell extends Cell {
+ val next = new Array[PieceCell](Cell.sides)
+
+ def flip = {
+ var swap = next(Cell.NE)
+ next(Cell.NE) = next(Cell.NW)
+ next(Cell.NW) = swap
+
+ swap = next(Cell.E)
+ next(Cell.E) = next(Cell.W)
+ next(Cell.W) = swap
+
+ swap = next(Cell.SE)
+ next(Cell.SE) = next(Cell.SW)
+ next(Cell.SW) = swap
+ }
+
+ def rotate = {
+ var swap = next(Cell.E)
+ next(Cell.E) = next(Cell.NE)
+ next(Cell.NE) = next(Cell.NW)
+ next(Cell.NW) = next(Cell.W)
+ next(Cell.W) = next(Cell.SW)
+ next(Cell.SW) = next(Cell.SE)
+ next(Cell.SE) = swap
+ }
+}
+
+
+
+
diff --git a/test/pending/shootout/meteor.scala-4.scala.runner b/test/pending/shootout/meteor.scala-4.scala.runner
new file mode 100644
index 0000000000..9f1b95e150
--- /dev/null
+++ b/test/pending/shootout/meteor.scala-4.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(0)) meteor.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/meteor.scala.runner b/test/pending/shootout/meteor.scala.runner
new file mode 100644
index 0000000000..9f1b95e150
--- /dev/null
+++ b/test/pending/shootout/meteor.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(0)) meteor.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/methcall.scala b/test/pending/shootout/methcall.scala
new file mode 100644
index 0000000000..2781da0be5
--- /dev/null
+++ b/test/pending/shootout/methcall.scala
@@ -0,0 +1,58 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy (Scala novice)
+*/
+
+object methcall {
+ def main(args: Array[String]) = {
+ var n = toPositiveInt(args);
+ var v: Boolean = _;
+
+ val toggle = new Toggle(true);
+ for (val i <- Iterator.range(1,n)) v = toggle.activate.value;
+
+ Console println( toggle.activate.value );
+
+ val ntoggle = new NToggle(true,3);
+ for (val i <- Iterator.range(1,n)) v = ntoggle.activate.value;
+
+ Console println( ntoggle.activate.value );
+ }
+
+
+ private def toPositiveInt(s: Array[String]) = {
+ val i =
+ try { Integer.parseInt(s(0)); }
+ catch { case _ => 1 }
+ if (i>0) i; else 1;
+ }
+}
+
+
+private class Toggle(b: Boolean) {
+ var state = b;
+
+ def value = state;
+
+ def activate = {
+ state = !state;
+ this
+ }
+}
+
+
+private class NToggle(b: Boolean, trigger: Int)
+extends Toggle(b) {
+
+ val toggleTrigger = trigger;
+ var count = 0;
+
+ override def activate = {
+ count = count + 1;
+ if (count >= toggleTrigger) {
+ state = !state;
+ count = 0;
+ }
+ this
+ }
+}
diff --git a/test/pending/shootout/methcall.scala.runner b/test/pending/shootout/methcall.scala.runner
new file mode 100644
index 0000000000..1e50ac5492
--- /dev/null
+++ b/test/pending/shootout/methcall.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(100000,400000,700000,1000000)) methcall.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/nsieve.scala-4.check b/test/pending/shootout/nsieve.scala-4.check
new file mode 100644
index 0000000000..5ae0440a5a
--- /dev/null
+++ b/test/pending/shootout/nsieve.scala-4.check
@@ -0,0 +1,9 @@
+Primes up to 1280000 98610
+Primes up to 640000 52074
+Primes up to 320000 27608
+Primes up to 2560000 187134
+Primes up to 1280000 98610
+Primes up to 640000 52074
+Primes up to 5120000 356244
+Primes up to 2560000 187134
+Primes up to 1280000 98610
diff --git a/test/pending/shootout/nsieve.scala-4.scala b/test/pending/shootout/nsieve.scala-4.scala
new file mode 100644
index 0000000000..b5e5ccc66c
--- /dev/null
+++ b/test/pending/shootout/nsieve.scala-4.scala
@@ -0,0 +1,45 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy
+*/
+
+
+object nsieve {
+
+ def nsieve(m: int, isPrime: Array[boolean]) = {
+ for (val i <- List.range(2, m)) isPrime(i) = true
+ var count = 0
+
+ for (val i <- List.range(2, m)){
+ if (isPrime(i)){
+ var k = i+i
+ while (k < m){ isPrime(k) = false; k = k+i }
+ count = count + 1
+ }
+ }
+ count
+ }
+
+
+ def main(args: Array[String]) = {
+ val n = Integer.parseInt(args(0))
+ val m = (1<<n)*10000
+ val flags = new Array[boolean](m+1)
+
+ def printPrimes(m: int) = {
+
+ def pad(i: int, width: int) = {
+ val s = i.toString
+ List.range(0, width - s.length)
+ .map((i) => " ") .foldLeft("")((a,b) => a+b) + s
+ }
+
+ Console.println("Primes up to " + pad(m,8) + pad(nsieve(m,flags),9))
+ }
+
+
+ printPrimes(m)
+ printPrimes( (1<<n-1)*10000 )
+ printPrimes( (1<<n-2)*10000 )
+ }
+}
diff --git a/test/pending/shootout/nsieve.scala-4.scala.runner b/test/pending/shootout/nsieve.scala-4.scala.runner
new file mode 100644
index 0000000000..8c9e80bca5
--- /dev/null
+++ b/test/pending/shootout/nsieve.scala-4.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(7,8,9)) nsieve.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/pidigits.check b/test/pending/shootout/pidigits.check
new file mode 100644
index 0000000000..ad4dc9962b
--- /dev/null
+++ b/test/pending/shootout/pidigits.check
@@ -0,0 +1,100 @@
+3141592653 :10
+5897932384 :20
+6264338327 :30
+9502884197 :40
+1693993751 :50
+0582097494 :60
+4592307816 :70
+4062862089 :80
+9862803482 :90
+5342117067 :100
+9821480865 :110
+1328230664 :120
+7093844609 :130
+5505822317 :140
+2535940812 :150
+8481117450 :160
+2841027019 :170
+3852110555 :180
+9644622948 :190
+9549303819 :200
+6442881097 :210
+5665933446 :220
+1284756482 :230
+3378678316 :240
+5271201909 :250
+1456485669 :260
+2346034861 :270
+0454326648 :280
+2133936072 :290
+6024914127 :300
+3724587006 :310
+6063155881 :320
+7488152092 :330
+0962829254 :340
+0917153643 :350
+6789259036 :360
+0011330530 :370
+5488204665 :380
+2138414695 :390
+1941511609 :400
+4330572703 :410
+6575959195 :420
+3092186117 :430
+3819326117 :440
+9310511854 :450
+8074462379 :460
+9627495673 :470
+5188575272 :480
+4891227938 :490
+1830119491 :500
+2983367336 :510
+2440656643 :520
+0860213949 :530
+4639522473 :540
+7190702179 :550
+8609437027 :560
+7053921717 :570
+6293176752 :580
+3846748184 :590
+6766940513 :600
+2000568127 :610
+1452635608 :620
+2778577134 :630
+2757789609 :640
+1736371787 :650
+2146844090 :660
+1224953430 :670
+1465495853 :680
+7105079227 :690
+9689258923 :700
+5420199561 :710
+1212902196 :720
+0864034418 :730
+1598136297 :740
+7477130996 :750
+0518707211 :760
+3499999983 :770
+7297804995 :780
+1059731732 :790
+8160963185 :800
+9502445945 :810
+5346908302 :820
+6425223082 :830
+5334468503 :840
+5261931188 :850
+1710100031 :860
+3783875288 :870
+6587533208 :880
+3814206171 :890
+7766914730 :900
+3598253490 :910
+4287554687 :920
+3115956286 :930
+3882353787 :940
+5937519577 :950
+8185778053 :960
+2171226806 :970
+6130019278 :980
+7661119590 :990
+9216420198 :1000
diff --git a/test/pending/shootout/pidigits.scala b/test/pending/shootout/pidigits.scala
new file mode 100644
index 0000000000..9da6ab9536
--- /dev/null
+++ b/test/pending/shootout/pidigits.scala
@@ -0,0 +1,69 @@
+/* ------------------------------------------------------------------ */
+/* The Computer Language Shootout */
+/* http://shootout.alioth.debian.org/ */
+/* */
+/* Contributed by Anthony Borla */
+/* ------------------------------------------------------------------ */
+
+object pidigits
+{
+ def main(args: Array[String]): unit =
+ {
+ val N: int = Integer.parseInt(args(0)); var i: int = 10
+
+ while (i <= N)
+ {
+ System.out.println(pi_digits(10) + "\t:" + i)
+ i = i + 10
+ }
+
+ i = i - 10
+
+ if (i < N)
+ {
+ System.out.println(pi_digits(N - i) + "\t:" + N)
+ }
+ }
+
+ def compose(a: Array[BigInt], b: Array[BigInt]): Array[BigInt] =
+ {
+ return Array(a(0) * b(0),
+ a(0) * b(1) + a(1) * b(3),
+ a(2) * b(0) + a(3) * b(2),
+ a(2) * b(1) + a(3) * b(3))
+ }
+
+ def extract(a: Array[BigInt], j: int): BigInt =
+ {
+ return (a(0) * j + a(1)) / (a(2) * j + a(3))
+ }
+
+ def pi_digits(c: int): String =
+ {
+ val r: StringBuffer = new StringBuffer(); var i: int = 0
+
+ while (i < c)
+ {
+ var y: BigInt = extract(Z, 3)
+
+ while (y != extract(Z, 4))
+ {
+ K = K + 1; Z = compose(Z, Array(K, 4 * K + 2, 0, 2 * K + 1))
+ y = extract(Z, 3)
+ }
+
+// Z = compose(Array(10, (-y) * 10, 0, 1), Z)
+
+ Z = compose(Array(10, y * (-10), 0, 1), Z)
+
+ r.append(y); i = i + 1;
+ }
+
+ return r.toString()
+ }
+
+ var K: int = 0
+
+ var Z: Array[BigInt] = Array(1, 0, 0, 1)
+}
+
diff --git a/test/pending/shootout/pidigits.scala.runner b/test/pending/shootout/pidigits.scala.runner
new file mode 100644
index 0000000000..ad83b17a73
--- /dev/null
+++ b/test/pending/shootout/pidigits.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(600,800,1000)) pidigits.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/prodcons.scala b/test/pending/shootout/prodcons.scala
new file mode 100644
index 0000000000..d48d3e94d8
--- /dev/null
+++ b/test/pending/shootout/prodcons.scala
@@ -0,0 +1,64 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy (Scala novice)
+*/
+
+import concurrent.SyncVar;
+import concurrent.ops._;
+
+object prodcons {
+ def main(args: Array[String]) = {
+ val n = toPositiveInt(args);
+ val buffer = new SharedBuffer();
+ var p = 0;
+ var c = 0;
+ val cDone = new SyncVar[Boolean];
+
+ spawn {
+ while(p<n) { p=p+1; buffer put(p); }
+ }
+
+ spawn {
+ var v: Int = _;
+ while(c<n) { c=c+1; v = buffer.get; }
+ cDone set true;
+ }
+
+ cDone.get;
+ Console println(p + " " + c);
+ }
+
+
+ private def toPositiveInt(s: Array[String]) = {
+ val i =
+ try { Integer.parseInt(s(0)); }
+ catch { case _ => 1 }
+ if (i>0) i; else 1;
+ }
+}
+
+
+private class SharedBuffer() {
+ var contents: Int = _;
+ var available = false;
+
+ def get = synchronized {
+ while (available == false) wait();
+ available = false;
+ // Console println("\t" + "get " + contents);
+ notifyAll();
+ contents
+ }
+
+ def put(value: Int) = synchronized {
+ while (available == true) wait();
+ contents = value;
+ available = true;
+ // Console println("put " + value);
+ notifyAll();
+ }
+}
+
+
+
+
diff --git a/test/pending/shootout/prodcons.scala.runner b/test/pending/shootout/prodcons.scala.runner
new file mode 100644
index 0000000000..d76c3736f7
--- /dev/null
+++ b/test/pending/shootout/prodcons.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(30000,70000,100000,150000)) prodcons.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/random.scala b/test/pending/shootout/random.scala
new file mode 100644
index 0000000000..9320b5aa95
--- /dev/null
+++ b/test/pending/shootout/random.scala
@@ -0,0 +1,32 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy (Scala novice)
+*/
+
+object random {
+ def main(args: Array[String]) = {
+ var n = toPositiveInt(args);
+ var result: Double = _;
+
+ while (n>0) { result=generate(100.0); n=n-1; }
+
+ Console.printf("{0,number,#.000000000}\n")(result);
+ }
+
+ private val IM = 139968;
+ private val IA = 3877;
+ private val IC = 29573;
+ private var seed = 42;
+
+ def generate(max: Double) = {
+ seed = (seed * IA + IC) % IM;
+ max * seed / IM;
+ }
+
+ private def toPositiveInt(s: Array[String]) = {
+ val i =
+ try { Integer.parseInt(s(0)); }
+ catch { case _ => 1 }
+ if (i>0) i; else 1;
+ }
+}
diff --git a/test/pending/shootout/random.scala.runner b/test/pending/shootout/random.scala.runner
new file mode 100644
index 0000000000..ffe62e5d40
--- /dev/null
+++ b/test/pending/shootout/random.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(9000,300000,600000,900000)) random.main(Array(n.toString))
+}
diff --git a/test/pending/shootout/revcomp.scala-2.check b/test/pending/shootout/revcomp.scala-2.check
new file mode 100644
index 0000000000..14d792ade8
--- /dev/null
+++ b/test/pending/shootout/revcomp.scala-2.check
@@ -0,0 +1,171 @@
+>ONE Homo sapiens alu
+CGGAGTCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAAC
+CTCCGCCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACA
+GGCGCGCGCCACCACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCAT
+GTTGGCCAGGCTGGTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAA
+AGTGCTGGGATTACAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTC
+TGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCGCCTCCCGG
+GTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGCGCGCCACC
+ACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGGCCAGGCTG
+GTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTA
+CAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTCTGTCGCCCAGGCT
+GGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCGCCTCCCGGGTTCAAGCGATTC
+TCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGCGCGCCACCACGCCCGGCTAAT
+TTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGGCCAGGCTGGTCTCGAACTCCT
+GACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCA
+CCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGC
+GCGATCTCGGCTCACTGCAACCTCCGCCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCC
+TCCCGAGTAGCTGGGATTACAGGCGCGCGCCACCACGCCCGGCTAATTTTTGTATTTTTA
+GTAGAGACGGGGTTTCACCATGTTGGCCAGGCTGGTCTCGAACTCCTGACCTCAGGTGAT
+CCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCACCGCGCCCGGCCT
+TTTTGAGACGGAGTCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTC
+ACTGCAACCTCCGCCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTG
+GGATTACAGGCGCGCGCCACCACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGT
+TTCACCATGTTGGCCAGGCTGGTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGG
+CCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAG
+TCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCG
+CCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGC
+GCGCCACCACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGG
+CCAGGCTGGTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGC
+TGGGATTACAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTCTGTCG
+CCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCGCCTCCCGGGTTCA
+AGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGCGCGCCACCACGCC
+CGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGGCCAGGCTGGTCTC
+GAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTACAGGC
+GTGAGCCACCGCGCCCGGCC
+>TWO IUB ambiguity codes
+TAGGDHACHATCRGTRGVTGAGWTATGYTGCTGTCABACDWVTRTAAGAVVAGATTTNDA
+GASMTCTGCATBYTTCAAKTTACMTATTACTTCATARGGYACMRTGTTTTYTATACVAAT
+TTCTAKGDACKADACTATATNTANTCGTTCACGBCGYSCBHTANGGTGATCGTAAAGTAA
+CTATBAAAAGATSTGWATBCSGAKHTTABBAACGTSYCATGCAAVATKTSKTASCGGAAT
+WVATTTNTCCTTCTTCTTDDAGTGGTTGGATACVGTTAYMTMTBTACTTTHAGCTAGBAA
+AAGAGKAAGTTRATWATCAGATTMDDTTTAAAVAAATATTKTCYTAAATTVCNKTTRACG
+ADTATATTTATGATSADSCAATAWAGCGRTAGTGTAAGTGACVGRADYGTGCTACHVSDT
+CTVCARCSYTTAATATARAAAATTTAATTTACDAATTGBACAGTAYAABATBTGCAGBVG
+TGATGGDCAAAATBNMSTTABKATTGGSTCCTAGBTTACTTGTTTAGTTTATHCGATSTA
+AAGTCGAKAAASTGTTTTAWAKCAGATATACTTTTMTTTTGBATAGAGGAGCMATGATRA
+AAGGNCAYDCCDDGAAAGTHGBTAATCKYTBTACBGTBCTTTTTGDTAASSWTAAWAARA
+TTGGCTAAGWGRADTYACATAGCTCBTAGATAWAGCAATNGTATMATGTTKMMAGTAWTC
+CCNTSGAAWATWCAAAAMACTGAADNTYGATNAATCCGAYWNCTAACGTTAGAGDTTTTC
+ATCTGGKRTAVGAABVCTGWGBTCTDVGKATTBTCTAAGGVADAAAVWTCTAGGGGAGGG
+TTAGAACAATTAAHTAATNAAATGCATKATCTAAYRTDTCAGSAYTTYHGATRTTWAVTA
+BGNTCDACAGBCCRCAGWCRTCABTGMMAWGMCTCAACCGATRTGBCAVAATCGTDWDAA
+CAYAWAATWCTGGTAHCCCTAAGATAACSCTTAGTGSAACAWTBGTCDTTDGACWDBAAC
+HTTTNGSKTYYAAYGGATNTGATTTAARTTAMBAATCTAAGTBTCATYTAACTTADTGTT
+TCGATACGAAHGGCYATATACCWDTKYATDCSHTDTCAAAATGTGBACTGSCCVGATGTA
+TCMMAGCCTTDAAABAATGAAGAGTAACTHATMGVTTAATAACCCGGTTVSANTGCAATT
+GTGAGATTTAMGTTTAMAAYGCTGACAYAAAAAGGCACAMYTAAGVGGCTGGAABVTACG
+GATTSTYGTBVAKTATWACCGTGTKAGTDTGTATGTTTAAAGGAAAAAGTAACATARAAA
+GGTYCAMNYAAABTATAGNTSATANAGTCATCCTATWADKAACTRGTMSACDGTATSAYT
+AAHSHGTAABYGACTYTATADTGSTATAGAGAAATCGNTAAAGGAAATCAGTTGTNCYMV
+TNACDRTATBNATATASTAGAAMSCGGGANRCKKMCAAACATTNAGTCTRMAATBMTACC
+CGTACTTCTBGDSYAATWGAAAATGACADDCHAKAAAYATATTKTTTTCACANACWAGAA
+AKATCCTTATTAYKHKCTAAACARTATTTTDATBTVWCYGCAATACTAGGKAAASTTDGA
+MGGCHTTHAATVCAHDRYAGGRCTATACGTCMAGAGAGCTBTHGNACARTCCBDCTAAGA
+GCGGCTTTARTAAAGAATCCNAGTAWBTGACTTGAATTACWTVACAGAAABCAATNAAAC
+CGTNTRANTTGAYCMAWBADTANABRGGTKTHTWTAGTTVCTMBKTAGMTVKCCAGCANT
+TVAGSWTTAGCCGCRHTTTCCTTHNTATTAAGAAGAATAGGMTRAARTCTABGTACDTTT
+TATAAVDHAHTATAGATCCTAGTAAGYTWATDWCATGAGGGATAGTAAMDMNGBASTWAM
+TSTATRBAYDABATGTATATYCGCACTGTTTTAACMCWBTATAWAGTATBTSTATVTTAR
+CCTMTTAAKADATCAACTAATYTSVTAKGDATTATGCKTCAYCAKAATACTTKAANGAGT
+ATTSDAGATCGGAAATACTTAAYAAVGTATMCGCTTGTGTDCTAATYTATTTTATTTWAA
+CAGWRCTATGTAGMTGTTTGTTYKTNGTTKTCAGAACNTRACCTACKTGSRATGTGGGGG
+CTGTCATTAAGTAAATNGSTTABCCCCTCGCAGCTCWHTCGCGAAGCAVATGCKACGHCA
+ACAKTTAATAACASAAADATTWNYTGTAATTGTTCGTMHACHTWATGTGCWTTTTGAAHY
+ACTTTGTAYAMSAAACTTAADAAATATAGTABMATATYAATGSGGTAGTTTGTGTBYGGT
+TWSGSVGWMATTDMTCCWWCABTCSVACAGBAATGTTKATBGTCAATAATCTTCTTAAAC
+ARVAATHAGYBWCTRWCABGTWWAATCTAAGTCASTAAAKTAAGVKBAATTBGABACGTA
+AGGTTAAATAAAAACTRMDTWBCTTTTTAATAAAAGATMGCCTACKAKNTBAGYRASTGT
+ASSTCGTHCGAAKTTATTATATTYTTTGTAGAACATGTCAAAACTWTWTHGKTCCYAATA
+AAGTGGAYTMCYTAARCSTAAATWAKTGAATTTRAGTCTSSATACGACWAKAASATDAAA
+TGYYACTSAACAAHAKTSHYARGASTATTATTHAGGYGGASTTTBGAKGATSANAACACD
+TRGSTTRAAAAAAAACAAGARTCVTAGTAAGATAWATGVHAAKATWGAAAAGTYAHVTAC
+TCTGRTGTCAWGATRVAAKTCGCAAVCGASWGGTTRTCSAMCCTAACASGWKKAWDAATG
+ACRCBACTATGTGTCTTCAAAHGSCTATATTTCGTVWAGAAGTAYCKGARAKSGKAGTAN
+TTTCYACATWATGTCTAAAADMDTWCAATSTKDACAMAADADBSAAATAGGCTHAHAGTA
+CGACVGAATTATAAAGAHCCVAYHGHTTTACATSTTTATGNCCMTAGCATATGATAVAAG
+>THREE Homo sapiens frequency
+ATATTTATCTTTTCACTTCCTACATTGGTCAGACCATTATTCGACACGTGGCGTCATTTT
+GTCATACCGGGTAATGTTGGAAACAAAACGTACTGATAAAATACTGAGTTGTAAACTCTA
+ATCAGATAACGCGCTTGGATATTAAGATTCACACAGGGGTTTCGGCTGTAAAAAAACTTG
+TGGAGCTGTTCTGGGACAGATAAGTTGTACCTCGTACTTAGCTAATTAATGAACCAACTG
+ATTACGATAGAACAATTCTGAGGCCGCCAGGACAGCCAAATTTTAATCTTATAAAGCTGG
+AAACAGCCGGTATTAGCTTCTCGCATACTTTGCCTGCATTGGTACCTTACAGATATCAGC
+GTAGTCATATACACCTCGGTCTCAGCTAAGCTTGTATCTCTTAGAGTAGTTCAAAGATAG
+TGGACAATACCTGTGGAATCGATTGCAGATATGGATTTATTTAACTACTGAGTCTCATTC
+ACAAGCTAAGCAAGGAGCACGTTTTGGTGCCGGCATACCGATTTGCTATCATGTCAGCAA
+ATTTGCGTTGTATTCCTAGTTGCACCCATTAAGGCCACACTCCGAACCTAATTATTACAT
+CGCAAAGACATGTACGAAGGACCCGATGTCGAATAGAAGGGAGGACTGTTCATTGGAAGC
+TAGACCAGAGGAATCGCAAAGATGCAACTCTTACAATAAAAATCTAATTTCAGTCAACAC
+GCAATTTCTATAAGGTTTCCGATAATAATGAACCGTCTTCCACAGGGGAATTTGCCATGC
+TCGTAAAAGTAGTTAATCCAAGTAGAAGAAATTTTGATAATGTTTTAAGTTGGCACGAAG
+GAATTCAGAGAGATCTTACCTAACAAAGGCATTAGTAGATGTTCCTTGGTTCACACTCGG
+TCAATCAGAGCACATACTACGGGCGATACCGGGAATGACACAACATCAATGAGATTGTTA
+AGTGAGGTAATTGACTTTAGAGGACTCGATCAGTATACTGTCACTATGAACATCGTATTA
+ATTGTTATCCGATATATACACCACCGATTTGCTTGTGCAAGGTTACAGACCCATTCGATA
+AATACAAACACGGAGCGATATTATTTAAGGAGTGCTGTCTTCAAAAGAATTATTCCCACA
+CCGACATAAGAACTTCGCTCCGTCATTCCAGATTTAAATAACATAACGTAACGCTTTGCT
+GATAACATAACATAACCGAGAATTTGCTTAGGAAATTTGGAGCAATATTGCATTGTTTCT
+CAGTCATCACAAGGCCCGCCAAAGAACTCTGAGAATCAGGATTCAACATGATTGGTAAGA
+CTCTATATATATAACTTAATTCTTGTGTCCGGAGATAGAAAGAGGACGAGAGATACTACG
+AAAGAAAGTGTACTTCGATGTATCAATTCAGACGCCTTCTCTATCATCAACATTATAGGT
+CTCGTATATGCTCGGCGCGATCTGCTTCTCTCCGCCAATAGCCCCATAGTGTATTTCAAG
+CGCAGTAACAGTGAAATCGTTACGAAGGTAGGGATGTTGCTTATAATTGTCGTAACTTAT
+CGCTTATGTATCTTTCAAGAATGAACGGCAGCATATACATACGTTCTACCTTTAGCTACA
+AAGCATCCATATACTCCCTCTCATGATTGAAACTCTTCCCTATTTTGTAGCCAATAGTGA
+AAGCGTATTAGTATAAATTCGTCGGTTTTTCACTCGCAACTGTTATACTCTGCAAACAAA
+CGAAAGCCTCATAGTACAAACCTAAAGCTACATACTTCATCATTGGCAGACCAGTGGCGG
+TATTTCTACGGAAGCATCACTATAGATATAAAGTTTCCCTTCATGTACGTCTGTTAACCA
+TATCACAAGAAACTGCTATCTCTGTCACGTAACAATTCACGCGCCTTATCGCCAAATGTT
+CATATATGCGCGGTATACGTATGAACGAATACTAATTAGTATAACGGAGGATTCACGGGA
+GGGATACTTGGGGCATTTATAAATCGTCTAAAAATTTTCTATCAGCACTTGCGGGTTATA
+GTGGATTACTAGGCAACATAATATTCTGTATTGGTCCAAATGACGCTATAGATAAATTAG
+CAAAATACATTGTTTCCATTTATGTAAGTCGAAACTCCAGGACTCCCGGGAACCAGTTAA
+ACCGTCTGGAAAAGACACATTGTGAGCGGGACTTCAATGATAGCTTTCAATGAGCTTCTC
+ATGCTTGGGGTCTGTACATATATGTTGGCGAAATTATCGTCTGTATTCTGTTATGCTTTG
+ATCATGGGTTATTAGTATAGTGTCCGGTTAAGTACCAATACCGCTAGAGACCCGACCTAA
+GTCGATAACTAACGATCATCGACGTAAGGATCGTCTCGATCAGTACTTCAGTCTAGATCT
+GGGAATAGTAACTCGTTAGTGAACTATGTCGTGTCATAACTCTAAAATGCAATCAAATCT
+TATTATTGAGTATTGATTATATAAAGCATCCGCTTAGCTTTACCCTCAAATGTTATATGC
+AATTTAAAGCGCTTGATATCGTCTACTCAAGTTCAGGTTTCACATGGCCGCAACGTGACG
+TTATTAGAGGTGGGTCATCATCTCTGAGGCTAGTGATGTTGAATACTCATTGAATGGGAA
+GTGGAATACCATGCTCGTAGGTAACAGCATGACCTATAAAATATACTATGGGTGTGTGGT
+AGATCAATATTGTTCAAGCATATCGTAACAATAACGGCTGAAATGTTACTGACATGAAAG
+AGGGAGTCCAAACCATTCTAACAGCTGATCAAGTCGTCTAAAAACGCCTGGTTCAGCCTT
+AAGAGTTATAAGCCAGACAAATTGTATCAATAGAGAATCCGTAAATTCCTCGGCCAACCT
+CTTGCAAAGACATCACTATCAATATACTACCGTGATCTTAATTAGTGAACTTATATAAAT
+ATCTACAACCAGATTCAACGGAAAAGCTTTAGTGGATTAGAAATTGCCAAGAATCACATT
+CATGTGGGTTCGAATGCTTTAGTAATACCATTTCGCCGAGTAGTCACTTCGCTGAACTGT
+CGTAAATTGCTATGACATAATCGAAAAGGATTGTCAAGAGTCGATTACTGCGGACTAATA
+ATCCCCACGGGGGTGGTCTCATGTCTCCCCAGGCGAGTGGGGACGGTTGATAAACACGCT
+GCATCGCGGACTGATGTTCCCAGTATTACATAGTCACATTGGATTGCGAGTAGTCTACCT
+ATTTATGAGCGAGAGATGCCTCTAACTACTTCGACTTTTAAAACCTTTCCACGCCAGTAT
+TCGGCGAAAGGGAAGTATTAAGGGTTGTCATAATTAAGCTGATACCACTTCAGACTTTGC
+TCTACTTCTGTCTTTCATTGGTTTAGTAAAGTCTGTCCATTCGTCGAGACCGTCTTTTGC
+AGCCTCATTCTACCAACTGCTCCGACTCTTAGTCTGCTTCTCCCAGCGTTATAACAAGAG
+GCATTTTGTCATCCTTAAAACAATAATAAAGAACTCGGAGCACTGATATAATGACTGAAT
+TAGAACCGCTTAAAAATACAACGAATAGATAAGACTATCGGATAAGATCTAATATGTAGT
+GATTAAGCCCTTTATTAATTAATAATAGTTACCCTTTCTGATGTAACGCGACATATTACG
+ATTTAGTGGCACGTCTGAATTGCAAAGCAGATCTCTACCCGATTTTTATTATAAATCCCG
+TATACATCTTGACTTGAGTAATTGTTCATCTTTTTATATCTCTTCGTACTACAAATAATT
+AATATCTCAACCCGTATTGTGTGATTCTAATTACCAACAGAATACGAGGAGGTTTTTGCT
+TAGGGCCATATATAATGAATCTATCTCGTTTATTCGCGGAACCCGAGATAACATTACGAT
+GTAACTATTTTAGAGAACTTAATACAAGAAACATTGCTGATTACTCATAACTAAATGCTT
+GGTAATATATCCTCAGTGCCCCTACCATCTTTTACGCAGGGATGTAATTACTTAGGATTC
+ATTGTGTAAGAATTACAATGAACGATGGATATGAAGGCATGTTGCGAGGTGTTCCTTGGT
+ATGTGAAGTTCGCAGGGCAACAAAAATTTCGCAGAATAGGCCTCAAAGTATTGGTAAAGA
+AGACAACTAATCATCACGAGCTTCTGATATCAATACGAACGAGTCCTGTGATGGATGAAA
+GAAAGTCGTATCGAAAATGTCAAGAGTCTGCCCAATGTAACTTACTTCAAAAAATAACGC
+TTCCGCCAAGTACGTTCGAATAAACGTAATTTTAAAAATACATAAGGGGTGTTAGAAAGT
+AAGCGACGGGATATAAGTTAGACTCAAGATTCCGCCGTAAAACGAGACTGATTCCGAAGA
+TTGTTCGTGGATCTGGTCATGACTTTCACTGAGTAAGGAGTTTCGACATATGTCAATAAA
+CACAAAAATAGAAGCTATTCGATCTGAAAAATATTAGGACAAGAAACTATCTCACGCTAG
+CCCAGAATATTCACTCACCCACGGGCGATACTAAAGCACTATATAGTCGCGTGATTACTA
+TACATATGGTACACATAAGAATCACGATCAGGTTCTCAATTTTCAACAATATATGTTTAT
+TTGCATAGGTAATATTAGGCCTTTAAGAGAAGGATGGGTGAGATACTCCGGGGATGGCGG
+CAATAAAGAAAAACACGATATGAGTAATAGGATCCTAATATCTTGGCGAGAGACTTAAGG
+TACGAATTTTGCGCAATCTATTTTTTACTTGGCCAGAATTCATGTATGGTATAAGTACGA
+ACTTTTTTGATCACTTTCATGGCTACCTGATTAGGATAGTTTGAGGAATTTCCCAAATAT
+ACCGATTTAATATACACTAGGGCTTGTCACTTTGAGTCAGAAAAAGAATATAATTACTTA
+GGGTAATGCTGCATACATATTCTTATATTGCAAAGGTTCTCTGGGTAATCTTGAGCCTTC
+ACGATACCTGGTGAAGTGTT
diff --git a/test/pending/shootout/revcomp.scala-2.scala b/test/pending/shootout/revcomp.scala-2.scala
new file mode 100644
index 0000000000..a8ea650cb9
--- /dev/null
+++ b/test/pending/shootout/revcomp.scala-2.scala
@@ -0,0 +1,92 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy
+*/
+
+import java.io._
+import scala.collection.mutable.Stack
+
+object revcomp {
+
+ val IUB = IUBCodeComplements
+
+ def IUBCodeComplements() = {
+ val code = "ABCDGHKMNRSTVWYabcdghkmnrstvwy".getBytes
+ val comp = "TVGHCDMKNYSABWRTVGHCDMKNYSABWR".getBytes
+ val a: Array[byte] = new Array( 'z'.toByte )
+
+ for (val indexValue <- code zip comp)
+ indexValue match { case Pair(i,v) => a(i) = v }
+
+ a
+ }
+
+
+ type LineStack = Stack[Array[byte]]
+
+ def main(args: Array[String]) = {
+ val r = new BufferedReader(new InputStreamReader(System.in))
+ val w = new BufferedOutputStream(System.out)
+
+ var lines: LineStack = new Stack
+ var desc = ""
+
+ var line = r.readLine
+ while (line != null) {
+ val c = line.charAt(0)
+ if (c == '>'){
+ if (desc.length > 0){
+ complementReverseWrite(desc, lines, w)
+ lines = new Stack
+ }
+ desc = line
+ } else {
+ if (c != ';') lines += line.getBytes
+ }
+ line = r.readLine
+ }
+ r.close
+
+ if (desc.length > 0) complementReverseWrite(desc, lines, w)
+ w.close
+ }
+
+
+ def complementReverseWrite(desc: String, lines: LineStack,
+ w: BufferedOutputStream) = {
+
+ def inplaceComplementReverse(b: Array[byte]) = {
+ var i = 0
+ var j = b.length - 1
+ while (i < j){
+ val swap = b(i)
+ b(i) = IUB( b(j) )
+ b(j) = IUB( swap )
+ i = i + 1
+ j = j - 1
+ }
+ if (i == j) b(i) = IUB( b(i) )
+ }
+
+ val nl = '\n'.toByte
+ w.write(desc.getBytes); w.write(nl)
+
+ val n = 60
+ val k = if (lines.isEmpty) 0 else lines.top.length
+ val isSplitLine = k < n
+ var isFirstLine = true
+
+ while (!lines.isEmpty) {
+ val line = lines.pop
+ inplaceComplementReverse(line)
+
+ if (isSplitLine){
+ if (isFirstLine){ w.write(line); isFirstLine = false }
+ else { w.write(line,0,n-k); w.write(nl); w.write(line,n-k,k) }
+ }
+ else { w.write(line); w.write(nl) }
+ }
+ if (isSplitLine && !isFirstLine) w.write(nl)
+ }
+
+}
diff --git a/test/pending/shootout/revcomp.scala-2.scala.runner b/test/pending/shootout/revcomp.scala-2.scala.runner
new file mode 100644
index 0000000000..0f68f0da01
--- /dev/null
+++ b/test/pending/shootout/revcomp.scala-2.scala.runner
@@ -0,0 +1,6 @@
+object Test extends Application {
+ for(val n <- List(25000,250000,2500000)) {
+ System.setIn(new java.io.FileInputStream(System.getProperty("partest.cwd")+"/revcomp-input"+n+".txt"))
+ revcomp.main(Array(n.toString))
+ }
+}
diff --git a/test/pending/shootout/revcomp.scala-3.check b/test/pending/shootout/revcomp.scala-3.check
new file mode 100644
index 0000000000..14d792ade8
--- /dev/null
+++ b/test/pending/shootout/revcomp.scala-3.check
@@ -0,0 +1,171 @@
+>ONE Homo sapiens alu
+CGGAGTCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAAC
+CTCCGCCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACA
+GGCGCGCGCCACCACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCAT
+GTTGGCCAGGCTGGTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAA
+AGTGCTGGGATTACAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTC
+TGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCGCCTCCCGG
+GTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGCGCGCCACC
+ACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGGCCAGGCTG
+GTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTA
+CAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTCTGTCGCCCAGGCT
+GGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCGCCTCCCGGGTTCAAGCGATTC
+TCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGCGCGCCACCACGCCCGGCTAAT
+TTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGGCCAGGCTGGTCTCGAACTCCT
+GACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCA
+CCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGC
+GCGATCTCGGCTCACTGCAACCTCCGCCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCC
+TCCCGAGTAGCTGGGATTACAGGCGCGCGCCACCACGCCCGGCTAATTTTTGTATTTTTA
+GTAGAGACGGGGTTTCACCATGTTGGCCAGGCTGGTCTCGAACTCCTGACCTCAGGTGAT
+CCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCACCGCGCCCGGCCT
+TTTTGAGACGGAGTCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTC
+ACTGCAACCTCCGCCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTG
+GGATTACAGGCGCGCGCCACCACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGT
+TTCACCATGTTGGCCAGGCTGGTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGG
+CCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAG
+TCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCG
+CCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGC
+GCGCCACCACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGG
+CCAGGCTGGTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGC
+TGGGATTACAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTCTGTCG
+CCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCGCCTCCCGGGTTCA
+AGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGCGCGCCACCACGCC
+CGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGGCCAGGCTGGTCTC
+GAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTACAGGC
+GTGAGCCACCGCGCCCGGCC
+>TWO IUB ambiguity codes
+TAGGDHACHATCRGTRGVTGAGWTATGYTGCTGTCABACDWVTRTAAGAVVAGATTTNDA
+GASMTCTGCATBYTTCAAKTTACMTATTACTTCATARGGYACMRTGTTTTYTATACVAAT
+TTCTAKGDACKADACTATATNTANTCGTTCACGBCGYSCBHTANGGTGATCGTAAAGTAA
+CTATBAAAAGATSTGWATBCSGAKHTTABBAACGTSYCATGCAAVATKTSKTASCGGAAT
+WVATTTNTCCTTCTTCTTDDAGTGGTTGGATACVGTTAYMTMTBTACTTTHAGCTAGBAA
+AAGAGKAAGTTRATWATCAGATTMDDTTTAAAVAAATATTKTCYTAAATTVCNKTTRACG
+ADTATATTTATGATSADSCAATAWAGCGRTAGTGTAAGTGACVGRADYGTGCTACHVSDT
+CTVCARCSYTTAATATARAAAATTTAATTTACDAATTGBACAGTAYAABATBTGCAGBVG
+TGATGGDCAAAATBNMSTTABKATTGGSTCCTAGBTTACTTGTTTAGTTTATHCGATSTA
+AAGTCGAKAAASTGTTTTAWAKCAGATATACTTTTMTTTTGBATAGAGGAGCMATGATRA
+AAGGNCAYDCCDDGAAAGTHGBTAATCKYTBTACBGTBCTTTTTGDTAASSWTAAWAARA
+TTGGCTAAGWGRADTYACATAGCTCBTAGATAWAGCAATNGTATMATGTTKMMAGTAWTC
+CCNTSGAAWATWCAAAAMACTGAADNTYGATNAATCCGAYWNCTAACGTTAGAGDTTTTC
+ATCTGGKRTAVGAABVCTGWGBTCTDVGKATTBTCTAAGGVADAAAVWTCTAGGGGAGGG
+TTAGAACAATTAAHTAATNAAATGCATKATCTAAYRTDTCAGSAYTTYHGATRTTWAVTA
+BGNTCDACAGBCCRCAGWCRTCABTGMMAWGMCTCAACCGATRTGBCAVAATCGTDWDAA
+CAYAWAATWCTGGTAHCCCTAAGATAACSCTTAGTGSAACAWTBGTCDTTDGACWDBAAC
+HTTTNGSKTYYAAYGGATNTGATTTAARTTAMBAATCTAAGTBTCATYTAACTTADTGTT
+TCGATACGAAHGGCYATATACCWDTKYATDCSHTDTCAAAATGTGBACTGSCCVGATGTA
+TCMMAGCCTTDAAABAATGAAGAGTAACTHATMGVTTAATAACCCGGTTVSANTGCAATT
+GTGAGATTTAMGTTTAMAAYGCTGACAYAAAAAGGCACAMYTAAGVGGCTGGAABVTACG
+GATTSTYGTBVAKTATWACCGTGTKAGTDTGTATGTTTAAAGGAAAAAGTAACATARAAA
+GGTYCAMNYAAABTATAGNTSATANAGTCATCCTATWADKAACTRGTMSACDGTATSAYT
+AAHSHGTAABYGACTYTATADTGSTATAGAGAAATCGNTAAAGGAAATCAGTTGTNCYMV
+TNACDRTATBNATATASTAGAAMSCGGGANRCKKMCAAACATTNAGTCTRMAATBMTACC
+CGTACTTCTBGDSYAATWGAAAATGACADDCHAKAAAYATATTKTTTTCACANACWAGAA
+AKATCCTTATTAYKHKCTAAACARTATTTTDATBTVWCYGCAATACTAGGKAAASTTDGA
+MGGCHTTHAATVCAHDRYAGGRCTATACGTCMAGAGAGCTBTHGNACARTCCBDCTAAGA
+GCGGCTTTARTAAAGAATCCNAGTAWBTGACTTGAATTACWTVACAGAAABCAATNAAAC
+CGTNTRANTTGAYCMAWBADTANABRGGTKTHTWTAGTTVCTMBKTAGMTVKCCAGCANT
+TVAGSWTTAGCCGCRHTTTCCTTHNTATTAAGAAGAATAGGMTRAARTCTABGTACDTTT
+TATAAVDHAHTATAGATCCTAGTAAGYTWATDWCATGAGGGATAGTAAMDMNGBASTWAM
+TSTATRBAYDABATGTATATYCGCACTGTTTTAACMCWBTATAWAGTATBTSTATVTTAR
+CCTMTTAAKADATCAACTAATYTSVTAKGDATTATGCKTCAYCAKAATACTTKAANGAGT
+ATTSDAGATCGGAAATACTTAAYAAVGTATMCGCTTGTGTDCTAATYTATTTTATTTWAA
+CAGWRCTATGTAGMTGTTTGTTYKTNGTTKTCAGAACNTRACCTACKTGSRATGTGGGGG
+CTGTCATTAAGTAAATNGSTTABCCCCTCGCAGCTCWHTCGCGAAGCAVATGCKACGHCA
+ACAKTTAATAACASAAADATTWNYTGTAATTGTTCGTMHACHTWATGTGCWTTTTGAAHY
+ACTTTGTAYAMSAAACTTAADAAATATAGTABMATATYAATGSGGTAGTTTGTGTBYGGT
+TWSGSVGWMATTDMTCCWWCABTCSVACAGBAATGTTKATBGTCAATAATCTTCTTAAAC
+ARVAATHAGYBWCTRWCABGTWWAATCTAAGTCASTAAAKTAAGVKBAATTBGABACGTA
+AGGTTAAATAAAAACTRMDTWBCTTTTTAATAAAAGATMGCCTACKAKNTBAGYRASTGT
+ASSTCGTHCGAAKTTATTATATTYTTTGTAGAACATGTCAAAACTWTWTHGKTCCYAATA
+AAGTGGAYTMCYTAARCSTAAATWAKTGAATTTRAGTCTSSATACGACWAKAASATDAAA
+TGYYACTSAACAAHAKTSHYARGASTATTATTHAGGYGGASTTTBGAKGATSANAACACD
+TRGSTTRAAAAAAAACAAGARTCVTAGTAAGATAWATGVHAAKATWGAAAAGTYAHVTAC
+TCTGRTGTCAWGATRVAAKTCGCAAVCGASWGGTTRTCSAMCCTAACASGWKKAWDAATG
+ACRCBACTATGTGTCTTCAAAHGSCTATATTTCGTVWAGAAGTAYCKGARAKSGKAGTAN
+TTTCYACATWATGTCTAAAADMDTWCAATSTKDACAMAADADBSAAATAGGCTHAHAGTA
+CGACVGAATTATAAAGAHCCVAYHGHTTTACATSTTTATGNCCMTAGCATATGATAVAAG
+>THREE Homo sapiens frequency
+ATATTTATCTTTTCACTTCCTACATTGGTCAGACCATTATTCGACACGTGGCGTCATTTT
+GTCATACCGGGTAATGTTGGAAACAAAACGTACTGATAAAATACTGAGTTGTAAACTCTA
+ATCAGATAACGCGCTTGGATATTAAGATTCACACAGGGGTTTCGGCTGTAAAAAAACTTG
+TGGAGCTGTTCTGGGACAGATAAGTTGTACCTCGTACTTAGCTAATTAATGAACCAACTG
+ATTACGATAGAACAATTCTGAGGCCGCCAGGACAGCCAAATTTTAATCTTATAAAGCTGG
+AAACAGCCGGTATTAGCTTCTCGCATACTTTGCCTGCATTGGTACCTTACAGATATCAGC
+GTAGTCATATACACCTCGGTCTCAGCTAAGCTTGTATCTCTTAGAGTAGTTCAAAGATAG
+TGGACAATACCTGTGGAATCGATTGCAGATATGGATTTATTTAACTACTGAGTCTCATTC
+ACAAGCTAAGCAAGGAGCACGTTTTGGTGCCGGCATACCGATTTGCTATCATGTCAGCAA
+ATTTGCGTTGTATTCCTAGTTGCACCCATTAAGGCCACACTCCGAACCTAATTATTACAT
+CGCAAAGACATGTACGAAGGACCCGATGTCGAATAGAAGGGAGGACTGTTCATTGGAAGC
+TAGACCAGAGGAATCGCAAAGATGCAACTCTTACAATAAAAATCTAATTTCAGTCAACAC
+GCAATTTCTATAAGGTTTCCGATAATAATGAACCGTCTTCCACAGGGGAATTTGCCATGC
+TCGTAAAAGTAGTTAATCCAAGTAGAAGAAATTTTGATAATGTTTTAAGTTGGCACGAAG
+GAATTCAGAGAGATCTTACCTAACAAAGGCATTAGTAGATGTTCCTTGGTTCACACTCGG
+TCAATCAGAGCACATACTACGGGCGATACCGGGAATGACACAACATCAATGAGATTGTTA
+AGTGAGGTAATTGACTTTAGAGGACTCGATCAGTATACTGTCACTATGAACATCGTATTA
+ATTGTTATCCGATATATACACCACCGATTTGCTTGTGCAAGGTTACAGACCCATTCGATA
+AATACAAACACGGAGCGATATTATTTAAGGAGTGCTGTCTTCAAAAGAATTATTCCCACA
+CCGACATAAGAACTTCGCTCCGTCATTCCAGATTTAAATAACATAACGTAACGCTTTGCT
+GATAACATAACATAACCGAGAATTTGCTTAGGAAATTTGGAGCAATATTGCATTGTTTCT
+CAGTCATCACAAGGCCCGCCAAAGAACTCTGAGAATCAGGATTCAACATGATTGGTAAGA
+CTCTATATATATAACTTAATTCTTGTGTCCGGAGATAGAAAGAGGACGAGAGATACTACG
+AAAGAAAGTGTACTTCGATGTATCAATTCAGACGCCTTCTCTATCATCAACATTATAGGT
+CTCGTATATGCTCGGCGCGATCTGCTTCTCTCCGCCAATAGCCCCATAGTGTATTTCAAG
+CGCAGTAACAGTGAAATCGTTACGAAGGTAGGGATGTTGCTTATAATTGTCGTAACTTAT
+CGCTTATGTATCTTTCAAGAATGAACGGCAGCATATACATACGTTCTACCTTTAGCTACA
+AAGCATCCATATACTCCCTCTCATGATTGAAACTCTTCCCTATTTTGTAGCCAATAGTGA
+AAGCGTATTAGTATAAATTCGTCGGTTTTTCACTCGCAACTGTTATACTCTGCAAACAAA
+CGAAAGCCTCATAGTACAAACCTAAAGCTACATACTTCATCATTGGCAGACCAGTGGCGG
+TATTTCTACGGAAGCATCACTATAGATATAAAGTTTCCCTTCATGTACGTCTGTTAACCA
+TATCACAAGAAACTGCTATCTCTGTCACGTAACAATTCACGCGCCTTATCGCCAAATGTT
+CATATATGCGCGGTATACGTATGAACGAATACTAATTAGTATAACGGAGGATTCACGGGA
+GGGATACTTGGGGCATTTATAAATCGTCTAAAAATTTTCTATCAGCACTTGCGGGTTATA
+GTGGATTACTAGGCAACATAATATTCTGTATTGGTCCAAATGACGCTATAGATAAATTAG
+CAAAATACATTGTTTCCATTTATGTAAGTCGAAACTCCAGGACTCCCGGGAACCAGTTAA
+ACCGTCTGGAAAAGACACATTGTGAGCGGGACTTCAATGATAGCTTTCAATGAGCTTCTC
+ATGCTTGGGGTCTGTACATATATGTTGGCGAAATTATCGTCTGTATTCTGTTATGCTTTG
+ATCATGGGTTATTAGTATAGTGTCCGGTTAAGTACCAATACCGCTAGAGACCCGACCTAA
+GTCGATAACTAACGATCATCGACGTAAGGATCGTCTCGATCAGTACTTCAGTCTAGATCT
+GGGAATAGTAACTCGTTAGTGAACTATGTCGTGTCATAACTCTAAAATGCAATCAAATCT
+TATTATTGAGTATTGATTATATAAAGCATCCGCTTAGCTTTACCCTCAAATGTTATATGC
+AATTTAAAGCGCTTGATATCGTCTACTCAAGTTCAGGTTTCACATGGCCGCAACGTGACG
+TTATTAGAGGTGGGTCATCATCTCTGAGGCTAGTGATGTTGAATACTCATTGAATGGGAA
+GTGGAATACCATGCTCGTAGGTAACAGCATGACCTATAAAATATACTATGGGTGTGTGGT
+AGATCAATATTGTTCAAGCATATCGTAACAATAACGGCTGAAATGTTACTGACATGAAAG
+AGGGAGTCCAAACCATTCTAACAGCTGATCAAGTCGTCTAAAAACGCCTGGTTCAGCCTT
+AAGAGTTATAAGCCAGACAAATTGTATCAATAGAGAATCCGTAAATTCCTCGGCCAACCT
+CTTGCAAAGACATCACTATCAATATACTACCGTGATCTTAATTAGTGAACTTATATAAAT
+ATCTACAACCAGATTCAACGGAAAAGCTTTAGTGGATTAGAAATTGCCAAGAATCACATT
+CATGTGGGTTCGAATGCTTTAGTAATACCATTTCGCCGAGTAGTCACTTCGCTGAACTGT
+CGTAAATTGCTATGACATAATCGAAAAGGATTGTCAAGAGTCGATTACTGCGGACTAATA
+ATCCCCACGGGGGTGGTCTCATGTCTCCCCAGGCGAGTGGGGACGGTTGATAAACACGCT
+GCATCGCGGACTGATGTTCCCAGTATTACATAGTCACATTGGATTGCGAGTAGTCTACCT
+ATTTATGAGCGAGAGATGCCTCTAACTACTTCGACTTTTAAAACCTTTCCACGCCAGTAT
+TCGGCGAAAGGGAAGTATTAAGGGTTGTCATAATTAAGCTGATACCACTTCAGACTTTGC
+TCTACTTCTGTCTTTCATTGGTTTAGTAAAGTCTGTCCATTCGTCGAGACCGTCTTTTGC
+AGCCTCATTCTACCAACTGCTCCGACTCTTAGTCTGCTTCTCCCAGCGTTATAACAAGAG
+GCATTTTGTCATCCTTAAAACAATAATAAAGAACTCGGAGCACTGATATAATGACTGAAT
+TAGAACCGCTTAAAAATACAACGAATAGATAAGACTATCGGATAAGATCTAATATGTAGT
+GATTAAGCCCTTTATTAATTAATAATAGTTACCCTTTCTGATGTAACGCGACATATTACG
+ATTTAGTGGCACGTCTGAATTGCAAAGCAGATCTCTACCCGATTTTTATTATAAATCCCG
+TATACATCTTGACTTGAGTAATTGTTCATCTTTTTATATCTCTTCGTACTACAAATAATT
+AATATCTCAACCCGTATTGTGTGATTCTAATTACCAACAGAATACGAGGAGGTTTTTGCT
+TAGGGCCATATATAATGAATCTATCTCGTTTATTCGCGGAACCCGAGATAACATTACGAT
+GTAACTATTTTAGAGAACTTAATACAAGAAACATTGCTGATTACTCATAACTAAATGCTT
+GGTAATATATCCTCAGTGCCCCTACCATCTTTTACGCAGGGATGTAATTACTTAGGATTC
+ATTGTGTAAGAATTACAATGAACGATGGATATGAAGGCATGTTGCGAGGTGTTCCTTGGT
+ATGTGAAGTTCGCAGGGCAACAAAAATTTCGCAGAATAGGCCTCAAAGTATTGGTAAAGA
+AGACAACTAATCATCACGAGCTTCTGATATCAATACGAACGAGTCCTGTGATGGATGAAA
+GAAAGTCGTATCGAAAATGTCAAGAGTCTGCCCAATGTAACTTACTTCAAAAAATAACGC
+TTCCGCCAAGTACGTTCGAATAAACGTAATTTTAAAAATACATAAGGGGTGTTAGAAAGT
+AAGCGACGGGATATAAGTTAGACTCAAGATTCCGCCGTAAAACGAGACTGATTCCGAAGA
+TTGTTCGTGGATCTGGTCATGACTTTCACTGAGTAAGGAGTTTCGACATATGTCAATAAA
+CACAAAAATAGAAGCTATTCGATCTGAAAAATATTAGGACAAGAAACTATCTCACGCTAG
+CCCAGAATATTCACTCACCCACGGGCGATACTAAAGCACTATATAGTCGCGTGATTACTA
+TACATATGGTACACATAAGAATCACGATCAGGTTCTCAATTTTCAACAATATATGTTTAT
+TTGCATAGGTAATATTAGGCCTTTAAGAGAAGGATGGGTGAGATACTCCGGGGATGGCGG
+CAATAAAGAAAAACACGATATGAGTAATAGGATCCTAATATCTTGGCGAGAGACTTAAGG
+TACGAATTTTGCGCAATCTATTTTTTACTTGGCCAGAATTCATGTATGGTATAAGTACGA
+ACTTTTTTGATCACTTTCATGGCTACCTGATTAGGATAGTTTGAGGAATTTCCCAAATAT
+ACCGATTTAATATACACTAGGGCTTGTCACTTTGAGTCAGAAAAAGAATATAATTACTTA
+GGGTAATGCTGCATACATATTCTTATATTGCAAAGGTTCTCTGGGTAATCTTGAGCCTTC
+ACGATACCTGGTGAAGTGTT
diff --git a/test/pending/shootout/revcomp.scala-3.scala b/test/pending/shootout/revcomp.scala-3.scala
new file mode 100644
index 0000000000..fa875f4b22
--- /dev/null
+++ b/test/pending/shootout/revcomp.scala-3.scala
@@ -0,0 +1,147 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy
+*/
+
+import java.io._
+import scala.collection.mutable.Stack
+
+object revcomp {
+ def main(args: Array[String]) = {
+ val out = new FastaOutputStream(System.out)
+ val in = new FastaInputStream(System.in)
+
+ out.writeReverseComplement( in.readSequenceStack )
+ out.writeReverseComplement( in.readSequenceStack )
+ out.writeReverseComplement( in.readSequenceStack )
+
+ in.close
+ out.close
+ }
+}
+
+
+trait FastaByteStream {
+ val nl = '\n'.toByte
+
+ type Line = Array[byte]
+ type LineStack = Stack[Line]
+}
+
+
+// extend the Java BufferedInputStream class
+
+final class FastaInputStream(in: InputStream)
+ extends BufferedInputStream(in) with FastaByteStream {
+
+ val gt = '>'.toByte
+ val sc = ';'.toByte
+
+ def readSequenceStack(): Pair[Line,LineStack] = {
+ var header: Line = null
+ val lines: LineStack = new Stack
+
+ var line = readLine()
+ while (line != null) {
+ val c = line(0)
+ if (c == gt){ // '>'
+ if (header == null){
+ header = line
+ } else {
+ pos = pos - line.length - 1 // reposition to start of line
+ return Pair(header,lines)
+ }
+ } else {
+ if (c != sc) lines += line // ';'
+ }
+ line = readLine()
+ }
+ return Pair(header,lines)
+ }
+
+ def readLine() = {
+ var bytes: Line = null
+ if (in == null) bytes
+ else {
+ mark(128) // mark the start of the line
+ if (count == 0) read() // fill buffer
+
+ var i = markpos
+ while (i < count && buf(i) != nl) i = i + 1
+
+ if (i >= count){ // line extends past end of buffer
+ pos = i; read(); i = pos; // fill buffer again
+ while (i < count && buf(i) != nl) i = i + 1
+ }
+
+ if (i < count){
+ bytes = new Array(i - markpos)
+ System.arraycopy(buf, markpos, bytes, 0, i - markpos);
+ pos = i+1
+ }
+ }
+ bytes
+ }
+}
+
+
+// extend the Java BufferedOutputStream class
+
+final class FastaOutputStream(in: OutputStream)
+ extends BufferedOutputStream(in) with FastaByteStream {
+
+ private val IUB = IUBCodeComplements
+
+ private def IUBCodeComplements() = {
+ val code = "ABCDGHKMNRSTVWYabcdghkmnrstvwy".getBytes
+ val comp = "TVGHCDMKNYSABWRTVGHCDMKNYSABWR".getBytes
+ val iub: Array[byte] = new Array( 'z'.toByte )
+
+ for (val indexValue <- code zip comp)
+ indexValue match { case Pair(i,v) => iub(i) = v }
+
+ iub
+ }
+
+ def writeReverseComplement(sequence: Pair[Line,LineStack]) = {
+
+ def inplaceComplementReverse(b: Array[byte]) = {
+ var i = 0
+ var j = b.length - 1
+ while (i < j){
+ val swap = b(i)
+ b(i) = IUB( b(j) )
+ b(j) = IUB( swap )
+ i = i + 1
+ j = j - 1
+ }
+ if (i == j) b(i) = IUB( b(i) )
+ }
+
+ sequence match {
+ case Pair(header,lines) => {
+
+ write(header); write(nl)
+
+ val k = if (lines.isEmpty) 0 else lines.top.length
+ val LineLength = 60
+ val isSplitLine = k < LineLength
+ var isFirstLine = true
+
+ while (!lines.isEmpty) {
+ val line = lines.pop
+ inplaceComplementReverse(line)
+
+ if (isSplitLine){
+ if (isFirstLine){ write(line); isFirstLine = false }
+ else { write(line,0,LineLength-k); write(nl); write(line,LineLength-k,k) }
+ }
+ else { write(line); write(nl) }
+ }
+
+ if (isSplitLine && !isFirstLine) write(nl)
+ }
+ }
+ }
+
+}
diff --git a/test/pending/shootout/revcomp.scala-3.scala.runner b/test/pending/shootout/revcomp.scala-3.scala.runner
new file mode 100644
index 0000000000..0f68f0da01
--- /dev/null
+++ b/test/pending/shootout/revcomp.scala-3.scala.runner
@@ -0,0 +1,6 @@
+object Test extends Application {
+ for(val n <- List(25000,250000,2500000)) {
+ System.setIn(new java.io.FileInputStream(System.getProperty("partest.cwd")+"/revcomp-input"+n+".txt"))
+ revcomp.main(Array(n.toString))
+ }
+}
diff --git a/test/pending/shootout/sieve.scala b/test/pending/shootout/sieve.scala
new file mode 100644
index 0000000000..06d7214ea6
--- /dev/null
+++ b/test/pending/shootout/sieve.scala
@@ -0,0 +1,43 @@
+/* The Computer Language Shootout
+ http://shootout.alioth.debian.org/
+ contributed by Isaac Gouy (Scala novice)
+*/
+
+object sieve {
+ def main(args: Array[String]) = {
+ var n = toPositiveInt(args);
+ val start = 2;
+ val stop = 8192;
+ val isPrime = new Array[Boolean](stop+1);
+ var count: Int = _;
+
+ while (n>0) {
+ count = 0;
+
+ for (val i <- Iterator.range(start,stop+1))
+ isPrime(i)=true;
+
+ for (val i <- Iterator.range(start,stop+1)) {
+ if( isPrime(i) ) {
+ var k = i+i;
+ while (k<=stop) { isPrime(k)=false; k=k+i; }
+ count = count+1;
+ }
+ }
+ n=n-1;
+ }
+
+ Console.println("Count: " + count);
+ }
+
+
+ private def toPositiveInt(s: Array[String]) = {
+ val i =
+ try { Integer.parseInt(s(0)); }
+ catch { case _ => 1 }
+ if (i>0) i; else 1;
+ }
+}
+
+
+
diff --git a/test/pending/shootout/sieve.scala.runner b/test/pending/shootout/sieve.scala.runner
new file mode 100644
index 0000000000..1c53d1dd72
--- /dev/null
+++ b/test/pending/shootout/sieve.scala.runner
@@ -0,0 +1,3 @@
+object Test extends Application {
+ for(val n <- List(300,600,900,1200)) sieve.main(Array(n.toString))
+}
diff --git a/test/postreview.py b/test/postreview.py
new file mode 100644
index 0000000000..2e2518f7ee
--- /dev/null
+++ b/test/postreview.py
@@ -0,0 +1,2540 @@
+#!/usr/bin/env python
+import cookielib
+import difflib
+import getpass
+import marshal
+import mimetools
+import ntpath
+import os
+import re
+import socket
+import stat
+import subprocess
+import sys
+import tempfile
+import urllib
+import urllib2
+from optparse import OptionParser
+from tempfile import mkstemp
+from urlparse import urljoin, urlparse
+
+try:
+ from hashlib import md5
+except ImportError:
+ # Support Python versions before 2.5.
+ from md5 import md5
+
+try:
+ import json
+except ImportError:
+ import simplejson as json
+
+# This specific import is necessary to handle the paths for
+# cygwin enabled machines.
+if (sys.platform.startswith('win')
+ or sys.platform.startswith('cygwin')):
+ import ntpath as cpath
+else:
+ import posixpath as cpath
+
+###
+# Default configuration -- user-settable variables follow.
+###
+
+# The following settings usually aren't needed, but if your Review
+# Board crew has specific preferences and doesn't want to express
+# them with command line switches, set them here and you're done.
+# In particular, setting the REVIEWBOARD_URL variable will allow
+# you to make it easy for people to submit reviews regardless of
+# their SCM setup.
+#
+# Note that in order for this script to work with a reviewboard site
+# that uses local paths to access a repository, the 'Mirror path'
+# in the repository setup page must be set to the remote URL of the
+# repository.
+
+#
+# Reviewboard URL.
+#
+# Set this if you wish to hard-code a default server to always use.
+# It's generally recommended to set this using your SCM repository
+# (for those that support it -- currently only SVN, Git, and Perforce).
+#
+# For example, on SVN:
+# $ svn propset reviewboard:url http://reviewboard.example.com .
+#
+# Or with Git:
+# $ git config reviewboard.url http://reviewboard.example.com
+#
+# On Perforce servers version 2008.1 and above:
+# $ p4 counter reviewboard.url http://reviewboard.example.com
+#
+# Older Perforce servers only allow numerical counters, so embedding
+# the url in the counter name is also supported:
+# $ p4 counter reviewboard.url.http:\|\|reviewboard.example.com 1
+#
+# Note that slashes are not allowed in Perforce counter names, so replace them
+# with pipe characters (they are a safe substitute as they are not used
+# unencoded in URLs). You may need to escape them when issuing the p4 counter
+# command as above.
+#
+# If this is not possible or desired, setting the value here will let
+# you get started quickly.
+#
+# For all other repositories, a .reviewboardrc file present at the top of
+# the checkout will also work. For example:
+#
+# $ cat .reviewboardrc
+# REVIEWBOARD_URL = "http://reviewboard.example.com"
+#
+REVIEWBOARD_URL = None
+
+# Default submission arguments. These are all optional; run this
+# script with --help for descriptions of each argument.
+TARGET_GROUPS = None
+TARGET_PEOPLE = None
+SUBMIT_AS = None
+PUBLISH = False
+OPEN_BROWSER = False
+
+# Debugging. For development...
+DEBUG = False
+
+###
+# End user-settable variables.
+###
+
+
+VERSION = "0.8"
+
+user_config = None
+tempfiles = []
+options = None
+
+
+class APIError(Exception):
+ pass
+
+
+class RepositoryInfo:
+ """
+ A representation of a source code repository.
+ """
+ def __init__(self, path=None, base_path=None, supports_changesets=False,
+ supports_parent_diffs=False):
+ self.path = path
+ self.base_path = base_path
+ self.supports_changesets = supports_changesets
+ self.supports_parent_diffs = supports_parent_diffs
+ debug("repository info: %s" % self)
+
+ def __str__(self):
+ return "Path: %s, Base path: %s, Supports changesets: %s" % \
+ (self.path, self.base_path, self.supports_changesets)
+
+ def set_base_path(self, base_path):
+ if not base_path.startswith('/'):
+ base_path = '/' + base_path
+ debug("changing repository info base_path from %s to %s" % \
+ (self.base_path, base_path))
+ self.base_path = base_path
+
+ def find_server_repository_info(self, server):
+ """
+ Try to find the repository from the list of repositories on the server.
+ For Subversion, this could be a repository with a different URL. For
+ all other clients, this is a noop.
+ """
+ return self
+
+
+class SvnRepositoryInfo(RepositoryInfo):
+ """
+ A representation of a SVN source code repository. This version knows how to
+ find a matching repository on the server even if the URLs differ.
+ """
+ def __init__(self, path, base_path, uuid, supports_parent_diffs=False):
+ RepositoryInfo.__init__(self, path, base_path,
+ supports_parent_diffs=supports_parent_diffs)
+ self.uuid = uuid
+
+ def find_server_repository_info(self, server):
+ """
+ The point of this function is to find a repository on the server that
+ matches self, even if the paths aren't the same. (For example, if self
+ uses an 'http' path, but the server uses a 'file' path for the same
+ repository.) It does this by comparing repository UUIDs. If the
+ repositories use the same path, you'll get back self, otherwise you'll
+ get a different SvnRepositoryInfo object (with a different path).
+ """
+ repositories = server.get_repositories()
+
+ for repository in repositories:
+ if repository['tool'] != 'Subversion':
+ continue
+
+ info = self._get_repository_info(server, repository)
+
+ if not info or self.uuid != info['uuid']:
+ continue
+
+ repos_base_path = info['url'][len(info['root_url']):]
+ relpath = self._get_relative_path(self.base_path, repos_base_path)
+ if relpath:
+ return SvnRepositoryInfo(info['url'], relpath, self.uuid)
+
+ # We didn't find a matching repository on the server. We'll just return
+ # self and hope for the best.
+ return self
+
+ def _get_repository_info(self, server, repository):
+ try:
+ return server.get_repository_info(repository['id'])
+ except APIError, e:
+ # If the server couldn't fetch the repository info, it will return
+ # code 210. Ignore those.
+ # Other more serious errors should still be raised, though.
+ rsp = e.args[0]
+ if rsp['err']['code'] == 210:
+ return None
+
+ raise e
+
+ def _get_relative_path(self, path, root):
+ pathdirs = self._split_on_slash(path)
+ rootdirs = self._split_on_slash(root)
+
+ # root is empty, so anything relative to that is itself
+ if len(rootdirs) == 0:
+ return path
+
+ # If one of the directories doesn't match, then path is not relative
+ # to root.
+ if rootdirs != pathdirs:
+ return None
+
+ # All the directories matched, so the relative path is whatever
+ # directories are left over. The base_path can't be empty, though, so
+ # if the paths are the same, return '/'
+ if len(pathdirs) == len(rootdirs):
+ return '/'
+ else:
+ return '/'.join(pathdirs[len(rootdirs):])
+
+ def _split_on_slash(self, path):
+ # Split on slashes, but ignore multiple slashes and throw away any
+ # trailing slashes.
+ split = re.split('/*', path)
+ if split[-1] == '':
+ split = split[0:-1]
+ return split
+
+
+class ReviewBoardHTTPPasswordMgr(urllib2.HTTPPasswordMgr):
+ """
+ Adds HTTP authentication support for URLs.
+
+ Python 2.4's password manager has a bug in http authentication when the
+ target server uses a non-standard port. This works around that bug on
+ Python 2.4 installs. This also allows post-review to prompt for passwords
+ in a consistent way.
+
+ See: http://bugs.python.org/issue974757
+ """
+ def __init__(self, reviewboard_url):
+ self.passwd = {}
+ self.rb_url = reviewboard_url
+ self.rb_user = None
+ self.rb_pass = None
+
+ def find_user_password(self, realm, uri):
+ if uri.startswith(self.rb_url):
+ if self.rb_user is None or self.rb_pass is None:
+ print "==> HTTP Authentication Required"
+ print 'Enter username and password for "%s" at %s' % \
+ (realm, urlparse(uri)[1])
+ self.rb_user = raw_input('Username: ')
+ self.rb_pass = getpass.getpass('Password: ')
+
+ return self.rb_user, self.rb_pass
+ else:
+ # If this is an auth request for some other domain (since HTTP
+ # handlers are global), fall back to standard password management.
+ return urllib2.HTTPPasswordMgr.find_user_password(self, realm, uri)
+
+
+class ReviewBoardServer(object):
+ """
+ An instance of a Review Board server.
+ """
+ def __init__(self, url, info, cookie_file):
+ self.url = url
+ if self.url[-1] != '/':
+ self.url += '/'
+ self._info = info
+ self._server_info = None
+ self.cookie_file = cookie_file
+ self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
+
+ # Set up the HTTP libraries to support all of the features we need.
+ cookie_handler = urllib2.HTTPCookieProcessor(self.cookie_jar)
+ password_mgr = ReviewBoardHTTPPasswordMgr(self.url)
+ auth_handler = urllib2.HTTPBasicAuthHandler(password_mgr)
+
+ opener = urllib2.build_opener(cookie_handler, auth_handler)
+ opener.addheaders = [('User-agent', 'post-review/' + VERSION)]
+ urllib2.install_opener(opener)
+
+ def login(self, force=False):
+ """
+ Logs in to a Review Board server, prompting the user for login
+ information if needed.
+ """
+ if not force and self.has_valid_cookie():
+ return
+
+ print "==> Review Board Login Required"
+ print "Enter username and password for Review Board at %s" % self.url
+ if options.username:
+ username = options.username
+ elif options.submit_as:
+ username = options.submit_as
+ else:
+ username = raw_input('Username: ')
+
+ if not options.password:
+ password = getpass.getpass('Password: ')
+ else:
+ password = options.password
+
+ debug('Logging in with username "%s"' % username)
+ try:
+ self.api_post('api/json/accounts/login/', {
+ 'username': username,
+ 'password': password,
+ })
+ except APIError, e:
+ rsp, = e.args
+
+ die("Unable to log in: %s (%s)" % (rsp["err"]["msg"],
+ rsp["err"]["code"]))
+
+ debug("Logged in.")
+
+ def has_valid_cookie(self):
+ """
+ Load the user's cookie file and see if they have a valid
+ 'rbsessionid' cookie for the current Review Board server. Returns
+ true if so and false otherwise.
+ """
+ try:
+ parsed_url = urlparse(self.url)
+ host = parsed_url[1]
+ path = parsed_url[2] or '/'
+
+ # Cookie files don't store port numbers, unfortunately, so
+ # get rid of the port number if it's present.
+ host = host.split(":")[0]
+
+ debug("Looking for '%s %s' cookie in %s" % \
+ (host, path, self.cookie_file))
+ self.cookie_jar.load(self.cookie_file, ignore_expires=True)
+
+ try:
+ cookie = self.cookie_jar._cookies[host][path]['rbsessionid']
+
+ if not cookie.is_expired():
+ debug("Loaded valid cookie -- no login required")
+ return True
+
+ debug("Cookie file loaded, but cookie has expired")
+ except KeyError:
+ debug("Cookie file loaded, but no cookie for this server")
+ except IOError, error:
+ debug("Couldn't load cookie file: %s" % error)
+
+ return False
+
+ def new_review_request(self, changenum, submit_as=None):
+ """
+ Creates a review request on a Review Board server, updating an
+ existing one if the changeset number already exists.
+
+ If submit_as is provided, the specified user name will be recorded as
+ the submitter of the review request (given that the logged in user has
+ the appropriate permissions).
+ """
+ try:
+ debug("Attempting to create review request for %s" % changenum)
+ data = { 'repository_path': self.info.path }
+
+ if changenum:
+ data['changenum'] = changenum
+
+ if submit_as:
+ debug("Submitting the review request as %s" % submit_as)
+ data['submit_as'] = submit_as
+
+ rsp = self.api_post('api/json/reviewrequests/new/', data)
+ except APIError, e:
+ rsp, = e.args
+
+ if not options.diff_only:
+ if rsp['err']['code'] == 204: # Change number in use
+ debug("Review request already exists. Updating it...")
+ rsp = self.api_post(
+ 'api/json/reviewrequests/%s/update_from_changenum/' %
+ rsp['review_request']['id'])
+ else:
+ raise e
+
+ debug("Review request created")
+ return rsp['review_request']
+
+ def set_review_request_field(self, review_request, field, value):
+ """
+ Sets a field in a review request to the specified value.
+ """
+ rid = review_request['id']
+
+ debug("Attempting to set field '%s' to '%s' for review request '%s'" %
+ (field, value, rid))
+
+ self.api_post('api/json/reviewrequests/%s/draft/set/' % rid, {
+ field: value,
+ })
+
+ def get_review_request(self, rid):
+ """
+ Returns the review request with the specified ID.
+ """
+ rsp = self.api_get('api/json/reviewrequests/%s/' % rid)
+ return rsp['review_request']
+
+ def get_repositories(self):
+ """
+ Returns the list of repositories on this server.
+ """
+ rsp = self.api_get('/api/json/repositories/')
+ return rsp['repositories']
+
+ def get_repository_info(self, rid):
+ """
+ Returns detailed information about a specific repository.
+ """
+ rsp = self.api_get('/api/json/repositories/%s/info/' % rid)
+ return rsp['info']
+
+ def save_draft(self, review_request):
+ """
+ Saves a draft of a review request.
+ """
+ self.api_post("api/json/reviewrequests/%s/draft/save/" %
+ review_request['id'])
+ debug("Review request draft saved")
+
+ def upload_diff(self, review_request, diff_content, parent_diff_content):
+ """
+ Uploads a diff to a Review Board server.
+ """
+ debug("Uploading diff, size: %d" % len(diff_content))
+
+ if parent_diff_content:
+ debug("Uploading parent diff, size: %d" % len(parent_diff_content))
+
+ fields = {}
+ files = {}
+
+ if self.info.base_path:
+ fields['basedir'] = self.info.base_path
+
+ files['path'] = {
+ 'filename': 'diff',
+ 'content': diff_content
+ }
+
+ if parent_diff_content:
+ files['parent_diff_path'] = {
+ 'filename': 'parent_diff',
+ 'content': parent_diff_content
+ }
+
+ self.api_post('api/json/reviewrequests/%s/diff/new/' %
+ review_request['id'], fields, files)
+
+ def publish(self, review_request):
+ """
+ Publishes a review request.
+ """
+ debug("Publishing")
+ self.api_post('api/json/reviewrequests/%s/publish/' %
+ review_request['id'])
+
+ def _get_server_info(self):
+ if not self._server_info:
+ self._server_info = self._info.find_server_repository_info(self)
+
+ return self._server_info
+
+ info = property(_get_server_info)
+
+ def process_json(self, data):
+ """
+ Loads in a JSON file and returns the data if successful. On failure,
+ APIError is raised.
+ """
+ rsp = json.loads(data)
+
+ if rsp['stat'] == 'fail':
+ raise APIError, rsp
+
+ return rsp
+
+ def http_get(self, path):
+ """
+ Performs an HTTP GET on the specified path, storing any cookies that
+ were set.
+ """
+ debug('HTTP GETting %s' % path)
+
+ url = self._make_url(path)
+
+ try:
+ rsp = urllib2.urlopen(url).read()
+ self.cookie_jar.save(self.cookie_file)
+ return rsp
+ except urllib2.HTTPError, e:
+ print "Unable to access %s (%s). The host path may be invalid" % \
+ (url, e.code)
+ try:
+ debug(e.read())
+ except AttributeError:
+ pass
+ die()
+
+ def _make_url(self, path):
+ """Given a path on the server returns a full http:// style url"""
+ app = urlparse(self.url)[2]
+ if path[0] == '/':
+ url = urljoin(self.url, app[:-1] + path)
+ else:
+ url = urljoin(self.url, app + path)
+
+ if not url.startswith('http'):
+ url = 'http://%s' % url
+ return url
+
+ def api_get(self, path):
+ """
+ Performs an API call using HTTP GET at the specified path.
+ """
+ return self.process_json(self.http_get(path))
+
+ def http_post(self, path, fields, files=None):
+ """
+ Performs an HTTP POST on the specified path, storing any cookies that
+ were set.
+ """
+ if fields:
+ debug_fields = fields.copy()
+ else:
+ debug_fields = {}
+
+ if 'password' in debug_fields:
+ debug_fields["password"] = "**************"
+ url = self._make_url(path)
+ debug('HTTP POSTing to %s: %s' % (url, debug_fields))
+
+ content_type, body = self._encode_multipart_formdata(fields, files)
+ headers = {
+ 'Content-Type': content_type,
+ 'Content-Length': str(len(body))
+ }
+
+ try:
+ r = urllib2.Request(url, body, headers)
+ data = urllib2.urlopen(r).read()
+ self.cookie_jar.save(self.cookie_file)
+ return data
+ except urllib2.URLError, e:
+ try:
+ debug(e.read())
+ except AttributeError:
+ pass
+
+ die("Unable to access %s. The host path may be invalid\n%s" % \
+ (url, e))
+ except urllib2.HTTPError, e:
+ die("Unable to access %s (%s). The host path may be invalid\n%s" % \
+ (url, e.code, e.read()))
+
+ def api_post(self, path, fields=None, files=None):
+ """
+ Performs an API call using HTTP POST at the specified path.
+ """
+ return self.process_json(self.http_post(path, fields, files))
+
+ def _encode_multipart_formdata(self, fields, files):
+ """
+ Encodes data for use in an HTTP POST.
+ """
+ BOUNDARY = mimetools.choose_boundary()
+ content = ""
+
+ fields = fields or {}
+ files = files or {}
+
+ for key in fields:
+ content += "--" + BOUNDARY + "\r\n"
+ content += "Content-Disposition: form-data; name=\"%s\"\r\n" % key
+ content += "\r\n"
+ content += fields[key] + "\r\n"
+
+ for key in files:
+ filename = files[key]['filename']
+ value = files[key]['content']
+ content += "--" + BOUNDARY + "\r\n"
+ content += "Content-Disposition: form-data; name=\"%s\"; " % key
+ content += "filename=\"%s\"\r\n" % filename
+ content += "\r\n"
+ content += value + "\r\n"
+
+ content += "--" + BOUNDARY + "--\r\n"
+ content += "\r\n"
+
+ content_type = "multipart/form-data; boundary=%s" % BOUNDARY
+
+ return content_type, content
+
+
+class SCMClient(object):
+ """
+ A base representation of an SCM tool for fetching repository information
+ and generating diffs.
+ """
+ def get_repository_info(self):
+ return None
+
+ def scan_for_server(self, repository_info):
+ """
+ Scans the current directory on up to find a .reviewboard file
+ containing the server path.
+ """
+ server_url = self._get_server_from_config(user_config, repository_info)
+ if server_url:
+ return server_url
+
+ for path in walk_parents(os.getcwd()):
+ filename = os.path.join(path, ".reviewboardrc")
+ if os.path.exists(filename):
+ config = load_config_file(filename)
+ server_url = self._get_server_from_config(config,
+ repository_info)
+ if server_url:
+ return server_url
+
+ return None
+
+ def diff(self, args):
+ """
+ Returns the generated diff and optional parent diff for this
+ repository.
+
+ The returned tuple is (diff_string, parent_diff_string)
+ """
+ return (None, None)
+
+ def diff_between_revisions(self, revision_range, args, repository_info):
+ """
+ Returns the generated diff between revisions in the repository.
+ """
+ return None
+
+ def _get_server_from_config(self, config, repository_info):
+ if 'REVIEWBOARD_URL' in config:
+ return config['REVIEWBOARD_URL']
+ elif 'TREES' in config:
+ trees = config['TREES']
+ if not isinstance(trees, dict):
+ die("Warning: 'TREES' in config file is not a dict!")
+
+ if repository_info.path in trees and \
+ 'REVIEWBOARD_URL' in trees[repository_info.path]:
+ return trees[repository_info.path]['REVIEWBOARD_URL']
+
+ return None
+
+
+class CVSClient(SCMClient):
+ """
+ A wrapper around the cvs tool that fetches repository
+ information and generates compatible diffs.
+ """
+ def get_repository_info(self):
+ if not check_install("cvs"):
+ return None
+
+ cvsroot_path = os.path.join("CVS", "Root")
+
+ if not os.path.exists(cvsroot_path):
+ return None
+
+ fp = open(cvsroot_path, "r")
+ repository_path = fp.read().strip()
+ fp.close()
+
+ i = repository_path.find("@")
+ if i != -1:
+ repository_path = repository_path[i + 1:]
+
+ i = repository_path.find(":")
+ if i != -1:
+ host = repository_path[:i]
+ try:
+ canon = socket.getfqdn(host)
+ repository_path = repository_path.replace('%s:' % host,
+ '%s:' % canon)
+ except socket.error, msg:
+ debug("failed to get fqdn for %s, msg=%s" % (host, msg))
+
+ return RepositoryInfo(path=repository_path)
+
+ def diff(self, files):
+ """
+ Performs a diff across all modified files in a CVS repository.
+
+ CVS repositories do not support branches of branches in a way that
+ makes parent diffs possible, so we never return a parent diff
+ (the second value in the tuple).
+ """
+ return (self.do_diff(files), None)
+
+ def diff_between_revisions(self, revision_range, args, repository_info):
+ """
+ Performs a diff between 2 revisions of a CVS repository.
+ """
+ revs = []
+
+ for rev in revision_range.split(":"):
+ revs += ["-r", rev]
+
+ return self.do_diff(revs)
+
+ def do_diff(self, params):
+ """
+ Performs the actual diff operation through cvs diff, handling
+ fake errors generated by CVS.
+ """
+ # Diff returns "1" if differences were found.
+ return execute(["cvs", "diff", "-uN"] + params,
+ extra_ignore_errors=(1,))
+
+
+class ClearCaseClient(SCMClient):
+ """
+ A wrapper around the clearcase tool that fetches repository
+ information and generates compatible diffs.
+ This client assumes that cygwin is installed on windows.
+ """
+ ccroot_path = "/view/reviewboard.diffview/vobs/"
+ viewinfo = ""
+ viewtype = "snapshot"
+
+ def get_filename_hash(self, fname):
+ # Hash the filename string so its easy to find the file later on.
+ return md5(fname).hexdigest()
+
+ def get_repository_info(self):
+ if not check_install('cleartool help'):
+ return None
+
+ # We must be running this from inside a view.
+ # Otherwise it doesn't make sense.
+ self.viewinfo = execute(["cleartool", "pwv", "-short"])
+ if self.viewinfo.startswith('\*\* NONE'):
+ return None
+
+ # Returning the hardcoded clearcase root path to match the server
+ # respository path.
+ # There is no reason to have a dynamic path unless you have
+ # multiple clearcase repositories. This should be implemented.
+ return RepositoryInfo(path=self.ccroot_path,
+ base_path=self.ccroot_path,
+ supports_parent_diffs=False)
+
+ def get_previous_version(self, files):
+ file = []
+ curdir = os.getcwd()
+
+ # Cygwin case must transform a linux-like path to windows like path
+ # including drive letter.
+ if 'cygdrive' in curdir:
+ where = curdir.index('cygdrive') + 9
+ drive_letter = curdir[where:where+1]
+ curdir = drive_letter + ":\\" + curdir[where+2:len(curdir)]
+
+ for key in files:
+ # Sometimes there is a quote in the filename. It must be removed.
+ key = key.replace('\'', '')
+ elem_path = cpath.normpath(os.path.join(curdir, key))
+
+ # Removing anything before the last /vobs
+ # because it may be repeated.
+ elem_path_idx = elem_path.rfind("/vobs")
+ if elem_path_idx != -1:
+ elem_path = elem_path[elem_path_idx:len(elem_path)].strip("\"")
+
+ # Call cleartool to get this version and the previous version
+ # of the element.
+ curr_version, pre_version = execute(
+ ["cleartool", "desc", "-pre", elem_path])
+ curr_version = cpath.normpath(curr_version)
+ pre_version = pre_version.split(':')[1].strip()
+
+ # If a specific version was given, remove it from the path
+ # to avoid version duplication
+ if "@@" in elem_path:
+ elem_path = elem_path[:elem_path.rfind("@@")]
+ file.append(elem_path + "@@" + pre_version)
+ file.append(curr_version)
+
+ # Determnine if the view type is snapshot or dynamic.
+ if os.path.exists(file[0]):
+ self.viewtype = "dynamic"
+
+ return file
+
+ def get_extended_namespace(self, files):
+ """
+ Parses the file path to get the extended namespace
+ """
+ versions = self.get_previous_version(files)
+
+ evfiles = []
+ hlist = []
+
+ for vkey in versions:
+ # Verify if it is a checkedout file.
+ if "CHECKEDOUT" in vkey:
+ # For checkedout files just add it to the file list
+ # since it cannot be accessed outside the view.
+ splversions = vkey[:vkey.rfind("@@")]
+ evfiles.append(splversions)
+ else:
+ # For checkedin files.
+ ext_path = []
+ ver = []
+ fname = "" # fname holds the file name without the version.
+ (bpath, fpath) = cpath.splitdrive(vkey)
+ if bpath :
+ # Windows.
+ # The version (if specified like file.c@@/main/1)
+ # should be kept as a single string
+ # so split the path and concat the file name
+ # and version in the last position of the list.
+ ver = fpath.split("@@")
+ splversions = fpath[:vkey.rfind("@@")].split("\\")
+ fname = splversions.pop()
+ splversions.append(fname + ver[1])
+ else :
+ # Linux.
+ bpath = vkey[:vkey.rfind("vobs")+4]
+ fpath = vkey[vkey.rfind("vobs")+5:]
+ ver = fpath.split("@@")
+ splversions = ver[0][:vkey.rfind("@@")].split("/")
+ fname = splversions.pop()
+ splversions.append(fname + ver[1])
+
+ filename = splversions.pop()
+ bpath = cpath.normpath(bpath + "/")
+ elem_path = bpath
+
+ for key in splversions:
+ # For each element (directory) in the path,
+ # get its version from clearcase.
+ elem_path = cpath.join(elem_path, key)
+
+ # This is the version to be appended to the extended
+ # path list.
+ this_version = execute(
+ ["cleartool", "desc", "-fmt", "%Vn",
+ cpath.normpath(elem_path)])
+ if this_version:
+ ext_path.append(key + "/@@" + this_version + "/")
+ else:
+ ext_path.append(key + "/")
+
+ # This must be done in case we haven't specified
+ # the version on the command line.
+ ext_path.append(cpath.normpath(fname + "/@@" +
+ vkey[vkey.rfind("@@")+2:len(vkey)]))
+ epstr = cpath.join(bpath, cpath.normpath(''.join(ext_path)))
+ evfiles.append(epstr)
+
+ """
+ In windows, there is a problem with long names(> 254).
+ In this case, we hash the string and copy the unextended
+ filename to a temp file whose name is the hash.
+ This way we can get the file later on for diff.
+ The same problem applies to snapshot views where the
+ extended name isn't available.
+ The previous file must be copied from the CC server
+ to a local dir.
+ """
+ if cpath.exists(epstr) :
+ pass
+ else:
+ if len(epstr) > 254 or self.viewtype == "snapshot":
+ name = self.get_filename_hash(epstr)
+ # Check if this hash is already in the list
+ try:
+ i = hlist.index(name)
+ die("ERROR: duplicate value %s : %s" %
+ (name, epstr))
+ except ValueError:
+ hlist.append(name)
+
+ normkey = cpath.normpath(vkey)
+ td = tempfile.gettempdir()
+ # Cygwin case must transform a linux-like path to
+ # windows like path including drive letter
+ if 'cygdrive' in td:
+ where = td.index('cygdrive') + 9
+ drive_letter = td[where:where+1] + ":"
+ td = cpath.join(drive_letter, td[where+1:])
+ tf = cpath.normpath(cpath.join(td, name))
+ if cpath.exists(tf):
+ debug("WARNING: FILE EXISTS")
+ os.unlink(tf)
+ execute(["cleartool", "get", "-to", tf, normkey])
+ else:
+ die("ERROR: FILE NOT FOUND : %s" % epstr)
+
+ return evfiles
+
+ def get_files_from_label(self, label):
+ voblist=[]
+ # Get the list of vobs for the current view
+ allvoblist = execute(["cleartool", "lsvob", "-short"]).split()
+ # For each vob, find if the label is present
+ for vob in allvoblist:
+ try:
+ execute(["cleartool", "describe", "-local",
+ "lbtype:%s@%s" % (label, vob)]).split()
+ voblist.append(vob)
+ except:
+ pass
+
+ filelist=[]
+ # For each vob containing the label, get the file list
+ for vob in voblist:
+ try:
+ res = execute(["cleartool", "find", vob, "-all", "-version",
+ "lbtype(%s)" % label, "-print"])
+ filelist.extend(res.split())
+ except :
+ pass
+
+ # Return only the unique itens
+ return set(filelist)
+
+ def diff(self, files):
+ """
+ Performs a diff of the specified file and its previous version.
+ """
+ # We must be running this from inside a view.
+ # Otherwise it doesn't make sense.
+ return self.do_diff(self.get_extended_namespace(files))
+
+ def diff_label(self, label):
+ """
+ Get the files that are attached to a label and diff them
+ TODO
+ """
+ return self.diff(self.get_files_from_label(label))
+
+ def diff_between_revisions(self, revision_range, args, repository_info):
+ """
+ Performs a diff between 2 revisions of a CC repository.
+ """
+ rev_str = ''
+
+ for rev in revision_range.split(":"):
+ rev_str += "-r %s " % rev
+
+ return self.do_diff(rev_str)
+
+ def do_diff(self, params):
+ # Diff returns "1" if differences were found.
+ # Add the view name and view type to the description
+ if options.description:
+ options.description = ("VIEW: " + self.viewinfo +
+ "VIEWTYPE: " + self.viewtype + "\n" + options.description)
+ else:
+ options.description = (self.viewinfo +
+ "VIEWTYPE: " + self.viewtype + "\n")
+
+ o = []
+ Feol = False
+ while len(params) > 0:
+ # Read both original and modified files.
+ onam = params.pop(0)
+ mnam = params.pop(0)
+ file_data = []
+ do_rem = False
+ # If the filename length is greater than 254 char for windows,
+ # we copied the file to a temp file
+ # because the open will not work for path greater than 254.
+ # This is valid for the original and
+ # modified files if the name size is > 254.
+ for filenam in (onam, mnam) :
+ if cpath.exists(filenam) and self.viewtype == "dynamic":
+ do_rem = False
+ fn = filenam
+ elif len(filenam) > 254 or self.viewtype == "snapshot":
+ fn = self.get_filename_hash(filenam)
+ fn = cpath.join(tempfile.gettempdir(), fn)
+ do_rem = True
+ fd = open(cpath.normpath(fn))
+ fdata = fd.readlines()
+ fd.close()
+ file_data.append(fdata)
+ # If the file was temp, it should be removed.
+ if do_rem:
+ os.remove(filenam)
+
+ modi = file_data.pop()
+ orig = file_data.pop()
+
+ # For snapshot views, the local directories must be removed because
+ # they will break the diff on the server. Just replacing
+ # everything before the view name (including the view name) for
+ # vobs do the work.
+ if (self.viewtype == "snapshot"
+ and (sys.platform.startswith('win')
+ or sys.platform.startswith('cygwin'))):
+ vinfo = self.viewinfo.rstrip("\r\n")
+ mnam = "c:\\\\vobs" + mnam[mnam.rfind(vinfo) + len(vinfo):]
+ onam = "c:\\\\vobs" + onam[onam.rfind(vinfo) + len(vinfo):]
+ # Call the diff lib to generate a diff.
+ # The dates are bogus, since they don't natter anyway.
+ # The only thing is that two spaces are needed to the server
+ # so it can identify the heades correctly.
+ diff = difflib.unified_diff(orig, modi, onam, mnam,
+ ' 2002-02-21 23:30:39.942229878 -0800',
+ ' 2002-02-21 23:30:50.442260588 -0800', lineterm=' \n')
+ # Transform the generator output into a string output
+ # Use a comprehension instead of a generator,
+ # so 2.3.x doesn't fail to interpret.
+ diffstr = ''.join([str(l) for l in diff])
+ # Workaround for the difflib no new line at end of file
+ # problem.
+ if not diffstr.endswith('\n'):
+ diffstr = diffstr + ("\n\\ No newline at end of file\n")
+ o.append(diffstr)
+
+ ostr = ''.join(o)
+ return (ostr, None) # diff, parent_diff (not supported)
+
+
+class SVNClient(SCMClient):
+ """
+ A wrapper around the svn Subversion tool that fetches repository
+ information and generates compatible diffs.
+ """
+ def get_repository_info(self):
+ if not check_install('svn help'):
+ return None
+
+ # Get the SVN repository path (either via a working copy or
+ # a supplied URI)
+ svn_info_params = ["svn", "info"]
+ if options.repository_url:
+ svn_info_params.append(options.repository_url)
+ data = execute(svn_info_params,
+ ignore_errors=True)
+ m = re.search(r'^Repository Root: (.+)$', data, re.M)
+ if not m:
+ return None
+
+ path = m.group(1)
+
+ m = re.search(r'^URL: (.+)$', data, re.M)
+ if not m:
+ return None
+
+ base_path = m.group(1)[len(path):] or "/"
+
+ m = re.search(r'^Repository UUID: (.+)$', data, re.M)
+ if not m:
+ return None
+
+ return SvnRepositoryInfo(path, base_path, m.group(1))
+
+ def scan_for_server(self, repository_info):
+ # Scan first for dot files, since it's faster and will cover the
+ # user's $HOME/.reviewboardrc
+ server_url = super(SVNClient, self).scan_for_server(repository_info)
+ if server_url:
+ return server_url
+
+ return self.scan_for_server_property(repository_info)
+
+ def scan_for_server_property(self, repository_info):
+ def get_url_prop(path):
+ url = execute(["svn", "propget", "reviewboard:url", path]).strip()
+ return url or None
+
+ for path in walk_parents(os.getcwd()):
+ if not os.path.exists(os.path.join(path, ".svn")):
+ break
+
+ prop = get_url_prop(path)
+ if prop:
+ return prop
+
+ return get_url_prop(repository_info.path)
+
+ def diff(self, files):
+ """
+ Performs a diff across all modified files in a Subversion repository.
+
+ SVN repositories do not support branches of branches in a way that
+ makes parent diffs possible, so we never return a parent diff
+ (the second value in the tuple).
+ """
+ return (self.do_diff(["svn", "diff", "--diff-cmd=diff"] + files),
+ None)
+
+ def diff_between_revisions(self, revision_range, args, repository_info):
+ """
+ Performs a diff between 2 revisions of a Subversion repository.
+ """
+ if options.repository_url:
+ revisions = revision_range.split(':')
+ if len(revisions) < 1:
+ return None
+ elif len(revisions) == 1:
+ revisions.append('HEAD')
+
+ # if a new path was supplied at the command line, set it
+ if len(args):
+ repository_info.set_base_path(args[0])
+
+ url = repository_info.path + repository_info.base_path
+
+ old_url = url + '@' + revisions[0]
+ new_url = url + '@' + revisions[1]
+
+ return self.do_diff(["svn", "diff", "--diff-cmd=diff", old_url,
+ new_url],
+ repository_info)
+ # Otherwise, perform the revision range diff using a working copy
+ else:
+ return self.do_diff(["svn", "diff", "--diff-cmd=diff", "-r",
+ revision_range],
+ repository_info)
+
+ def do_diff(self, cmd, repository_info=None):
+ """
+ Performs the actual diff operation, handling renames and converting
+ paths to absolute.
+ """
+ diff = execute(cmd, split_lines=True)
+ diff = self.handle_renames(diff)
+ diff = self.convert_to_absolute_paths(diff, repository_info)
+
+ return ''.join(diff)
+
+ def handle_renames(self, diff_content):
+ """
+ The output of svn diff is incorrect when the file in question came
+ into being via svn mv/cp. Although the patch for these files are
+ relative to its parent, the diff header doesn't reflect this.
+ This function fixes the relevant section headers of the patch to
+ portray this relationship.
+ """
+
+ # svn diff against a repository URL on two revisions appears to
+ # handle moved files properly, so only adjust the diff file names
+ # if they were created using a working copy.
+ if options.repository_url:
+ return diff_content
+
+ result = []
+
+ from_line = ""
+ for line in diff_content:
+ if line.startswith('--- '):
+ from_line = line
+ continue
+
+ # This is where we decide how mangle the previous '--- '
+ if line.startswith('+++ '):
+ to_file, _ = self.parse_filename_header(line[4:])
+ info = self.svn_info(to_file)
+ if info.has_key("Copied From URL"):
+ url = info["Copied From URL"]
+ root = info["Repository Root"]
+ from_file = urllib.unquote(url[len(root):])
+ result.append(from_line.replace(to_file, from_file))
+ else:
+ result.append(from_line) #as is, no copy performed
+
+ # We only mangle '---' lines. All others get added straight to
+ # the output.
+ result.append(line)
+
+ return result
+
+
+ def convert_to_absolute_paths(self, diff_content, repository_info):
+ """
+ Converts relative paths in a diff output to absolute paths.
+ This handles paths that have been svn switched to other parts of the
+ repository.
+ """
+
+ result = []
+
+ for line in diff_content:
+ front = None
+ if line.startswith('+++ ') or line.startswith('--- ') or line.startswith('Index: '):
+ front, line = line.split(" ", 1)
+
+ if front:
+ if line.startswith('/'): #already absolute
+ line = front + " " + line
+ else:
+ # filename and rest of line (usually the revision
+ # component)
+ file, rest = self.parse_filename_header(line)
+
+ # If working with a diff generated outside of a working
+ # copy, then file paths are already absolute, so just
+ # add initial slash.
+ if options.repository_url:
+ path = urllib.unquote(
+ "%s/%s" % (repository_info.base_path, file))
+ else:
+ info = self.svn_info(file)
+ url = info["URL"]
+ root = info["Repository Root"]
+ path = urllib.unquote(url[len(root):])
+
+ line = front + " " + path + rest
+
+ result.append(line)
+
+ return result
+
+ def svn_info(self, path):
+ """Return a dict which is the result of 'svn info' at a given path."""
+ svninfo = {}
+ for info in execute(["svn", "info", path],
+ split_lines=True):
+ parts = info.strip().split(": ", 1)
+ if len(parts) == 2:
+ key, value = parts
+ svninfo[key] = value
+
+ return svninfo
+
+ # Adapted from server code parser.py
+ def parse_filename_header(self, s):
+ parts = None
+ if "\t" in s:
+ # There's a \t separating the filename and info. This is the
+ # best case scenario, since it allows for filenames with spaces
+ # without much work.
+ parts = s.split("\t")
+
+ # There's spaces being used to separate the filename and info.
+ # This is technically wrong, so all we can do is assume that
+ # 1) the filename won't have multiple consecutive spaces, and
+ # 2) there's at least 2 spaces separating the filename and info.
+ if " " in s:
+ parts = re.split(r" +", s)
+
+ if parts:
+ parts[1] = '\t' + parts[1]
+ return parts
+
+ # strip off ending newline, and return it as the second component
+ return [s.split('\n')[0], '\n']
+
+
+class PerforceClient(SCMClient):
+ """
+ A wrapper around the p4 Perforce tool that fetches repository information
+ and generates compatible diffs.
+ """
+ def get_repository_info(self):
+ if not check_install('p4 help'):
+ return None
+
+ data = execute(["p4", "info"], ignore_errors=True)
+
+ m = re.search(r'^Server address: (.+)$', data, re.M)
+ if not m:
+ return None
+
+ repository_path = m.group(1).strip()
+
+ try:
+ hostname, port = repository_path.split(":")
+ info = socket.gethostbyaddr(hostname)
+ repository_path = "%s:%s" % (info[0], port)
+ except (socket.gaierror, socket.herror):
+ pass
+
+ return RepositoryInfo(path=repository_path, supports_changesets=True)
+
+ def scan_for_server(self, repository_info):
+ # Scan first for dot files, since it's faster and will cover the
+ # user's $HOME/.reviewboardrc
+ server_url = \
+ super(PerforceClient, self).scan_for_server(repository_info)
+
+ if server_url:
+ return server_url
+
+ return self.scan_for_server_counter(repository_info)
+
+ def scan_for_server_counter(self, repository_info):
+ """
+ Checks the Perforce counters to see if the Review Board server's url
+ is specified. Since Perforce only started supporting non-numeric
+ counter values in server version 2008.1, we support both a normal
+ counter 'reviewboard.url' with a string value and embedding the url in
+ a counter name like 'reviewboard.url.http:||reviewboard.example.com'.
+ Note that forward slashes aren't allowed in counter names, so
+ pipe ('|') characters should be used. These should be safe because they
+ should not be used unencoded in urls.
+ """
+
+ counters_text = execute(["p4", "counters"])
+
+ # Try for a "reviewboard.url" counter first.
+ m = re.search(r'^reviewboard.url = (\S+)', counters_text, re.M)
+
+ if m:
+ return m.group(1)
+
+ # Next try for a counter of the form:
+ # reviewboard_url.http:||reviewboard.example.com
+ m2 = re.search(r'^reviewboard.url\.(\S+)', counters_text, re.M)
+
+ if m2:
+ return m2.group(1).replace('|', '/')
+
+ return None
+
+ def get_changenum(self, args):
+ if len(args) == 1:
+ try:
+ return str(int(args[0]))
+ except ValueError:
+ pass
+ return None
+
+ def diff(self, args):
+ """
+ Goes through the hard work of generating a diff on Perforce in order
+ to take into account adds/deletes and to provide the necessary
+ revision information.
+ """
+ # set the P4 enviroment:
+ if options.p4_client:
+ os.environ['P4CLIENT'] = options.p4_client
+
+ if options.p4_port:
+ os.environ['P4PORT'] = options.p4_port
+
+ changenum = self.get_changenum(args)
+ if changenum is None:
+ return self._path_diff(args)
+ else:
+ return self._changenum_diff(changenum)
+
+
+ def _path_diff(self, args):
+ """
+ Process a path-style diff. See _changenum_diff for the alternate
+ version that handles specific change numbers.
+
+ Multiple paths may be specified in `args`. The path styles supported
+ are:
+
+ //path/to/file
+ Upload file as a "new" file.
+
+ //path/to/dir/...
+ Upload all files as "new" files.
+
+ //path/to/file[@#]rev
+ Upload file from that rev as a "new" file.
+
+ //path/to/file[@#]rev,[@#]rev
+ Upload a diff between revs.
+
+ //path/to/dir/...[@#]rev,[@#]rev
+ Upload a diff of all files between revs in that directory.
+ """
+ r_revision_range = re.compile(r'^(?P<path>//[^@#]+)' +
+ r'(?P<revision1>[#@][^,]+)?' +
+ r'(?P<revision2>,[#@][^,]+)?$')
+
+ empty_filename = make_tempfile()
+ tmp_diff_from_filename = make_tempfile()
+ tmp_diff_to_filename = make_tempfile()
+
+ diff_lines = []
+
+ for path in args:
+ m = r_revision_range.match(path)
+
+ if not m:
+ die('Path %r does not match a valid Perforce path.' % (path,))
+ revision1 = m.group('revision1')
+ revision2 = m.group('revision2')
+ first_rev_path = m.group('path')
+
+ if revision1:
+ first_rev_path += revision1
+ records = self._run_p4(['files', first_rev_path])
+
+ # Make a map for convenience.
+ files = {}
+
+ # Records are:
+ # 'rev': '1'
+ # 'func': '...'
+ # 'time': '1214418871'
+ # 'action': 'edit'
+ # 'type': 'ktext'
+ # 'depotFile': '...'
+ # 'change': '123456'
+ for record in records:
+ if record['action'] != 'delete':
+ if revision2:
+ files[record['depotFile']] = [record, None]
+ else:
+ files[record['depotFile']] = [None, record]
+
+ if revision2:
+ # [1:] to skip the comma.
+ second_rev_path = m.group('path') + revision2[1:]
+ records = self._run_p4(['files', second_rev_path])
+ for record in records:
+ if record['action'] != 'delete':
+ try:
+ m = files[record['depotFile']]
+ m[1] = record
+ except KeyError:
+ files[record['depotFile']] = [None, record]
+
+ old_file = new_file = empty_filename
+ changetype_short = None
+
+ for depot_path, (first_record, second_record) in files.items():
+ old_file = new_file = empty_filename
+ if first_record is None:
+ self._write_file(depot_path + '#' + second_record['rev'],
+ tmp_diff_to_filename)
+ new_file = tmp_diff_to_filename
+ changetype_short = 'A'
+ base_revision = 0
+ elif second_record is None:
+ self._write_file(depot_path + '#' + first_record['rev'],
+ tmp_diff_from_filename)
+ old_file = tmp_diff_from_filename
+ changetype_short = 'D'
+ base_revision = int(first_record['rev'])
+ else:
+ self._write_file(depot_path + '#' + first_record['rev'],
+ tmp_diff_from_filename)
+ self._write_file(depot_path + '#' + second_record['rev'],
+ tmp_diff_to_filename)
+ new_file = tmp_diff_to_filename
+ old_file = tmp_diff_from_filename
+ changetype_short = 'M'
+ base_revision = int(first_record['rev'])
+
+ dl = self._do_diff(old_file, new_file, depot_path,
+ base_revision, changetype_short,
+ ignore_unmodified=True)
+ diff_lines += dl
+
+ os.unlink(empty_filename)
+ os.unlink(tmp_diff_from_filename)
+ os.unlink(tmp_diff_to_filename)
+ return (''.join(diff_lines), None)
+
+ def _run_p4(self, command):
+ """Execute a perforce command using the python marshal API.
+
+ - command: A list of strings of the command to execute.
+
+ The return type depends on the command being run.
+ """
+ command = ['p4', '-G'] + command
+ p = subprocess.Popen(command, stdout=subprocess.PIPE)
+ result = []
+ has_error = False
+
+ while 1:
+ try:
+ data = marshal.load(p.stdout)
+ except EOFError:
+ break
+ else:
+ result.append(data)
+ if data.get('code', None) == 'error':
+ has_error = True
+
+ rc = p.wait()
+
+ if rc or has_error:
+ for record in result:
+ if 'data' in record:
+ print record['data']
+ die('Failed to execute command: %s\n' % (command,))
+
+ return result
+
+ def _changenum_diff(self, changenum):
+ """
+ Process a diff for a particular change number. This handles both
+ pending and submitted changelists.
+
+ See _path_diff for the alternate version that does diffs of depot
+ paths.
+ """
+ # TODO: It might be a good idea to enhance PerforceDiffParser to
+ # understand that newFile could include a revision tag for post-submit
+ # reviewing.
+ cl_is_pending = False
+
+ debug("Generating diff for changenum %s" % changenum)
+
+ description = execute(["p4", "describe", "-s", changenum],
+ split_lines=True)
+
+ if '*pending*' in description[0]:
+ cl_is_pending = True
+
+ # Get the file list
+ for line_num, line in enumerate(description):
+ if 'Affected files ...' in line:
+ break
+ else:
+ # Got to the end of all the description lines and didn't find
+ # what we were looking for.
+ die("Couldn't find any affected files for this change.")
+
+ description = description[line_num+2:]
+
+ diff_lines = []
+
+ empty_filename = make_tempfile()
+ tmp_diff_from_filename = make_tempfile()
+ tmp_diff_to_filename = make_tempfile()
+
+ for line in description:
+ line = line.strip()
+ if not line:
+ continue
+
+ m = re.search(r'\.\.\. ([^#]+)#(\d+) (add|edit|delete|integrate|branch)', line)
+ if not m:
+ die("Unsupported line from p4 opened: %s" % line)
+
+ depot_path = m.group(1)
+ base_revision = int(m.group(2))
+ if not cl_is_pending:
+ # If the changelist is pending our base revision is the one that's
+ # currently in the depot. If we're not pending the base revision is
+ # actually the revision prior to this one
+ base_revision -= 1
+
+ changetype = m.group(3)
+
+ debug('Processing %s of %s' % (changetype, depot_path))
+
+ old_file = new_file = empty_filename
+ old_depot_path = new_depot_path = None
+ changetype_short = None
+
+ if changetype == 'edit' or changetype == 'integrate':
+ # A big assumption
+ new_revision = base_revision + 1
+
+ # We have an old file, get p4 to take this old version from the
+ # depot and put it into a plain old temp file for us
+ old_depot_path = "%s#%s" % (depot_path, base_revision)
+ self._write_file(old_depot_path, tmp_diff_from_filename)
+ old_file = tmp_diff_from_filename
+
+ # Also print out the new file into a tmpfile
+ if cl_is_pending:
+ new_file = self._depot_to_local(depot_path)
+ else:
+ new_depot_path = "%s#%s" %(depot_path, new_revision)
+ self._write_file(new_depot_path, tmp_diff_to_filename)
+ new_file = tmp_diff_to_filename
+
+ changetype_short = "M"
+
+ elif changetype == 'add' or changetype == 'branch':
+ # We have a new file, get p4 to put this new file into a pretty
+ # temp file for us. No old file to worry about here.
+ if cl_is_pending:
+ new_file = self._depot_to_local(depot_path)
+ else:
+ self._write_file(depot_path, tmp_diff_to_filename)
+ new_file = tmp_diff_to_filename
+ changetype_short = "A"
+
+ elif changetype == 'delete':
+ # We've deleted a file, get p4 to put the deleted file into a temp
+ # file for us. The new file remains the empty file.
+ old_depot_path = "%s#%s" % (depot_path, base_revision)
+ self._write_file(old_depot_path, tmp_diff_from_filename)
+ old_file = tmp_diff_from_filename
+ changetype_short = "D"
+ else:
+ die("Unknown change type '%s' for %s" % (changetype, depot_path))
+
+ dl = self._do_diff(old_file, new_file, depot_path, base_revision, changetype_short)
+ diff_lines += dl
+
+ os.unlink(empty_filename)
+ os.unlink(tmp_diff_from_filename)
+ os.unlink(tmp_diff_to_filename)
+ return (''.join(diff_lines), None)
+
+ def _do_diff(self, old_file, new_file, depot_path, base_revision,
+ changetype_short, ignore_unmodified=False):
+ """
+ Do the work of producing a diff for Perforce.
+
+ old_file - The absolute path to the "old" file.
+ new_file - The absolute path to the "new" file.
+ depot_path - The depot path in Perforce for this file.
+ base_revision - The base perforce revision number of the old file as
+ an integer.
+ changetype_short - The change type as a single character string.
+ ignore_unmodified - If True, will return an empty list if the file
+ is not changed.
+
+ Returns a list of strings of diff lines.
+ """
+ if hasattr(os, 'uname') and os.uname()[0] == 'SunOS':
+ diff_cmd = ["gdiff", "-urNp", old_file, new_file]
+ else:
+ diff_cmd = ["diff", "-urNp", old_file, new_file]
+ # Diff returns "1" if differences were found.
+ dl = execute(diff_cmd, extra_ignore_errors=(1,2),
+ translate_newlines=False)
+
+ # If the input file has ^M characters at end of line, lets ignore them.
+ dl = dl.replace('\r\r\n', '\r\n')
+ dl = dl.splitlines(True)
+
+ cwd = os.getcwd()
+ if depot_path.startswith(cwd):
+ local_path = depot_path[len(cwd) + 1:]
+ else:
+ local_path = depot_path
+
+ # Special handling for the output of the diff tool on binary files:
+ # diff outputs "Files a and b differ"
+ # and the code below expects the output to start with
+ # "Binary files "
+ if len(dl) == 1 and \
+ dl[0] == ('Files %s and %s differ'% (old_file, new_file)):
+ dl = ['Binary files %s and %s differ'% (old_file, new_file)]
+
+ if dl == [] or dl[0].startswith("Binary files "):
+ if dl == []:
+ if ignore_unmodified:
+ return []
+ else:
+ print "Warning: %s in your changeset is unmodified" % \
+ local_path
+
+ dl.insert(0, "==== %s#%s ==%s== %s ====\n" % \
+ (depot_path, base_revision, changetype_short, local_path))
+ dl.append('\n')
+ else:
+ m = re.search(r'(\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d)', dl[1])
+ if m:
+ timestamp = m.group(1)
+ else:
+ # Thu Sep 3 11:24:48 2007
+ m = re.search(r'(\w+)\s+(\w+)\s+(\d+)\s+(\d\d:\d\d:\d\d)\s+(\d\d\d\d)', dl[1])
+ if not m:
+ die("Unable to parse diff header: %s" % dl[1])
+
+ month_map = {
+ "Jan": "01",
+ "Feb": "02",
+ "Mar": "03",
+ "Apr": "04",
+ "May": "05",
+ "Jun": "06",
+ "Jul": "07",
+ "Aug": "08",
+ "Sep": "09",
+ "Oct": "10",
+ "Nov": "11",
+ "Dec": "12",
+ }
+ month = month_map[m.group(2)]
+ day = m.group(3)
+ timestamp = m.group(4)
+ year = m.group(5)
+
+ timestamp = "%s-%s-%s %s" % (year, month, day, timestamp)
+
+ dl[0] = "--- %s\t%s#%s\n" % (local_path, depot_path, base_revision)
+ dl[1] = "+++ %s\t%s\n" % (local_path, timestamp)
+
+ return dl
+
+ def _write_file(self, depot_path, tmpfile):
+ """
+ Grabs a file from Perforce and writes it to a temp file. p4 print sets
+ the file readonly and that causes a later call to unlink fail. So we
+ make the file read/write.
+ """
+ debug('Writing "%s" to "%s"' % (depot_path, tmpfile))
+ execute(["p4", "print", "-o", tmpfile, "-q", depot_path])
+ os.chmod(tmpfile, stat.S_IREAD | stat.S_IWRITE)
+
+ def _depot_to_local(self, depot_path):
+ """
+ Given a path in the depot return the path on the local filesystem to
+ the same file. If there are multiple results, take only the last
+ result from the where command.
+ """
+ where_output = self._run_p4(['where', depot_path])
+ return where_output[-1]['path']
+
+
+class MercurialClient(SCMClient):
+ """
+ A wrapper around the hg Mercurial tool that fetches repository
+ information and generates compatible diffs.
+ """
+ def get_repository_info(self):
+ if not check_install('hg --help'):
+ return None
+
+ data = execute(["hg", "root"], ignore_errors=True)
+ if data.startswith('abort:'):
+ # hg aborted => no mercurial repository here.
+ return None
+
+ # Elsewhere, hg root output give us the repository path.
+
+ # We save data here to use it as a fallback. See below
+ local_data = data.strip()
+
+ svn = execute(["hg", "svn", "info", ], ignore_errors=True)
+
+ if (not svn.startswith('abort:') and
+ not svn.startswith("hg: unknown command")):
+ self.type = 'svn'
+ m = re.search(r'^Repository Root: (.+)$', svn, re.M)
+
+ if not m:
+ return None
+
+ path = m.group(1)
+ m2 = re.match(r'^(svn\+ssh|http|https)://([-a-zA-Z0-9.]*@)(.*)$',
+ path)
+ if m2:
+ path = '%s://%s' % (m2.group(1), m2.group(3))
+
+ m = re.search(r'^URL: (.+)$', svn, re.M)
+
+ if not m:
+ return None
+
+ base_path = m.group(1)[len(path):] or "/"
+ return RepositoryInfo(path=path,
+ base_path=base_path,
+ supports_parent_diffs=True)
+
+ self.type = 'hg'
+
+ # We are going to search .hg/hgrc for the default path.
+ file_name = os.path.join(local_data,'.hg', 'hgrc')
+
+ if not os.path.exists(file_name):
+ return RepositoryInfo(path=local_data, base_path='/',
+ supports_parent_diffs=True)
+
+ f = open(file_name)
+ data = f.read()
+ f.close()
+
+ m = re.search(r'^default\s+=\s+(.+)$', data, re.M)
+
+ if not m:
+ # Return the local path, if no default value is found.
+ return RepositoryInfo(path=local_data, base_path='/',
+ supports_parent_diffs=True)
+
+ path = m.group(1).strip()
+
+ return RepositoryInfo(path=path, base_path='',
+ supports_parent_diffs=True)
+
+ def diff(self, files):
+ """
+ Performs a diff across all modified files in a Mercurial repository.
+ """
+ # We don't support parent diffs with Mercurial yet, so we always
+ # return None for the parent diff.
+ if self.type == 'svn':
+ parent = execute(['hg', 'parent', '--svn', '--template',
+ '{node}\n']).strip()
+
+ if options.parent_branch:
+ parent = options.parent_branch
+
+ if options.guess_summary and not options.summary:
+ options.summary = execute(['hg', 'log', '-r.', '--template',
+ r'{desc|firstline}\n'])
+
+ if options.guess_description and not options.description:
+ numrevs = len(execute(['hg', 'log', '-r.:%s' % parent,
+ '--follow', '--template',
+ r'{rev}\n']).strip().split('\n'))
+ options.description = execute(['hg', 'log', '-r.:%s' % parent,
+ '--follow', '--template',
+ r'{desc}\n\n', '--limit',
+ str(numrevs-1)]).strip()
+
+ return (execute(["hg", "diff", "--svn", '-r%s:.' % parent]), None)
+
+ return (execute(["hg", "diff"] + files), None)
+
+ def diff_between_revisions(self, revision_range, args, repository_info):
+ """
+ Performs a diff between 2 revisions of a Mercurial repository.
+ """
+ if self.type != 'hg':
+ raise NotImplementedError
+
+ r1, r2 = revision_range.split(':')
+ return execute(["hg", "diff", "-r", r1, "-r", r2])
+
+
+class GitClient(SCMClient):
+ """
+ A wrapper around git that fetches repository information and generates
+ compatible diffs. This will attempt to generate a diff suitable for the
+ remote repository, whether git, SVN or Perforce.
+ """
+ def get_repository_info(self):
+ if not check_install('git --help'):
+ return None
+
+ git_dir = execute(["git", "rev-parse", "--git-dir"],
+ ignore_errors=True).strip()
+
+ if git_dir.startswith("fatal:") or not os.path.isdir(git_dir):
+ return None
+
+ # post-review in directories other than the top level of
+ # of a work-tree would result in broken diffs on the server
+ os.chdir(os.path.dirname(os.path.abspath(git_dir)))
+
+ # We know we have something we can work with. Let's find out
+ # what it is. We'll try SVN first.
+ data = execute(["git", "svn", "info"], ignore_errors=True)
+
+ m = re.search(r'^Repository Root: (.+)$', data, re.M)
+ if m:
+ path = m.group(1)
+ m = re.search(r'^URL: (.+)$', data, re.M)
+
+ if m:
+ base_path = m.group(1)[len(path):] or "/"
+ m = re.search(r'^Repository UUID: (.+)$', data, re.M)
+
+ if m:
+ uuid = m.group(1)
+ self.type = "svn"
+
+ return SvnRepositoryInfo(path=path, base_path=base_path,
+ uuid=uuid,
+ supports_parent_diffs=True)
+ else:
+ # Versions of git-svn before 1.5.4 don't (appear to) support
+ # 'git svn info'. If we fail because of an older git install,
+ # here, figure out what version of git is installed and give
+ # the user a hint about what to do next.
+ version = execute(["git", "svn", "--version"], ignore_errors=True)
+ version_parts = re.search('version (\d+)\.(\d+)\.(\d+)',
+ version)
+ svn_remote = execute(["git", "config", "--get",
+ "svn-remote.svn.url"], ignore_errors=True)
+
+ if (version_parts and
+ not self.is_valid_version((int(version_parts.group(1)),
+ int(version_parts.group(2)),
+ int(version_parts.group(3))),
+ (1, 5, 4)) and
+ svn_remote):
+ die("Your installation of git-svn must be upgraded to " + \
+ "version 1.5.4 or later")
+
+ # Okay, maybe Perforce.
+ # TODO
+
+ # Nope, it's git then.
+ origin = execute(["git", "remote", "show", "origin"])
+ m = re.search(r'URL: (.+)', origin)
+ if m:
+ url = m.group(1).rstrip('/')
+ if url:
+ self.type = "git"
+ return RepositoryInfo(path=url, base_path='',
+ supports_parent_diffs=True)
+
+ return None
+
+ def is_valid_version(self, actual, expected):
+ """
+ Takes two tuples, both in the form:
+ (major_version, minor_version, micro_version)
+ Returns true if the actual version is greater than or equal to
+ the expected version, and false otherwise.
+ """
+ return (actual[0] > expected[0]) or \
+ (actual[0] == expected[0] and actual[1] > expected[1]) or \
+ (actual[0] == expected[0] and actual[1] == expected[1] and \
+ actual[2] >= expected[2])
+
+ def scan_for_server(self, repository_info):
+ # Scan first for dot files, since it's faster and will cover the
+ # user's $HOME/.reviewboardrc
+ server_url = super(GitClient, self).scan_for_server(repository_info)
+
+ if server_url:
+ return server_url
+
+ # TODO: Maybe support a server per remote later? Is that useful?
+ url = execute(["git", "config", "--get", "reviewboard.url"],
+ ignore_errors=True).strip()
+ if url:
+ return url
+
+ if self.type == "svn":
+ # Try using the reviewboard:url property on the SVN repo, if it
+ # exists.
+ prop = SVNClient().scan_for_server_property(repository_info)
+
+ if prop:
+ return prop
+
+ return None
+
+ def diff(self, args):
+ """
+ Performs a diff across all modified files in the branch, taking into
+ account a parent branch.
+ """
+ parent_branch = options.parent_branch or "master"
+
+ diff_lines = self.make_diff(parent_branch)
+
+ if parent_branch != "master":
+ parent_diff_lines = self.make_diff("master", parent_branch)
+ else:
+ parent_diff_lines = None
+
+ if options.guess_summary and not options.summary:
+ options.summary = execute(["git", "log", "--pretty=format:%s",
+ "HEAD^.."], ignore_errors=True).strip()
+
+ if options.guess_description and not options.description:
+ options.description = execute(
+ ["git", "log", "--pretty=format:%s%n%n%b", parent_branch + ".."],
+ ignore_errors=True).strip()
+
+ return (diff_lines, parent_diff_lines)
+
+ def make_diff(self, parent_branch, source_branch=""):
+ """
+ Performs a diff on a particular branch range.
+ """
+ if self.type == "svn":
+ diff_lines = execute(["git", "diff", "--no-color", "--no-prefix",
+ "-r", "-u", "%s..%s" % (parent_branch,
+ source_branch)],
+ split_lines=True)
+ return self.make_svn_diff(parent_branch, diff_lines)
+ elif self.type == "git":
+ return execute(["git", "diff", "--no-color", "--full-index",
+ parent_branch])
+
+ return None
+
+ def make_svn_diff(self, parent_branch, diff_lines):
+ """
+ Formats the output of git diff such that it's in a form that
+ svn diff would generate. This is needed so the SVNTool in Review
+ Board can properly parse this diff.
+ """
+ rev = execute(["git", "svn", "find-rev", "master"]).strip()
+
+ if not rev:
+ return None
+
+ diff_data = ""
+ filename = ""
+ revision = ""
+ newfile = False
+
+ for line in diff_lines:
+ if line.startswith("diff "):
+ # Grab the filename and then filter this out.
+ # This will be in the format of:
+ #
+ # diff --git a/path/to/file b/path/to/file
+ info = line.split(" ")
+ diff_data += "Index: %s\n" % info[2]
+ diff_data += "=" * 67
+ diff_data += "\n"
+ elif line.startswith("index "):
+ # Filter this out.
+ pass
+ elif line.strip() == "--- /dev/null":
+ # New file
+ newfile = True
+ elif line.startswith("--- "):
+ newfile = False
+ diff_data += "--- %s\t(revision %s)\n" % \
+ (line[4:].strip(), rev)
+ elif line.startswith("+++ "):
+ filename = line[4:].strip()
+ if newfile:
+ diff_data += "--- %s\t(revision 0)\n" % filename
+ diff_data += "+++ %s\t(revision 0)\n" % filename
+ else:
+ # We already printed the "--- " line.
+ diff_data += "+++ %s\t(working copy)\n" % filename
+ else:
+ diff_data += line
+
+ return diff_data
+
+ def diff_between_revisions(self, revision_range, args, repository_info):
+ pass
+
+
+SCMCLIENTS = (
+ SVNClient(),
+ CVSClient(),
+ GitClient(),
+ MercurialClient(),
+ PerforceClient(),
+ ClearCaseClient(),
+)
+
+def debug(s):
+ """
+ Prints debugging information if post-review was run with --debug
+ """
+ if DEBUG or options and options.debug:
+ print ">>> %s" % s
+
+
+def make_tempfile():
+ """
+ Creates a temporary file and returns the path. The path is stored
+ in an array for later cleanup.
+ """
+ fd, tmpfile = mkstemp()
+ os.close(fd)
+ tempfiles.append(tmpfile)
+ return tmpfile
+
+
+def check_install(command):
+ """
+ Try executing an external command and return a boolean indicating whether
+ that command is installed or not. The 'command' argument should be
+ something that executes quickly, without hitting the network (for
+ instance, 'svn help' or 'git --version').
+ """
+ try:
+ p = subprocess.Popen(command.split(' '),
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ return True
+ except OSError:
+ return False
+
+
+def execute(command, env=None, split_lines=False, ignore_errors=False,
+ extra_ignore_errors=(), translate_newlines=True):
+ """
+ Utility function to execute a command and return the output.
+ """
+ if isinstance(command, list):
+ debug(subprocess.list2cmdline(command))
+ else:
+ debug(command)
+
+ if env:
+ env.update(os.environ)
+ else:
+ env = os.environ.copy()
+
+ env['LC_ALL'] = 'en_US.UTF-8'
+ env['LANGUAGE'] = 'en_US.UTF-8'
+
+ if sys.platform.startswith('win'):
+ p = subprocess.Popen(command,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ shell=False,
+ universal_newlines=translate_newlines,
+ env=env)
+ else:
+ p = subprocess.Popen(command,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ shell=False,
+ close_fds=True,
+ universal_newlines=translate_newlines,
+ env=env)
+ if split_lines:
+ data = p.stdout.readlines()
+ else:
+ data = p.stdout.read()
+ rc = p.wait()
+ if rc and not ignore_errors and rc not in extra_ignore_errors:
+ die('Failed to execute command: %s\n%s' % (command, data))
+
+ return data
+
+
+def die(msg=None):
+ """
+ Cleanly exits the program with an error message. Erases all remaining
+ temporary files.
+ """
+ for tmpfile in tempfiles:
+ try:
+ os.unlink(tmpfile)
+ except:
+ pass
+
+ if msg:
+ print msg
+
+ sys.exit(1)
+
+
+def walk_parents(path):
+ """
+ Walks up the tree to the root directory.
+ """
+ while os.path.splitdrive(path)[1] != os.sep:
+ yield path
+ path = os.path.dirname(path)
+
+
+def load_config_file(filename):
+ """
+ Loads data from a config file.
+ """
+ config = {
+ 'TREES': {},
+ }
+
+ if os.path.exists(filename):
+ try:
+ execfile(filename, config)
+ except:
+ pass
+
+ return config
+
+
+def tempt_fate(server, tool, changenum, diff_content=None,
+ parent_diff_content=None, submit_as=None, retries=3):
+ """
+ Attempts to create a review request on a Review Board server and upload
+ a diff. On success, the review request path is displayed.
+ """
+ try:
+ save_draft = False
+
+ if options.rid:
+ review_request = server.get_review_request(options.rid)
+ else:
+ review_request = server.new_review_request(changenum, submit_as)
+
+ if options.target_groups:
+ server.set_review_request_field(review_request, 'target_groups',
+ options.target_groups)
+ save_draft = True
+
+ if options.target_people:
+ server.set_review_request_field(review_request, 'target_people',
+ options.target_people)
+ save_draft = True
+
+ if options.summary:
+ server.set_review_request_field(review_request, 'summary',
+ options.summary)
+ save_draft = True
+
+ if options.branch:
+ server.set_review_request_field(review_request, 'branch',
+ options.branch)
+ save_draft = True
+
+ if options.bugs_closed:
+ server.set_review_request_field(review_request, 'bugs_closed',
+ options.bugs_closed)
+ save_draft = True
+
+ if options.description:
+ server.set_review_request_field(review_request, 'description',
+ options.description)
+ save_draft = True
+
+ if options.testing_done:
+ server.set_review_request_field(review_request, 'testing_done',
+ options.testing_done)
+ save_draft = True
+
+ if save_draft:
+ server.save_draft(review_request)
+ except APIError, e:
+ rsp, = e.args
+ if rsp['err']['code'] == 103: # Not logged in
+ retries = retries - 1
+
+ # We had an odd issue where the server ended up a couple of
+ # years in the future. Login succeeds but the cookie date was
+ # "odd" so use of the cookie appeared to fail and eventually
+ # ended up at max recursion depth :-(. Check for a maximum
+ # number of retries.
+ if retries >= 0:
+ server.login(force=True)
+ tempt_fate(server, tool, changenum, diff_content,
+ parent_diff_content, submit_as, retries=retries)
+ return
+
+ if options.rid:
+ die("Error getting review request %s: %s (code %s)" % \
+ (options.rid, rsp['err']['msg'], rsp['err']['code']))
+ else:
+ die("Error creating review request: %s (code %s)" % \
+ (rsp['err']['msg'], rsp['err']['code']))
+
+
+ if not server.info.supports_changesets or not options.change_only:
+ try:
+ server.upload_diff(review_request, diff_content,
+ parent_diff_content)
+ except APIError, e:
+ rsp, = e.args
+ print "Error uploading diff: %s (%s)" % (rsp['err']['msg'],
+ rsp['err']['code'])
+ debug(rsp)
+ die("Your review request still exists, but the diff is not " +
+ "attached.")
+
+ if options.publish:
+ server.publish(review_request)
+
+ request_url = 'r/' + str(review_request['id'])
+ review_url = urljoin(server.url, request_url)
+
+ if not review_url.startswith('http'):
+ review_url = 'http://%s' % review_url
+
+ print "Review request #%s posted." % (review_request['id'],)
+ print
+ print review_url
+
+ return review_url
+
+
+def parse_options(args):
+ parser = OptionParser(usage="%prog [-pond] [-r review_id] [changenum]",
+ version="%prog " + VERSION)
+
+ parser.add_option("-p", "--publish",
+ dest="publish", action="store_true", default=PUBLISH,
+ help="publish the review request immediately after "
+ "submitting")
+ parser.add_option("-r", "--review-request-id",
+ dest="rid", metavar="ID", default=None,
+ help="existing review request ID to update")
+ parser.add_option("-o", "--open",
+ dest="open_browser", action="store_true",
+ default=OPEN_BROWSER,
+ help="open a web browser to the review request page")
+ parser.add_option("-n", "--output-diff",
+ dest="output_diff_only", action="store_true",
+ default=False,
+ help="outputs a diff to the console and exits. "
+ "Does not post")
+ parser.add_option("--server",
+ dest="server", default=REVIEWBOARD_URL,
+ metavar="SERVER",
+ help="specify a different Review Board server "
+ "to use")
+ parser.add_option("--diff-only",
+ dest="diff_only", action="store_true", default=False,
+ help="uploads a new diff, but does not update "
+ "info from changelist")
+ parser.add_option("--target-groups",
+ dest="target_groups", default=TARGET_GROUPS,
+ help="names of the groups who will perform "
+ "the review")
+ parser.add_option("--target-people",
+ dest="target_people", default=TARGET_PEOPLE,
+ help="names of the people who will perform "
+ "the review")
+ parser.add_option("--summary",
+ dest="summary", default=None,
+ help="summary of the review ")
+ parser.add_option("--description",
+ dest="description", default=None,
+ help="description of the review ")
+ parser.add_option("--description-file",
+ dest="description_file", default=None,
+ help="text file containing a description of the review")
+ parser.add_option("--guess-summary",
+ dest="guess_summary", action="store_true",
+ default=False,
+ help="guess summary from the latest commit (git/"
+ "hgsubversion only)")
+ parser.add_option("--guess-description",
+ dest="guess_description", action="store_true",
+ default=False,
+ help="guess description based on commits on this branch "
+ "(git/hgsubversion only)")
+ parser.add_option("--testing-done",
+ dest="testing_done", default=None,
+ help="details of testing done ")
+ parser.add_option("--testing-done-file",
+ dest="testing_file", default=None,
+ help="text file containing details of testing done ")
+ parser.add_option("--branch",
+ dest="branch", default=None,
+ help="affected branch ")
+ parser.add_option("--bugs-closed",
+ dest="bugs_closed", default=None,
+ help="list of bugs closed ")
+ parser.add_option("--revision-range",
+ dest="revision_range", default=None,
+ help="generate the diff for review based on given "
+ "revision range")
+ parser.add_option("--label",
+ dest="label", default=None,
+ help="label (ClearCase Only) ")
+ parser.add_option("--submit-as",
+ dest="submit_as", default=SUBMIT_AS, metavar="USERNAME",
+ help="user name to be recorded as the author of the "
+ "review request, instead of the logged in user")
+ parser.add_option("--username",
+ dest="username", default=None, metavar="USERNAME",
+ help="user name to be supplied to the reviewboard server")
+ parser.add_option("--password",
+ dest="password", default=None, metavar="PASSWORD",
+ help="password to be supplied to the reviewboard server")
+ parser.add_option("--change-only",
+ dest="change_only", action="store_true",
+ default=False,
+ help="updates info from changelist, but does "
+ "not upload a new diff (only available if your "
+ "repository supports changesets)")
+ parser.add_option("--parent",
+ dest="parent_branch", default=None,
+ metavar="PARENT_BRANCH",
+ help="the parent branch this diff should be against "
+ "(only available if your repository supports "
+ "parent diffs)")
+ parser.add_option("--p4-client",
+ dest="p4_client", default=None,
+ help="the Perforce client name that the review is in")
+ parser.add_option("--p4-port",
+ dest="p4_port", default=None,
+ help="the Perforce servers IP address that the review is on")
+ parser.add_option("--repository-url",
+ dest="repository_url", default=None,
+ help="the url for a repository for creating a diff "
+ "outside of a working copy (currently only supported "
+ "by Subversion). Requires --revision-range")
+ parser.add_option("-d", "--debug",
+ action="store_true", dest="debug", default=DEBUG,
+ help="display debug output")
+
+ (globals()["options"], args) = parser.parse_args(args)
+
+ if options.description and options.description_file:
+ sys.stderr.write("The --description and --description-file options "
+ "are mutually exclusive.\n")
+ sys.exit(1)
+
+ if options.description_file:
+ if os.path.exists(options.description_file):
+ fp = open(options.description_file, "r")
+ options.description = fp.read()
+ fp.close()
+ else:
+ sys.stderr.write("The description file %s does not exist.\n" %
+ options.description_file)
+ sys.exit(1)
+
+ if options.testing_done and options.testing_file:
+ sys.stderr.write("The --testing-done and --testing-done-file options "
+ "are mutually exclusive.\n")
+ sys.exit(1)
+
+ if options.testing_file:
+ if os.path.exists(options.testing_file):
+ fp = open(options.testing_file, "r")
+ options.testing_done = fp.read()
+ fp.close()
+ else:
+ sys.stderr.write("The testing file %s does not exist.\n" %
+ options.testing_file)
+ sys.exit(1)
+
+ if options.repository_url and not options.revision_range:
+ sys.stderr.write("The --repository-url option requires the "
+ "--revision-range option.\n")
+ sys.exit(1)
+
+ return args
+
+def determine_client():
+
+ repository_info = None
+ tool = None
+
+ # Try to find the SCM Client we're going to be working with.
+ for tool in SCMCLIENTS:
+ repository_info = tool.get_repository_info()
+
+ if repository_info:
+ break
+
+ if not repository_info:
+ if options.repository_url:
+ print "No supported repository could be access at the supplied url."
+ else:
+ print "The current directory does not contain a checkout from a"
+ print "supported source code repository."
+ sys.exit(1)
+
+ # Verify that options specific to an SCM Client have not been mis-used.
+ if options.change_only and not repository_info.supports_changesets:
+ sys.stderr.write("The --change-only option is not valid for the "
+ "current SCM client.\n")
+ sys.exit(1)
+
+ if options.parent_branch and not repository_info.supports_parent_diffs:
+ sys.stderr.write("The --parent option is not valid for the "
+ "current SCM client.\n")
+ sys.exit(1)
+
+ if ((options.p4_client or options.p4_port) and \
+ not isinstance(tool, PerforceClient)):
+ sys.stderr.write("The --p4-client and --p4-port options are not valid "
+ "for the current SCM client.\n")
+ sys.exit(1)
+
+ return (repository_info, tool)
+
+def main():
+ if 'USERPROFILE' in os.environ:
+ homepath = os.path.join(os.environ["USERPROFILE"], "Local Settings",
+ "Application Data")
+ elif 'HOME' in os.environ:
+ homepath = os.environ["HOME"]
+ else:
+ homepath = ''
+
+ # Load the config and cookie files
+ globals()['user_config'] = \
+ load_config_file(os.path.join(homepath, ".reviewboardrc"))
+ cookie_file = os.path.join(homepath, ".post-review-cookies.txt")
+
+ args = parse_options(sys.argv[1:])
+
+ repository_info, tool = determine_client()
+
+ # Try to find a valid Review Board server to use.
+ if options.server:
+ server_url = options.server
+ else:
+ server_url = tool.scan_for_server(repository_info)
+
+ if not server_url:
+ print "Unable to find a Review Board server for this source code tree."
+ sys.exit(1)
+
+ server = ReviewBoardServer(server_url, repository_info, cookie_file)
+
+ if repository_info.supports_changesets:
+ changenum = tool.get_changenum(args)
+ else:
+ changenum = None
+
+ if options.revision_range:
+ diff = tool.diff_between_revisions(options.revision_range, args,
+ repository_info)
+ parent_diff = None
+ elif options.label and isinstance(tool, ClearCaseClient):
+ diff, parent_diff = tool.diff_label(options.label)
+ else:
+ diff, parent_diff = tool.diff(args)
+
+ if options.output_diff_only:
+ print diff
+ sys.exit(0)
+
+ # Let's begin.
+ server.login()
+
+ review_url = tempt_fate(server, tool, changenum, diff_content=diff,
+ parent_diff_content=parent_diff,
+ submit_as=options.submit_as)
+
+ # Load the review up in the browser if requested to:
+ if options.open_browser:
+ try:
+ import webbrowser
+ if 'open_new_tab' in dir(webbrowser):
+ # open_new_tab is only in python 2.5+
+ webbrowser.open_new_tab(review_url)
+ elif 'open_new' in dir(webbrowser):
+ webbrowser.open_new(review_url)
+ else:
+ os.system( 'start %s' % review_url )
+ except:
+ print 'Error opening review URL: %s' % review_url
+
+
+if __name__ == "__main__":
+ main()
diff --git a/test/review b/test/review
new file mode 100755
index 0000000000..e1ccb9c0af
--- /dev/null
+++ b/test/review
@@ -0,0 +1,44 @@
+#!/bin/sh
+
+if [ -z $1 ] || [ "$1" = "-h" ] || [ "$1" = "--help" ] || [ "$1" = "-help" ] || [ "$1" = "-?" ]; then
+ echo "Usage: `basename $0` [rev] [args]\n"
+ echo " [rev] : either the revision number without leading 'r' (post-commit),"
+ echo " or '-loc' to create a review from current local changes (pre-commit)\n"
+ echo " [args] : optional arguments:"
+ echo " -r ID existing review request ID to update\n"
+ exit 1
+fi
+
+POSTREVIEW=`dirname $0`/postreview.py
+
+if [ "$1" = "-loc" ]; then
+ echo "creating review request from local changes..."
+ REVARG=""
+ LOG=""
+ SUMMARY="local changes"
+ REPO=""
+else
+ REV=$1
+ PREV=`expr $REV - 1`
+ if [ $? -ne 0 ]; then
+ echo "argument revision not a number: $REV"
+ exit 1
+ fi
+
+ echo "creating review request for changeset $REV..."
+
+ LOG="`svn log http://lampsvn.epfl.ch/svn-repos/scala -c $REV`"
+ if [ $? -ne 0 ]; then
+ echo "could not get svn log for revision $REV"
+ exit 1
+ fi
+
+ REVARG="--revision-range=$PREV:$REV"
+ SUMMARY="r$REV"
+ REPO="--repository-url=http://lampsvn.epfl.ch/svn-repos/scala"
+fi
+
+
+shift # remove parameter $1 (revision)
+
+python $POSTREVIEW --server="https://chara2.epfl.ch" $REVARG --summary="$SUMMARY" --description="$LOG" $REPO -o $@
diff --git a/test/simplejson/__init__.py b/test/simplejson/__init__.py
new file mode 100644
index 0000000000..d5b4d39913
--- /dev/null
+++ b/test/simplejson/__init__.py
@@ -0,0 +1,318 @@
+r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
+JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
+interchange format.
+
+:mod:`simplejson` exposes an API familiar to users of the standard library
+:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
+version of the :mod:`json` library contained in Python 2.6, but maintains
+compatibility with Python 2.4 and Python 2.5 and (currently) has
+significant performance advantages, even without using the optional C
+extension for speedups.
+
+Encoding basic Python object hierarchies::
+
+ >>> import simplejson as json
+ >>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
+ '["foo", {"bar": ["baz", null, 1.0, 2]}]'
+ >>> print json.dumps("\"foo\bar")
+ "\"foo\bar"
+ >>> print json.dumps(u'\u1234')
+ "\u1234"
+ >>> print json.dumps('\\')
+ "\\"
+ >>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
+ {"a": 0, "b": 0, "c": 0}
+ >>> from StringIO import StringIO
+ >>> io = StringIO()
+ >>> json.dump(['streaming API'], io)
+ >>> io.getvalue()
+ '["streaming API"]'
+
+Compact encoding::
+
+ >>> import simplejson as json
+ >>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
+ '[1,2,3,{"4":5,"6":7}]'
+
+Pretty printing::
+
+ >>> import simplejson as json
+ >>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
+ >>> print '\n'.join([l.rstrip() for l in s.splitlines()])
+ {
+ "4": 5,
+ "6": 7
+ }
+
+Decoding JSON::
+
+ >>> import simplejson as json
+ >>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
+ >>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
+ True
+ >>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
+ True
+ >>> from StringIO import StringIO
+ >>> io = StringIO('["streaming API"]')
+ >>> json.load(io)[0] == 'streaming API'
+ True
+
+Specializing JSON object decoding::
+
+ >>> import simplejson as json
+ >>> def as_complex(dct):
+ ... if '__complex__' in dct:
+ ... return complex(dct['real'], dct['imag'])
+ ... return dct
+ ...
+ >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
+ ... object_hook=as_complex)
+ (1+2j)
+ >>> import decimal
+ >>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
+ True
+
+Specializing JSON object encoding::
+
+ >>> import simplejson as json
+ >>> def encode_complex(obj):
+ ... if isinstance(obj, complex):
+ ... return [obj.real, obj.imag]
+ ... raise TypeError(repr(o) + " is not JSON serializable")
+ ...
+ >>> json.dumps(2 + 1j, default=encode_complex)
+ '[2.0, 1.0]'
+ >>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
+ '[2.0, 1.0]'
+ >>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
+ '[2.0, 1.0]'
+
+
+Using simplejson.tool from the shell to validate and pretty-print::
+
+ $ echo '{"json":"obj"}' | python -m simplejson.tool
+ {
+ "json": "obj"
+ }
+ $ echo '{ 1.2:3.4}' | python -m simplejson.tool
+ Expecting property name: line 1 column 2 (char 2)
+"""
+__version__ = '2.0.9'
+__all__ = [
+ 'dump', 'dumps', 'load', 'loads',
+ 'JSONDecoder', 'JSONEncoder',
+]
+
+__author__ = 'Bob Ippolito <bob@redivi.com>'
+
+from decoder import JSONDecoder
+from encoder import JSONEncoder
+
+_default_encoder = JSONEncoder(
+ skipkeys=False,
+ ensure_ascii=True,
+ check_circular=True,
+ allow_nan=True,
+ indent=None,
+ separators=None,
+ encoding='utf-8',
+ default=None,
+)
+
+def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
+ allow_nan=True, cls=None, indent=None, separators=None,
+ encoding='utf-8', default=None, **kw):
+ """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
+ ``.write()``-supporting file-like object).
+
+ If ``skipkeys`` is true then ``dict`` keys that are not basic types
+ (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
+ will be skipped instead of raising a ``TypeError``.
+
+ If ``ensure_ascii`` is false, then the some chunks written to ``fp``
+ may be ``unicode`` instances, subject to normal Python ``str`` to
+ ``unicode`` coercion rules. Unless ``fp.write()`` explicitly
+ understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
+ to cause an error.
+
+ If ``check_circular`` is false, then the circular reference check
+ for container types will be skipped and a circular reference will
+ result in an ``OverflowError`` (or worse).
+
+ If ``allow_nan`` is false, then it will be a ``ValueError`` to
+ serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
+ in strict compliance of the JSON specification, instead of using the
+ JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+
+ If ``indent`` is a non-negative integer, then JSON array elements and object
+ members will be pretty-printed with that indent level. An indent level
+ of 0 will only insert newlines. ``None`` is the most compact representation.
+
+ If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+ then it will be used instead of the default ``(', ', ': ')`` separators.
+ ``(',', ':')`` is the most compact JSON representation.
+
+ ``encoding`` is the character encoding for str instances, default is UTF-8.
+
+ ``default(obj)`` is a function that should return a serializable version
+ of obj or raise TypeError. The default simply raises TypeError.
+
+ To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+ ``.default()`` method to serialize additional types), specify it with
+ the ``cls`` kwarg.
+
+ """
+ # cached encoder
+ if (not skipkeys and ensure_ascii and
+ check_circular and allow_nan and
+ cls is None and indent is None and separators is None and
+ encoding == 'utf-8' and default is None and not kw):
+ iterable = _default_encoder.iterencode(obj)
+ else:
+ if cls is None:
+ cls = JSONEncoder
+ iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+ check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+ separators=separators, encoding=encoding,
+ default=default, **kw).iterencode(obj)
+ # could accelerate with writelines in some versions of Python, at
+ # a debuggability cost
+ for chunk in iterable:
+ fp.write(chunk)
+
+
+def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
+ allow_nan=True, cls=None, indent=None, separators=None,
+ encoding='utf-8', default=None, **kw):
+ """Serialize ``obj`` to a JSON formatted ``str``.
+
+ If ``skipkeys`` is false then ``dict`` keys that are not basic types
+ (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
+ will be skipped instead of raising a ``TypeError``.
+
+ If ``ensure_ascii`` is false, then the return value will be a
+ ``unicode`` instance subject to normal Python ``str`` to ``unicode``
+ coercion rules instead of being escaped to an ASCII ``str``.
+
+ If ``check_circular`` is false, then the circular reference check
+ for container types will be skipped and a circular reference will
+ result in an ``OverflowError`` (or worse).
+
+ If ``allow_nan`` is false, then it will be a ``ValueError`` to
+ serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
+ strict compliance of the JSON specification, instead of using the
+ JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+
+ If ``indent`` is a non-negative integer, then JSON array elements and
+ object members will be pretty-printed with that indent level. An indent
+ level of 0 will only insert newlines. ``None`` is the most compact
+ representation.
+
+ If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+ then it will be used instead of the default ``(', ', ': ')`` separators.
+ ``(',', ':')`` is the most compact JSON representation.
+
+ ``encoding`` is the character encoding for str instances, default is UTF-8.
+
+ ``default(obj)`` is a function that should return a serializable version
+ of obj or raise TypeError. The default simply raises TypeError.
+
+ To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+ ``.default()`` method to serialize additional types), specify it with
+ the ``cls`` kwarg.
+
+ """
+ # cached encoder
+ if (not skipkeys and ensure_ascii and
+ check_circular and allow_nan and
+ cls is None and indent is None and separators is None and
+ encoding == 'utf-8' and default is None and not kw):
+ return _default_encoder.encode(obj)
+ if cls is None:
+ cls = JSONEncoder
+ return cls(
+ skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+ check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+ separators=separators, encoding=encoding, default=default,
+ **kw).encode(obj)
+
+
+_default_decoder = JSONDecoder(encoding=None, object_hook=None)
+
+
+def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
+ parse_int=None, parse_constant=None, **kw):
+ """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
+ a JSON document) to a Python object.
+
+ If the contents of ``fp`` is encoded with an ASCII based encoding other
+ than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
+ be specified. Encodings that are not ASCII based (such as UCS-2) are
+ not allowed, and should be wrapped with
+ ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
+ object and passed to ``loads()``
+
+ ``object_hook`` is an optional function that will be called with the
+ result of any object literal decode (a ``dict``). The return value of
+ ``object_hook`` will be used instead of the ``dict``. This feature
+ can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+
+ To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+ kwarg.
+
+ """
+ return loads(fp.read(),
+ encoding=encoding, cls=cls, object_hook=object_hook,
+ parse_float=parse_float, parse_int=parse_int,
+ parse_constant=parse_constant, **kw)
+
+
+def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
+ parse_int=None, parse_constant=None, **kw):
+ """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
+ document) to a Python object.
+
+ If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
+ other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
+ must be specified. Encodings that are not ASCII based (such as UCS-2)
+ are not allowed and should be decoded to ``unicode`` first.
+
+ ``object_hook`` is an optional function that will be called with the
+ result of any object literal decode (a ``dict``). The return value of
+ ``object_hook`` will be used instead of the ``dict``. This feature
+ can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+
+ ``parse_float``, if specified, will be called with the string
+ of every JSON float to be decoded. By default this is equivalent to
+ float(num_str). This can be used to use another datatype or parser
+ for JSON floats (e.g. decimal.Decimal).
+
+ ``parse_int``, if specified, will be called with the string
+ of every JSON int to be decoded. By default this is equivalent to
+ int(num_str). This can be used to use another datatype or parser
+ for JSON integers (e.g. float).
+
+ ``parse_constant``, if specified, will be called with one of the
+ following strings: -Infinity, Infinity, NaN, null, true, false.
+ This can be used to raise an exception if invalid JSON numbers
+ are encountered.
+
+ To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+ kwarg.
+
+ """
+ if (cls is None and encoding is None and object_hook is None and
+ parse_int is None and parse_float is None and
+ parse_constant is None and not kw):
+ return _default_decoder.decode(s)
+ if cls is None:
+ cls = JSONDecoder
+ if object_hook is not None:
+ kw['object_hook'] = object_hook
+ if parse_float is not None:
+ kw['parse_float'] = parse_float
+ if parse_int is not None:
+ kw['parse_int'] = parse_int
+ if parse_constant is not None:
+ kw['parse_constant'] = parse_constant
+ return cls(encoding=encoding, **kw).decode(s)
diff --git a/test/simplejson/decoder.py b/test/simplejson/decoder.py
new file mode 100644
index 0000000000..b769ea486c
--- /dev/null
+++ b/test/simplejson/decoder.py
@@ -0,0 +1,354 @@
+"""Implementation of JSONDecoder
+"""
+import re
+import sys
+import struct
+
+from simplejson.scanner import make_scanner
+try:
+ from simplejson._speedups import scanstring as c_scanstring
+except ImportError:
+ c_scanstring = None
+
+__all__ = ['JSONDecoder']
+
+FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
+
+def _floatconstants():
+ _BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
+ if sys.byteorder != 'big':
+ _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
+ nan, inf = struct.unpack('dd', _BYTES)
+ return nan, inf, -inf
+
+NaN, PosInf, NegInf = _floatconstants()
+
+
+def linecol(doc, pos):
+ lineno = doc.count('\n', 0, pos) + 1
+ if lineno == 1:
+ colno = pos
+ else:
+ colno = pos - doc.rindex('\n', 0, pos)
+ return lineno, colno
+
+
+def errmsg(msg, doc, pos, end=None):
+ # Note that this function is called from _speedups
+ lineno, colno = linecol(doc, pos)
+ if end is None:
+ #fmt = '{0}: line {1} column {2} (char {3})'
+ #return fmt.format(msg, lineno, colno, pos)
+ fmt = '%s: line %d column %d (char %d)'
+ return fmt % (msg, lineno, colno, pos)
+ endlineno, endcolno = linecol(doc, end)
+ #fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
+ #return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
+ fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
+ return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
+
+
+_CONSTANTS = {
+ '-Infinity': NegInf,
+ 'Infinity': PosInf,
+ 'NaN': NaN,
+}
+
+STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
+BACKSLASH = {
+ '"': u'"', '\\': u'\\', '/': u'/',
+ 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
+}
+
+DEFAULT_ENCODING = "utf-8"
+
+def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
+ """Scan the string s for a JSON string. End is the index of the
+ character in s after the quote that started the JSON string.
+ Unescapes all valid JSON string escape sequences and raises ValueError
+ on attempt to decode an invalid string. If strict is False then literal
+ control characters are allowed in the string.
+
+ Returns a tuple of the decoded string and the index of the character in s
+ after the end quote."""
+ if encoding is None:
+ encoding = DEFAULT_ENCODING
+ chunks = []
+ _append = chunks.append
+ begin = end - 1
+ while 1:
+ chunk = _m(s, end)
+ if chunk is None:
+ raise ValueError(
+ errmsg("Unterminated string starting at", s, begin))
+ end = chunk.end()
+ content, terminator = chunk.groups()
+ # Content is contains zero or more unescaped string characters
+ if content:
+ if not isinstance(content, unicode):
+ content = unicode(content, encoding)
+ _append(content)
+ # Terminator is the end of string, a literal control character,
+ # or a backslash denoting that an escape sequence follows
+ if terminator == '"':
+ break
+ elif terminator != '\\':
+ if strict:
+ msg = "Invalid control character %r at" % (terminator,)
+ #msg = "Invalid control character {0!r} at".format(terminator)
+ raise ValueError(errmsg(msg, s, end))
+ else:
+ _append(terminator)
+ continue
+ try:
+ esc = s[end]
+ except IndexError:
+ raise ValueError(
+ errmsg("Unterminated string starting at", s, begin))
+ # If not a unicode escape sequence, must be in the lookup table
+ if esc != 'u':
+ try:
+ char = _b[esc]
+ except KeyError:
+ msg = "Invalid \\escape: " + repr(esc)
+ raise ValueError(errmsg(msg, s, end))
+ end += 1
+ else:
+ # Unicode escape sequence
+ esc = s[end + 1:end + 5]
+ next_end = end + 5
+ if len(esc) != 4:
+ msg = "Invalid \\uXXXX escape"
+ raise ValueError(errmsg(msg, s, end))
+ uni = int(esc, 16)
+ # Check for surrogate pair on UCS-4 systems
+ if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
+ msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
+ if not s[end + 5:end + 7] == '\\u':
+ raise ValueError(errmsg(msg, s, end))
+ esc2 = s[end + 7:end + 11]
+ if len(esc2) != 4:
+ raise ValueError(errmsg(msg, s, end))
+ uni2 = int(esc2, 16)
+ uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
+ next_end += 6
+ char = unichr(uni)
+ end = next_end
+ # Append the unescaped character
+ _append(char)
+ return u''.join(chunks), end
+
+
+# Use speedup if available
+scanstring = c_scanstring or py_scanstring
+
+WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
+WHITESPACE_STR = ' \t\n\r'
+
+def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
+ pairs = {}
+ # Use a slice to prevent IndexError from being raised, the following
+ # check will raise a more specific ValueError if the string is empty
+ nextchar = s[end:end + 1]
+ # Normally we expect nextchar == '"'
+ if nextchar != '"':
+ if nextchar in _ws:
+ end = _w(s, end).end()
+ nextchar = s[end:end + 1]
+ # Trivial empty object
+ if nextchar == '}':
+ return pairs, end + 1
+ elif nextchar != '"':
+ raise ValueError(errmsg("Expecting property name", s, end))
+ end += 1
+ while True:
+ key, end = scanstring(s, end, encoding, strict)
+
+ # To skip some function call overhead we optimize the fast paths where
+ # the JSON key separator is ": " or just ":".
+ if s[end:end + 1] != ':':
+ end = _w(s, end).end()
+ if s[end:end + 1] != ':':
+ raise ValueError(errmsg("Expecting : delimiter", s, end))
+
+ end += 1
+
+ try:
+ if s[end] in _ws:
+ end += 1
+ if s[end] in _ws:
+ end = _w(s, end + 1).end()
+ except IndexError:
+ pass
+
+ try:
+ value, end = scan_once(s, end)
+ except StopIteration:
+ raise ValueError(errmsg("Expecting object", s, end))
+ pairs[key] = value
+
+ try:
+ nextchar = s[end]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end]
+ except IndexError:
+ nextchar = ''
+ end += 1
+
+ if nextchar == '}':
+ break
+ elif nextchar != ',':
+ raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
+
+ try:
+ nextchar = s[end]
+ if nextchar in _ws:
+ end += 1
+ nextchar = s[end]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end]
+ except IndexError:
+ nextchar = ''
+
+ end += 1
+ if nextchar != '"':
+ raise ValueError(errmsg("Expecting property name", s, end - 1))
+
+ if object_hook is not None:
+ pairs = object_hook(pairs)
+ return pairs, end
+
+def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
+ values = []
+ nextchar = s[end:end + 1]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end:end + 1]
+ # Look-ahead for trivial empty array
+ if nextchar == ']':
+ return values, end + 1
+ _append = values.append
+ while True:
+ try:
+ value, end = scan_once(s, end)
+ except StopIteration:
+ raise ValueError(errmsg("Expecting object", s, end))
+ _append(value)
+ nextchar = s[end:end + 1]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end:end + 1]
+ end += 1
+ if nextchar == ']':
+ break
+ elif nextchar != ',':
+ raise ValueError(errmsg("Expecting , delimiter", s, end))
+
+ try:
+ if s[end] in _ws:
+ end += 1
+ if s[end] in _ws:
+ end = _w(s, end + 1).end()
+ except IndexError:
+ pass
+
+ return values, end
+
+class JSONDecoder(object):
+ """Simple JSON <http://json.org> decoder
+
+ Performs the following translations in decoding by default:
+
+ +---------------+-------------------+
+ | JSON | Python |
+ +===============+===================+
+ | object | dict |
+ +---------------+-------------------+
+ | array | list |
+ +---------------+-------------------+
+ | string | unicode |
+ +---------------+-------------------+
+ | number (int) | int, long |
+ +---------------+-------------------+
+ | number (real) | float |
+ +---------------+-------------------+
+ | true | True |
+ +---------------+-------------------+
+ | false | False |
+ +---------------+-------------------+
+ | null | None |
+ +---------------+-------------------+
+
+ It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
+ their corresponding ``float`` values, which is outside the JSON spec.
+
+ """
+
+ def __init__(self, encoding=None, object_hook=None, parse_float=None,
+ parse_int=None, parse_constant=None, strict=True):
+ """``encoding`` determines the encoding used to interpret any ``str``
+ objects decoded by this instance (utf-8 by default). It has no
+ effect when decoding ``unicode`` objects.
+
+ Note that currently only encodings that are a superset of ASCII work,
+ strings of other encodings should be passed in as ``unicode``.
+
+ ``object_hook``, if specified, will be called with the result
+ of every JSON object decoded and its return value will be used in
+ place of the given ``dict``. This can be used to provide custom
+ deserializations (e.g. to support JSON-RPC class hinting).
+
+ ``parse_float``, if specified, will be called with the string
+ of every JSON float to be decoded. By default this is equivalent to
+ float(num_str). This can be used to use another datatype or parser
+ for JSON floats (e.g. decimal.Decimal).
+
+ ``parse_int``, if specified, will be called with the string
+ of every JSON int to be decoded. By default this is equivalent to
+ int(num_str). This can be used to use another datatype or parser
+ for JSON integers (e.g. float).
+
+ ``parse_constant``, if specified, will be called with one of the
+ following strings: -Infinity, Infinity, NaN.
+ This can be used to raise an exception if invalid JSON numbers
+ are encountered.
+
+ """
+ self.encoding = encoding
+ self.object_hook = object_hook
+ self.parse_float = parse_float or float
+ self.parse_int = parse_int or int
+ self.parse_constant = parse_constant or _CONSTANTS.__getitem__
+ self.strict = strict
+ self.parse_object = JSONObject
+ self.parse_array = JSONArray
+ self.parse_string = scanstring
+ self.scan_once = make_scanner(self)
+
+ def decode(self, s, _w=WHITESPACE.match):
+ """Return the Python representation of ``s`` (a ``str`` or ``unicode``
+ instance containing a JSON document)
+
+ """
+ obj, end = self.raw_decode(s, idx=_w(s, 0).end())
+ end = _w(s, end).end()
+ if end != len(s):
+ raise ValueError(errmsg("Extra data", s, end, len(s)))
+ return obj
+
+ def raw_decode(self, s, idx=0):
+ """Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
+ with a JSON document) and return a 2-tuple of the Python
+ representation and the index in ``s`` where the document ended.
+
+ This can be used to decode a JSON document from a string that may
+ have extraneous data at the end.
+
+ """
+ try:
+ obj, end = self.scan_once(s, idx)
+ except StopIteration:
+ raise ValueError("No JSON object could be decoded")
+ return obj, end
diff --git a/test/simplejson/encoder.py b/test/simplejson/encoder.py
new file mode 100644
index 0000000000..cf58290366
--- /dev/null
+++ b/test/simplejson/encoder.py
@@ -0,0 +1,440 @@
+"""Implementation of JSONEncoder
+"""
+import re
+
+try:
+ from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
+except ImportError:
+ c_encode_basestring_ascii = None
+try:
+ from simplejson._speedups import make_encoder as c_make_encoder
+except ImportError:
+ c_make_encoder = None
+
+ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
+ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
+HAS_UTF8 = re.compile(r'[\x80-\xff]')
+ESCAPE_DCT = {
+ '\\': '\\\\',
+ '"': '\\"',
+ '\b': '\\b',
+ '\f': '\\f',
+ '\n': '\\n',
+ '\r': '\\r',
+ '\t': '\\t',
+}
+for i in range(0x20):
+ #ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
+ ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
+
+# Assume this produces an infinity on all machines (probably not guaranteed)
+INFINITY = float('1e66666')
+FLOAT_REPR = repr
+
+def encode_basestring(s):
+ """Return a JSON representation of a Python string
+
+ """
+ def replace(match):
+ return ESCAPE_DCT[match.group(0)]
+ return '"' + ESCAPE.sub(replace, s) + '"'
+
+
+def py_encode_basestring_ascii(s):
+ """Return an ASCII-only JSON representation of a Python string
+
+ """
+ if isinstance(s, str) and HAS_UTF8.search(s) is not None:
+ s = s.decode('utf-8')
+ def replace(match):
+ s = match.group(0)
+ try:
+ return ESCAPE_DCT[s]
+ except KeyError:
+ n = ord(s)
+ if n < 0x10000:
+ #return '\\u{0:04x}'.format(n)
+ return '\\u%04x' % (n,)
+ else:
+ # surrogate pair
+ n -= 0x10000
+ s1 = 0xd800 | ((n >> 10) & 0x3ff)
+ s2 = 0xdc00 | (n & 0x3ff)
+ #return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
+ return '\\u%04x\\u%04x' % (s1, s2)
+ return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
+
+
+encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
+
+class JSONEncoder(object):
+ """Extensible JSON <http://json.org> encoder for Python data structures.
+
+ Supports the following objects and types by default:
+
+ +-------------------+---------------+
+ | Python | JSON |
+ +===================+===============+
+ | dict | object |
+ +-------------------+---------------+
+ | list, tuple | array |
+ +-------------------+---------------+
+ | str, unicode | string |
+ +-------------------+---------------+
+ | int, long, float | number |
+ +-------------------+---------------+
+ | True | true |
+ +-------------------+---------------+
+ | False | false |
+ +-------------------+---------------+
+ | None | null |
+ +-------------------+---------------+
+
+ To extend this to recognize other objects, subclass and implement a
+ ``.default()`` method with another method that returns a serializable
+ object for ``o`` if possible, otherwise it should call the superclass
+ implementation (to raise ``TypeError``).
+
+ """
+ item_separator = ', '
+ key_separator = ': '
+ def __init__(self, skipkeys=False, ensure_ascii=True,
+ check_circular=True, allow_nan=True, sort_keys=False,
+ indent=None, separators=None, encoding='utf-8', default=None):
+ """Constructor for JSONEncoder, with sensible defaults.
+
+ If skipkeys is false, then it is a TypeError to attempt
+ encoding of keys that are not str, int, long, float or None. If
+ skipkeys is True, such items are simply skipped.
+
+ If ensure_ascii is true, the output is guaranteed to be str
+ objects with all incoming unicode characters escaped. If
+ ensure_ascii is false, the output will be unicode object.
+
+ If check_circular is true, then lists, dicts, and custom encoded
+ objects will be checked for circular references during encoding to
+ prevent an infinite recursion (which would cause an OverflowError).
+ Otherwise, no such check takes place.
+
+ If allow_nan is true, then NaN, Infinity, and -Infinity will be
+ encoded as such. This behavior is not JSON specification compliant,
+ but is consistent with most JavaScript based encoders and decoders.
+ Otherwise, it will be a ValueError to encode such floats.
+
+ If sort_keys is true, then the output of dictionaries will be
+ sorted by key; this is useful for regression tests to ensure
+ that JSON serializations can be compared on a day-to-day basis.
+
+ If indent is a non-negative integer, then JSON array
+ elements and object members will be pretty-printed with that
+ indent level. An indent level of 0 will only insert newlines.
+ None is the most compact representation.
+
+ If specified, separators should be a (item_separator, key_separator)
+ tuple. The default is (', ', ': '). To get the most compact JSON
+ representation you should specify (',', ':') to eliminate whitespace.
+
+ If specified, default is a function that gets called for objects
+ that can't otherwise be serialized. It should return a JSON encodable
+ version of the object or raise a ``TypeError``.
+
+ If encoding is not None, then all input strings will be
+ transformed into unicode using that encoding prior to JSON-encoding.
+ The default is UTF-8.
+
+ """
+
+ self.skipkeys = skipkeys
+ self.ensure_ascii = ensure_ascii
+ self.check_circular = check_circular
+ self.allow_nan = allow_nan
+ self.sort_keys = sort_keys
+ self.indent = indent
+ if separators is not None:
+ self.item_separator, self.key_separator = separators
+ if default is not None:
+ self.default = default
+ self.encoding = encoding
+
+ def default(self, o):
+ """Implement this method in a subclass such that it returns
+ a serializable object for ``o``, or calls the base implementation
+ (to raise a ``TypeError``).
+
+ For example, to support arbitrary iterators, you could
+ implement default like this::
+
+ def default(self, o):
+ try:
+ iterable = iter(o)
+ except TypeError:
+ pass
+ else:
+ return list(iterable)
+ return JSONEncoder.default(self, o)
+
+ """
+ raise TypeError(repr(o) + " is not JSON serializable")
+
+ def encode(self, o):
+ """Return a JSON string representation of a Python data structure.
+
+ >>> JSONEncoder().encode({"foo": ["bar", "baz"]})
+ '{"foo": ["bar", "baz"]}'
+
+ """
+ # This is for extremely simple cases and benchmarks.
+ if isinstance(o, basestring):
+ if isinstance(o, str):
+ _encoding = self.encoding
+ if (_encoding is not None
+ and not (_encoding == 'utf-8')):
+ o = o.decode(_encoding)
+ if self.ensure_ascii:
+ return encode_basestring_ascii(o)
+ else:
+ return encode_basestring(o)
+ # This doesn't pass the iterator directly to ''.join() because the
+ # exceptions aren't as detailed. The list call should be roughly
+ # equivalent to the PySequence_Fast that ''.join() would do.
+ chunks = self.iterencode(o, _one_shot=True)
+ if not isinstance(chunks, (list, tuple)):
+ chunks = list(chunks)
+ return ''.join(chunks)
+
+ def iterencode(self, o, _one_shot=False):
+ """Encode the given object and yield each string
+ representation as available.
+
+ For example::
+
+ for chunk in JSONEncoder().iterencode(bigobject):
+ mysocket.write(chunk)
+
+ """
+ if self.check_circular:
+ markers = {}
+ else:
+ markers = None
+ if self.ensure_ascii:
+ _encoder = encode_basestring_ascii
+ else:
+ _encoder = encode_basestring
+ if self.encoding != 'utf-8':
+ def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
+ if isinstance(o, str):
+ o = o.decode(_encoding)
+ return _orig_encoder(o)
+
+ def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
+ # Check for specials. Note that this type of test is processor- and/or
+ # platform-specific, so do tests which don't depend on the internals.
+
+ if o != o:
+ text = 'NaN'
+ elif o == _inf:
+ text = 'Infinity'
+ elif o == _neginf:
+ text = '-Infinity'
+ else:
+ return _repr(o)
+
+ if not allow_nan:
+ raise ValueError(
+ "Out of range float values are not JSON compliant: " +
+ repr(o))
+
+ return text
+
+
+ if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
+ _iterencode = c_make_encoder(
+ markers, self.default, _encoder, self.indent,
+ self.key_separator, self.item_separator, self.sort_keys,
+ self.skipkeys, self.allow_nan)
+ else:
+ _iterencode = _make_iterencode(
+ markers, self.default, _encoder, self.indent, floatstr,
+ self.key_separator, self.item_separator, self.sort_keys,
+ self.skipkeys, _one_shot)
+ return _iterencode(o, 0)
+
+def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
+ ## HACK: hand-optimized bytecode; turn globals into locals
+ False=False,
+ True=True,
+ ValueError=ValueError,
+ basestring=basestring,
+ dict=dict,
+ float=float,
+ id=id,
+ int=int,
+ isinstance=isinstance,
+ list=list,
+ long=long,
+ str=str,
+ tuple=tuple,
+ ):
+
+ def _iterencode_list(lst, _current_indent_level):
+ if not lst:
+ yield '[]'
+ return
+ if markers is not None:
+ markerid = id(lst)
+ if markerid in markers:
+ raise ValueError("Circular reference detected")
+ markers[markerid] = lst
+ buf = '['
+ if _indent is not None:
+ _current_indent_level += 1
+ newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
+ separator = _item_separator + newline_indent
+ buf += newline_indent
+ else:
+ newline_indent = None
+ separator = _item_separator
+ first = True
+ for value in lst:
+ if first:
+ first = False
+ else:
+ buf = separator
+ if isinstance(value, basestring):
+ yield buf + _encoder(value)
+ elif value is None:
+ yield buf + 'null'
+ elif value is True:
+ yield buf + 'true'
+ elif value is False:
+ yield buf + 'false'
+ elif isinstance(value, (int, long)):
+ yield buf + str(value)
+ elif isinstance(value, float):
+ yield buf + _floatstr(value)
+ else:
+ yield buf
+ if isinstance(value, (list, tuple)):
+ chunks = _iterencode_list(value, _current_indent_level)
+ elif isinstance(value, dict):
+ chunks = _iterencode_dict(value, _current_indent_level)
+ else:
+ chunks = _iterencode(value, _current_indent_level)
+ for chunk in chunks:
+ yield chunk
+ if newline_indent is not None:
+ _current_indent_level -= 1
+ yield '\n' + (' ' * (_indent * _current_indent_level))
+ yield ']'
+ if markers is not None:
+ del markers[markerid]
+
+ def _iterencode_dict(dct, _current_indent_level):
+ if not dct:
+ yield '{}'
+ return
+ if markers is not None:
+ markerid = id(dct)
+ if markerid in markers:
+ raise ValueError("Circular reference detected")
+ markers[markerid] = dct
+ yield '{'
+ if _indent is not None:
+ _current_indent_level += 1
+ newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
+ item_separator = _item_separator + newline_indent
+ yield newline_indent
+ else:
+ newline_indent = None
+ item_separator = _item_separator
+ first = True
+ if _sort_keys:
+ items = dct.items()
+ items.sort(key=lambda kv: kv[0])
+ else:
+ items = dct.iteritems()
+ for key, value in items:
+ if isinstance(key, basestring):
+ pass
+ # JavaScript is weakly typed for these, so it makes sense to
+ # also allow them. Many encoders seem to do something like this.
+ elif isinstance(key, float):
+ key = _floatstr(key)
+ elif key is True:
+ key = 'true'
+ elif key is False:
+ key = 'false'
+ elif key is None:
+ key = 'null'
+ elif isinstance(key, (int, long)):
+ key = str(key)
+ elif _skipkeys:
+ continue
+ else:
+ raise TypeError("key " + repr(key) + " is not a string")
+ if first:
+ first = False
+ else:
+ yield item_separator
+ yield _encoder(key)
+ yield _key_separator
+ if isinstance(value, basestring):
+ yield _encoder(value)
+ elif value is None:
+ yield 'null'
+ elif value is True:
+ yield 'true'
+ elif value is False:
+ yield 'false'
+ elif isinstance(value, (int, long)):
+ yield str(value)
+ elif isinstance(value, float):
+ yield _floatstr(value)
+ else:
+ if isinstance(value, (list, tuple)):
+ chunks = _iterencode_list(value, _current_indent_level)
+ elif isinstance(value, dict):
+ chunks = _iterencode_dict(value, _current_indent_level)
+ else:
+ chunks = _iterencode(value, _current_indent_level)
+ for chunk in chunks:
+ yield chunk
+ if newline_indent is not None:
+ _current_indent_level -= 1
+ yield '\n' + (' ' * (_indent * _current_indent_level))
+ yield '}'
+ if markers is not None:
+ del markers[markerid]
+
+ def _iterencode(o, _current_indent_level):
+ if isinstance(o, basestring):
+ yield _encoder(o)
+ elif o is None:
+ yield 'null'
+ elif o is True:
+ yield 'true'
+ elif o is False:
+ yield 'false'
+ elif isinstance(o, (int, long)):
+ yield str(o)
+ elif isinstance(o, float):
+ yield _floatstr(o)
+ elif isinstance(o, (list, tuple)):
+ for chunk in _iterencode_list(o, _current_indent_level):
+ yield chunk
+ elif isinstance(o, dict):
+ for chunk in _iterencode_dict(o, _current_indent_level):
+ yield chunk
+ else:
+ if markers is not None:
+ markerid = id(o)
+ if markerid in markers:
+ raise ValueError("Circular reference detected")
+ markers[markerid] = o
+ o = _default(o)
+ for chunk in _iterencode(o, _current_indent_level):
+ yield chunk
+ if markers is not None:
+ del markers[markerid]
+
+ return _iterencode
diff --git a/test/simplejson/scanner.py b/test/simplejson/scanner.py
new file mode 100644
index 0000000000..adbc6ec979
--- /dev/null
+++ b/test/simplejson/scanner.py
@@ -0,0 +1,65 @@
+"""JSON token scanner
+"""
+import re
+try:
+ from simplejson._speedups import make_scanner as c_make_scanner
+except ImportError:
+ c_make_scanner = None
+
+__all__ = ['make_scanner']
+
+NUMBER_RE = re.compile(
+ r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
+ (re.VERBOSE | re.MULTILINE | re.DOTALL))
+
+def py_make_scanner(context):
+ parse_object = context.parse_object
+ parse_array = context.parse_array
+ parse_string = context.parse_string
+ match_number = NUMBER_RE.match
+ encoding = context.encoding
+ strict = context.strict
+ parse_float = context.parse_float
+ parse_int = context.parse_int
+ parse_constant = context.parse_constant
+ object_hook = context.object_hook
+
+ def _scan_once(string, idx):
+ try:
+ nextchar = string[idx]
+ except IndexError:
+ raise StopIteration
+
+ if nextchar == '"':
+ return parse_string(string, idx + 1, encoding, strict)
+ elif nextchar == '{':
+ return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
+ elif nextchar == '[':
+ return parse_array((string, idx + 1), _scan_once)
+ elif nextchar == 'n' and string[idx:idx + 4] == 'null':
+ return None, idx + 4
+ elif nextchar == 't' and string[idx:idx + 4] == 'true':
+ return True, idx + 4
+ elif nextchar == 'f' and string[idx:idx + 5] == 'false':
+ return False, idx + 5
+
+ m = match_number(string, idx)
+ if m is not None:
+ integer, frac, exp = m.groups()
+ if frac or exp:
+ res = parse_float(integer + (frac or '') + (exp or ''))
+ else:
+ res = parse_int(integer)
+ return res, m.end()
+ elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
+ return parse_constant('NaN'), idx + 3
+ elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
+ return parse_constant('Infinity'), idx + 8
+ elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
+ return parse_constant('-Infinity'), idx + 9
+ else:
+ raise StopIteration
+
+ return _scan_once
+
+make_scanner = c_make_scanner or py_make_scanner
diff --git a/test/simplejson/tool.py b/test/simplejson/tool.py
new file mode 100644
index 0000000000..90443317b2
--- /dev/null
+++ b/test/simplejson/tool.py
@@ -0,0 +1,37 @@
+r"""Command-line tool to validate and pretty-print JSON
+
+Usage::
+
+ $ echo '{"json":"obj"}' | python -m simplejson.tool
+ {
+ "json": "obj"
+ }
+ $ echo '{ 1.2:3.4}' | python -m simplejson.tool
+ Expecting property name: line 1 column 2 (char 2)
+
+"""
+import sys
+import simplejson
+
+def main():
+ if len(sys.argv) == 1:
+ infile = sys.stdin
+ outfile = sys.stdout
+ elif len(sys.argv) == 2:
+ infile = open(sys.argv[1], 'rb')
+ outfile = sys.stdout
+ elif len(sys.argv) == 3:
+ infile = open(sys.argv[1], 'rb')
+ outfile = open(sys.argv[2], 'wb')
+ else:
+ raise SystemExit(sys.argv[0] + " [infile [outfile]]")
+ try:
+ obj = simplejson.load(infile)
+ except ValueError, e:
+ raise SystemExit(e)
+ simplejson.dump(obj, outfile, sort_keys=True, indent=4)
+ outfile.write('\n')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/files/jvm/NestedAnnotations.java b/test/support/annotations/NestedAnnotations.java
index 8f2327dcce..c4a98a0af3 100644
--- a/test/files/jvm/NestedAnnotations.java
+++ b/test/support/annotations/NestedAnnotations.java
@@ -10,14 +10,14 @@ public class NestedAnnotations {
@OuterAnno(inner=@InnerAnno(name="inner"))
String field;
-
- @Target({FIELD})
+
+ @Target({FIELD})
@Retention(RUNTIME)
public static @interface InnerAnno {
String name();
}
-
- @Target({FIELD})
+
+ @Target({FIELD})
@Retention(RUNTIME)
public static @interface OuterAnno {
InnerAnno inner();
diff --git a/test/files/jvm/OuterEnum.java b/test/support/annotations/OuterEnum.java
index 75d3f34223..75d3f34223 100644
--- a/test/files/jvm/OuterEnum.java
+++ b/test/support/annotations/OuterEnum.java
diff --git a/test/files/jvm/OuterTParams.java b/test/support/annotations/OuterTParams.java
index 1d3db49fcf..1d3db49fcf 100644
--- a/test/files/jvm/OuterTParams.java
+++ b/test/support/annotations/OuterTParams.java
diff --git a/test/files/jvm/SourceAnnotation.java b/test/support/annotations/SourceAnnotation.java
index 047751ddfe..047751ddfe 100644
--- a/test/files/jvm/SourceAnnotation.java
+++ b/test/support/annotations/SourceAnnotation.java
diff --git a/test/files/jvm/mkAnnotationsJar.sh b/test/support/annotations/mkAnnotationsJar.sh
index 3d69351165..3d69351165 100755
--- a/test/files/jvm/mkAnnotationsJar.sh
+++ b/test/support/annotations/mkAnnotationsJar.sh
diff --git a/tools/abspath b/tools/abspath
new file mode 100755
index 0000000000..a2d1410b9b
--- /dev/null
+++ b/tools/abspath
@@ -0,0 +1,9 @@
+#!/bin/sh
+#
+# print the absolute path of each argument
+
+for relpath in $* ; do
+ D=`dirname "$relpath"`
+ B=`basename "$relpath"`
+ echo "`cd \"$D\" 2>/dev/null && pwd || echo \"$D\"`/$B"
+done \ No newline at end of file
diff --git a/tools/cpof b/tools/cpof
new file mode 100755
index 0000000000..ab5a42b4fb
--- /dev/null
+++ b/tools/cpof
@@ -0,0 +1,30 @@
+#!/bin/sh
+#
+# Creates a classpath out of the contents of each directory
+# given as an argument.
+
+if [ $# == 0 ] ; then
+ echo "Usage: $0 [dir1 dir2 ...]"
+ exit 1
+fi
+
+THISDIR=`dirname $0`
+ABSCMD="${THISDIR}/abspath"
+CPRES=""
+
+for dir in $* ; do
+ absdir=`${ABSCMD} $dir`
+ LS=`ls -1 ${absdir}`
+
+ for x in $LS ; do
+ ABS=`${ABSCMD} "${absdir}/${x}"`
+ CPRES="${CPRES}:${ABS}"
+ done
+done
+
+# shaving the : off the beginning. Applause to /bin/sh for
+# keeping us humble about how far we've come.
+LEN=$(( ${#CPRES} - 1 ))
+result=${CPRES:1:${LEN}}
+
+echo $result
diff --git a/tools/diffPickled b/tools/diffPickled
new file mode 100755
index 0000000000..b4a345dc7d
--- /dev/null
+++ b/tools/diffPickled
@@ -0,0 +1,51 @@
+#!/bin/sh
+#
+# Shows the difference in pickler output between two variations on a class.
+#
+# If quick and strap are built normally you can run
+#
+# diffPickled foo.bar.Baz
+#
+# to see any differences between them in that class.
+
+USAGE="Usage: $0 classpath1 classpath2 class"
+TOOLSDIR=`dirname $0`
+BUILDDIR="${TOOLSDIR}/../build"
+QUICKDIR="${BUILDDIR}/quick"
+STRAPDIR="${BUILDDIR}/strap"
+
+CP1=""
+CP2=""
+CLASS=""
+
+if [ $# == 1 ] ; then
+ if [ -e ${QUICKDIR} ] && [ -e ${STRAPDIR} ] ; then
+ CP1=`${TOOLSDIR}/quickcp`
+ CP2=`${TOOLSDIR}/strapcp`
+ CLASS=$1
+ else
+ echo $USAGE
+ echo "(If only one argument is given, $QUICKDIR and $STRAPDIR must exist.)"
+ exit 1
+ fi
+elif [ $# == 3 ] ; then
+ CP1=$1
+ CP2=$2
+ CLASS=$3
+else
+ echo $USAGE
+ exit 1
+fi
+
+TMPDIR="/tmp/scala_pickle_diff${RANDOM}"
+
+if mkdir -m 0700 "$TMPDIR" 2>/dev/null ; then
+ ${TOOLSDIR}/showPickled -cp $CP1 $CLASS > "${TMPDIR}/out1.txt"
+ ${TOOLSDIR}/showPickled -cp $CP2 $CLASS > "${TMPDIR}/out2.txt"
+ diff "${TMPDIR}/out1.txt" "${TMPDIR}/out2.txt"
+ rm -rf ${TMPDIR}
+else
+ echo "Failed to create temporary directory ${TMPDIR}."
+ exit 1
+fi
+
diff --git a/tools/epfl-build b/tools/epfl-build
new file mode 100755
index 0000000000..dd66307de3
--- /dev/null
+++ b/tools/epfl-build
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+#
+# builds nightlies
+
+[[ $# -gt 0 ]] || {
+ cat <<EOM
+Usage: $0 <version> [opt opt ...]
+
+ Everything after the version is supplied to scalac and partest.
+ Example: $0 -Xcheckinit -Ycheck:all
+
+Environment variables:
+ extra_ant_targets Additional ant targets to run after nightly
+
+EOM
+ exit 0
+}
+
+# version isn't actually used at present.
+scalaVersion="$1" && shift
+scalaArgs="-Dscalac.args=\"$@\" -Dpartest.scalac_opts=\"$@\""
+
+ant all.clean && ./pull-binary-libs.sh
+
+ant $scalaArgs build-opt &&
+ant $scalaArgs nightly &&
+for target in $extra_ant_targets; do ant $target ; done
+# [[ -n "$BUILD_DOCSCOMP" ]] && ant docscomp
diff --git a/tools/epfl-build-2.x.x b/tools/epfl-build-2.x.x
new file mode 100755
index 0000000000..7bc884c162
--- /dev/null
+++ b/tools/epfl-build-2.x.x
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+#
+
+[[ $# -gt 0 ]] || {
+ echo "Usage: $0 <version> [publish destination]"
+ echo ""
+ exit 0
+}
+
+version="$1"
+shift
+rsyncDest="$1"
+
+# should not be hardcoded
+mavenSettings="/home/linuxsoft/apps/hudson-maven-settings/settings.xml"
+
+# main build sequence
+ant all.clean
+./pull-binary-libs.sh
+ant nightly
+ant docscomp
+
+# publish nightly build
+if [ -n "$rsyncDest" ]; then
+ echo "Copying nightly build to $rsyncDest"
+ # Archive Scala nightly distribution
+ rsync -az dists/archives/ "$rsyncDest/distributions"
+ # SKIP PUBLISHING DOCS IN 2.8.X BRANCH
+ if [[ $version != "2.8.x" ]]; then
+ rsync -az build/scaladoc/ "$rsyncDest/docs"
+ fi
+ rsync -az dists/sbaz/ "$rsyncDest/sbaz"
+ # Deploy the maven artifacts on scala-tools.org
+ ( cd dists/maven/latest && ant deploy.snapshot -Dsettings.file="$mavenSettings" )
+fi
diff --git a/tools/epfl-publish b/tools/epfl-publish
new file mode 100755
index 0000000000..e9cd97b3d2
--- /dev/null
+++ b/tools/epfl-publish
@@ -0,0 +1,50 @@
+#!/usr/bin/env bash
+#
+# publishes nightly build if $publish_to is set in environment.
+# alternate maven settings.xml file given in $maven_settings.
+#
+
+[[ $# -eq 1 ]] || {
+ cat <<EOM
+Usage: $0 <scala version>
+
+Environment variables:
+ publish_to rsync destination
+EOM
+ exit 0
+}
+version="$1"
+
+[[ -d dists/archives ]] || {
+ echo "Can't find build, has it completed? No directory at dists/archives"
+ exit 1
+}
+
+# should not be hardcoded
+# adds -Dsettings.file= if fixed path is present
+mavenSettingsOption () {
+ hardcoded_path="/home/linuxsoft/apps/hudson-maven-settings/settings.xml"
+
+ # environment variable
+ if [[ -n $maven_settings ]]; then
+ echo -Dsettings.file="$maven_settings"
+ elif [[ -f $hardcoded_path ]]; then
+ echo -Dsettings.file="$hardcoded_path"
+ fi
+}
+
+mavenSettings=${maven_settings:-findMavenSettings}
+
+if [[ -z $publish_to ]]; then
+ echo "Nothing to publish."
+else
+ echo "Publishing nightly build to $publish_to"
+ # Archive Scala nightly distribution
+ rsync -az dists/archives/ "$publish_to/distributions"
+ # don't publish docs in 2.8.x
+ [[ $version == "2.8.x" ]] || rsync -az build/scaladoc/ "$publish_to/docs"
+ # sbaz
+ [[ -d dists/sbaz ]] && rsync -az dists/sbaz/ "$publish_to/sbaz"
+ # Deploy the maven artifacts on scala-tools.org
+ ( cd dists/maven/latest && ant deploy.snapshot $(mavenSettingsOption) )
+fi
diff --git a/tools/git-get-rev b/tools/git-get-rev
new file mode 100755
index 0000000000..9adda35ca7
--- /dev/null
+++ b/tools/git-get-rev
@@ -0,0 +1,5 @@
+#!/bin/sh
+#
+
+GIT_PAGER=cat
+git log -10 | grep git-svn-id | head -1 \ No newline at end of file
diff --git a/tools/packcp b/tools/packcp
new file mode 100755
index 0000000000..42bce9e266
--- /dev/null
+++ b/tools/packcp
@@ -0,0 +1,5 @@
+#!/bin/sh
+#
+
+THISDIR=`dirname $0`
+${THISDIR}/cpof ${THISDIR}/../build/pack/lib
diff --git a/tools/pathResolver b/tools/pathResolver
new file mode 100755
index 0000000000..efff45ea62
--- /dev/null
+++ b/tools/pathResolver
@@ -0,0 +1,11 @@
+#!/bin/sh
+#
+
+WHICH=`which scala`
+BASE=`dirname $WHICH`
+LIBDIR=$BASE/../lib
+
+echo Using ${WHICH}.
+echo
+
+java -cp "${LIBDIR}/*" scala.tools.util.PathResolver $*
diff --git a/tools/quickcp b/tools/quickcp
new file mode 100755
index 0000000000..0bfcad1941
--- /dev/null
+++ b/tools/quickcp
@@ -0,0 +1,8 @@
+#!/bin/sh
+#
+
+THISDIR=`dirname $0`
+cp=`${THISDIR}/cpof ${THISDIR}/../build/quick/classes`
+fjbg=`${THISDIR}/abspath ${THISDIR}/../lib/fjbg.jar`
+
+echo ${cp}:${fjbg}
diff --git a/tools/scalawhich b/tools/scalawhich
new file mode 100755
index 0000000000..6a4b1788a8
--- /dev/null
+++ b/tools/scalawhich
@@ -0,0 +1,4 @@
+#!/bin/sh
+#
+
+scala scala.tools.util.Which $*
diff --git a/tools/scmp b/tools/scmp
new file mode 100755
index 0000000000..f6acea5ab1
--- /dev/null
+++ b/tools/scmp
@@ -0,0 +1,4 @@
+#!/bin/sh
+#
+
+scala scala.tools.cmd.program.Scmp "$@"
diff --git a/tools/showPickled b/tools/showPickled
new file mode 100755
index 0000000000..27421c3ae5
--- /dev/null
+++ b/tools/showPickled
@@ -0,0 +1,32 @@
+#!/bin/sh
+#
+# Shows the pickled scala data in a classfile.
+
+if [ $# == 0 ] ; then
+ echo "Usage: $0 [--bare] [-cp classpath] <class*>"
+ exit 1
+fi
+
+TOOLSDIR=`dirname $0`
+CPOF="$TOOLSDIR/cpof"
+
+PACK="$TOOLSDIR/../build/pack/lib"
+QUICK="$TOOLSDIR/../build/quick/classes"
+STARR="$TOOLSDIR/../lib"
+CP=""
+
+if [ -f "${PACK}/scala-library.jar" ] ; then
+ CP=`${TOOLSDIR}/packcp`
+elif [ -d "${QUICK}/library" ] ; then
+ CP=`${TOOLSDIR}/quickcp`
+else
+ CP=`${TOOLSDIR}/starrcp`
+fi
+
+if [ "$1" == "-cp" ] ; then
+ shift
+ CP="${1}:${CP}"
+ shift
+fi
+
+java -cp "$CP" scala.tools.nsc.util.ShowPickled $*
diff --git a/tools/starrcp b/tools/starrcp
new file mode 100755
index 0000000000..6add5665b5
--- /dev/null
+++ b/tools/starrcp
@@ -0,0 +1,5 @@
+#!/bin/sh
+#
+
+THISDIR=`dirname $0`
+${THISDIR}/cpof ${THISDIR}/../lib \ No newline at end of file
diff --git a/tools/strapcp b/tools/strapcp
new file mode 100755
index 0000000000..61e4a61b2c
--- /dev/null
+++ b/tools/strapcp
@@ -0,0 +1,8 @@
+#!/bin/sh
+#
+
+THISDIR=`dirname $0`
+cp=`${THISDIR}/cpof ${THISDIR}/../build/strap/classes`
+fjbg=`${THISDIR}/abspath ${THISDIR}/../lib/fjbg.jar`
+
+echo ${cp}:${fjbg}
diff --git a/tools/tokens b/tools/tokens
new file mode 100755
index 0000000000..b910fb29cc
--- /dev/null
+++ b/tools/tokens
@@ -0,0 +1,4 @@
+#!/bin/sh
+#
+
+scala scala.tools.cmd.program.Tokens "$@"
diff --git a/tools/truncate b/tools/truncate
new file mode 100755
index 0000000000..b7f410e25d
--- /dev/null
+++ b/tools/truncate
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+find . -type f -not -path "*.svn*" -name "*.scala" -exec sed -i "" -e 's/$ *Id.*$/$Id$/g' {} \;
+find . -type f -not -path "*.svn*" -name "*.java" -exec sed -i "" -e 's/$ *Id.*$/$Id$/g' {} \;
+find . -type f -not -path "*.svn*" -name "*.cs" -exec sed -i "" -e 's/$ *Id.*$/$Id$/g' {} \;
+find . -type f -not -path "*.svn*" -name "*.js" -exec sed -i "" -e 's/$ *Id.*$/$Id$/g' {} \;
+find . -type f -not -path "*.svn*" -name "*.scala.disabled" -exec sed -i "" -e 's/$ *Id.*$/$Id$/g' {} \;